code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defprotocol BinFormat.Field do
@fallback_to_any true
@moduledoc """
Converts information about the field into snippets of Elixir AST that can be
used to define it instructs and match against it in structs and binaries if
necessary.
If field generates code for a given function, it should return a tuple with
`{:ok, ast}` where ast is the standard Elixir ast to be inserted with unquote.
If the field does not require any code for a given function because it should
be ignored (i.e. a constant that only appears in the binary matches) it
should return the atom `:undefined`.
"""
@doc """
The code needed to define the field in the struct definition.
If code should be inserted return `{:ok, ast}` where ast is the quoted Elixir
code to insert into the definition. Struct definition elements are
represented as a two element tupple containing the name (as an atom) and the
default value.
If no code should be inserted (because the field doesn't need to be in the
struct) the atom `:undefined` is returned.
"""
def struct_definition(field, module)
@doc """
The code used to insert this field into a struct.
If this field is part of the struct for the packet this function should
return `{:ok, ast}` where ast is the quoted Elixir code to insert when
building a struct to represent the packet. The code should assume any
variables needed were created by `struct_match_pattern/3` or
`bin_match_pattern/3`. If a prefix is supplied it should be appended to the
start of any user supplied part of the field variable name. The field names
used in the struct should match those generated in `struct_definition/2`.
Module is the name of the module where variables should be interpreted as
being as an atom.
If no code should be inserted (because the field doesn't need to be in the
struct) the atom `:undefined` is returned.
"""
def struct_build_pattern(field, module, prefix \\ "")
@doc """
The code used to match for this field against a struct.
If this field is part of the struct for the packet this function should
return `{:ok, ast}` where ast is the quoted Elixir code to insert when
matching against a struct represeting the packet. The code should create
any variables needed by `struct_build_pattern/3` or `bin_build_pattern/3`.
If a prefix is supplied it should be appeded to the start of any user
supplied part of the field variable name. The field names used in the struct
should match those generated in `struct_definition/2`.
Module is the name of the module where variables should be interpreted as
being as an atom.
If no code should be inserted (because this field doesn't need to be in the
struct) the atom `:undefined` is returned.
"""
def struct_match_pattern(field, module, prefix \\ "")
@doc """
The code used to insert this field into a binary.
If this field appears in the binary for the packet this function should
return `{:ok, ast}` where ast is the quoted Elixir code to insert when
building a binary representation of the packet. The code should assume any
variables needed were created by `struct_match_pattern/3` or
`bin_match_pattern/3`. If a prefix is supplied it should be appended to the
start of any user supplied part of the field variable name.
Module is the name of the module where variables should be interpreted as
being as an atom.
If no code should be inserted (becasue the field doesn't need to be in the
binary) the atom `:undefined` is returned.
"""
def bin_build_pattern(field, module, prefix \\ "")
@doc """
The code used to match for this field against a binary.
If this field appears in the binary for the packet this function should
return `{:ok, ast}` where ast is the quoted Elixir code to insert when
matching against a binary representation of the packet. The code should
create any variables needed by `struct_build_pattern/3` or
`bin_build_pattern/3`. If a prefix is supplied it should be appened to the
start of any user supplied part of the field variable name.
Module is the name of the module where variables should be interpreted as
being as an atom.
If no code should be inserted (becasue the field doesn't need to be in the
binary) the atom `:undefined` is returned.
"""
def bin_match_pattern(field, module, prefix \\ "")
end
# Default implementation allows other code in the defformat block
defimpl BinFormat.Field, for: Any do
def struct_definition(_, _) do
:undefined
end
def struct_build_pattern(_field, _module, _prefix) do
:undefined
end
def struct_match_pattern(_field, _module, _prefix) do
:undefined
end
def bin_build_pattern(_field, _module, _prefix) do
:undefined
end
def bin_match_pattern(_field, _module, _prefix) do
:undefined
end
end
|
lib/bin_format/field.ex
| 0.901391
| 0.77437
|
field.ex
|
starcoder
|
defmodule Cog.Controller.Helpers do
alias Phoenix.ConnTest
alias Plug.Conn
@doc """
Prepare and execute a Cog API request.
Arguments:
* `requestor` - a `%User{}` with an associated token. This token
will be added to the request via an `Authorization` header. All
request authentication and authorization will be done in terms of
this user.
* `method` - an atom representing one of the HTTP verbs: `:get`,
`:post`, etc.
* `path` - the path of the request,
e.g. "/v1/user/898c50f2-0523-4be3-a9a4-16dbc1677a59"
* `options` - a keyword list of options that may be used to further
customize the request. The supported options are:
`body` - a map representing the JSON payload of the
request. Defaults to `nil`
`endpoint` - the Phoenix Endpoint the request will be dispatched
to. Defaults to `Cog.Endpoint`. (Note: until such time as we
have multiple endpoints, you should use the default.)
Returns a `%Plug.Conn{}`.
Examples:
# List groups
api_request(user, :get, "/v1/groups")
# Retrieve a specific user
api_request(user, :get, "/v1/user/898c50f2-0523-4be3-a9a4-16dbc1677a59")
# Create a role
api_request(user, :post, "/v1/roles",
body: %{"role" => %{"name" => "admin"}})
"""
def api_request(requestor, method, path, options \\ []) do
# Process all options
defaults = [body: nil,
endpoint: Cog.Endpoint,
content_type: :json]
options = Keyword.merge(defaults, options)
body = Keyword.fetch!(options, :body)
endpoint = Keyword.fetch!(options, :endpoint)
# Obtain a token from the requestor; if more than one is
# associated with the requestor, we take the first one.
requestor = Cog.Repo.preload(requestor, :tokens)
token = hd(requestor.tokens).value
# Route the request, with appropriate headers in place
case Keyword.fetch!(options, :content_type) do
:json ->
ConnTest.build_conn()
|> Conn.put_req_header("accept", "application/json")
|> Conn.put_req_header("content-type", "application/json")
|> Conn.put_req_header("authorization", "token #{token}")
|> ConnTest.dispatch(endpoint, method, path, Poison.encode!(body))
:multipart ->
ConnTest.build_conn()
|> Conn.put_req_header("accept", "application/json")
|> Conn.put_req_header("content-type", "multipart/form-data")
|> Conn.put_req_header("authorization", "token #{token}")
|> ConnTest.dispatch(endpoint, method, path, body)
end
end
@doc """
Utility function to sort a list of structs or maps by the value of a
specified field. Useful to eliminate ordering issues when comparing
to API responses that are lists.
"""
def sort_by(things, field) do
Enum.sort_by(things, fn(t) -> Map.get(t, field) end)
end
end
|
test/support/controller_helpers.ex
| 0.84228
| 0.54256
|
controller_helpers.ex
|
starcoder
|
defmodule Expwd do
use Bitwise
@type supported_algs :: :ripemd160 | :sha256 | :sha384 | :sha512
@supported_hash_algorithms [:ripemd160, :sha256, :sha384, :sha512]
def supported_hash_algorithms(), do: @supported_hash_algorithms
@doc """
Securely compare two strings, in constant time.
Returns `true` if strings are equals, false otherwise. One of this strings can be a
`t:Expwd.Hashed.Portable.t/0`.
## Example
```elixir
iex> Expwd.secure_compare("V01H2GjdTXE3iqDb+3j1VdbM65+/8QcXcjr9dVuMcYE", "V01H2GjdTXE3iqDb+3j1VdbM65+/8QcXcjr9dVuMcYE")
true
iex> Expwd.secure_compare("V01H2GjdTXE3iqDb+3j1VdbM65+/8QcXcjr9dVuMcYE", "ni8fN4rnwavBlbVpRrD/pYcAulaG4pW33fJ")
false
iex> Expwd.secure_compare("G9fE1eS9aW+/eap0GjSgZaeAKRK8XlhZDLDu6UV2Q1g", "expwd:sha256:10+X11gPkuoRwXHZ/5uva6bYP7inqfykJ/pMk9dXT8E")
true
iex> Expwd.secure_compare("expwd:sha256:aX5jyhAYXJGssY/DFj0PbCj5kj+SviA0d7egOTFrbBw", "3w8C85FRAnUSF68KPgArX6yfGDeS8AP6EpEzyd8UaJ8")
true
iex(10)> Expwd.secure_compare("G9fE1eS9aW+/eap0GjSgZaeAKRK8XlhZDLDu6UV2Q1g", "expwd:sha256:10+X11gPkuoRwXHZ/5vva6bYP7inqfykJ/pMk9dXT8E")
false
```
"""
@spec secure_compare(binary(), binary()) :: boolean()
def secure_compare(left, "expwd:" <> _ = right) when is_binary(left) do
hashed_right = Expwd.Hashed.Portable.from_portable(right)
secure_compare(:crypto.hash(hashed_right.alg, left), hashed_right.hash, 0) == 0
end
def secure_compare("expwd:" <> _ = left, right) do
secure_compare(right, left)
end
def secure_compare(left, right) when is_binary(left) and is_binary(right) do
hashed_left = :crypto.hash(:sha256, left)
hashed_right = :crypto.hash(:sha256, right)
secure_compare(hashed_left, hashed_right, 0) == 0
end
@spec secure_compare(binary(), binary(), non_neg_integer) :: non_neg_integer
defp secure_compare(<<x, left :: binary>>, <<y, right :: binary>>, acc) do
secure_compare(left, right, acc ||| (x ^^^ y))
end
defp secure_compare(<<>>, <<>>, acc) do
acc
end
defmodule UnsupportedHashAlgorithm do
defexception [:message]
@type t :: %__MODULE__{message: String.t}
end
end
|
lib/expwd.ex
| 0.902612
| 0.727855
|
expwd.ex
|
starcoder
|
defmodule Vex.Validators.Uuid do
@moduledoc """
Ensure a value is a valid UUID string.
## Options
At least one of the following must be provided:
* `:format`: Required. An atom that defines the UUID format of the value:
* `:default`: The value must be a string with format `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxxx`.
* `:hex`: The value must be a string with the format `xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx`.
* `:urn`: The value must be a string with the format `urn:uuid:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxxx`.
* `:any`: The value must be a string of any of the supported formats (`:default`, `:hex` or `:urn`).
* `:not_any`: The value must not be a valid UUID string.
*Note: `x` is a hex number.*
Optional:
* `:message`: A custom error message. May be in EEx format
and use the fields described in [Custom Error Messages](#module-custom-error-messages).
* `:allow_nil`: A boolean whether to skip this validation for `nil` values.
* `:allow_blank`: A boolean whether to skip this validaton for blank values.
The value for `:format` can be provided instead of the options keyword list.
Additionally, if the options is a boolean value, then:
* `true`: Is the same as the `[format: :any]` options.
* `false`: Is the same as the `[format: :not_any]` options.
## Examples
Examples when using the `:any` or `true` options:
iex> Vex.Validators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", format: :any)
:ok
iex> Vex.Validators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a30", format: :any)
{:error, "must be a valid UUID string"}
iex> Vex.Validators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", true)
:ok
iex> Vex.Validators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a30", true)
{:error, "must be a valid UUID string"}
Examples when using the `:not_any` or `false` options:
iex> Vex.Validators.Uuid.validate("not_a_uuid", format: :not_any)
:ok
iex> Vex.Validators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", format: :not_any)
{:error, "must not be a valid UUID string"}
iex> Vex.Validators.Uuid.validate("not_a_uuid", false)
:ok
iex> Vex.Validators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", false)
{:error, "must not be a valid UUID string"}
Examples when using the `:default` option:
iex> Vex.Validators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", format: :default)
:ok
iex> Vex.Validators.Uuid.validate("02aa7f483ccd11e4b63e14109ff1a304", format: :default)
{:error, "must be a valid UUID string in default format"}
Examples when using the `:hex` option:
iex> Vex.Validators.Uuid.validate("02aa7f483ccd11e4b63e14109ff1a304", format: :hex)
:ok
iex> Vex.Validators.Uuid.validate("urn:uuid:02aa7f48-3ccd-11e4-b63e-14109ff1a304", format: :hex)
{:error, "must be a valid UUID string in hex format"}
Examples when using the `:urn` option:
iex> Vex.Validators.Uuid.validate("urn:uuid:02aa7f48-3ccd-11e4-b63e-14109ff1a304", format: :urn)
:ok
iex> Vex.Validators.Uuid.validate("02aa7f48-3ccd-11e4-b63e-14109ff1a304", format: :urn)
{:error, "must be a valid UUID string in urn format"}
## Custom Error Messages
Custom error messages (in EEx format), provided as `:message`, can use the following values:
iex> Vex.Validators.Uuid.__validator__(:message_fields)
[value: "Bad value", format: "The UUID format"]
An example:
iex> Vex.Validators.Uuid.validate("not_a_uuid", format: :any,
...> message: "<%= value %> should be <%= format %> UUID")
{:error, "not_a_uuid should be any UUID"}
"""
use Vex.Validator
@uuid_formats [:default, :hex, :urn]
@formats [:any, :not_any] ++ @uuid_formats
@urn_prefix "urn:uuid:"
@message_fields [value: "Bad value", format: "The UUID format"]
def validate(value, true), do: validate(value, format: :any)
def validate(value, false), do: validate(value, format: :not_any)
def validate(value, options) when options in @formats, do: validate(value, format: options)
def validate(value, options) when is_list(options) do
unless_skipping value, options do
format = options[:format]
case do_validate(value, format) do
:ok -> :ok
{:error, reason} -> {:error, message(options, reason, value: value, format: format)}
end
end
end
defp do_validate(<<_::64, ?-, _::32, ?-, _::32, ?-, _::32, ?-, _::96>>, :default) do
:ok
end
defp do_validate(<<_::256>>, :hex) do
:ok
end
defp do_validate(<<@urn_prefix, _::64, ?-, _::32, ?-, _::32, ?-, _::32, ?-, _::96>>, :urn) do
:ok
end
defp do_validate(_, format) when format in @uuid_formats do
{:error, "must be a valid UUID string in #{format} format"}
end
defp do_validate(value, :any) do
error = {:error, "must be a valid UUID string"}
Enum.reduce_while(@uuid_formats, error, fn format, _ ->
case do_validate(value, format) do
:ok -> {:halt, :ok}
_ -> {:cont, error}
end
end)
end
defp do_validate(value, :not_any) do
case do_validate(value, :any) do
:ok -> {:error, "must not be a valid UUID string"}
_ -> :ok
end
end
defp do_validate(_, format) do
raise "Invalid value #{inspect(format)} for option :format"
end
end
|
lib/vex/validators/uuid.ex
| 0.853715
| 0.741627
|
uuid.ex
|
starcoder
|
defmodule AWS.KinesisAnalyticsV2 do
@moduledoc """
Amazon Kinesis Data Analytics is a fully managed service that you can use
to process and analyze streaming data using SQL or Java. The service
enables you to quickly author and run SQL or Java code against streaming
sources to perform time series analytics, feed real-time dashboards, and
create real-time metrics.
"""
@doc """
Adds an Amazon CloudWatch log stream to monitor application configuration
errors.
"""
def add_application_cloud_watch_logging_option(client, input, options \\ []) do
request(client, "AddApplicationCloudWatchLoggingOption", input, options)
end
@doc """
Adds a streaming source to your SQL-based Amazon Kinesis Data Analytics
application.
You can add a streaming source when you create an application, or you can
use this operation to add a streaming source after you create an
application. For more information, see `CreateApplication`.
Any configuration update, including adding a streaming source using this
operation, results in a new version of the application. You can use the
`DescribeApplication` operation to find the current application version.
"""
def add_application_input(client, input, options \\ []) do
request(client, "AddApplicationInput", input, options)
end
@doc """
Adds an `InputProcessingConfiguration` to an SQL-based Kinesis Data
Analytics application. An input processor pre-processes records on the
input stream before the application's SQL code executes. Currently, the
only input processor available is [AWS
Lambda](https://aws.amazon.com/documentation/lambda/).
"""
def add_application_input_processing_configuration(client, input, options \\ []) do
request(client, "AddApplicationInputProcessingConfiguration", input, options)
end
@doc """
Adds an external destination to your SQL-based Amazon Kinesis Data
Analytics application.
If you want Kinesis Data Analytics to deliver data from an in-application
stream within your application to an external destination (such as an
Kinesis data stream, a Kinesis Data Firehose delivery stream, or an AWS
Lambda function), you add the relevant configuration to your application
using this operation. You can configure one or more outputs for your
application. Each output configuration maps an in-application stream and an
external destination.
You can use one of the output configurations to deliver data from your
in-application error stream to an external destination so that you can
analyze the errors.
Any configuration update, including adding a streaming source using this
operation, results in a new version of the application. You can use the
`DescribeApplication` operation to find the current application version.
"""
def add_application_output(client, input, options \\ []) do
request(client, "AddApplicationOutput", input, options)
end
@doc """
Adds a reference data source to an existing SQL-based Amazon Kinesis Data
Analytics application.
Kinesis Data Analytics reads reference data (that is, an Amazon S3 object)
and creates an in-application table within your application. In the
request, you provide the source (S3 bucket name and object key name), name
of the in-application table to create, and the necessary mapping
information that describes how data in an Amazon S3 object maps to columns
in the resulting in-application table.
"""
def add_application_reference_data_source(client, input, options \\ []) do
request(client, "AddApplicationReferenceDataSource", input, options)
end
@doc """
Adds a Virtual Private Cloud (VPC) configuration to the application.
Applications can use VPCs to store and access resources securely.
Note the following about VPC configurations for Kinesis Data Analytics
applications:
<ul> <li> VPC configurations are not supported for SQL applications.
</li> <li> When a VPC is added to a Kinesis Data Analytics application, the
application can no longer be accessed from the Internet directly. To enable
Internet access to the application, add an Internet gateway to your VPC.
</li> </ul>
"""
def add_application_vpc_configuration(client, input, options \\ []) do
request(client, "AddApplicationVpcConfiguration", input, options)
end
@doc """
Creates an Amazon Kinesis Data Analytics application. For information about
creating a Kinesis Data Analytics application, see [Creating an
Application](https://docs.aws.amazon.com/kinesisanalytics/latest/java/getting-started.html).
"""
def create_application(client, input, options \\ []) do
request(client, "CreateApplication", input, options)
end
@doc """
Creates a snapshot of the application's state data.
"""
def create_application_snapshot(client, input, options \\ []) do
request(client, "CreateApplicationSnapshot", input, options)
end
@doc """
Deletes the specified application. Kinesis Data Analytics halts application
execution and deletes the application.
"""
def delete_application(client, input, options \\ []) do
request(client, "DeleteApplication", input, options)
end
@doc """
Deletes an Amazon CloudWatch log stream from an Amazon Kinesis Data
Analytics application.
"""
def delete_application_cloud_watch_logging_option(client, input, options \\ []) do
request(client, "DeleteApplicationCloudWatchLoggingOption", input, options)
end
@doc """
Deletes an `InputProcessingConfiguration` from an input.
"""
def delete_application_input_processing_configuration(client, input, options \\ []) do
request(client, "DeleteApplicationInputProcessingConfiguration", input, options)
end
@doc """
Deletes the output destination configuration from your SQL-based Amazon
Kinesis Data Analytics application's configuration. Kinesis Data Analytics
will no longer write data from the corresponding in-application stream to
the external output destination.
"""
def delete_application_output(client, input, options \\ []) do
request(client, "DeleteApplicationOutput", input, options)
end
@doc """
Deletes a reference data source configuration from the specified SQL-based
Amazon Kinesis Data Analytics application's configuration.
If the application is running, Kinesis Data Analytics immediately removes
the in-application table that you created using the
`AddApplicationReferenceDataSource` operation.
"""
def delete_application_reference_data_source(client, input, options \\ []) do
request(client, "DeleteApplicationReferenceDataSource", input, options)
end
@doc """
Deletes a snapshot of application state.
"""
def delete_application_snapshot(client, input, options \\ []) do
request(client, "DeleteApplicationSnapshot", input, options)
end
@doc """
Removes a VPC configuration from a Kinesis Data Analytics application.
"""
def delete_application_vpc_configuration(client, input, options \\ []) do
request(client, "DeleteApplicationVpcConfiguration", input, options)
end
@doc """
Returns information about a specific Amazon Kinesis Data Analytics
application.
If you want to retrieve a list of all applications in your account, use the
`ListApplications` operation.
"""
def describe_application(client, input, options \\ []) do
request(client, "DescribeApplication", input, options)
end
@doc """
Returns information about a snapshot of application state data.
"""
def describe_application_snapshot(client, input, options \\ []) do
request(client, "DescribeApplicationSnapshot", input, options)
end
@doc """
Infers a schema for an SQL-based Amazon Kinesis Data Analytics application
by evaluating sample records on the specified streaming source (Kinesis
data stream or Kinesis Data Firehose delivery stream) or Amazon S3 object.
In the response, the operation returns the inferred schema and also the
sample records that the operation used to infer the schema.
You can use the inferred schema when configuring a streaming source for
your application. When you create an application using the Kinesis Data
Analytics console, the console uses this operation to infer a schema and
show it in the console user interface.
"""
def discover_input_schema(client, input, options \\ []) do
request(client, "DiscoverInputSchema", input, options)
end
@doc """
Lists information about the current application snapshots.
"""
def list_application_snapshots(client, input, options \\ []) do
request(client, "ListApplicationSnapshots", input, options)
end
@doc """
Returns a list of Amazon Kinesis Data Analytics applications in your
account. For each application, the response includes the application name,
Amazon Resource Name (ARN), and status.
If you want detailed information about a specific application, use
`DescribeApplication`.
"""
def list_applications(client, input, options \\ []) do
request(client, "ListApplications", input, options)
end
@doc """
Retrieves the list of key-value tags assigned to the application. For more
information, see [Using
Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Starts the specified Amazon Kinesis Data Analytics application. After
creating an application, you must exclusively call this operation to start
your application.
"""
def start_application(client, input, options \\ []) do
request(client, "StartApplication", input, options)
end
@doc """
Stops the application from processing data. You can stop an application
only if it is in the running state. You can use the `DescribeApplication`
operation to find the application state.
"""
def stop_application(client, input, options \\ []) do
request(client, "StopApplication", input, options)
end
@doc """
Adds one or more key-value tags to a Kinesis Analytics application. Note
that the maximum number of application tags includes system tags. The
maximum number of user-defined application tags is 50. For more
information, see [Using
Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes one or more tags from a Kinesis Analytics application. For more
information, see [Using
Tagging](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-tagging.html).
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates an existing Amazon Kinesis Data Analytics application. Using this
operation, you can update application code, input configuration, and output
configuration.
Kinesis Data Analytics updates the `ApplicationVersionId` each time you
update your application.
"""
def update_application(client, input, options \\ []) do
request(client, "UpdateApplication", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "kinesisanalytics"}
host = build_host("kinesisanalytics", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "KinesisAnalytics_20180523.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/kinesis_analytics_v2.ex
| 0.914161
| 0.659967
|
kinesis_analytics_v2.ex
|
starcoder
|
defmodule Apoc.Hazmat.MAC.HMAC256 do
@moduledoc """
Implementation of the HMAC construction
as described in [FIPS PUB 198-1](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.198-1.pdf)
"""
# This is needed for our API check
require :crypto
use Apoc.Adapter.MAC
defguard is_valid_key(key) when is_binary(key) and byte_size(key) >= 32
@doc """
Generate the HMAC signature for the given message
on the key. This function only returns the Base16 (hex) encoding
of the signature and does not encode the plaintext at all
(unlike `Plug.Crypto.MessageVerifier` which includes the plaintext
in the encoded return value)
SHA256 is used as the Hash function and as such a 32 byte (256 bit)
key is recommended.
Key length for HMAC-256 should be at least 32 bytes. Keys longer than 64 bytes,
while valid are not necessary as HMAC will hash them to generate a shorter key anyway.
See https://crypto.stackexchange.com/questions/34864/key-size-for-hmac-sha256
## Examples
iex> Apoc.Hazmat.MAC.HMAC256.sign("hello", Apoc.decode!("Of-znK3DYHWeV1u6XHXQ6QNotKMLdYleLUhc4-TMpxU"))
{:ok,
<<241, 135, 240, 239, 31, 202, 134, 189, 43, 55, 208, 89, 37, 208, 2, 87, 228,
236, 191, 9, 76, 82, 110, 190, 174, 78, 97, 103, 188, 14, 211, 146>>}
iex> Apoc.Hazmat.MAC.HMAC256.sign("hello", <<1, 2, 3>>)
{:error, "Invalid key size"}
"""
@impl Apoc.Adapter.MAC
def sign(message, key, opts \\ [])
def sign(message, key, _opts) when is_valid_key(key) do
tag =
if function_exported?(:crypto, :mac, 4) do
:crypto.mac(:hmac, :sha256, key, message)
else
:crypto.hmac(:sha256, key, message)
end
{:ok, tag}
end
def sign(_, _, _) do
{:error, "Invalid key size"}
end
@doc """
Similar to `c:sign/3` but either returns the tag directly
or raises `Apoc.Error` if something went wrong.
## Example
iex> "hello"
...> |> Apoc.Hazmat.MAC.HMAC256.sign!(Apoc.decode!("<KEY>"))
...> |> Apoc.encode
"8Yfw7x_Khr0rN9BZJdACV-TsvwlMUm6-rk5hZ7wO05I"
"""
@impl Apoc.Adapter.MAC
def sign!(message, key, opts \\ []) do
with {:ok, tag} <- sign(message, key, opts) do
tag
else
{:error, message} ->
raise Apoc.Error, message: message
end
end
@doc """
Verifies a tag generated by `Apoc.Hazmat.MAC.HMAC256.sign/3`.
## Examples
iex> key = Apoc.decode!("<KEY>")
iex> "8Yfw7x_Khr0rN9BZJdACV-TsvwlMUm6-rk5hZ7wO05I"
...> |> Apoc.decode!
...> |> Apoc.Hazmat.MAC.HMAC256.verify("hello", key)
true
"""
@impl Apoc.Adapter.MAC
def verify(tag, message, key, opts \\ []) when is_valid_key(key) do
with {:ok, challenge} <- sign(message, key, opts) do
Apoc.secure_compare(tag, challenge)
else
_ ->
false
end
end
@deprecated "Use `Apoc.sign/3` or `Apoc.sign!/3` instead"
def sign_hex(message, key, opts \\ []) do
message
|> sign!(key, opts)
|> Apoc.hex()
end
end
|
lib/apoc/hazmat/mac/hmac256.ex
| 0.842199
| 0.584953
|
hmac256.ex
|
starcoder
|
defmodule Zoneinfo.TimeZoneDatabase do
@moduledoc """
Calendar.TimeZoneDatabase implementation for Zoneinfo
Pass this module to the `DateTime` functions or set it as the default by
calling `Calendar.put_time_zone_database/1`
"""
@behaviour Calendar.TimeZoneDatabase
import Zoneinfo.Utils
@impl Calendar.TimeZoneDatabase
def time_zone_period_from_utc_iso_days(iso_days, time_zone) do
with {:ok, tzif} <- Zoneinfo.Cache.get(time_zone) do
iso_days_to_gregorian_seconds(iso_days)
|> find_period_for_utc_secs(tzif.periods)
end
end
@impl Calendar.TimeZoneDatabase
def time_zone_periods_from_wall_datetime(naive_datetime, time_zone) do
with {:ok, tzif} <- Zoneinfo.Cache.get(time_zone) do
{seconds, _micros} = NaiveDateTime.to_gregorian_seconds(naive_datetime)
find_period_for_wall_secs(seconds, tzif.periods)
end
end
defp find_period_for_utc_secs(secs, periods) do
period = Enum.find(periods, fn {time, _, _, _} -> secs >= time end)
{:ok, period_to_map(period)}
end
# receives wall gregorian seconds (also referred as the 'given timestamp' in the comments below)
# and the list of transitions
defp find_period_for_wall_secs(_, [period]), do: {:ok, period_to_map(period)}
defp find_period_for_wall_secs(wall_secs, [
period = {utc_secs, utc_off, std_off, _},
prev_period = {_ts2, prev_utc_off, prev_std_off, _}
| tail
]) do
period_start_wall_secs = utc_secs + utc_off + std_off
prev_period_end_wall_secs = utc_secs + prev_utc_off + prev_std_off
case {wall_secs >= period_start_wall_secs, wall_secs >= prev_period_end_wall_secs} do
{false, false} ->
# Try next earlier period
find_period_for_wall_secs(wall_secs, [prev_period | tail])
{true, true} ->
# Contained in this period
{:ok, period_to_map(period)}
{false, true} ->
# Time leaped forward and this is in the gap between periods
{:gap,
{period_to_map(prev_period),
gregorian_seconds_to_naive_datetime(prev_period_end_wall_secs)},
{period_to_map(period), gregorian_seconds_to_naive_datetime(period_start_wall_secs)}}
{true, false} ->
# Time fell back and this is in both periods
{:ambiguous, period_to_map(prev_period), period_to_map(period)}
end
end
defp period_to_map({_timestamp, utc_off, std_off, abbr}) do
%{
utc_offset: utc_off,
std_offset: std_off,
zone_abbr: abbr
}
end
end
|
lib/zoneinfo/time_zone_database.ex
| 0.864754
| 0.453262
|
time_zone_database.ex
|
starcoder
|
defmodule TflInterp do
@timeout 300000
@moduledoc """
Tensorflow lite intepreter for Elixir.
Deep Learning inference framework for embedded devices.
## Installation
This module is designed for Poncho-style. Therefore, it cannot be installed
by adding this module to your project's dependency list. Follow the steps
below to install.
Download `tfl_interp` to a directory of your choice. I recommend that you put
it in the same hierarchy as your Deep Learning project directory.
```shell
$ cd parent-of-your-project
$ git clone https://github.com/shoz-f/tfl_interp.git
```
Then you need to download the file set of Google Tensorflow and build
`tfl_intep` executable (Port extended called by Elixir) into ./priv.
Don't worry, `mix_cmake` utility will help you.
```shell
$ cd tfl_interp
$ mix deps.get
$ mix cmake --config
;-) It takes a few minutes to download and build Tensorflow.
```
Now you are ready. The figure below shows the directory structure of tfl_interp.
```
+- your-project
|
+- tfl_interp
+- _build
| +- .cmake_build --- Tensorflow is downloaded here
+- deps
+- lib
+- priv
| +- tfl_interp --- Elixir Port extended
+- src/
+- test/
+- CMakeLists.txt --- Cmake configuration script
+- mix.exs --- includes parameter for mix-cmake task
+- msys2.patch --- Patch script for MSYS2/MinGW64
```
## Basic Usage
To use TflInterp in your project, you add the path to `tfl_interp` above to
the `mix.exs`:
```elixir:mix.exs
def deps do
[
{:tfl_interp, path: "../tfl_interp"},
]
end
```
Then you put the trained model of Tensolflow lite in ./priv.
```shell
$ cp your-trained-model.tflite ./priv
```
The remaining task is to create a module that will interface with your Deep
Learning model. The module will probably have pre-processing and post-processing
in addition to inference processing, as in the code example below. TflInterp
provides only inference processing.
You put `use TflInterp` at the beginning of your module, specify the model path
in optional arguments. The inference section involves inputing data to the
model - `TflInterp.set_input_tensor/3`, executing it - `TflInterp.invoke/1`,
and extracting the results - `TflInterp.get_output_tensor/2`.
```elixr:your_model.ex
defmodule YourApp.YourModel do
use TflInterp, model: "priv/your-trained-model.tflite"
def predict(data) do
# preprocess
# to convert the data to be inferred to the input format of the model.
input_bin = convert-float32-binaries(data)
# inference
# typical I/O data for Tensorflow lite models is a serialized 32-bit float tensor.
output_bin =
__MODULE__
|> TflInterp.set_input_tensor(0, input_bin)
|> TflInterp.invoke()
|> TflInterp.get_output_tensor(0)
# postprocess
# add your post-processing here.
# you may need to reshape output_bin to tensor at first.
tensor = output_bin
|> Nx.from_binary({:f, 32})
|> Nx.reshape({size-x, size-y, :auto})
* your-postprocessing *
...
end
end
```
"""
defmacro __using__(opts) do
quote generated: true, location: :keep do
use GenServer
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(opts) do
executable = Application.app_dir(:tfl_interp, "priv/tfl_interp")
opts = Keyword.merge(unquote(opts), opts)
tfl_model = Keyword.get(opts, :model)
tfl_label = Keyword.get(opts, :label, "none")
tfl_opts = Keyword.get(opts, :opts, "")
port = Port.open({:spawn_executable, executable}, [
{:args, String.split(tfl_opts) ++ [tfl_model, tfl_label]},
{:packet, 4},
:binary
])
{:ok, %{port: port}}
end
def handle_call(cmd_line, _from, state) do
Port.command(state.port, cmd_line)
response = receive do
{_, {:data, <<result::binary>>}} -> {:ok, result}
after
Keyword.get(unquote(opts), :timeout, 300000) -> {:timeout}
end
{:reply, response, state}
end
def terminate(_reason, state) do
Port.close(state.port)
end
end
end
@doc """
Get the propaty of the tflite model.
## Parameters
* mod - modules' names
"""
def info(mod) do
cmd = 0
case GenServer.call(mod, <<cmd::8>>, @timeout) do
{:ok, result} -> Poison.decode(result)
any -> any
end
end
@doc """
Stop the tflite interpreter.
## Parameters
* mod - modules' names
"""
def stop(mod) do
GenServer.stop(mod)
end
@doc """
Put a flat binary to the input tensor on the interpreter.
## Parameters
* mod - modules' names
* index - index of input tensor in the model
* bin - input data - flat binary, cf. serialized tensor
"""
def set_input_tensor(mod, index, bin) do
cmd = 1
case GenServer.call(mod, <<cmd::8, index::8, bin::binary>>, @timeout) do
{:ok, result} -> Poison.decode(result)
any -> any
end
mod
end
@doc """
Invoke prediction.
## Parameters
* mod - modules' names
"""
def invoke(mod) do
cmd = 2
case GenServer.call(mod, <<cmd::8>>, @timeout) do
{:ok, result} -> Poison.decode(result)
any -> any
end
mod
end
@doc """
Get the flat binary from the output tensor on the interpreter"
## Parameters
* mod - modules' names
* index - index of output tensor in the model
"""
def get_output_tensor(mod, index) do
cmd = 3
case GenServer.call(mod, <<cmd::8, index::8>>, @timeout) do
{:ok, result} -> result
any -> any
end
end
@doc """
Execute post processing: nms.
## Parameters
* mod - modules' names
* num_boxes - number of candidate boxes
* num_class - number of category class
* boxes - binaries, serialized boxes tensor[`num_boxes`][4]; dtype: float32
* scores - binaries, serialized score tensor[`num_boxes`][`num_class`]; dtype: float32
* iou_threshold - IOU threshold
* score_threshold - score cutoff threshold
* sigma - soft IOU parameter
"""
def non_max_suppression_multi_class(mod, {num_boxes, num_class}, boxes, scores, iou_threshold \\ 0.5, score_threshold \\ 0.25, sigma \\ 0.0) do
cmd = 4
case GenServer.call(mod, <<cmd::8, num_boxes::little-integer-32, num_class::little-integer-32, iou_threshold::little-float-32, score_threshold::little-float-32, sigma::little-float-32>> <> boxes <> scores, @timeout) do
{:ok, result} -> Poison.decode(result)
any -> any
end
end
end
|
lib/tfl_interp.ex
| 0.88129
| 0.838415
|
tfl_interp.ex
|
starcoder
|
defmodule Crux.Structs.Snowflake.Parts do
@moduledoc """
Custom non discord api struct representing a deconstructed Discord snowflake.
## Structure of the Parts
| Field | Bits | Number of Bits | Description |
| :-----------------: | :------: | :------------: | :------------------------------------------------------------------------: |
| Timestamp | 63 to 22 | 42 bits | Milliseconds since Discord Epoch (1420070400000) |
| Internal Worker ID | 21 to 17 | 5 bits | |
| Internal Process ID | 16 to 12 | 5 bits | |
| Increment | 11 to 0 | 12 bits | For every ID that is generated on that process, this number is incremented |
For more information see [Discord Docs](https://discord.com/developers/docs/reference#snowflakes).
"""
@moduledoc since: "0.2.1"
alias Crux.Structs.Snowflake
use Bitwise
@discord_epoch 1_420_070_400_000
@doc false
@doc since: "0.2.1"
@spec discord_epoch() :: non_neg_integer()
def discord_epoch(), do: @discord_epoch
# bits 63 to 22
@timestamp_bitmask 0xFFFF_FFFF_FFC0_0000
# bits 21 to 17
@worker_id_bitmask 0x3E_0000
# bits 16 to 12
@process_id_bitmask 0x1_F000
# bits 11 to 0
@increment_bitmask 0xFFF
@typedoc """
The parts of a `t:Crux.Structs.Snowflake.t/0`.
"""
@typedoc since: "0.2.1"
@type t :: %Snowflake.Parts{
timestamp: non_neg_integer,
worker_id: non_neg_integer,
process_id: non_neg_integer,
increment: non_neg_integer
}
defstruct timestamp: @discord_epoch,
worker_id: 0,
process_id: 0,
increment: 0
@doc false
@doc since: "0.2.1"
@spec deconstruct(Snowflake.t()) :: t
def deconstruct(snowflake) when is_integer(snowflake) and snowflake >= 0 do
%Snowflake.Parts{
timestamp: ((snowflake &&& @timestamp_bitmask) >>> 22) + @discord_epoch,
worker_id: (snowflake &&& @worker_id_bitmask) >>> 17,
process_id: (snowflake &&& @process_id_bitmask) >>> 12,
increment: snowflake &&& @increment_bitmask >>> 0
}
end
@doc false
@doc since: "0.2.1"
@spec construct(t | Keyword.t()) :: Snowflake.t()
def construct(%Snowflake.Parts{
timestamp: timestamp,
worker_id: worker_id,
process_id: process_id,
increment: increment
})
when timestamp >= @discord_epoch and worker_id >= 0 and process_id >= 0 and increment >= 0 do
timestamp = timestamp - @discord_epoch
0
|> bor(timestamp <<< 22 &&& @timestamp_bitmask)
|> bor(worker_id <<< 17 &&& @worker_id_bitmask)
|> bor(process_id <<< 12 &&& @process_id_bitmask)
|> bor(increment <<< 0 &&& @increment_bitmask)
end
def construct(opts) when is_list(opts) do
Snowflake.Parts
|> struct(opts)
|> construct()
end
end
|
lib/structs/snowflake/parts.ex
| 0.755952
| 0.4474
|
parts.ex
|
starcoder
|
defmodule Day12 do
@moduledoc """
Documentation for Day12.
"""
@north {1, 0}
@south {-1, 0}
@east {0, 1}
@west {0, -1}
@right_turns [@north, @east, @south, @west]
def part1 do
Day12.read_data("input.txt")
|> Day12.calc_distances()
|> Day12.sum_distances()
|> IO.inspect()
end
def part2 do
Day12.read_data("input.txt")
|> Day12.navigate()
|> IO.inspect()
end
def read_data(filename) do
File.stream!(filename)
|> Stream.map(&String.trim/1)
|> Stream.map(fn s -> String.split_at(s, 1) end)
|> Stream.map(fn {x, y} -> {x, String.to_integer(y)} end)
end
def move({x, y}, value) do
{x * value, y * value}
end
def turn_right(heading, value) do
Enum.at(
@right_turns,
rem(
Enum.find_index(@right_turns, fn i -> i == heading end) + floor(value / 90),
4
)
)
end
def calc_change({heading, {inst, value}}) do
case inst do
"N" -> {heading, move(@north, value)}
"S" -> {heading, move(@south, value)}
"E" -> {heading, move(@east, value)}
"W" -> {heading, move(@west, value)}
"F" -> {heading, move(heading, value)}
"L" -> calc_change({heading, {"R", 360 - value}})
"R" -> {turn_right(heading, value), {0, 0}}
end
end
def calc_distances(stream) do
stream
|> Stream.transform(@east, fn cmd, heading ->
{new_heading, distance} = calc_change({heading, cmd})
{[distance], new_heading}
end)
end
def sum_distances(stream) do
stream |> Enum.reduce({0, 0}, fn {dx, dy}, {tx, ty} -> {tx + dx, ty + dy} end)
end
def navigate(stream) do
stream
|> Stream.transform(%State{}, fn {inst, value},
%State{
ship: ship,
waypoint: waypoint
} = state ->
new_state =
case inst do
"N" -> %{state | waypoint: Position.move(waypoint, {0, 1}, value)}
"S" -> %{state | waypoint: Position.move(waypoint, {0, -1}, value)}
"E" -> %{state | waypoint: Position.move(waypoint, {1, 0}, value)}
"W" -> %{state | waypoint: Position.move(waypoint, {-1, 0}, value)}
"R" -> %{state | waypoint: Position.rotate(waypoint, value)}
"L" -> %{state | waypoint: Position.rotate(waypoint, 360 - value)}
"F" -> %{state | ship: Position.move(ship, Position.to_vector(waypoint), value)}
end
# IO.inspect([{inst, value}, state, new_state])
{[new_state], new_state}
end)
|> Enum.to_list()
|> List.last()
end
end
|
day12/lib/day12.ex
| 0.709019
| 0.511534
|
day12.ex
|
starcoder
|
defmodule Esperanto.Parsers.TopLevel do
require Logger
alias Esperanto.Walker
@behaviour Esperanto.Parser
@default_parsers [
{Esperanto.Parsers.PlainText, nil},
{Esperanto.Parsers.Br, nil},
{Esperanto.Parsers.Img, nil},
{Esperanto.Parsers.Link, nil},
{Esperanto.Parsers.IndentedCode, nil},
{Esperanto.Parsers.InlineFencedCode, nil},
{Esperanto.Parsers.FencedCode, nil},
{Esperanto.Parsers.Italic, nil},
{Esperanto.Parsers.Bold, nil}
]
@spec default_parsers :: list()
def default_parsers, do: @default_parsers
@moduledoc """
Top level parser\n
This parser selected which parser will be used based on `should_parse` call from other modules
"""
@impl Esperanto.Parser
def should_parse(_, _, _, _), do: true
@impl Esperanto.Parser
def parse(walker, nil, nil, opts) do
tree = NaryTree.new(NaryTree.Node.new())
parse(walker, tree, tree.root, opts)
end
def parse(walker, tree, parent_id, opts) do
opts = Keyword.merge([parsers: @default_parsers], opts)
astify(walker, tree, parent_id, opts, :find_parse)
end
# No more input finished the parser
defp astify(
%Walker{input: "", rest: ""} = walker,
tree,
_parent_id,
_opts,
_selected_parser
) do
{tree, walker}
end
# No more input finished the parser
defp astify(
%Walker{rest: :barried} = walker,
tree,
parent_id,
opts,
_
) do
# one last chance
case select_parse(walker, tree, parent_id, opts) do
{parser, _opts} -> parser.parse(walker, tree, parent_id, opts)
_ -> {tree, walker}
end
end
# Parser found, execute it
defp astify(input, tree, parent_id, opts, {parser, _opts}) do
Logger.debug("Using parse #{parser} #{readable_walker(input)}")
{tree, walker} = parser.parse(input, tree, parent_id, opts)
astify(walker, tree, parent_id, opts, :find_parse)
end
# Find parser
defp astify(walker, tree, parent_id, opts, :find_parse) do
selected_parsers = select_parse(walker, tree, parent_id, opts)
astify(walker, tree, parent_id, opts, selected_parsers)
end
# no parser found, walk
defp astify(walker, tree, parent_id, opts, :walk) do
walker = Walker.walk(walker)
# Logger.debug("No parse found for #{readable_walker(walker)} walking...")
astify(walker, tree, parent_id, opts, :find_parse)
end
# Only one parse found, ready to go
defp select_parse(_input, [parse]) do
parse
end
# no parser found, walk
defp select_parse(_input, []) do
:walk
end
# error more then one parser found. Grammar is ambiguous
defp select_parse(input, parsers) do
parsers =
parsers
|> Enum.map(&elem(&1, 0))
|> Enum.map(&Atom.to_string/1)
|> Enum.join(", ")
raise "Grammar is ambiguos! More then one parser found for input \"#{input.input}\": #{parsers}."
end
# find parsers that should be executed
defp select_parse(walker, tree, parent_id, opts) do
parsers = Keyword.get(opts, :parsers)
filtered_parsers =
Enum.filter(parsers, fn {parser, opts} ->
parser.should_parse(walker, tree, parent_id, opts)
end)
# case more than 1 parse is found, give priority to any other than PlainText
if Enum.count(filtered_parsers) > 1 do
filtered_parsers =
Enum.filter(filtered_parsers, fn {parser, _opts} ->
parser != Esperanto.Parsers.PlainText
end)
select_parse(walker, filtered_parsers)
else
select_parse(walker, filtered_parsers)
end
end
defp readable_walker(%Walker{input: input, rest: :barried}) do
"input: #{readable_string(input)} rest: :barried"
end
defp readable_walker(%Walker{input: input, rest: rest}) do
"input: #{readable_string(input)} rest: #{readable_string(rest)}"
end
defp readable_string(input) do
case String.length(input) > 10 do
true -> inspect(String.slice(input, 0..4) <> "..." <> String.slice(input, -5..-1))
_ -> inspect(input)
end
end
end
|
apps/esperanto/lib/trybe/esperanto/parsers/top_level_parser.ex
| 0.662251
| 0.462473
|
top_level_parser.ex
|
starcoder
|
defmodule FusionAuth.Response do
@moduledoc """
The `FusionAuth.Response` module provides functions to format the response from FusionAuth.result.
## Example
```
iex> FusionAuth.client()
|> FusionAuth.Groups.get_groups()
|> FusionAuth.Response.format("groups", &atomize_keys/1)
{:ok, [%{id: "id", name: "name"}]
```
"""
@type result :: FusionAuth.result()
@spec format(result()) :: {:error, any()}
def format({:error, error, _}), do: {:error, error}
@spec format(result()) :: {:ok, map() | list()}
def format({:ok, data, _}), do: {:ok, data}
@spec format(result(), String.t()) :: {:error, any()}
def format({:error, error, _}, _), do: {:error, error}
@spec format(result(), String.t()) :: {:ok, map() | list()}
def format({:ok, data, _}, payload_key), do: {:ok, Map.get(data, payload_key)}
@spec format(result(), String.t(), fun()) :: {:error, any()}
def format({:error, error, _}, _, _), do: {:error, error}
@spec format(result(), String.t(), fun()) :: {:ok, map() | list()}
def format({:ok, data, _}, payload_key, formatter) do
payload = Map.get(data, payload_key, nil)
{:ok, process(payload, formatter)}
end
@spec format(result(), String.t(), fun(), fun()) :: {:error, any()}
def format({:error, error, _}, _, _, _), do: {:error, error}
@spec format(result(), String.t(), fun(), fun()) :: {:ok, map() | list()}
def format({:ok, data, _}, payload_key, formatter, format_helper) do
payload = Map.get(data, payload_key, nil)
{:ok, process(payload, formatter, format_helper)}
end
defp process(nil, _), do: nil
defp process(data, formatter) when is_list(data),
do: Enum.map(data, &process(&1, formatter))
defp process(data, formatter) when is_map(data),
do: formatter.(data)
defp process(data, formatter, format_helper) when is_map(data),
do: formatter.(data, format_helper)
defp process(data, formatter, format_helper) when is_list(data),
do: Enum.map(data, &process(&1, formatter, format_helper))
end
|
lib/fusion_auth/response.ex
| 0.911367
| 0.706899
|
response.ex
|
starcoder
|
defmodule Scenic.Primitive.Transform do
@moduledoc """
Change the position, rotation, scale and more of a primitive.
Unlike html, which uses auto-layout to position items on the screen, Scenic moves primitives around using matrix transforms. This is common in video games and provides powerful control of your primitives.
A [matrix](https://en.wikipedia.org/wiki/Matrix_(mathematics)) is an array of numbers that can be used to change the positions, rotations, scale and more of locations.
**Don't worry!** You will not need to look at any matrices unless you want to get fancy. In Scenic, you will rarely (if ever) create matrices on your own (you can if you know what you are doing!), and will instead use the transform helpers.
Multiple transforms can be applied to any primitive. Transforms combine down the graph to create a very flexible way to manage your scene.
There are a fixed set of transform helpers that create matrices for you.
* [`Matrix`](Scenic.Primitive.Transform.Matrix.html) hand specify a matrix.
* [`Pin`](Scenic.Primitive.Transform.Pin.html) set a pin to rotate or scale around. Most primitives define a sensible default pin.
* [`Rotate`](Scenic.Primitive.Transform.Rotate.html) rotate around the pin.
* [`Scale`](Scenic.Primitive.Transform.Scale.html) scale larger or smaller. Centered around the pin.
* [`Translate`](Scenic.Primitive.Transform.Translate.html) move/translate horizontally and veritcally.
### Specifying Transforms
You apply transforms to a primitive the same way you specify styles.
```elixir
graph =
Graph.build
|> circle( 100, fill: {:color, :green}, translate: {200, 200} )
|> ellipse( {40, 60, fill: {:color, :red}, rotate: 0.4, translate: {100, 100} )
```
Don't worry about the order you apply transforms to a single object. Scenic will multiply them together in the correct way when it comes time to render them.
"""
alias Scenic.Math.Matrix
alias Scenic.Math.Vector2
alias Scenic.Primitive.Transform
@callback validate(data :: any) :: {:ok, data :: any} | {:error, String.t()}
# ===========================================================================
@opts_map %{
:pin => Transform.Pin,
:scale => Transform.Scale,
:rotate => Transform.Rotate,
:translate => Transform.Translate,
:matrix => Transform.Matrix,
:s => Transform.Scale,
:r => Transform.Rotate,
:t => Transform.Translate
}
@opts_schema [
# Note: Due to https://github.com/dashbitco/nimble_options/issues/68 any
# `:rename_to` entries need to come before the keys that they are renaming
t: [rename_to: :translate],
s: [rename_to: :scale],
r: [rename_to: :rotate],
translate: [type: {:custom, Transform.Translate, :validate, []}],
scale: [type: {:custom, Transform.Scale, :validate, []}],
rotate: [type: {:custom, Transform.Rotate, :validate, []}],
pin: [type: {:custom, Transform.Pin, :validate, []}],
matrix: [type: {:custom, Transform.Matrix, :validate, []}]
]
@primitive_transforms [
:pin,
:scale,
:rotate,
:translate,
:matrix
]
@doc false
def opts_map(), do: @opts_map
@doc false
def opts_schema(), do: @opts_schema
# ===========================================================================
# defmacro __using__([type_code: type_code]) when is_integer(type_code) do
defmacro __using__(_opts) do
quote do
@behaviour Scenic.Primitive.Transform
end
end
@doc false
def valid(), do: @primitive_transforms
# ============================================================================
# transform helper functions
# --------------------------------------------------------
@doc """
Given a Map describing the transforms on a primitive, calculate the combined matrix
that should be applied.
This is trickier than just multiplying them together. Rotations, translations and scale,
need to be done in the right order, which is why this function is provided.
You will not normally need to use this function. It is used internally by the input system.
"""
def combine(txs)
def combine(nil), do: nil
def combine(txs) when txs == %{}, do: nil
def combine(%{pin: _} = txs) do
# look for case where only the pin is set
case Enum.count(txs) do
1 -> nil
_ -> do_combine(txs)
end
end
def combine(txs), do: do_combine(txs)
defp do_combine(txs) do
# start with identity - which is like multiplying by 1
Matrix.identity()
|> multiply_partial(:matrix, txs[:matrix])
|> multiply_partial(:translate, txs[:translate])
|> rotate_and_scale(txs)
end
# --------------------------------------------------------
defp multiply_partial(mx, type, value)
defp multiply_partial(mx, _, nil), do: mx
defp multiply_partial(mx, :pin, point), do: Matrix.translate(mx, point)
defp multiply_partial(mx, :scale, pct), do: Matrix.scale(mx, pct)
defp multiply_partial(mx, :rotate, rot), do: Matrix.rotate(mx, rot)
defp multiply_partial(mx, :translate, trns), do: Matrix.translate(mx, trns)
defp multiply_partial(mx, :matrix, dev_mx), do: Matrix.mul(mx, dev_mx)
defp multiply_partial(mx, :inv_pin, point) do
Matrix.translate(mx, Vector2.invert(point))
end
# --------------------------------------------------------
defp rotate_and_scale(mx, txs) do
# don't do any work if neither otate nor scale are set
# don't need to translate twice for no reason
case txs[:rotate] || txs[:scale] do
nil ->
mx
_ ->
mx
|> multiply_partial(:pin, txs[:pin])
|> multiply_partial(:rotate, txs[:rotate])
|> multiply_partial(:scale, txs[:scale])
|> multiply_partial(:inv_pin, txs[:pin])
end
end
end
|
lib/scenic/primitive/transform/transform.ex
| 0.930829
| 0.960212
|
transform.ex
|
starcoder
|
defmodule Nosedrum.Converters do
@moduledoc """
Conversion from command arguments to various types.
This module provides an interface to the individual converter modules.
Most converter functions related to Discord itself take a `guild_id`
which is used for loading the guild from the cache. If the guild could
not be load from the cache, implementations will usually attempt to
fetch the relevant data from the API.
"""
alias Nostrum.Struct.{Channel, Guild}
alias Nostrum.Struct.Guild.{Member, Role}
@doc """
Convert the given `text` to a `t:Nostrum.Struct.Channel.t/0`.
Lookup is attempted in the following order:
- by direct ID, such as `9999`
- by mention, such as `<#9999>`
- by name, such as `mod-log`
"""
@spec to_channel(String.t(), Guild.id()) :: {:ok, Channel.t()} | {:error, String.t()}
defdelegate to_channel(text, guild_id), to: __MODULE__.Channel, as: :into
@doc """
Convert the given `text` to a `t:Nostrum.Struct.Guild.Member.t/0`.
Lookup is attempted in the following order:
- by direct ID, such as `1231321`
- by mention, such as `<@1231321>`, `<@!1231321>`
- by name#discrim combination, such as `Jimmy#9999`
- by name, such as `Jimmy`
- by nickname, such as `SuperJimmy`
Note that name and nickname lookups may not be 100% accurate: if there are
multiple users with the same name on the server, the first one found will be
used.
"""
@spec to_member(String.t(), Guild.id()) :: {:ok, Member.t()} | {:error, String.t()}
defdelegate to_member(text, guild_id), to: __MODULE__.Member, as: :into
@doc """
Convert the given `text` to a `t:Nostrum.Struct.Guild.Role.t/0`.
Lookup is attempted in the following order:
- by direct ID, such as `5555`
- by mention, such as `<@&5555>`
- by name, such as `Bots`
The optional `ilike` argument determines whether the role name
search should be case-insensitive. This is useful if your users
are lazy and you want to save them from holding down an extra
button on their keyboard.
"""
@spec to_role(String.t(), Guild.id(), boolean()) :: {:ok, Role.t()} | {:error, String.t()}
defdelegate to_role(text, guild_id, ilike \\ false), to: __MODULE__.Role, as: :into
end
|
lib/nosedrum/converters.ex
| 0.827863
| 0.551393
|
converters.ex
|
starcoder
|
defmodule Base32Crockford do
@moduledoc ~S"""
Base32-Crockford: base-32 encoding for expressing integer numbers
in a form that can be conveniently and accurately transmitted
between humans and computer systems.
[https://www.crockford.com/wrmg/base32.html](https://www.crockford.com/wrmg/base32.html)
A symbol set of 10 digits and 22 letters is used:
`0123456789ABCDEFGHJKMNPQRSTVWXYZ`
It does not include 4 of the 26 letters: I L O U.
A check symbol can be appended to a symbol string. 5 additional symbols
`*~$=U` are used only for encoding or decoding the check symbol.
When decoding, upper and lower case letters are accepted,
and i and l will be treated as 1 and o will be treated as 0.
When encoding, only upper case letters are used.
"""
@doc ~S"""
Encodes an integer number into base32-crockford encoded string.
Checksum can be added to the end of the string if the
`:checksum` option is set to true.
For better readability the resulting string can be partitioned by hyphens
if the `:partitions` option is provided.
## Options
* `:checksum` (boolean) - the check symbol will be added to the end
of the string. The check symbol encodes the number modulo 37,
37 being the least prime number greater than 32.
* `:partitions` (positive integer) - hyphens (-) will be inserted into
symbol strings to partition a string into manageable pieces,
improving readability by helping to prevent confusion.
## Examples
iex> Base32Crockford.encode(973_113_317)
"X011Z5"
To add a check symbol to the end of the string:
iex> Base32Crockford.encode(973_113_317, checksum: true)
"X011Z5$"
To partition a resulting string into pieces:
iex> Base32Crockford.encode(973_113_317, partitions: 2)
"X01-1Z5"
iex> Base32Crockford.encode(973_113_317, partitions: 3)
"X0-11-Z5"
iex> Base32Crockford.encode(973_113_317, partitions: 4)
"X-0-11-Z5"
"""
@spec encode(integer, keyword) :: binary
def encode(number, opts \\ []) when is_integer(number) do
init_encoding(number, opts)
|> base10to32(number)
|> to_string
|> partition(opts)
end
@doc ~S"""
Decodes base32-crockford encoded string into integer number.
Upper and lower case letters are accepted, and i and l will be treated as 1
and o will be treated as 0.
Hyphens are ignored during decoding.
## Options
* `:checksum` (boolean) - the last symbol will be considered as check symbol
and extracted from the encoded string before decoding. It then will be
compared with a check symbol calculated from a decoded number.
## Examples
iex> Base32Crockford.decode("X011Z5")
{:ok, 973113317}
iex> Base32Crockford.decode("XoIlZ5")
{:ok, 973113317}
iex> Base32Crockford.decode("X01-1Z5")
{:ok, 973113317}
iex> Base32Crockford.decode("X011Z5$", checksum: true)
{:ok, 973113317}
iex> Base32Crockford.decode("X011Z5=", checksum: true)
:error
"""
@spec decode(binary, keyword) :: {:ok, integer} | :error
def decode(binary, opts \\ []) when is_binary(binary) do
{chars, checksum} = binary
|> String.replace("-", "")
|> String.upcase
|> String.reverse
|> String.to_charlist
|> init_decoding(opts)
values = chars
|> Enum.with_index
|> Enum.map(&base32to10/1)
case Enum.filter(values, &(&1 == :error)) do
[] ->
Enum.sum(values)
|> check(checksum)
_ -> :error
end
end
@doc ~S"""
Similar to `decode/2` but raises `ArgumentError` if a checksum is invalid or
an invalid character is present in the string.
## Options
Accepts the same options as `decode/2`.
## Examples
iex> Base32Crockford.decode!("X011Z5")
973113317
"""
@spec decode!(binary, keyword) :: integer
def decode!(binary, opts \\ []) when is_binary(binary) do
case decode(binary, opts) do
{:ok, number} -> number
:error ->
raise ArgumentError, "contains invalid character or checksum does not match"
end
end
defp init_encoding(number, opts) do
if Keyword.get(opts, :checksum, false) do
[calculate_checksum(number)]
else
[]
end
end
defp init_decoding(chars, opts) do
if Keyword.get(opts, :checksum, false) do
[checksum | chars] = chars
{chars, checksum}
else
{chars, nil}
end
end
defp base10to32([], 0), do: '0'
defp base10to32('0', 0), do: '00'
defp base10to32(chars, 0), do: chars
defp base10to32(chars, number) do
reminder = rem(number, 32)
chars = [enc(reminder) | chars]
number = div(number, 32)
base10to32(chars, number)
end
defp base32to10({char, power}) do
with {:ok, value} <- dec(char) do
value * :math.pow(32, power) |> round
end
end
defp check(number, nil), do: {:ok, number}
defp check(number, checksum) do
case calculate_checksum(number) do
^checksum ->
{:ok, number}
_ -> :error
end
end
defp partition(binary, opts) do
case Keyword.get(opts, :partitions, 0) do
count when count in [0, 1] ->
binary
count ->
split([], binary, count)
|> Enum.reverse
|> Enum.join("-")
end
end
defp split(parts, binary, 1), do: [binary | parts]
defp split(parts, binary, count) do
len = div(String.length(binary), count)
{part, rest} = String.split_at(binary, len)
split([part | parts], rest, count - 1)
end
defp calculate_checksum(number) do
reminder = rem(number, 37)
enc(reminder)
end
encoding_symbols = '0123456789ABCDEFGHJKMNPQRSTVWXYZ'
check_symbols = '*~$=U'
encoding_alphabet = Enum.with_index(encoding_symbols ++ check_symbols)
for {encoding, value} <- encoding_alphabet do
defp enc(unquote(value)), do: unquote(encoding)
end
decoding_alphabet = Enum.with_index(encoding_symbols)
for {encoding, value} <- decoding_alphabet do
defp dec(unquote(encoding)), do: {:ok, unquote(value)}
end
defp dec(79), do: {:ok, 0} # O
defp dec(73), do: {:ok, 1} # I
defp dec(76), do: {:ok, 1} # L
defp dec(_), do: :error
end
|
lib/base32_crockford.ex
| 0.913148
| 0.676179
|
base32_crockford.ex
|
starcoder
|
defmodule MacroX do
@moduledoc ~S"""
`Macro` extension module.
"""
@doc ~S"""
Converts the given atom or binary to snakize format.
If an atom is given, it is assumed to be an Elixir module,
so it is converted to a binary and then processed.
This function was designed to snakize language identifiers/tokens,
that's why it belongs to the `Macro` module. Do not use it as a general
mechanism for underscoring strings as it does not support Unicode or
characters that are not valid in Elixir identifiers.
## Examples
```elixir
iex> MacroX.snakize("FooBar")
"foo_bar"
iex> MacroX.snakize("Foo.Bar")
"foo/bar"
iex> MacroX.snakize(Foo.Bar)
"foo/bar"
iex> MacroX.snakize(:FooBar)
:foo_bar
```
In general, `snakize` can be thought of as the reverse of
`pascalize`, however, in some cases formatting may be lost:
```elixir
iex> MacroX.snakize("SAPExample")
"sap_example"
iex> MacroX.pascalize("sap_example")
"SapExample"
iex> MacroX.pascalize("hello_10")
"Hello10"
```
"""
@spec snakize(String.t() | atom) :: String.t() | atom
def snakize(atom) when is_atom(atom) do
case Atom.to_string(atom) do
"Elixir." <> rest -> snakize(rest)
atomize -> atomize |> snakize() |> String.to_atom()
end
end
def snakize(<<h, t::binary>>), do: <<to_lower_char(h)>> <> do_snakize(t, h)
def snakize(""), do: ""
defp do_snakize(<<h, t, rest::binary>>, _)
when h >= ?A and h <= ?Z and not (t >= ?A and t <= ?Z) and t != ?. and t != ?_ do
<<?_, to_lower_char(h), t>> <> do_snakize(rest, t)
end
defp do_snakize(<<h, t::binary>>, prev)
when h >= ?A and h <= ?Z and not (prev >= ?A and prev <= ?Z) and prev != ?_ do
<<?_, to_lower_char(h)>> <> do_snakize(t, h)
end
defp do_snakize(<<?., t::binary>>, _), do: <<?/>> <> snakize(t)
defp do_snakize(<<h, t::binary>>, _), do: <<to_lower_char(h)>> <> do_snakize(t, h)
defp do_snakize(<<>>, _), do: <<>>
@doc ~S"""
Alias for `snakize/1`.
## Example
```elixir
iex> MacroX.underscore("PascalCase")
"pascal_case"
```
"""
@spec underscore(String.t() | atom) :: String.t() | atom
def underscore(data), do: snakize(data)
@doc ~S"""
Converts the given string to PascalCase format.
This function was designed to pascalize language identifiers/tokens,
that's why it belongs to the `MacroX` module. Do not use it as a general
mechanism for pascalizing strings as it does not support Unicode or
characters that are not valid in Elixir identifiers.
## Examples
```elixir
iex> MacroX.pascalize("foo_bar")
"FooBar"
iex> MacroX.pascalize(:foo_bar)
FooBar
iex> MacroX.pascalize("cluster_ip")
"ClusterIP"
```
If uppercase characters are present, they are not modified in any way
as a mechanism to preserve acronyms:
```
iex> MacroX.pascalize("API.V1")
"API.V1"
iex> MacroX.pascalize("API_SPEC")
"API_SPEC"
```
"""
@spec pascalize(String.t() | atom) :: String.t() | atom
def pascalize(data) when is_atom(data),
do: String.to_atom("Elixir." <> pascalize(to_string(data)))
def pascalize(data), do: Macro.camelize(Regex.replace(~r/(^|_)ip(_|$)/, data, "\\1iP\\2"))
@doc ~S"""
Properly converts atoms and strings to camelCase.
Unlike `MacroX.camelize/1`, which converts only strings to PascalCase.
## Examples
```elixir
iex> MacroX.camelize(:my_atom)
:myAtom
iex> MacroX.camelize("my_string")
"myString"
iex> MacroX.camelize("my_ip_address")
"myIPAddress"
```
"""
@spec camelize(atom | String.t()) :: atom | String.t()
def camelize(h) when is_atom(h), do: String.to_atom(camelize(to_string(h)))
def camelize(data), do: pre_camelize(Regex.replace(~r/(^|_)ip(_|$)/, data, "\\1IP\\2"))
defp pre_camelize(<<h, t::binary>>), do: <<h>> <> do_camelize(t)
defp do_camelize(<<?_, ?_, t::binary>>), do: do_camelize(<<?_, t::binary>>)
defp do_camelize(<<?_, h, t::binary>>) when h >= ?a and h <= ?z,
do: <<to_upper_char(h)>> <> do_camelize(t)
defp do_camelize(<<?_>>), do: <<>>
defp do_camelize(<<?_, t::binary>>), do: do_camelize(t)
defp do_camelize(<<h, t::binary>>), do: <<h>> <> do_camelize(t)
defp do_camelize(<<>>), do: <<>>
defp to_upper_char(char) when char >= ?a and char <= ?z, do: char - 32
# defp to_upper_char(char), do: char
defp to_lower_char(char) when char >= ?A and char <= ?Z, do: char + 32
defp to_lower_char(char), do: char
end
|
lib/common_x/macro_x.ex
| 0.853379
| 0.818084
|
macro_x.ex
|
starcoder
|
defmodule PatternMatching do
use Koans
@intro "PatternMatching"
koan "One matches one" do
assert match?(1, 1)
end
koan "Patterns can be used to pull things apart" do
[head | tail] = [1, 2, 3, 4]
assert head == 1
assert tail == [2, 3, 4]
end
koan "And then put them back together" do
head = 1
tail = [2, 3, 4]
assert [1, 2, 3, 4] == [head | tail]
end
koan "Some values can be ignored" do
[_first, _second, third, _fourth] = [1, 2, 3, 4]
assert third == 3
end
koan "Strings come apart just as easily" do
"Shopping list: " <> items = "Shopping list: eggs, milk"
assert items == "eggs, milk"
end
koan "Maps support partial pattern matching" do
%{make: make} = %{type: "car", year: 2016, make: "Honda", color: "black"}
assert make == "Honda"
end
koan "Lists must match exactly" do
assert_raise MatchError, fn ->
[a, b] = [1,2,3]
end
end
koan "So does the keyword lists" do
kw_list = [type: "car", year: 2016, make: "Honda"]
[_type | [_year | [tuple]]] = kw_list
assert tuple == {:make, "Honda"}
end
koan "The pattern can make assertions about what it expects" do
assert match?([1, _second, _third], [1, "foo", "bar"])
end
def make_noise(%{type: "cat"}), do: "Meow"
def make_noise(%{type: "dog"}), do: "Woof"
def make_noise(_anything), do: "Eh?"
koan "Functions perform pattern matching on their arguments" do
cat = %{type: "cat"}
dog = %{type: "dog"}
snake = %{type: "snake"}
assert make_noise(cat) == "Meow"
assert make_noise(dog) == "Woof"
assert make_noise(snake) == "Eh?"
end
koan "And they will only run the code that matches the argument" do
name = fn
("duck") -> "Donald"
("mouse") -> "Mickey"
(_other) -> "I need a name!"
end
assert name.("mouse") == "Mickey"
assert name.("duck") == "Donald"
assert name.("donkey") == "I need a name!"
end
koan "Errors are shaped differently than successful results" do
dog = %{type: "dog"}
result = case Map.fetch(dog, :type) do
{:ok, value} -> value
:error -> "not present"
end
assert result == "dog"
end
defmodule Animal do
defstruct [:kind, :name]
end
koan "You can pattern match into the fields of a struct" do
%Animal{name: name} = %Animal{kind: "dog", name: "Max"}
assert name == "Max"
end
defmodule Plane do
defstruct passengers: 0, maker: :boeing
end
def plane?(%Plane{}), do: true
def plane?(_), do: false
koan "...or onto the type of the struct itself" do
assert plane?(%Plane{passengers: 417, maker: :boeing}) == true
assert plane?(%Animal{}) == false
end
koan "Structs will even match with a regular map" do
%{name: name} = %Animal{kind: "dog", name: "Max"}
assert name == "Max"
end
koan "A value can be bound to a variable" do
a = 1
assert a == 1
end
koan "A variable can be rebound" do
a = 1
a = 2
assert a == 2
end
koan "A variable can be pinned to use its value when matching instead of binding to a new value" do
pinned_variable = 1
example = fn
(^pinned_variable) -> "The number One"
(2) -> "The number Two"
(number) -> "The number #{number}"
end
assert example.(1) == "The number One"
assert example.(2) == "The number Two"
assert example.(3) == "The number 3"
end
koan "Pinning works anywhere one would match, including 'case'" do
pinned_variable = 1
result = case 1 do
^pinned_variable -> "same"
other -> "different #{other}"
end
assert result == "same"
end
koan "Trying to rebind a pinned variable will result in an error" do
a = 1
assert_raise MatchError, fn() ->
^a = 2
end
end
end
|
lib/koans/12_pattern_matching.ex
| 0.863607
| 0.720811
|
12_pattern_matching.ex
|
starcoder
|
defmodule Envelope do
@moduledoc ~S"""
A library for calculating envelopes of geometries and tools to compare them.
This is most useful as an approximation of spacial relationships between more
complicated geometries.
iex> Envelope.from_geo( %Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]} )
%Envelope{ min_x: 2, min_y: -2, max_x: 20, max_y: 11 }
iex> Envelope.from_geo( %Geo.LineString{coordinates: [{1, 3}, {2, -1}, {0, -1}, {1, 3}]} )
%Envelope{ min_x: 0, min_y: -1, max_x: 2, max_y: 3 }
You can also expand an existing Envelope with a geometry or another Envelope
iex> a = Envelope.from_geo( %Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]} )
...> b = %Geo.LineString{coordinates: [{1, 3}, {2, -1}, {0, -1}, {1, 3}]}
...> Envelope.expand(a, b)
%Envelope{ min_x: 0, min_y: -2, max_x: 20, max_y: 11 }
"""
defstruct min_x: 0, min_y: 0, max_x: 0, max_y: 0
@type t() :: %__MODULE__{
min_x: number() | nil,
min_y: number() | nil,
max_x: number() | nil,
max_y: number() | nil
}
@type point :: {number, number}
@type points ::
point
| list
| %{coordinates: list}
| %Geo.Point{}
| %Geo.MultiPoint{}
| %Geo.LineString{}
| %Geo.MultiLineString{}
| %Geo.Polygon{}
| %Geo.MultiPolygon{}
alias Distance.GreatCircle
@doc ~S"""
Returns an `Envelope` that represents the extent of the geometry or
coordinates.
## Examples
iex> Envelope.from_geo %{coordinates: [{11, 10}, {4, 2.5}, {16, 2.5}, {11, 10}]}
%Envelope{ max_x: 16, max_y: 10, min_x: 4, min_y: 2.5 }
iex> Envelope.from_geo [{11, 10}, {4, 2.5}, {16, 2.5}, {11, 10}]
%Envelope{ max_x: 16, max_y: 10, min_x: 4, min_y: 2.5 }
iex> Envelope.from_geo %Geo.Polygon{coordinates: [[{1, 3}, {2, -1}, {0, -1}, {1, 3}]]}
%Envelope{ min_x: 0, min_y: -1, max_x: 2, max_y: 3 }
iex> Envelope.from_geo {1, 3}
%Envelope{ min_x: 1, min_y: 3, max_x: 1, max_y: 3 }
"""
@spec from_geo(points()) :: t()
def from_geo({x, y}) when is_number(x) and is_number(y),
do: %Envelope{min_x: x, min_y: y, max_x: x, max_y: y}
def from_geo(%Geo.Point{coordinates: {x, y}}),
do: %Envelope{min_x: x, min_y: y, max_x: x, max_y: y}
def from_geo(%{coordinates: coordinates}), do: from_geo(coordinates)
def from_geo(coordinates) when is_list(coordinates) do
coordinates
|> List.flatten()
|> Enum.reduce(Envelope.empty(), &expand(&2, &1))
end
@doc ~S"""
Returns a `Geo.Polygon`, `Geo.LineString`, or `Geo.Point` that is equal to
the area covered by the given `Envelope`.
Note that they exact type of the Geometry returned will depend on the nature
of the Envelope:
- `Geo.Point` will be returned when an envelope has zero area and all
extents are equal.
- `Geo.LineString` will be returned when an envelope has zero area
and it extends along only one axes.
- `Geo.Polygon` will be returned when an envelope has non-zeron area
## Examples
iex> Envelope.to_geo %Envelope{ max_x: 16, max_y: 10, min_x: 4, min_y: 2.5 }
%Geo.Polygon{coordinates: [[{4, 2.5}, {16, 2.5}, {16, 10}, {4, 10}, {4, 2.5}]]}
iex> Envelope.to_geo %Envelope{ min_x: 1, min_y: 3, max_x: 1, max_y: 5 }
%Geo.LineString{coordinates: [{1, 3}, {1, 5}]}
iex> Envelope.to_geo %Envelope{ min_x: 1, min_y: 3, max_x: 4, max_y: 3 }
%Geo.LineString{coordinates: [{1, 3}, {4, 3}]}
iex> Envelope.to_geo %Envelope{ min_x: 1, min_y: 3, max_x: 1, max_y: 3 }
%Geo.Point{coordinates: {1, 3}}
"""
@spec to_geo(t()) :: %Geo.Polygon{} | %Geo.Point{} | %Geo.LineString{}
def to_geo(%Envelope{min_x: x, min_y: y, max_x: x, max_y: y}),
do: %Geo.Point{coordinates: {x, y}}
def to_geo(%Envelope{min_x: x, min_y: min_y, max_x: x, max_y: max_y}),
do: %Geo.LineString{coordinates: [{x, min_y}, {x, max_y}]}
def to_geo(%Envelope{min_x: min_x, min_y: y, max_x: max_x, max_y: y}),
do: %Geo.LineString{coordinates: [{min_x, y}, {max_x, y}]}
def to_geo(%Envelope{} = env),
do: %Geo.Polygon{
coordinates: [
[
{env.min_x, env.min_y},
{env.max_x, env.min_y},
{env.max_x, env.max_y},
{env.min_x, env.max_y},
{env.min_x, env.min_y}
]
]
}
@doc ~S"""
Returns an `Envelope` that represents no extent at all. This is primarily
a convenience function for starting an expanding Envelope. Internally,
"empty" Envelopes are represented with `nil` values for all extents.
Note that there is a important distinction between an empty Envelope and
an Envelope around a single Point (where the min and max for each axis are
real numbers but may represent zero area).
## Examples
iex> Envelope.empty
%Envelope{max_x: nil, max_y: nil, min_x: nil, min_y: nil}
iex> Envelope.empty |> Envelope.empty?
true
"""
@spec empty() :: t()
def empty, do: %Envelope{min_x: nil, min_y: nil, max_x: nil, max_y: nil}
@doc ~S"""
Returns `true` if the given envelope is empty (has non-existent extent),
otherwise `false`
## Examples
iex> Envelope.empty |> Envelope.empty?
true
iex> %Envelope{ min_x: 0, min_y: -1, max_x: 2, max_y: 3 } |> Envelope.empty?
false
"""
@spec empty?(t()) :: boolean()
def empty?(%Envelope{min_x: nil, min_y: nil, max_x: nil, max_y: nil}), do: true
def empty?(%Envelope{}), do: false
@doc ~S"""
Returns a new Envelope that is expanded to include an additional geometry.
## Examples
iex> a = Envelope.from_geo(%Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]})
...> b = %Geo.LineString{coordinates: [{1, 3}, {2, -1}, {0, -1}, {1, 3}]}
...> Envelope.expand(a, b)
%Envelope{ min_x: 0, min_y: -2, max_x: 20, max_y: 11 }
iex> a = %Envelope{ min_x: 0, min_y: -2, max_x: 20, max_y: 11 }
...> b = %Envelope{ min_x: 2, min_y: -3, max_x: 12, max_y: -2 }
...> Envelope.expand(a, b)
%Envelope{ min_x: 0, min_y: -3, max_x: 20, max_y: 11 }
iex> Envelope.empty
...> |> Envelope.expand(%Envelope{ min_x: 0, min_y: -2, max_x: 12, max_y: 11 })
...> |> Envelope.expand(%Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]})
...> |> Envelope.expand(%{type: "Point", coordinates: {-1, 3}})
%Envelope{ min_x: -1, min_y: -2, max_x: 20, max_y: 11 }
iex> Envelope.expand(Envelope.empty, Envelope.empty) |> Envelope.empty?
true
"""
@spec expand(t(), point() | t() | points()) :: t()
def expand(%Envelope{} = env1, %Envelope{} = env2) do
cond do
Envelope.empty?(env1) ->
env2
Envelope.empty?(env2) ->
env1
true ->
%Envelope{
min_x: min(env1.min_x, env2.min_x),
min_y: min(env1.min_y, env2.min_y),
max_x: max(env1.max_x, env2.max_x),
max_y: max(env1.max_y, env2.max_y)
}
end
end
def expand(%Envelope{} = env, other), do: expand(env, from_geo(other))
@doc ~S"""
Returns a new Envelope that is expanded in positive and negative directions
in each axis by `radius`.
## Examples
iex> Envelope.expand_by(Envelope.from_geo(%Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]}), 3)
%Envelope{ min_x: -1, min_y: -5, max_x: 23, max_y: 14 }
iex> Envelope.expand_by(Envelope.empty, 4) |> Envelope.empty?
true
"""
@spec expand_by(t(), number()) :: t()
def expand_by(%Envelope{} = env, radius) when is_number(radius) and radius >= 0 do
case Envelope.empty?(env) do
true ->
env
false ->
%Envelope{
min_x: env.min_x - radius,
min_y: env.min_y - radius,
max_x: env.max_x + radius,
max_y: env.max_y + radius
}
end
end
@doc ~S"""
Simple distance from the left bounadary to the right boundary of the Envelope.
## Examples
iex> Envelope.width(Envelope.from_geo(%Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]}))
18
"""
@spec width(t()) :: number()
def width(%Envelope{} = env) do
env.max_x - env.min_x
end
@doc ~S"""
When an Envelope's coordinates are in degress of longitude and latitude, calculates the
great circle distance between the center of the east and west extent in meters.
## Examples
iex> Envelope.width_gc(Envelope.from_geo(%Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]})) |> round
1982362
"""
@spec width_gc(t()) :: number()
def width_gc(%Envelope{} = env) do
bottom = GreatCircle.distance({env.min_x, env.min_y}, {env.max_x, env.min_y})
top = GreatCircle.distance({env.min_x, env.max_y}, {env.max_x, env.max_y})
(bottom + top) / 2.0
end
@doc ~S"""
Simple distance from the bottom bounadary to the top boundary of the Envelope.
## Examples
iex> Envelope.height(Envelope.from_geo(%Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]}))
13
"""
@spec height(t()) :: number()
def height(%Envelope{} = env) do
env.max_y - env.min_y
end
@doc ~S"""
When an Envelope's coordinates are in degress of longitude and latitude, calculates the
great circle distance between the center of the north and south extent in meters.
## Examples
iex> Envelope.height_gc(Envelope.from_geo(%Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]})) |> round
1445536
"""
@spec height_gc(t()) :: number()
def height_gc(%Envelope{} = env) do
GreatCircle.distance({env.min_x, env.min_y}, {env.min_x, env.max_y})
end
@doc ~S"""
Calculates the simple area of an Envelope.
## Examples
iex> Envelope.area(Envelope.from_geo(%Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]}))
234
"""
@spec area(t()) :: number()
def area(%Envelope{} = env) do
width(env) * height(env)
end
@doc ~S"""
Estimates the area of an Envelope in square meters when the Envelope's coordinates are in degress of longitude and latitude.
## Examples
iex> Envelope.area_gc(Envelope.from_geo(%Geo.Polygon{coordinates: [[{2, -2}, {20, -2}, {11, 11}, {2, -2}]]})) |> round
2865575088701
"""
@spec area_gc(t()) :: number()
def area_gc(%Envelope{} = env) do
width_gc(env) * height_gc(env)
end
@doc ~S"""
Returns the center point of an envelope.
## Examples
iex> %Envelope{ min_x: 0, min_y: -1, max_x: 2, max_y: 5 } |> Envelope.center()
{1.0, 2.0}
iex> Envelope.empty() |> Envelope.center()
nil
"""
@spec center(t()) :: {number(), number()}
def center(env) do
if Envelope.empty?(env) do
nil
else
{(env.min_x + env.max_x) / 2.0, (env.min_y + env.max_y) / 2.0}
end
end
@doc ~S"""
Returns whether one envelope fully contains another envelope or point.
## Examples
iex> Envelope.contains?(
...> %Envelope{ min_x: -1, min_y: -5, max_x: 23, max_y: 14 },
...> %Envelope{ min_x: 0, min_y: 3, max_x: 7, max_y: 4 })
true
iex> Envelope.contains?(
...> %Envelope{ min_x: -1, min_y: 5, max_x: 23, max_y: 14 },
...> %Envelope{ min_x: -2, min_y: 5, max_x: 7, max_y: 4 })
false
iex> Envelope.contains?(
...> %Geo.Polygon{ coordinates: [{-1, 3}, {-3, -1}, { 5, -3}, {4, 12}, {-2, 11}, {-1, 3}] },
...> {0, 11})
true
"""
@spec contains?(t() | points(), t() | points()) :: boolean()
def contains?(%Envelope{} = env, {x, y}) do
env.min_x <= x && env.min_y <= y && env.max_x >= x && env.max_y >= y
end
def contains?(%Envelope{} = env, %{coordinates: {x, y}}), do: contains?(env, {x, y})
def contains?(%Envelope{} = env1, %Envelope{} = env2) do
env1.min_x <= env2.min_x && env1.min_y <= env2.min_y && env1.max_x >= env2.max_x &&
env1.max_y >= env2.max_y
end
def contains?(%Envelope{} = env1, other), do: contains?(env1, from_geo(other))
def contains?(a, b), do: contains?(from_geo(a), b)
@doc ~S"""
The inverse of the relationship tested by Envelope#contains?
## Examples
iex> Envelope.within?(
...> %Envelope{ min_x: 0, min_y: 3, max_x: 7, max_y: 4 },
...> %Envelope{ min_x: -1, min_y: -5, max_x: 23, max_y: 14 })
true
iex> Envelope.within?(
...> %Geo.Polygon{ coordinates: [{-1, 3}, {-3, -1}, { 5, -3}, {4, 12}, {-2, 11}, {-1, 3}] },
...> {0, 11})
false
"""
@spec within?(t() | points(), t() | points()) :: boolean()
def within?(a, b), do: contains?(b, a)
@doc ~S"""
Returns whether two envelopes touch or intersect.
## Examples
iex> Envelope.intersects?(
...> %Envelope{ min_x: -1, min_y: -5, max_x: 23, max_y: 14 },
...> %Envelope{ min_x: 0, min_y: 3, max_x: 7, max_y: 4 })
true
iex> Envelope.intersects?(
...> %Envelope{ min_x: -1, min_y: 5, max_x: 23, max_y: 14 },
...> %Envelope{ min_x: 0, min_y: -3, max_x: 7, max_y: 4 })
false
"""
@spec intersects?(t() | points(), t() | points()) :: boolean()
def intersects?(%Envelope{} = env1, %Envelope{} = env2) do
cond do
env1.min_x > env2.max_x -> false
env1.max_x < env2.min_x -> false
env1.min_y > env2.max_y -> false
env1.max_y < env2.min_y -> false
true -> true
end
end
def intersects?(a, b), do: intersects?(from_geo(a), from_geo(b))
end
|
lib/envelope.ex
| 0.938899
| 0.727855
|
envelope.ex
|
starcoder
|
defmodule Cogoli do
@moduledoc """
A highly modular implementation of Conway's Game of Life.
The implementation itself is provided as a callback module, and as such
alternative callback modules can be written to explore variations on the
original Game of Life. A number of variations are in fact included with
the app which implement purely aesthetic game alternatives in the form
of colouration, and others that implement actual competition between
different 'species' which individually implement the Game of Life rules
on their own, but experience conflict and cross breading when there is
a conflict for the same square!
The display and back end store are also implemented as callbacks, so it
is possible to implement more efficient back end storage, or present the
game as something other than by X Windows, if desirable.
"""
@doc """
Start a game.
Start a game. The single parameter is a keywords list and at least the
Width and height properties must be provided, like this:
iex(1)> Cogoli.start(
width: 100,
height: 100
)
{:ok, #PID<0.83.0>}
Other properties that may be given are as follows:
rules: [RulesModule: RulesConfig]
rules: RulesModule
The default is `cgolam_rules_normal: []` but other modules, which implement
the cgolam_rules behaviour, may be used to provide a different set of rules
to Conway's Game of Life.
Another rules implementation is cgolam_rules_coloured, which also implements
Conway's Game of Life, but uses different colours for the cells, with new
cells (becoming alive from dead) assuming a colour mix of the surrounding
cells.
field: [FieldModule: FieldConfig]
field: FieldModule
The 'field' module is the back end implementation containing the data
representing the game state.
The default is `cgolam_rules_ets: []`, which uses ETS tables, but
cgolam_rules_gb_trees and cgolam_rules_tuples also exist.
init: [InitModule: [InitType: InitConfig]]
init: [InitModule: InitType]
The 'init' module is used to initialise the field with contents. The default
is to use RulesModule, which initialises the field with data that makes sense
the rules. The default InitType is `:default`.
InitType is an atom, by default it is `:default` but can be other values if
supported by the module. InitConfig is a keywords list, and the properties
supported depends entirely on the module, but cgolam_rules_normal end
cgolam_rules_coloured both `:support cluster_size` and `:cluster_density`,
which increase or decrease the size and densities of the clusters of cells
(the default is 100, it is a percentage, so 50 halves the density (sort of)
and 200 doubles it). Also both support clusters, which determine the number
of clusters.
display: [DisplayModule: DisplayConfig]
display: DisplayModule
The 'display' module is used to display the game state. Currently this can
only be `:cgolam_display_wx`, which uses WX Widgets / X Windows. A property
of `:sqsize` may be given in DisplayConfig to change the size of the cells in
pixels.
For example, this, with the width and height added is the default game
(note that the 'init' config is redundant here):
cgolam:start(
rules: [cgolam_rules_normal: []],
field: [cgolam_field_ets: []],
width: 100,
height, 100,
display: [cgolam_display_wx: [sqsize: 1]],
init: [cgolam_rules_normal: [default: []]],
)
Here's a more interesting one which shows you more features:
cgolam:start(
title: "Coloured Conway's Game of Life",
interval: 20,
rules: [cgolam_rules_coloured: []],
field: [cgolam_field_ets: []],
width: 100,
height: 100,
display: [cgolam_display_wx: [sqsize: 5]],
init: [cgolam_rules_coloured: [default: [cluster_size: 200]]],
])
"""
@spec start([{atom, term}]) :: {:ok, pid}
def start(game_cfg) do
:cgolam.start(cfg_iex_to_erl(game_cfg))
end
@spec stop(pid) :: :ok
def stop(pid) do
:cgolam.stop(pid)
end
@spec list() :: [pid]
def list() do
:cgolam.list()
end
defp cfg_iex_to_erl ([{:rules, [{rules_mod, rules_cfg}]} | more_cfg]) do
[{:rules, rules_mod, rules_cfg} | cfg_iex_to_erl(more_cfg)]
end
defp cfg_iex_to_erl ([{:field, [{field_mod, field_cfg}]} | more_cfg]) do
[{:field, field_mod, field_cfg} | cfg_iex_to_erl(more_cfg)]
end
defp cfg_iex_to_erl ([{:display, [{display_mod, display_cfg}]} | more_cfg]) do
[{:display, display_mod, display_cfg} | cfg_iex_to_erl(more_cfg)]
end
defp cfg_iex_to_erl ([{:init, [{init_mod, [{init_type, init_cfg}]}]} | more_cfg]) do
[{:init, init_mod, init_type, init_cfg} | cfg_iex_to_erl(more_cfg)]
end
defp cfg_iex_to_erl ([{:init, [{init_mod, init_type}]} | more_cfg]) do
[{:init, init_mod, init_type} | cfg_iex_to_erl(more_cfg)]
end
defp cfg_iex_to_erl ([{cfg_opt, cfg_val} | more_cfg]) do
[{cfg_opt, cfg_val} | cfg_iex_to_erl(more_cfg)]
end
defp cfg_iex_to_erl ([]) do
[]
end
end
|
lib/cogoli.ex
| 0.850065
| 0.784154
|
cogoli.ex
|
starcoder
|
defmodule AWS.MTurk do
@moduledoc """
Amazon Mechanical Turk API Reference
"""
@doc """
The `AcceptQualificationRequest` operation approves a Worker's request for a
Qualification.
Only the owner of the Qualification type can grant a Qualification request for
that type.
A successful request for the `AcceptQualificationRequest` operation returns with
no errors and an empty body.
"""
def accept_qualification_request(client, input, options \\ []) do
request(client, "AcceptQualificationRequest", input, options)
end
@doc """
The `ApproveAssignment` operation approves the results of a completed
assignment.
Approving an assignment initiates two payments from the Requester's Amazon.com
account
* The Worker who submitted the results is paid the reward specified
in the HIT.
* Amazon Mechanical Turk fees are debited.
If the Requester's account does not have adequate funds for these payments, the
call to ApproveAssignment returns an exception, and the approval is not
processed. You can include an optional feedback message with the approval, which
the Worker can see in the Status section of the web site.
You can also call this operation for assignments that were previous rejected and
approve them by explicitly overriding the previous rejection. This only works on
rejected assignments that were submitted within the previous 30 days and only if
the assignment's related HIT has not been deleted.
"""
def approve_assignment(client, input, options \\ []) do
request(client, "ApproveAssignment", input, options)
end
@doc """
The `AssociateQualificationWithWorker` operation gives a Worker a Qualification.
`AssociateQualificationWithWorker` does not require that the Worker submit a
Qualification request. It gives the Qualification directly to the Worker.
You can only assign a Qualification of a Qualification type that you created
(using the `CreateQualificationType` operation).
Note: `AssociateQualificationWithWorker` does not affect any pending
Qualification requests for the Qualification by the Worker. If you assign a
Qualification to a Worker, then later grant a Qualification request made by the
Worker, the granting of the request may modify the Qualification score. To
resolve a pending Qualification request without affecting the Qualification the
Worker already has, reject the request with the `RejectQualificationRequest`
operation.
"""
def associate_qualification_with_worker(client, input, options \\ []) do
request(client, "AssociateQualificationWithWorker", input, options)
end
@doc """
The `CreateAdditionalAssignmentsForHIT` operation increases the maximum number
of assignments of an existing HIT.
To extend the maximum number of assignments, specify the number of additional
assignments.
HITs created with fewer than 10 assignments cannot be extended to
have 10 or more assignments. Attempting to add assignments in a way that brings
the total number of assignments for a HIT from fewer than 10 assignments to 10
or more assignments will result in an
`AWS.MechanicalTurk.InvalidMaximumAssignmentsIncrease` exception.
HITs that were created before July 22, 2015 cannot be extended.
Attempting to extend HITs that were created before July 22, 2015 will result in
an `AWS.MechanicalTurk.HITTooOldForExtension` exception.
"""
def create_additional_assignments_for_h_i_t(client, input, options \\ []) do
request(client, "CreateAdditionalAssignmentsForHIT", input, options)
end
@doc """
The `CreateHIT` operation creates a new Human Intelligence Task (HIT).
The new HIT is made available for Workers to find and accept on the Amazon
Mechanical Turk website.
This operation allows you to specify a new HIT by passing in values for the
properties of the HIT, such as its title, reward amount and number of
assignments. When you pass these values to `CreateHIT`, a new HIT is created for
you, with a new `HITTypeID`. The HITTypeID can be used to create additional HITs
in the future without needing to specify common parameters such as the title,
description and reward amount each time.
An alternative way to create HITs is to first generate a HITTypeID using the
`CreateHITType` operation and then call the `CreateHITWithHITType` operation.
This is the recommended best practice for Requesters who are creating large
numbers of HITs.
CreateHIT also supports several ways to provide question data: by providing a
value for the `Question` parameter that fully specifies the contents of the HIT,
or by providing a `HitLayoutId` and associated `HitLayoutParameters`.
If a HIT is created with 10 or more maximum assignments, there is an additional
fee. For more information, see [Amazon Mechanical Turk Pricing](https://requester.mturk.com/pricing).
"""
def create_h_i_t(client, input, options \\ []) do
request(client, "CreateHIT", input, options)
end
@doc """
The `CreateHITType` operation creates a new HIT type.
This operation allows you to define a standard set of HIT properties to use when
creating HITs. If you register a HIT type with values that match an existing HIT
type, the HIT type ID of the existing type will be returned.
"""
def create_h_i_t_type(client, input, options \\ []) do
request(client, "CreateHITType", input, options)
end
@doc """
The `CreateHITWithHITType` operation creates a new Human Intelligence Task (HIT)
using an existing HITTypeID generated by the `CreateHITType` operation.
This is an alternative way to create HITs from the `CreateHIT` operation. This
is the recommended best practice for Requesters who are creating large numbers
of HITs.
CreateHITWithHITType also supports several ways to provide question data: by
providing a value for the `Question` parameter that fully specifies the contents
of the HIT, or by providing a `HitLayoutId` and associated
`HitLayoutParameters`.
If a HIT is created with 10 or more maximum assignments, there is an additional
fee. For more information, see [Amazon Mechanical Turk Pricing](https://requester.mturk.com/pricing).
"""
def create_h_i_t_with_h_i_t_type(client, input, options \\ []) do
request(client, "CreateHITWithHITType", input, options)
end
@doc """
The `CreateQualificationType` operation creates a new Qualification type, which
is represented by a `QualificationType` data structure.
"""
def create_qualification_type(client, input, options \\ []) do
request(client, "CreateQualificationType", input, options)
end
@doc """
The `CreateWorkerBlock` operation allows you to prevent a Worker from working on
your HITs.
For example, you can block a Worker who is producing poor quality work. You can
block up to 100,000 Workers.
"""
def create_worker_block(client, input, options \\ []) do
request(client, "CreateWorkerBlock", input, options)
end
@doc """
The `DeleteHIT` operation is used to delete HIT that is no longer needed.
Only the Requester who created the HIT can delete it.
You can only dispose of HITs that are in the `Reviewable` state, with all of
their submitted assignments already either approved or rejected. If you call the
DeleteHIT operation on a HIT that is not in the `Reviewable` state (for example,
that has not expired, or still has active assignments), or on a HIT that is
Reviewable but without all of its submitted assignments already approved or
rejected, the service will return an error.
HITs are automatically disposed of after 120 days.
After you dispose of a HIT, you can no longer approve the HIT's
rejected assignments.
Disposed HITs are not returned in results for the ListHITs
operation.
Disposing HITs can improve the performance of operations such as
ListReviewableHITs and ListHITs.
"""
def delete_h_i_t(client, input, options \\ []) do
request(client, "DeleteHIT", input, options)
end
@doc """
The `DeleteQualificationType` deletes a Qualification type and deletes any HIT
types that are associated with the Qualification type.
This operation does not revoke Qualifications already assigned to Workers
because the Qualifications might be needed for active HITs. If there are any
pending requests for the Qualification type, Amazon Mechanical Turk rejects
those requests. After you delete a Qualification type, you can no longer use it
to create HITs or HIT types.
DeleteQualificationType must wait for all the HITs that use the deleted
Qualification type to be deleted before completing. It may take up to 48 hours
before DeleteQualificationType completes and the unique name of the
Qualification type is available for reuse with CreateQualificationType.
"""
def delete_qualification_type(client, input, options \\ []) do
request(client, "DeleteQualificationType", input, options)
end
@doc """
The `DeleteWorkerBlock` operation allows you to reinstate a blocked Worker to
work on your HITs.
This operation reverses the effects of the CreateWorkerBlock operation. You need
the Worker ID to use this operation. If the Worker ID is missing or invalid,
this operation fails and returns the message “WorkerId is invalid.” If the
specified Worker is not blocked, this operation returns successfully.
"""
def delete_worker_block(client, input, options \\ []) do
request(client, "DeleteWorkerBlock", input, options)
end
@doc """
The `DisassociateQualificationFromWorker` revokes a previously granted
Qualification from a user.
You can provide a text message explaining why the Qualification was revoked. The
user who had the Qualification can see this message.
"""
def disassociate_qualification_from_worker(client, input, options \\ []) do
request(client, "DisassociateQualificationFromWorker", input, options)
end
@doc """
The `GetAccountBalance` operation retrieves the amount of money in your Amazon
Mechanical Turk account.
"""
def get_account_balance(client, input, options \\ []) do
request(client, "GetAccountBalance", input, options)
end
@doc """
The `GetAssignment` operation retrieves the details of the specified Assignment.
"""
def get_assignment(client, input, options \\ []) do
request(client, "GetAssignment", input, options)
end
@doc """
The `GetFileUploadURL` operation generates and returns a temporary URL.
You use the temporary URL to retrieve a file uploaded by a Worker as an answer
to a FileUploadAnswer question for a HIT. The temporary URL is generated the
instant the GetFileUploadURL operation is called, and is valid for 60 seconds.
You can get a temporary file upload URL any time until the HIT is disposed.
After the HIT is disposed, any uploaded files are deleted, and cannot be
retrieved. Pending Deprecation on December 12, 2017. The Answer Specification
structure will no longer support the `FileUploadAnswer` element to be used for
the QuestionForm data structure. Instead, we recommend that Requesters who want
to create HITs asking Workers to upload files to use Amazon S3.
"""
def get_file_upload_u_r_l(client, input, options \\ []) do
request(client, "GetFileUploadURL", input, options)
end
@doc """
The `GetHIT` operation retrieves the details of the specified HIT.
"""
def get_h_i_t(client, input, options \\ []) do
request(client, "GetHIT", input, options)
end
@doc """
The `GetQualificationScore` operation returns the value of a Worker's
Qualification for a given Qualification type.
To get a Worker's Qualification, you must know the Worker's ID. The Worker's ID
is included in the assignment data returned by the `ListAssignmentsForHIT`
operation.
Only the owner of a Qualification type can query the value of a Worker's
Qualification of that type.
"""
def get_qualification_score(client, input, options \\ []) do
request(client, "GetQualificationScore", input, options)
end
@doc """
The `GetQualificationType`operation retrieves information about a Qualification
type using its ID.
"""
def get_qualification_type(client, input, options \\ []) do
request(client, "GetQualificationType", input, options)
end
@doc """
The `ListAssignmentsForHIT` operation retrieves completed assignments for a HIT.
You can use this operation to retrieve the results for a HIT.
You can get assignments for a HIT at any time, even if the HIT is not yet
Reviewable. If a HIT requested multiple assignments, and has received some
results but has not yet become Reviewable, you can still retrieve the partial
results with this operation.
Use the AssignmentStatus parameter to control which set of assignments for a HIT
are returned. The ListAssignmentsForHIT operation can return submitted
assignments awaiting approval, or it can return assignments that have already
been approved or rejected. You can set AssignmentStatus=Approved,Rejected to get
assignments that have already been approved and rejected together in one result
set.
Only the Requester who created the HIT can retrieve the assignments for that
HIT.
Results are sorted and divided into numbered pages and the operation returns a
single page of results. You can use the parameters of the operation to control
sorting and pagination.
"""
def list_assignments_for_h_i_t(client, input, options \\ []) do
request(client, "ListAssignmentsForHIT", input, options)
end
@doc """
The `ListBonusPayments` operation retrieves the amounts of bonuses you have paid
to Workers for a given HIT or assignment.
"""
def list_bonus_payments(client, input, options \\ []) do
request(client, "ListBonusPayments", input, options)
end
@doc """
The `ListHITs` operation returns all of a Requester's HITs.
The operation returns HITs of any status, except for HITs that have been deleted
of with the DeleteHIT operation or that have been auto-deleted.
"""
def list_h_i_ts(client, input, options \\ []) do
request(client, "ListHITs", input, options)
end
@doc """
The `ListHITsForQualificationType` operation returns the HITs that use the given
Qualification type for a Qualification requirement.
The operation returns HITs of any status, except for HITs that have been deleted
with the `DeleteHIT` operation or that have been auto-deleted.
"""
def list_h_i_ts_for_qualification_type(client, input, options \\ []) do
request(client, "ListHITsForQualificationType", input, options)
end
@doc """
The `ListQualificationRequests` operation retrieves requests for Qualifications
of a particular Qualification type.
The owner of the Qualification type calls this operation to poll for pending
requests, and accepts them using the AcceptQualification operation.
"""
def list_qualification_requests(client, input, options \\ []) do
request(client, "ListQualificationRequests", input, options)
end
@doc """
The `ListQualificationTypes` operation returns a list of Qualification types,
filtered by an optional search term.
"""
def list_qualification_types(client, input, options \\ []) do
request(client, "ListQualificationTypes", input, options)
end
@doc """
The `ListReviewPolicyResultsForHIT` operation retrieves the computed results and
the actions taken in the course of executing your Review Policies for a given
HIT.
For information about how to specify Review Policies when you call CreateHIT,
see Review Policies. The ListReviewPolicyResultsForHIT operation can return
results for both Assignment-level and HIT-level review results.
"""
def list_review_policy_results_for_h_i_t(client, input, options \\ []) do
request(client, "ListReviewPolicyResultsForHIT", input, options)
end
@doc """
The `ListReviewableHITs` operation retrieves the HITs with Status equal to
Reviewable or Status equal to Reviewing that belong to the Requester calling the
operation.
"""
def list_reviewable_h_i_ts(client, input, options \\ []) do
request(client, "ListReviewableHITs", input, options)
end
@doc """
The `ListWorkersBlocks` operation retrieves a list of Workers who are blocked
from working on your HITs.
"""
def list_worker_blocks(client, input, options \\ []) do
request(client, "ListWorkerBlocks", input, options)
end
@doc """
The `ListWorkersWithQualificationType` operation returns all of the Workers that
have been associated with a given Qualification type.
"""
def list_workers_with_qualification_type(client, input, options \\ []) do
request(client, "ListWorkersWithQualificationType", input, options)
end
@doc """
The `NotifyWorkers` operation sends an email to one or more Workers that you
specify with the Worker ID.
You can specify up to 100 Worker IDs to send the same message with a single call
to the NotifyWorkers operation. The NotifyWorkers operation will send a
notification email to a Worker only if you have previously approved or rejected
work from the Worker.
"""
def notify_workers(client, input, options \\ []) do
request(client, "NotifyWorkers", input, options)
end
@doc """
The `RejectAssignment` operation rejects the results of a completed assignment.
You can include an optional feedback message with the rejection, which the
Worker can see in the Status section of the web site. When you include a
feedback message with the rejection, it helps the Worker understand why the
assignment was rejected, and can improve the quality of the results the Worker
submits in the future.
Only the Requester who created the HIT can reject an assignment for the HIT.
"""
def reject_assignment(client, input, options \\ []) do
request(client, "RejectAssignment", input, options)
end
@doc """
The `RejectQualificationRequest` operation rejects a user's request for a
Qualification.
You can provide a text message explaining why the request was rejected. The
Worker who made the request can see this message.
"""
def reject_qualification_request(client, input, options \\ []) do
request(client, "RejectQualificationRequest", input, options)
end
@doc """
The `SendBonus` operation issues a payment of money from your account to a
Worker.
This payment happens separately from the reward you pay to the Worker when you
approve the Worker's assignment. The SendBonus operation requires the Worker's
ID and the assignment ID as parameters to initiate payment of the bonus. You
must include a message that explains the reason for the bonus payment, as the
Worker may not be expecting the payment. Amazon Mechanical Turk collects a fee
for bonus payments, similar to the HIT listing fee. This operation fails if your
account does not have enough funds to pay for both the bonus and the fees.
"""
def send_bonus(client, input, options \\ []) do
request(client, "SendBonus", input, options)
end
@doc """
The `SendTestEventNotification` operation causes Amazon Mechanical Turk to send
a notification message as if a HIT event occurred, according to the provided
notification specification.
This allows you to test notifications without setting up notifications for a
real HIT type and trying to trigger them using the website. When you call this
operation, the service attempts to send the test notification immediately.
"""
def send_test_event_notification(client, input, options \\ []) do
request(client, "SendTestEventNotification", input, options)
end
@doc """
The `UpdateExpirationForHIT` operation allows you update the expiration time of
a HIT.
If you update it to a time in the past, the HIT will be immediately expired.
"""
def update_expiration_for_h_i_t(client, input, options \\ []) do
request(client, "UpdateExpirationForHIT", input, options)
end
@doc """
The `UpdateHITReviewStatus` operation updates the status of a HIT.
If the status is Reviewable, this operation can update the status to Reviewing,
or it can revert a Reviewing HIT back to the Reviewable status.
"""
def update_h_i_t_review_status(client, input, options \\ []) do
request(client, "UpdateHITReviewStatus", input, options)
end
@doc """
The `UpdateHITTypeOfHIT` operation allows you to change the HITType properties
of a HIT.
This operation disassociates the HIT from its old HITType properties and
associates it with the new HITType properties. The HIT takes on the properties
of the new HITType in place of the old ones.
"""
def update_h_i_t_type_of_h_i_t(client, input, options \\ []) do
request(client, "UpdateHITTypeOfHIT", input, options)
end
@doc """
The `UpdateNotificationSettings` operation creates, updates, disables or
re-enables notifications for a HIT type.
If you call the UpdateNotificationSettings operation for a HIT type that already
has a notification specification, the operation replaces the old specification
with a new one. You can call the UpdateNotificationSettings operation to enable
or disable notifications for the HIT type, without having to modify the
notification specification itself by providing updates to the Active status
without specifying a new notification specification. To change the Active status
of a HIT type's notifications, the HIT type must already have a notification
specification, or one must be provided in the same call to
`UpdateNotificationSettings`.
"""
def update_notification_settings(client, input, options \\ []) do
request(client, "UpdateNotificationSettings", input, options)
end
@doc """
The `UpdateQualificationType` operation modifies the attributes of an existing
Qualification type, which is represented by a QualificationType data structure.
Only the owner of a Qualification type can modify its attributes.
Most attributes of a Qualification type can be changed after the type has been
created. However, the Name and Keywords fields cannot be modified. The
RetryDelayInSeconds parameter can be modified or added to change the delay or to
enable retries, but RetryDelayInSeconds cannot be used to disable retries.
You can use this operation to update the test for a Qualification type. The test
is updated based on the values specified for the Test, TestDurationInSeconds and
AnswerKey parameters. All three parameters specify the updated test. If you are
updating the test for a type, you must specify the Test and
TestDurationInSeconds parameters. The AnswerKey parameter is optional; omitting
it specifies that the updated test does not have an answer key.
If you omit the Test parameter, the test for the Qualification type is
unchanged. There is no way to remove a test from a Qualification type that has
one. If the type already has a test, you cannot update it to be AutoGranted. If
the Qualification type does not have a test and one is provided by an update,
the type will henceforth have a test.
If you want to update the test duration or answer key for an existing test
without changing the questions, you must specify a Test parameter with the
original questions, along with the updated values.
If you provide an updated Test but no AnswerKey, the new test will not have an
answer key. Requests for such Qualifications must be granted manually.
You can also update the AutoGranted and AutoGrantedValue attributes of the
Qualification type.
"""
def update_qualification_type(client, input, options \\ []) do
request(client, "UpdateQualificationType", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "mturk-requester",
region: ""}
host = build_host("mturk-requester", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "MTurkRequesterServiceV20170117.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{endpoint: endpoint}) do
"#{endpoint_prefix}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/mturk.ex
| 0.89963
| 0.687532
|
mturk.ex
|
starcoder
|
defmodule Phoenix.Channels.GenSocketClient do
@moduledoc """
Communication with a Phoenix Channels server.
This module powers a process which can connect to a Phoenix Channels server and
exchange messages with it. Currently, only websocket communication protocol is
supported.
The module is implemented as a behaviour. To use it, you need to implement the
callback module. Then, you can invoke `start_link/5` to start the socket process.
The communication with the server is then controlled from that process.
The connection is not automatically established during the creation. Instead,
the implementation can return `{:connect, state}` to try to establish the
connection. As the result either `handle_connected/2` or `handle_disconnected/2`
callbacks will be invoked.
To join a topic, `join/3` function can be used. Depending on the result, either
`handle_joined/4` or `handle_join_error/4` will be invoked. A client can join
multiple topics on the same socket. It is also possible to leave a topic using
the `leave/3` function.
Once a client has joined a topic, it can use `push/4` to send messages to the
server. If the server directly replies to the message, it will be handled in
the `handle_reply/5` callback.
If a server sends an independent message (i.e. the one which is not a direct
reply), the `handle_message/5` callback will be invoked.
If the server closes the channel, the `handle_channel_closed/4` will be invoked.
This will not close the socket connection, and the client can continue to
communicate on other channels, or attempt to rejoin the channel.
## Sending messages over the socket
As mentioned, you can use `join/3`, `push/4`, and `leave/3` to send messages to
the server. All of these functions require the `transport` information as the
first argument. This information is available in most of the callback functions.
Functions will return `{:ok, ref}` if the message was sent successfully,
or `{:error, reason}`, where `ref` is the Phoenix ref used to uniquely identify
a message on a channel.
Error responses are returned in following situations:
- The client is not connected
- Attempt to send a message on a non-joined channel
- Attempt to leave a non-joined channel
- Attempt to join the already joined channel
Keep in mind that there's no guarantee that a message will arrive to the server.
You need to implement your own communication protocol on top of Phoenix
Channels to obtain such guarantees.
## Process structure and lifecycle
The behaviour will internally start the websocket client in a separate child
process. This means that the communication runs concurrently to any processing
which takes place in the behaviour.
The socket process will crash only if the websocket process crashes, which can
be caused only by some bug in the websocket client library. If you want to
survive this situation, you can simply trap exits in the socket process, by
calling `Process.flag(:trap_exit, true)` in the `init/1` callback. In this case,
a crash of the websocket client process will be treated as a disconnect event.
The socket process never decides to stop on its own. If you want to stop it,
you can simply return `{:stop, reason, state}` from any of the callback.
"""
use GenServer
@type transport_opts :: any
@type socket_opts :: [
serializer: module,
transport_opts: transport_opts
]
@type callback_state :: any
@opaque transport :: %{
transport_mod: module,
transport_pid: pid | nil,
serializer: module
}
@type topic :: String.t()
@type event :: String.t()
@type payload :: %{String.t() => any}
@type out_payload :: %{(String.t() | atom) => any}
@type ref :: pos_integer
@type message :: term
@type encoded_message :: binary
@type handler_response ::
{:ok, callback_state}
| {:connect, callback_state}
| {:connect, url :: String.t(), query_params, callback_state}
| {:stop, reason :: any, callback_state}
@type query_params :: [{String.t(), String.t()}]
@doc "Invoked when the process is created."
@callback init(arg :: any) ::
{:connect, url :: String.t(), query_params, callback_state}
| {:noconnect, url :: String.t(), query_params, callback_state}
| :ignore
| {:error, reason :: any}
# -------------------------------------------------------------------
# Behaviour definition
# -------------------------------------------------------------------
@doc "Invoked after the client has successfully connected to the server."
@callback handle_connected(transport, callback_state) :: handler_response
@doc "Invoked after the client has been disconnected from the server."
@callback handle_disconnected(reason :: any, callback_state) :: handler_response
@doc "Invoked after the client has successfully joined a topic."
@callback handle_joined(topic, payload, transport, callback_state) :: handler_response
@doc "Invoked if the server has refused a topic join request."
@callback handle_join_error(topic, payload, transport, callback_state) :: handler_response
@doc "Invoked after the server closes a channel."
@callback handle_channel_closed(topic, payload, transport, callback_state) :: handler_response
@doc "Invoked when a message from the server arrives."
@callback handle_message(topic, event, payload, transport, callback_state) :: handler_response
@doc "Invoked when the server replies to a message sent by the client."
@callback handle_reply(topic, ref, payload, transport, callback_state) :: handler_response
@doc "Invoked to handle an Erlang message."
@callback handle_info(message :: any, transport, callback_state) :: handler_response
@doc "Invoked to handle a synchronous call."
@callback handle_call(message :: any, GenServer.from(), transport, callback_state) ::
{:reply, reply, new_state}
| {:reply, reply, new_state, timeout | :hibernate}
| {:noreply, new_state}
| {:noreply, new_state, timeout | :hibernate}
| {:stop, reason, reply, new_state}
| {:stop, reason, new_state}
when new_state: callback_state, reply: term, reason: term
# -------------------------------------------------------------------
# API functions
# -------------------------------------------------------------------
@doc "Starts the socket process."
@spec start_link(
callback :: module,
transport_mod :: module,
any,
socket_opts,
GenServer.options()
) :: GenServer.on_start()
def start_link(callback, transport_mod, arg, socket_opts \\ [], gen_server_opts \\ []) do
GenServer.start_link(__MODULE__, {callback, transport_mod, arg, socket_opts}, gen_server_opts)
end
@doc "Makes a synchronous call to the server and waits for its reply."
@spec call(GenServer.server(), any, non_neg_integer) :: any
def call(server, request, timeout \\ 5000),
do: GenServer.call(server, {__MODULE__, :call, request}, timeout)
@doc "Joins the topic."
@spec join(transport, topic, out_payload) :: {:ok, ref} | {:error, reason :: any}
def join(transport, topic, payload \\ %{}), do: push(transport, topic, "phx_join", payload)
@doc "Leaves the topic."
@spec leave(transport, topic, out_payload) :: {:ok, ref} | {:error, reason :: any}
def leave(transport, topic, payload \\ %{}), do: push(transport, topic, "phx_leave", payload)
@doc "Pushes a message to the topic."
@spec push(transport, topic, event, out_payload) :: {:ok, ref} | {:error, reason :: any}
def push(%{transport_pid: nil}, _topic, _event, _payload), do: {:error, :disconnected}
def push(transport, topic, event, payload) do
cond do
# first message on a channel must always be a join
event != "phx_join" and not joined?(topic) ->
{:error, :not_joined}
# join must always be a first message
event == "phx_join" and joined?(topic) ->
{:error, :already_joined}
true ->
{join_ref, ref} = next_ref(event, topic)
case transport.serializer.encode_message([join_ref, ref, topic, event, payload]) do
{:ok, encoded} ->
transport.transport_mod.push(transport.transport_pid, encoded)
{:ok, ref}
{:error, error} ->
{:error, {:encoding_error, error}}
end
end
end
@doc """
Returns true if the socket is joined on the given topic.
This function should be invoked from the `GenSocketClient` process.
"""
@spec joined?(topic) :: boolean
def joined?(topic), do: not is_nil(join_ref(topic))
@doc "Can be invoked to send a response to the client."
@spec reply(GenServer.from(), any) :: :ok
defdelegate reply(from, response), to: GenServer
# -------------------------------------------------------------------
# API for transport (websocket client)
# -------------------------------------------------------------------
@doc "Notifies the socket process that the connection has been established."
@spec notify_connected(GenServer.server()) :: :ok
def notify_connected(socket), do: GenServer.cast(socket, :notify_connected)
@doc "Notifies the socket process about a disconnect."
@spec notify_disconnected(GenServer.server(), any) :: :ok
def notify_disconnected(socket, reason),
do: GenServer.cast(socket, {:notify_disconnected, reason})
@doc "Forwards a received message to the socket process."
@spec notify_message(GenServer.server(), binary) :: :ok
def notify_message(socket, message), do: GenServer.cast(socket, {:notify_message, message})
# -------------------------------------------------------------------
# GenServer callbacks
# -------------------------------------------------------------------
@doc false
def init({callback, transport_mod, arg, socket_opts}) do
case callback.init(arg) do
{action, url, query_params, callback_state} when action in [:connect, :noconnect] ->
{:ok,
maybe_connect(action, %{
url: url,
query_params: Enum.uniq_by(query_params ++ [{"vsn", "2.0.0"}], &elem(&1, 0)),
transport_mod: transport_mod,
transport_opts: Keyword.get(socket_opts, :transport_opts, []),
serializer:
Keyword.get(
socket_opts,
:serializer,
Phoenix.Channels.GenSocketClient.Serializer.Json
),
callback: callback,
callback_state: callback_state,
transport_pid: nil,
transport_mref: nil
})}
other ->
other
end
end
@doc false
def handle_cast(:notify_connected, state) do
invoke_callback(state, :handle_connected, [transport(state)])
end
def handle_cast({:notify_disconnected, reason}, state) do
invoke_callback(reinit(state), :handle_disconnected, [reason])
end
def handle_cast({:notify_message, encoded_message}, state) do
decoded_message = state.serializer.decode_message(encoded_message)
handle_message(decoded_message, state)
end
@doc false
def handle_call({__MODULE__, :call, request}, from, state) do
case state.callback.handle_call(request, from, transport(state), state.callback_state) do
{:reply, reply, callback_state} ->
{:reply, reply, %{state | callback_state: callback_state}}
{:reply, reply, callback_state, timeout} ->
{:reply, reply, %{state | callback_state: callback_state}, timeout}
{:noreply, callback_state} ->
{:noreply, %{state | callback_state: callback_state}}
{:noreply, callback_state, timeout} ->
{:noreply, %{state | callback_state: callback_state}, timeout}
{:stop, reason, callback_state} ->
{:stop, reason, %{state | callback_state: callback_state}}
{:stop, reason, reply, callback_state} ->
{:stop, reason, reply, %{state | callback_state: callback_state}}
end
end
@doc false
def handle_info(
{:DOWN, transport_mref, :process, _, reason},
%{transport_mref: transport_mref} = state
) do
invoke_callback(reinit(state), :handle_disconnected, [{:transport_down, reason}])
end
def handle_info(message, state) do
invoke_callback(state, :handle_info, [message, transport(state)])
end
# -------------------------------------------------------------------
# Handling of Phoenix messages
# -------------------------------------------------------------------
# server replied to a join message (recognized by ref 1 which is the first message on the topic)
defp handle_message(message, state) do
[join_ref, ref, topic, event, payload] = message
cond do
event == "phx_reply" and join_ref in [ref, nil] ->
handle_join_reply(join_ref, topic, payload, state)
join_ref != join_ref(topic) and event in ["phx_reply", "phx_close", "phx_error"] ->
{:noreply, state}
event == "phx_reply" ->
handle_reply(ref, topic, payload, state)
event in ["phx_close", "phx_error"] ->
handle_channel_closed(topic, payload, state)
true ->
handle_server_message(topic, event, payload, state)
end
end
defp handle_join_reply(join_ref, topic, payload, state) do
case payload["status"] do
"ok" ->
store_join_ref(topic, join_ref)
invoke_callback(state, :handle_joined, [topic, payload["response"], transport(state)])
"error" ->
invoke_callback(state, :handle_join_error, [topic, payload["response"], transport(state)])
end
end
# server replied to a non-join message
defp handle_reply(ref, topic, payload, state),
do: invoke_callback(state, :handle_reply, [topic, ref, payload, transport(state)])
# channel has been closed (phx_close) or crashed (phx_error) on the server
defp handle_channel_closed(topic, payload, state) do
delete_join_ref(topic)
invoke_callback(state, :handle_channel_closed, [topic, payload, transport(state)])
end
defp handle_server_message(topic, event, payload, state),
do: invoke_callback(state, :handle_message, [topic, event, payload, transport(state)])
# -------------------------------------------------------------------
# Internal functions
# -------------------------------------------------------------------
defp maybe_connect(:connect, state), do: connect(state)
defp maybe_connect(:noconnect, state), do: state
defp connect(%{transport_pid: nil} = state) do
if params_in_url?(state.url) do
raise(
ArgumentError,
"query parameters must be passed as a keyword list from the `init/1` callback"
)
end
{:ok, transport_pid} = state.transport_mod.start_link(url(state), state.transport_opts)
transport_mref = Process.monitor(transport_pid)
%{state | transport_pid: transport_pid, transport_mref: transport_mref}
end
defp params_in_url?(url), do: not is_nil(URI.parse(url).query)
defp url(state), do: "#{state.url}?#{URI.encode_query(state.query_params)}"
defp reinit(state) do
Process.get_keys()
|> Stream.filter(&match?({__MODULE__, _}, &1))
|> Enum.each(&Process.delete/1)
if state.transport_mref != nil, do: Process.demonitor(state.transport_mref, [:flush])
%{state | transport_pid: nil, transport_mref: nil}
end
defp transport(state), do: Map.take(state, [:transport_mod, :transport_pid, :serializer])
defp next_ref(event, topic) do
ref = Process.get({__MODULE__, :ref}, 0) + 1
Process.put({__MODULE__, :ref}, ref)
join_ref = if event == "phx_join", do: ref, else: join_ref(topic)
{join_ref, ref}
end
defp store_join_ref(topic, join_ref),
do: Process.put({__MODULE__, {:join_ref, topic}}, join_ref)
defp join_ref(topic), do: Process.get({__MODULE__, {:join_ref, topic}})
defp delete_join_ref(topic), do: Process.delete({__MODULE__, {:join_ref, topic}})
defp invoke_callback(state, function, args) do
callback_response = apply(state.callback, function, args ++ [state.callback_state])
handle_callback_response(callback_response, state)
end
defp handle_callback_response({:ok, callback_state}, state),
do: {:noreply, %{state | callback_state: callback_state}}
defp handle_callback_response({:connect, callback_state}, state),
do: {:noreply, connect(%{state | callback_state: callback_state})}
defp handle_callback_response({:connect, url, query_params, callback_state}, state) do
state =
state
|> Map.put(:callback_state, callback_state)
|> Map.put(:url, url)
|> Map.put(:query_params, Enum.uniq_by(query_params ++ [{"vsn", "2.0.0"}], &elem(&1, 0)))
{:noreply, connect(state)}
end
defp handle_callback_response({:stop, reason, callback_state}, state),
do: {:stop, reason, %{state | callback_state: callback_state}}
end
|
lib/gen_socket_client.ex
| 0.861217
| 0.541227
|
gen_socket_client.ex
|
starcoder
|
defmodule Cloak.Ecto.HMAC do
@moduledoc """
A custom `Ecto.Type` for hashing fields using `:crypto.hmac/3`.
## Why
If you store a hash of a field's value, you can then query on it as a proxy
for an encrypted field. This works because HMAC is deterministic and
always results in the same value, while secure encryption does not. Be
warned, however, that hashing will expose which fields have the same value,
because they will contain the same hash.
## Security
HMAC is **more secure** than `Cloak.Ecto.SHA256`, because it uses a
secret to obfuscate the hash. This makes it harder to guess the value of
the field.
## Configuration
Create an `HMAC` field in your project:
defmodule MyApp.Hashed.HMAC do
use Cloak.Ecto.HMAC, otp_app: :my_app
end
Then, configure it with a `:secret` and `:algorithm`, either using
mix configuration:
config :my_app, MyApp.Hashed.HMAC,
algorithm: :sha512,
secret: "secret"
Or using the `init/1` callback to fetch configuration at runtime:
defmodule MyApp.Hashed.HMAC do
use Cloak.Ecto.HMAC, otp_app: :my_app
@impl Cloak.Ecto.HMAC
def init(config) do
config = Keyword.merge(config, [
algorithm: :sha512,
secret: System.get_env("HMAC_SECRET")
])
{:ok, config}
end
end
## Usage
Create the hash field with the type `:binary`. Add it to your schema
definition like this:
schema "table" do
field :field_name, MyApp.Encrypted.Binary
field :field_name_hash, MyApp.Hashed.HMAC
end
Ensure that the hash is updated whenever the target field changes with the
`put_change/3` function:
def changeset(struct, attrs \\\\ %{}) do
struct
|> cast(attrs, [:field_name, :field_name_hash])
|> put_hashed_fields()
end
defp put_hashed_fields(changeset) do
changeset
|> put_change(:field_name_hash, get_field(changeset, :field_name))
end
Query the Repo using the `:field_name_hash` in any place you would typically
query by `:field_name`.
user = Repo.get_by(User, email_hash: "<EMAIL>")
"""
@typedoc "HMAC algorithms supported by Cloak.Field.HMAC"
@type algorithms :: :md5 | :ripemd160 | :sha | :sha224 | :sha256 | :sha384 | :sha512
@doc """
Configures the `HMAC` field using runtime information.
## Example
@impl Cloak.Fields.HMAC
def init(config) do
config = Keyword.merge(config, [
algorithm: :sha512,
secret: System.get_env("HMAC_SECRET")
])
{:ok, config}
end
"""
@callback init(config :: Keyword.t()) :: {:ok, Keyword.t()} | {:error, any}
@doc false
defmacro __using__(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
quote do
alias Cloak.Ecto.Crypto
@behaviour Cloak.Ecto.HMAC
@behaviour Ecto.Type
@algorithms ~w[
md5
ripemd160
sha
sha224
sha256
sha384
sha512
]a
@impl Cloak.Ecto.HMAC
def init(config) do
{:ok, config}
end
@impl Ecto.Type
def type, do: :binary
@impl Ecto.Type
def cast(nil) do
{:ok, nil}
end
def cast(value) when is_binary(value) do
{:ok, value}
end
def cast(_value) do
:error
end
@impl Ecto.Type
def dump(nil) do
{:ok, nil}
end
def dump(value) when is_binary(value) do
config = build_config()
{:ok, Crypto.hmac(config[:algorithm], config[:secret], value)}
end
def dump(_value) do
:error
end
@impl Ecto.Type
def embed_as(_format) do
:self
end
@impl Ecto.Type
def equal?(term1, term2) do
term1 == term2
end
@impl Ecto.Type
def load(value) do
{:ok, value}
end
defoverridable init: 1, type: 0, cast: 1, dump: 1, load: 1, embed_as: 1, equal?: 2
defp build_config do
{:ok, config} =
unquote(otp_app)
|> Application.get_env(__MODULE__, [])
|> init()
validate_config(config)
end
defp validate_config(config) do
unless is_binary(config[:secret]) do
secret = inspect(config[:secret])
raise Cloak.InvalidConfig, "#{secret} is an invalid secret for #{inspect(__MODULE__)}"
end
unless config[:algorithm] in @algorithms do
algo = inspect(config[:algorithm])
raise Cloak.InvalidConfig,
"#{algo} is an invalid hash algorithm for #{inspect(__MODULE__)}"
end
config
end
end
end
end
|
lib/cloak_ecto/types/hmac.ex
| 0.895739
| 0.505554
|
hmac.ex
|
starcoder
|
defmodule Sourceror.Identifier do
@moduledoc false
@unary_ops [:&, :!, :^, :not, :+, :-, :~~~, :@]
binary_ops = [
:<-,
:\\,
:when,
:"::",
:|,
:=,
:||,
:|||,
:or,
:&&,
:&&&,
:and,
:==,
:!=,
:=~,
:===,
:!==,
:<,
:<=,
:>=,
:>,
:|>,
:<<<,
:>>>,
:<~,
:~>,
:<<~,
:~>>,
:<~>,
:<|>,
:in,
:^^^,
:"//",
:++,
:--,
:..,
:<>,
:+,
:-,
:*,
:/,
:.
]
@binary_ops (if Version.match?(System.version(), "~> 1.12") do
binary_ops ++ Enum.map(~w[+++ ---], &String.to_existing_atom/1)
else
binary_ops
end)
@doc """
Checks if the given identifier is an unary op.
## Examples
iex> Sourceror.Identifier.is_unary_op(:+)
true
"""
@spec is_unary_op(Macro.t()) :: Macro.t()
defguard is_unary_op(op) when is_atom(op) and op in @unary_ops
@doc """
Checks if the given identifier is a binary op.
## Examples
iex> Sourceror.Identifier.is_binary_op(:+)
true
"""
@spec is_binary_op(Macro.t()) :: Macro.t()
defguard is_binary_op(op) when is_atom(op) and op in @binary_ops
@doc """
Checks if the given atom is a valid module alias.
## Examples
iex> Sourceror.Identifier.valid_alias?(Foo)
true
iex> Sourceror.Identifier.valid_alias?(:foo)
false
"""
def valid_alias?(atom) when is_atom(atom) do
valid_alias?(to_charlist(atom))
end
def valid_alias?('Elixir' ++ rest), do: valid_alias_piece?(rest)
def valid_alias?(_other), do: false
defp valid_alias_piece?([?., char | rest]) when char >= ?A and char <= ?Z,
do: valid_alias_piece?(trim_leading_while_valid_identifier(rest))
defp valid_alias_piece?([]), do: true
defp valid_alias_piece?(_other), do: false
defp trim_leading_while_valid_identifier([char | rest])
when char >= ?a and char <= ?z
when char >= ?A and char <= ?Z
when char >= ?0 and char <= ?9
when char == ?_ do
trim_leading_while_valid_identifier(rest)
end
defp trim_leading_while_valid_identifier(other) do
other
end
end
|
lib/sourceror/identifier.ex
| 0.550366
| 0.538862
|
identifier.ex
|
starcoder
|
defmodule Timex.Macros do
@moduledoc false
@doc """
Wraps a function definition in a warning at runtime on :stderr that the wrapped function has been deprecated.
The message parameter should be used to communicate the action needed to move to supported behaviour.
"""
defmacro defdeprecated({name, _env, args} = head, message, do: body) do
caller = Enum.join(Module.split(__CALLER__.module), ".")
{name, len} =
case {name, args} do
{:when, [{name, _, args} | _]} -> {name, Enum.count(args)}
_ -> {name, Enum.count(args)}
end
quote do
def unquote(head) do
IO.write(
:stderr,
"warning: #{unquote(caller)}.#{unquote(name)}/#{unquote(len)} is deprecated, #{
unquote(message)
}\n"
)
unquote(body)
end
end
end
@doc """
A guard macro which asserts that the given value is an integer >= 0
"""
defmacro is_positive_integer(n) do
quote do
is_integer(unquote(n)) and unquote(n) >= 0
end
end
@doc """
A guard macro which asserts that the given value is an integer or float >= 0
"""
defmacro is_positive_number(n) do
quote do
is_number(unquote(n)) and unquote(n) >= 0
end
end
@doc """
A guard macro which assert that the given value is an integer in between the values min and max
"""
defmacro is_integer_in_range(n, min, max) do
quote do
is_integer(unquote(n)) and unquote(n) >= unquote(min) and unquote(n) <= unquote(max)
end
end
@doc """
A guard macro which asserts that the given value is a float in between the values min and max,
where max is not included in the range (this is to account for fractions which can be arbitrarily precise)
"""
defmacro is_float_in_range(n, min, max) do
quote do
is_float(unquote(n)) and unquote(n) >= unquote(min) and unquote(n) < unquote(max)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 0-999
"""
defmacro is_millisecond(ms) do
quote do
is_integer_in_range(unquote(ms), 0, 999) or is_float_in_range(unquote(ms), 0, 1000)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 0-59
"""
defmacro is_second(s) do
quote do
is_integer_in_range(unquote(s), 0, 59)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 0-59
"""
defmacro is_minute(m) do
quote do
is_integer_in_range(unquote(m), 0, 59)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 0-24
"""
defmacro is_hour(h, :exclusive) do
quote do
is_integer_in_range(unquote(h), 0, 23)
end
end
defmacro is_hour(h, :inclusive) do
quote do
is_integer_in_range(unquote(h), 0, 23)
end
end
@doc """
A guard macro which asserts that the given values forms a valid Erlang timestamp
"""
defmacro is_timestamp(mega, sec, micro) do
quote do
is_integer(unquote(mega)) and
is_integer(unquote(sec)) and
is_integer(unquote(micro))
end
end
@doc """
A guard macro which asserts that the given value is a valid Gregorian year value
"""
defmacro is_year(y) do
quote do
is_positive_integer(unquote(y))
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-12
"""
defmacro is_month(m) do
quote do
is_integer_in_range(unquote(m), 1, 12)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-7
"""
defmacro is_day_of_week(d, :mon) do
quote do
is_integer_in_range(unquote(d), 1, 7)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-31
"""
defmacro is_day_of_month(d) do
quote do
is_integer_in_range(unquote(d), 1, 31)
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-366
"""
defmacro is_day_of_year(d) do
quote do
is_integer_in_range(unquote(d), 1, 366)
end
end
@doc """
A guard macro which asserts that the given value is a valid iso day for the given year.
For a leap year this would be in the range of 1-366. For a regular year this would be
in the range of 1-365.
## Examples
iex> import Timex.Macros
...> is_iso_day_of_year(2001, 1)
true
iex> import Timex.Macros
...> is_iso_day_of_year(2001, 0)
false
iex> import Timex.Macros
...> is_iso_day_of_year(2012, 366)
true
iex> import Timex.Macros
...> is_iso_day_of_year(2011, 366)
false
iex> import Timex.Macros
...> is_iso_day_of_year(2012, 367)
false
"""
defmacro is_iso_day_of_year(y, d) do
quote do
is_integer_in_range(unquote(d), 1, 365) or
(unquote(d) == 366 and is_leap_year(unquote(y)))
end
end
@doc """
A guard macro which returns true if the given value is a leap year
## Examples
iex> import Timex.Macros
...> is_leap_year(2001)
false
iex> import Timex.Macros
...> is_leap_year(2000)
true
iex> import Timex.Macros
...> is_leap_year(2004)
true
iex> import Timex.Macros
...> is_leap_year(1900)
false
"""
defmacro is_leap_year(y) do
quote do
(rem(unquote(y), 4) == 0 and rem(unquote(y), 100) != 0) or rem(unquote(y), 400) == 0
end
end
@doc """
A guard macro which asserts that the given value is an integer in the range of 1-53
"""
defmacro is_week_of_year(w) do
quote do
is_integer_in_range(unquote(w), 1, 53)
end
end
@doc """
A guard macro which asserts that the given values are a valid year, month, and day of month
"""
defmacro is_date(y, m, d) do
quote do
is_year(unquote(y)) and is_month(unquote(m)) and is_day_of_month(unquote(d))
end
end
@doc """
A guard macro which asserts that the given values are a valid hour, minute, second, and optional millisecond
"""
defmacro is_time(h, m, s, ms \\ 0) do
quote do
is_hour(unquote(h), :exclusive) and is_minute(unquote(m)) and is_second(unquote(s)) and
is_millisecond(unquote(ms))
end
end
@doc """
A guard macro which asserts that the given values are a valid year, month, day, hour,
minute, second, and optional millisecond
"""
defmacro is_datetime(y, m, d, h, mm, s, ms \\ 0) do
quote do
is_date(unquote(y), unquote(m), unquote(d)) and
is_time(unquote(h), unquote(mm), unquote(s), unquote(ms))
end
end
@doc """
A guard macro which asserts that the given values compose a timestamp which is representable
by a Date or DateTime, relative to year zero
"""
defmacro is_date_timestamp(mega, secs, micro) do
quote do
is_positive_integer(unquote(mega)) and
is_positive_integer(unquote(secs)) and
is_positive_integer(unquote(micro))
end
end
end
|
lib/timex/macros.ex
| 0.779154
| 0.712362
|
macros.ex
|
starcoder
|
defmodule ElixirRigidPhysics.Geometry.Capsule do
@moduledoc """
Capsule geometry module.
[Capsules](https://en.wikipedia.org/wiki/Capsule_(geometry)) are cylinders capped with hemispheres, with two dimensions:
* `axial_length` -- the length of the cylinder (not including hemispheres)
* `cap_radius` -- the radius of the hemispheres
Their center is assumed to be at the origin, but in the absence of a frame that doesn't matter.
They are assumed to be vertically-oriented; their main axis is aligned with the y-axis (up).
"""
require Record
Record.defrecord(:capsule, axial_length: 0.5, cap_radius: 0.25)
@type capsule :: record(:capsule, axial_length: number, cap_radius: number)
alias Graphmath.Vec3
@doc """
Creates a capsule geometry.
`axial_length` is its length on the y-axis (up) without the hemispheres, `cap_radius` is the radius of the hemisphere ends.
## Examples
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
ElixirRigidPhysics.Geometry.Capsule
iex> Capsule.create(2.0, 3.0)
{:capsule, 2.0, 3.0}
"""
@spec create(number, number) :: capsule
def create(axial_length, cap_radius),
do: capsule(axial_length: axial_length, cap_radius: cap_radius)
@doc """
Gets the interior principle points of a capsule in local space.
"principle points" are the point on the capsule at the center of where the axis starts capping.
## Examples
iex> # test unit capsule
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
iex> Capsule.get_principle_points( Capsule.capsule(axial_length: 1.0, cap_radius: 0.5))
{ {0.0, -0.5, 0.0}, {0.0, 0.5, 0.0} }
iex> Capsule.get_principle_points( Capsule.capsule(axial_length: 10.0, cap_radius: 0.5))
{ {0.0, -5.0, 0.0}, {0.0, 5.0, 0.0} }
"""
@spec get_principle_points(capsule) :: {{number, number, number}, {number, number, number}}
def get_principle_points(capsule(axial_length: l)) do
{
{0.0, -l / 2.0, 0.0},
{0.0, l / 2.0, 0.0}
}
end
@doc """
Finds a support point (usually for GJK) on a capsule given a search direction.
## Examples
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
iex> c = Capsule.create(2.0, 1.0)
iex> Capsule.support_point(c, {1.0,1.0,1.0})
{0.0, 1.0, 0.0}
iex> require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
iex> c = Capsule.create(2.0, 1.0)
iex> Capsule.support_point(c, {1.0,-1.0,1.0})
{0.0, -1.0, 0.0}
"""
@spec support_point(capsule, Vec3.vec3()) :: Vec3.vec3()
def support_point(capsule(axial_length: l), {_x, y, _z} = _direction) do
# technique borrowed from ReactPhysics3D
# it weirds me out that the most extreme point is always a capsule end, but maybe that's okay?
hl = l / 2.0
dot_top = hl * y
dot_bottom = -hl * y
if dot_top > dot_bottom do
{0.0, hl, 0.0}
else
{0.0, -hl, 0.0}
end
end
end
|
lib/geometry/capsule.ex
| 0.912655
| 0.808332
|
capsule.ex
|
starcoder
|
defmodule Connect do
@x_piece "X"
@o_piece "O"
@players %{
@x_piece => :black,
@o_piece => :white
}
@out_of_bounds_index -1
@doc """
Calculates the winner (if any) of a board
using "O" as the white player
and "X" as the black player
"""
@spec result_for([String.t()]) :: :none | :black | :white
def result_for([piece]), do: @players[piece]
def result_for(board) do
board = Enum.map(board, &String.graphemes/1)
cond do
winner?(transpose(board), @o_piece) ->
@players[@o_piece]
winner?(board, @x_piece) ->
@players[@x_piece]
true ->
:none
end
end
defp winner?(board, piece) do
board
|> Enum.with_index()
|> Enum.any?(&opposite_side_connected?(board, piece, &1))
end
defp opposite_side_connected?(board, piece, {[square | _rest], row_index}) do
piece == square and opposite_side_reached?(board, piece, [{row_index, 0}])
end
defp opposite_side_reached?(board, piece, [square | _rest] = path) do
{row_index, column_index} = square
if column_index == opposite_edge(board) do
true
else
board
|> adjacent_squares(row_index, column_index)
|> Enum.reject(&square_already_in_path?(path, &1))
|> Enum.filter(&square_contains_piece?(board, piece, &1))
|> Enum.any?(&opposite_side_reached?(board, piece, [&1 | path]))
end
end
defp opposite_edge(board) do
board
|> hd()
|> length()
|> Kernel.-(1)
end
defp square_already_in_path?(path, square), do: square in path
defp square_contains_piece?(board, piece, {row, column}) do
board
|> Enum.at(row)
|> Enum.at(column)
|> Kernel.==(piece)
end
defp transpose(board) do
board
|> List.zip()
|> Enum.map(&Tuple.to_list/1)
end
defp adjacent_squares(board, row_index, column_index) do
for y <- adjacent_range(row_index),
x <- adjacent_range(column_index),
y > @out_of_bounds_index and y < length(board),
x > @out_of_bounds_index and x <= opposite_edge(board) do
{y, x}
end
end
defp adjacent_range(index), do: (index - 1)..(index + 1)
end
|
elixir/connect/lib/connect.ex
| 0.634996
| 0.406862
|
connect.ex
|
starcoder
|
defmodule Oasis.Spec.Parameter do
@moduledoc false
alias Oasis.InvalidSpecError
@locations ["query", "head", "path", "cookie"]
def build(path_expr, parameter) do
parameter
|> check_schema_or_content(parameter["schema"], parameter["content"])
|> check_name(path_expr)
end
defp check_schema_or_content(parameter, nil, nil) do
raise InvalidSpecError,
"A parameter MUST contain either a schema property, or a content property, but not found any in #{
inspect(parameter, pretty: true)
}"
end
defp check_schema_or_content(parameter, schema, nil) when schema != nil do
parameter
end
defp check_schema_or_content(parameter, nil, content) when content != nil do
Enum.map(content, fn {content_type, media_type} ->
if media_type["schema"] == nil do
raise InvalidSpecError,
"Not found required schema field in media type object #{inspect(content_type)}: #{
inspect(media_type, pretty: true)
} in parameter #{inspect(parameter, pretty: true)}"
end
end)
parameter
end
defp check_schema_or_content(parameter, _schema, _content) do
raise InvalidSpecError,
"A parameter MUST contain either a schema property, or a content property, but found both in #{
inspect(parameter, pretty: true)
}"
end
defp check_name(%{"in" => "path", "name" => name} = parameter, path_expr)
when is_bitstring(name) do
if String.contains?(path_expr, name) do
parameter
else
raise InvalidSpecError,
"The name field: `#{name}` MUST correspond to a template expression occurring within the path: `#{
path_expr
}`"
end
end
defp check_name(%{"in" => "header", "name" => name} = parameter, _path_expr)
when is_bitstring(name) do
# If `in` is "header" and the name field is "Accept", "Content-Type" or "Authorization",
# the parameter definition SHALL be ignored.
lowercase = name |> String.trim() |> String.downcase()
if lowercase in ["accept", "content-type", "authorization"] do
nil
else
# Since `Plug` converts all names of header parameters in lowercase by default,
# we need to do the same conversion as well when define schema of header parameter.
Map.put(parameter, "name", lowercase)
end
end
defp check_name(%{"in" => in_field} = _parameter, path_expr) when in_field not in @locations do
raise InvalidSpecError,
"The location of the parameter: `#{in_field}` is invalid in the path: `#{path_expr}`, expect to be one of #{
inspect(@locations)
}"
end
defp check_name(%{"name" => name} = parameter, path_expr) when is_bitstring(name) do
if String.trim(name) == "" do
raise InvalidSpecError, "The name of the parameter is missing in the path: `#{path_expr}`"
end
parameter
end
defp check_name(%{"name" => name} = _parameter, path_expr) do
raise InvalidSpecError,
"The name of the parameter expect to be a string, but got `#{inspect(name)}` in the path: `#{
path_expr
}`"
end
defp check_name(_parameter, path_expr) do
raise InvalidSpecError, "The name of the parameter is missing in the path: `#{path_expr}`"
end
end
|
lib/oasis/spec/parameter.ex
| 0.7181
| 0.415492
|
parameter.ex
|
starcoder
|
import Kernel, except: [to_string: 1]
defmodule Ecto.DateTime.Utils do
@moduledoc false
@doc "Pads with zero"
def zero_pad(val, count) do
num = Integer.to_string(val)
pad_length = max(count - byte_size(num), 0)
:binary.copy("0", pad_length) <> num
end
@doc "Converts to integer if possible"
def to_i(nil), do: nil
def to_i({int, _}) when is_integer(int), do: int
def to_i(int) when is_integer(int), do: int
def to_i(bin) when is_binary(bin) do
case Integer.parse(bin) do
{int, ""} -> int
_ -> nil
end
end
@doc "A guard to check for dates"
defmacro is_date(year, month, day) do
quote do
is_integer(unquote(year)) and unquote(month) in 1..12 and unquote(day) in 1..31
end
end
@doc "A guard to check for times"
defmacro is_time(hour, min, sec, usec \\ 0) do
quote do
unquote(hour) in 0..23 and
unquote(min) in 0..59 and
unquote(sec) in 0..59 and
unquote(usec) in 0..999_999
end
end
@doc """
Checks if the trailing part of a date/time matches ISO specs.
"""
defmacro is_iso_8601(x) do
quote do: unquote(x) in ["", "Z"]
end
@doc """
Gets microseconds from rest and validates it.
Returns nil if an invalid format is given.
"""
def usec("." <> rest) do
case parse(rest, "") do
{int, rest} when byte_size(int) > 6 and is_iso_8601(rest) ->
String.to_integer(binary_part(int, 0, 6))
{int, rest} when byte_size(int) in 1..6 and is_iso_8601(rest) ->
pad = String.duplicate("0", 6 - byte_size(int))
String.to_integer(int <> pad)
_ ->
nil
end
end
def usec(rest) when is_iso_8601(rest), do: 0
def usec(_), do: nil
@doc """
Compare two datetimes.
Receives two datetimes and compares the `t1`
against `t2` and returns `:lt`, `:eq` or `:gt`.
"""
def compare(%{__struct__: module} = t1, %{__struct__: module} = t2) do
{:ok, t1} = module.dump(t1)
{:ok, t2} = module.dump(t2)
cond do
t1 == t2 -> :eq
t1 > t2 -> :gt
true -> :lt
end
end
defp parse(<<h, t::binary>>, acc) when h in ?0..?9, do: parse(t, <<acc::binary, h>>)
defp parse(rest, acc), do: {acc, rest}
end
defmodule Ecto.Date do
import Ecto.DateTime.Utils
@doc """
Compare two dates.
Receives two dates and compares the `t1`
against `t2` and returns `:lt`, `:eq` or `:gt`.
"""
defdelegate compare(t1, t2), to: Ecto.DateTime.Utils
@moduledoc """
An Ecto type for dates.
"""
@behaviour Ecto.Type
defstruct [:year, :month, :day]
@doc """
The Ecto primitive type.
"""
def type, do: :date
@doc """
Casts the given value to date.
It supports:
* a binary in the "YYYY-MM-DD" format
* a binary in the "YYYY-MM-DD HH:MM:SS" format
(may be separated by T and/or followed by "Z", as in `2014-04-17T14:00:00Z`)
* a binary in the "YYYY-MM-DD HH:MM:SS.USEC" format
(may be separated by T and/or followed by "Z", as in `2014-04-17T14:00:00.030Z`)
* a map with `"year"`, `"month"` and `"day"` keys
with integer or binaries as values
* a map with `:year`, `:month` and `:day` keys
with integer or binaries as values
* a tuple with `{year, month, day}` as integers or binaries
* an `Ecto.Date` struct itself
"""
def cast(d), do: d |> do_cast() |> validate_cast()
@doc """
Same as `cast/1` but raises `Ecto.CastError` on invalid dates.
"""
def cast!(value) do
case cast(value) do
{:ok, date} -> date
:error -> raise Ecto.CastError, "cannot cast #{inspect value} to date"
end
end
defp do_cast(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes>>),
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes, sep,
_hour::2-bytes, ?:, _min::2-bytes, ?:, _sec::2-bytes, _rest::binary>>) when sep in [?\s, ?T],
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast(%Ecto.Date{} = d),
do: {:ok, d}
defp do_cast(%{"year" => empty, "month" => empty, "day" => empty}) when empty in ["", nil],
do: {:ok, nil}
defp do_cast(%{year: empty, month: empty, day: empty}) when empty in ["", nil],
do: {:ok, nil}
defp do_cast(%{"year" => year, "month" => month, "day" => day}),
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast(%{year: year, month: month, day: day}),
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast({year, month, day}),
do: from_parts(to_i(year), to_i(month), to_i(day))
defp do_cast(_),
do: :error
defp validate_cast(:error), do: :error
defp validate_cast({:ok, nil}), do: {:ok, nil}
defp validate_cast({:ok, %{year: y, month: m, day: d} = date}) do
if :calendar.valid_date(y, m, d), do: {:ok, date}, else: :error
end
defp from_parts(year, month, day) when is_date(year, month, day) do
{:ok, %Ecto.Date{year: year, month: month, day: day}}
end
defp from_parts(_, _, _), do: :error
@doc """
Converts an `Ecto.Date` into a date triplet.
"""
def dump(%Ecto.Date{year: year, month: month, day: day}) do
{:ok, {year, month, day}}
end
def dump(_), do: :error
@doc """
Converts a date triplet into an `Ecto.Date`.
"""
def load({year, month, day}) do
{:ok, %Ecto.Date{year: year, month: month, day: day}}
end
def load(_), do: :error
@doc """
Converts `Ecto.Date` to a readable string representation.
"""
def to_string(%Ecto.Date{year: year, month: month, day: day}) do
zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2)
end
@doc """
Converts `Ecto.Date` to ISO8601 representation.
"""
def to_iso8601(date) do
to_string(date)
end
@doc """
Returns an `Ecto.Date` in UTC.
"""
def utc do
{{year, month, day}, _time} = :erlang.universaltime
%Ecto.Date{year: year, month: month, day: day}
end
@doc """
Returns an Erlang date tuple from an `Ecto.Date`.
"""
def to_erl(%Ecto.Date{year: year, month: month, day: day}) do
{year, month, day}
end
@doc """
Returns an `Ecto.Date` from an Erlang date tuple.
"""
def from_erl({year, month, day}) do
%Ecto.Date{year: year, month: month, day: day}
end
end
defmodule Ecto.Time do
import Ecto.DateTime.Utils
@doc """
Compare two times.
Receives two times and compares the `t1`
against `t2` and returns `:lt`, `:eq` or `:gt`.
"""
defdelegate compare(t1, t2), to: Ecto.DateTime.Utils
@moduledoc """
An Ecto type for time.
"""
@behaviour Ecto.Type
defstruct [:hour, :min, :sec, usec: 0]
@doc """
The Ecto primitive type.
"""
def type, do: :time
@doc """
Casts the given value to time.
It supports:
* a binary in the "HH:MM:SS" format
(may be followed by "Z", as in `12:00:00Z`)
* a binary in the "HH:MM:SS.USEC" format
(may be followed by "Z", as in `12:00:00.005Z`)
* a map with `"hour"`, `"minute"` keys with `"second"` and `"microsecond"`
as optional keys and values are integers or binaries
* a map with `:hour`, `:minute` keys with `:second` and `:microsecond`
as optional keys and values are integers or binaries
* a tuple with `{hour, min, sec}` as integers or binaries
* a tuple with `{hour, min, sec, usec}` as integers or binaries
* an `Ecto.Time` struct itself
"""
def cast(<<hour::2-bytes, ?:, min::2-bytes, ?:, sec::2-bytes, rest::binary>>) do
if usec = usec(rest) do
from_parts(to_i(hour), to_i(min), to_i(sec), usec)
else
:error
end
end
def cast(%Ecto.Time{} = t),
do: {:ok, t}
def cast(%{"hour" => hour, "min" => min} = map),
do: from_parts(to_i(hour), to_i(min), to_i(Map.get(map, "sec", 0)), to_i(Map.get(map, "usec", 0)))
def cast(%{hour: hour, min: min} = map),
do: from_parts(to_i(hour), to_i(min), to_i(Map.get(map, :sec, 0)), to_i(Map.get(map, :usec, 0)))
def cast(%{"hour" => empty, "minute" => empty}) when empty in ["", nil],
do: {:ok, nil}
def cast(%{hour: empty, minute: empty}) when empty in ["", nil],
do: {:ok, nil}
def cast(%{"hour" => hour, "minute" => minute} = map),
do: from_parts(to_i(hour), to_i(minute), to_i(Map.get(map, "second", 0)), to_i(Map.get(map, "microsecond", 0)))
def cast(%{hour: hour, minute: minute} = map),
do: from_parts(to_i(hour), to_i(minute), to_i(Map.get(map, :second, 0)), to_i(Map.get(map, :microsecond, 0)))
def cast({hour, min, sec}),
do: from_parts(to_i(hour), to_i(min), to_i(sec), 0)
def cast({hour, min, sec, usec}),
do: from_parts(to_i(hour), to_i(min), to_i(sec), to_i(usec))
def cast(_),
do: :error
@doc """
Same as `cast/1` but raises `Ecto.CastError` on invalid times.
"""
def cast!(value) do
case cast(value) do
{:ok, time} -> time
:error -> raise Ecto.CastError, "cannot cast #{inspect value} to time"
end
end
defp from_parts(hour, min, sec, usec) when is_time(hour, min, sec, usec),
do: {:ok, %Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}}
defp from_parts(_, _, _, _),
do: :error
@doc """
Converts an `Ecto.Time` into a time tuple (in the form `{hour, min, sec,
usec}`).
"""
def dump(%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}) do
{:ok, {hour, min, sec, usec}}
end
def dump(_), do: :error
@doc """
Converts a time tuple like the one returned by `dump/1` into an `Ecto.Time`.
"""
def load({hour, min, sec, usec}) do
{:ok, %Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}}
end
def load({_, _, _} = time) do
{:ok, from_erl(time)}
end
def load(_), do: :error
@doc """
Converts `Ecto.Time` to a string representation.
"""
def to_string(%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}) do
str = zero_pad(hour, 2) <> ":" <> zero_pad(min, 2) <> ":" <> zero_pad(sec, 2)
if is_nil(usec) or usec == 0 do
str
else
str <> "." <> zero_pad(usec, 6)
end
end
@doc """
Converts `Ecto.Time` to its ISO 8601 representation.
"""
def to_iso8601(time) do
to_string(time)
end
@doc """
Returns an `Ecto.Time` in UTC.
`precision` can be `:sec` or `:usec.`
"""
def utc(precision \\ :sec)
def utc(:sec) do
{_, {hour, min, sec}} = :erlang.universaltime
%Ecto.Time{hour: hour, min: min, sec: sec}
end
def utc(:usec) do
now = {_, _, usec} = :os.timestamp
{_date, {hour, min, sec}} = :calendar.now_to_universal_time(now)
%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}
end
@doc """
Returns an Erlang time tuple from an `Ecto.Time`.
"""
def to_erl(%Ecto.Time{hour: hour, min: min, sec: sec}) do
{hour, min, sec}
end
@doc """
Returns an `Ecto.Time` from an Erlang time tuple.
"""
def from_erl({hour, min, sec}) do
%Ecto.Time{hour: hour, min: min, sec: sec}
end
end
defmodule Ecto.DateTime do
import Ecto.DateTime.Utils
@unix_epoch :calendar.datetime_to_gregorian_seconds {{1970, 1, 1}, {0, 0, 0}}
@doc """
Compare two datetimes.
Receives two datetimes and compares the `t1`
against `t2` and returns `:lt`, `:eq` or `:gt`.
"""
defdelegate compare(t1, t2), to: Ecto.DateTime.Utils
@moduledoc """
An Ecto type that includes a date and a time.
"""
@behaviour Ecto.Type
defstruct [:year, :month, :day, :hour, :min, :sec, usec: 0]
@doc """
The Ecto primitive type.
"""
def type, do: :naive_datetime
@doc """
Casts the given value to datetime.
It supports:
* a binary in the "YYYY-MM-DD HH:MM:SS" format
(may be separated by T and/or followed by "Z", as in `2014-04-17T14:00:00Z`)
* a binary in the "YYYY-MM-DD HH:MM:SS.USEC" format
(may be separated by T and/or followed by "Z", as in `2014-04-17T14:00:00.030Z`)
* a map with `"year"`, `"month"`,`"day"`, `"hour"`, `"minute"` keys
with `"second"` and `"microsecond"` as optional keys and values are integers or binaries
* a map with `:year`, `:month`,`:day`, `:hour`, `:minute` keys
with `:second` and `:microsecond` as optional keys and values are integers or binaries
* a tuple with `{{year, month, day}, {hour, min, sec}}` as integers or binaries
* a tuple with `{{year, month, day}, {hour, min, sec, usec}}` as integers or binaries
* an `Ecto.DateTime` struct itself
"""
def cast(dt), do: dt |> do_cast() |> validate_cast()
@doc """
Same as `cast/1` but raises `Ecto.CastError` on invalid datetimes.
"""
def cast!(value) do
case cast(value) do
{:ok, datetime} -> datetime
:error -> raise Ecto.CastError, "cannot cast #{inspect value} to datetime"
end
end
defp do_cast(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes, sep,
hour::2-bytes, ?:, min::2-bytes, ?:, sec::2-bytes, rest::binary>>) when sep in [?\s, ?T] do
if usec = usec(rest) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(sec), usec)
else
:error
end
end
defp do_cast(%Ecto.DateTime{} = dt) do
{:ok, dt}
end
defp do_cast(%{"year" => year, "month" => month, "day" => day, "hour" => hour, "min" => min} = map) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(Map.get(map, "sec", 0)),
to_i(Map.get(map, "usec", 0)))
end
defp do_cast(%{year: year, month: month, day: day, hour: hour, min: min} = map) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(Map.get(map, :sec, 0)),
to_i(Map.get(map, :usec, 0)))
end
defp do_cast(%{"year" => empty, "month" => empty, "day" => empty,
"hour" => empty, "minute" => empty}) when empty in ["", nil] do
{:ok, nil}
end
defp do_cast(%{year: empty, month: empty, day: empty,
hour: empty, minute: empty}) when empty in ["", nil] do
{:ok, nil}
end
defp do_cast(%{"year" => year, "month" => month, "day" => day, "hour" => hour, "minute" => min} = map) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(Map.get(map, "second", 0)),
to_i(Map.get(map, "microsecond", 0)))
end
defp do_cast(%{year: year, month: month, day: day, hour: hour, minute: min} = map) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(Map.get(map, :second, 0)),
to_i(Map.get(map, :microsecond, 0)))
end
defp do_cast({{year, month, day}, {hour, min, sec}}) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(sec), 0)
end
defp do_cast({{year, month, day}, {hour, min, sec, usec}}) do
from_parts(to_i(year), to_i(month), to_i(day),
to_i(hour), to_i(min), to_i(sec), to_i(usec))
end
defp do_cast(_) do
:error
end
defp validate_cast(:error), do: :error
defp validate_cast({:ok, nil}), do: {:ok, nil}
defp validate_cast({:ok, %{year: y, month: m, day: d} = datetime}) do
if :calendar.valid_date(y, m, d), do: {:ok, datetime}, else: :error
end
defp from_parts(year, month, day, hour, min, sec, usec)
when is_date(year, month, day) and is_time(hour, min, sec, usec) do
{:ok, %Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec, usec: usec}}
end
defp from_parts(_, _, _, _, _, _, _), do: :error
@doc """
Converts an `Ecto.DateTime` into a `{date, time}` tuple.
"""
def dump(%Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec, usec: usec}) do
{:ok, {{year, month, day}, {hour, min, sec, usec}}}
end
def dump(_), do: :error
@doc """
Converts a `{date, time}` tuple into an `Ecto.DateTime`.
"""
def load({{_, _, _}, {_, _, _, _}} = datetime) do
{:ok, erl_load(datetime)}
end
def load({{_, _, _}, {_, _, _}} = datetime) do
{:ok, from_erl(datetime)}
end
def load(_), do: :error
@doc """
Converts `Ecto.DateTime` into an `Ecto.Date`.
"""
def to_date(%Ecto.DateTime{year: year, month: month, day: day}) do
%Ecto.Date{year: year, month: month, day: day}
end
@doc """
Converts `Ecto.DateTime` into an `Ecto.Time`.
"""
def to_time(%Ecto.DateTime{hour: hour, min: min, sec: sec, usec: usec}) do
%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}
end
@doc """
Converts the given `Ecto.Date` into `Ecto.DateTime` with the time being
00:00:00.
"""
def from_date(%Ecto.Date{year: year, month: month, day: day}) do
%Ecto.DateTime{year: year, month: month, day: day,
hour: 0, min: 0, sec: 0, usec: 0}
end
@doc """
Converts the given `Ecto.Date` and `Ecto.Time` into `Ecto.DateTime`.
"""
def from_date_and_time(%Ecto.Date{year: year, month: month, day: day},
%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}) do
%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec, usec: usec}
end
@doc """
Converts `Ecto.DateTime` to its string representation.
"""
def to_string(%Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec, usec: usec}) do
str = zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2) <> " " <>
zero_pad(hour, 2) <> ":" <> zero_pad(min, 2) <> ":" <> zero_pad(sec, 2)
if is_nil(usec) or usec == 0 do
str
else
str <> "." <> zero_pad(usec, 6)
end
end
@doc """
Converts `Ecto.DateTime` to its ISO 8601 representation
without timezone specification.
"""
def to_iso8601(%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec, usec: usec}) do
str = zero_pad(year, 4) <> "-" <> zero_pad(month, 2) <> "-" <> zero_pad(day, 2) <> "T" <>
zero_pad(hour, 2) <> ":" <> zero_pad(min, 2) <> ":" <> zero_pad(sec, 2)
if is_nil(usec) or usec == 0 do
str
else
str <> "." <> zero_pad(usec, 6)
end
end
@doc """
Returns an `Ecto.DateTime` in UTC.
`precision` can be `:sec` or `:usec`.
"""
def utc(precision \\ :sec) do
autogenerate(precision)
end
@doc """
Returns an Erlang datetime tuple from an `Ecto.DateTime`.
"""
def to_erl(%Ecto.DateTime{year: year, month: month, day: day, hour: hour, min: min, sec: sec}) do
{{year, month, day}, {hour, min, sec}}
end
@doc """
Returns an `Ecto.DateTime` from an Erlang datetime tuple.
"""
def from_erl({{year, month, day}, {hour, min, sec}}) do
%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec}
end
def from_unix!(integer, unit) do
total = System.convert_time_unit(integer, unit, :microseconds)
microsecond = rem(total, 1_000_000)
{{year, month, day}, {hour, minute, second}} =
:calendar.gregorian_seconds_to_datetime(@unix_epoch + div(total, 1_000_000))
%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: minute, sec: second, usec: microsecond}
end
# Callback invoked by autogenerate fields.
@doc false
def autogenerate(precision \\ :sec)
def autogenerate(:sec) do
{date, {h, m, s}} = :erlang.universaltime
erl_load({date, {h, m, s, 0}})
end
def autogenerate(:usec) do
timestamp = {_, _, usec} = :os.timestamp
{date, {h, m, s}} = :calendar.now_to_datetime(timestamp)
erl_load({date, {h, m, s, usec}})
end
defp erl_load({{year, month, day}, {hour, min, sec, usec}}) do
%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec, usec: usec}
end
end
defimpl String.Chars, for: [Ecto.DateTime, Ecto.Date, Ecto.Time] do
def to_string(dt) do
@for.to_string(dt)
end
end
defimpl Inspect, for: [Ecto.DateTime, Ecto.Date, Ecto.Time] do
@inspected inspect(@for)
def inspect(dt, _opts) do
"#" <> @inspected <> "<" <> @for.to_string(dt) <> ">"
end
end
defimpl Ecto.DataType, for: Ecto.DateTime do
def dump(%Ecto.DateTime{year: year, month: month, day: day,
hour: hour, min: min, sec: sec, usec: usec}) do
{:ok, {{year, month, day}, {hour, min, sec, usec}}}
end
end
defimpl Ecto.DataType, for: Ecto.Date do
def dump(%Ecto.Date{year: year, month: month, day: day}) do
{:ok, {year, month, day}}
end
end
defimpl Ecto.DataType, for: Ecto.Time do
def dump(%Ecto.Time{hour: hour, min: min, sec: sec, usec: usec}) do
{:ok, {hour, min, sec, usec}}
end
end
if Code.ensure_loaded?(Poison) do
defimpl Poison.Encoder, for: [Ecto.Date, Ecto.Time, Ecto.DateTime] do
def encode(dt, _opts), do: <<?", @for.to_iso8601(dt)::binary, ?">>
end
end
|
deps/ecto/lib/ecto/date_time.ex
| 0.838382
| 0.605507
|
date_time.ex
|
starcoder
|
defmodule Nanoid.Secure do
@moduledoc """
Generate a secure URL-friendly unique ID. This method uses the secure and non predictable random generator.
By default, the ID will have 21 symbols with a collision probability similar to UUID v4.
"""
use Bitwise
alias Nanoid.Configuration
@doc """
Generates a secure NanoID using the default alphabet.
## Example
Generate a NanoID with the default size of 21 characters.
iex> Nanoid.Secure.generate()
"mJUHrGXZBZpNX50x2xkzf"
Generate a secure NanoID with a custom size of 64 characters.
iex> Nanoid.Secure.generate(64)
"wk9fsUrhK9k-MxY0hLazRKpcSlic8XYDFusks7Jb8FwCVnoQaKFSPsmmLHzP7qCX"
"""
@spec generate(non_neg_integer()) :: binary()
def generate(size \\ Configuration.default_size())
def generate(size) when is_integer(size) and size > 0 do
step = calculate_step(Configuration.default_mask(), size, Configuration.default_alphabet_length())
do_generate(size, Configuration.default_alphabet(), Configuration.default_mask(), step)
end
def generate(_size), do: generate(Configuration.default_size())
@doc """
Generates a secure NanoID using a custom size and an individual alphabet.
## Example
Generate a secure NanoID with the default size of 21 characters and an individual alphabet.
iex> Nanoid.Secure.generate(21, "abcdef123")
"d1dcd2dee333cae1bfdea"
Generate a secure NanoID with custom size of 64 characters and an individual alphabet.
iex> Nanoid.Secure.generate(64, "abcdef123")
"aabbaca3c11accca213babed2bcd1213efb3e3fa1ad23ecbf11c2ffc123f3bbe"
"""
@spec generate(non_neg_integer(), binary()) :: binary()
def generate(size, alphabet)
def generate(size, alphabet) when is_integer(size) and size > 0 and is_binary(alphabet) and byte_size(alphabet) > 1 do
alphabet_length = String.length(alphabet)
mask = calculate_mask(alphabet_length)
step = calculate_step(mask, size, alphabet_length)
do_generate(size, alphabet, mask, step)
end
def generate(size, alphabet) when is_list(alphabet),
do: generate(size, to_string(alphabet))
def generate(size, _alphabet) when is_integer(size) and size > 0,
do: generate(size, Configuration.default_alphabet())
def generate(_size, _alphabet),
do: generate(Configuration.default_size(), Configuration.default_alphabet())
# Generate NanoID recursively as long as the given size is reached
@spec do_generate(non_neg_integer(), binary(), non_neg_integer(), non_neg_integer(), binary()) :: binary()
defp do_generate(size, alphabet, mask, step, acc \\ "")
defp do_generate(size, _alphabet, _mask, _step, acc) when is_binary(acc) and byte_size(acc) >= size,
do: String.slice(acc, 0, size)
defp do_generate(size, alphabet, mask, step, acc) when is_binary(acc) and byte_size(acc) < size,
do: do_generate(size, alphabet, mask, step, acc <> generator(step, alphabet, mask))
defp do_generate(size, alphabet, mask, step, _acc),
do: do_generate(size, alphabet, mask, step, "")
@spec generator(non_neg_integer(), binary(), non_neg_integer()) :: binary()
defp generator(size, alphabet, mask)
defp generator(size, alphabet, mask)
when is_integer(size) and size > 0 and is_binary(alphabet) and byte_size(alphabet) > 1 and is_integer(mask) and mask > 0 do
size
|> random_bytes()
|> Enum.map(&(&1 &&& mask))
|> Enum.map(&String.at(alphabet, &1))
|> Enum.reject(&is_nil/1)
|> Enum.join()
end
defp generator(_size, _alphabet, _mask),
do:
generator(
Configuration.default_size(),
Configuration.default_alphabet(),
Configuration.default_mask()
)
@spec calculate_mask(non_neg_integer()) :: non_neg_integer()
defp calculate_mask(alphabet_length)
defp calculate_mask(alphabet_length) when is_integer(alphabet_length) and alphabet_length > 1,
do: (2 <<< round(Float.floor(:math.log(alphabet_length - 1) / :math.log(2)))) - 1
defp calculate_mask(_alphabet_length),
do: calculate_mask(2)
@spec calculate_step(non_neg_integer(), non_neg_integer(), non_neg_integer()) :: non_neg_integer()
defp calculate_step(mask, size, alphabet_length)
defp calculate_step(mask, size, alphabet_length) when is_integer(alphabet_length) and alphabet_length > 0,
do: round(Float.ceil(1.6 * mask * size / alphabet_length))
defp calculate_step(mask, size, _alphabet_length),
do: calculate_step(mask, size, 1)
@spec random_bytes(non_neg_integer()) :: nonempty_list(non_neg_integer())
defp random_bytes(size)
defp random_bytes(size) when is_integer(size) and size > 0 do
size
|> :crypto.strong_rand_bytes()
|> :binary.bin_to_list()
end
defp random_bytes(_size), do: random_bytes(Configuration.default_size())
end
|
lib/nanoid/secure.ex
| 0.869174
| 0.635265
|
secure.ex
|
starcoder
|
defmodule Snek.Board.Snake do
@moduledoc """
Represents a snake on a board.
You may also refer to it as a "snake on a plane", as the joke
goes in the Battlesnake community. 😎
"""
@moduledoc since: "0.1.0"
alias __MODULE__
alias Snek.Board.Point
@typedoc """
A unique ID to differentiate between snakes on a board
"""
@typedoc since: "0.1.0"
@type id :: any
@typedoc """
A valid direction for a snake to move according to the game rules.
"""
@typedoc since: "0.1.0"
@type snake_move :: :up | :down | :left | :right
@typedoc """
Whether a snake is currently alive, or has been eliminated.
If eliminated, the reason is encoded. If the elimination was caused by an
opponent, the opponent's snake ID is also specified.
"""
@typedoc since: "0.1.0"
@type state :: :alive
| {:eliminated, :starvation}
| {:eliminated, :out_of_bounds}
| {:eliminated, :self_collision}
| {:eliminated, :collision, id}
| {:eliminated, :head_to_head, id}
@typedoc """
A snake on a board.
"""
@typedoc since: "0.1.0"
@type t :: %Snake{
id: any,
state: state,
health: non_neg_integer,
body: list(Point.t)
}
@enforce_keys [:id, :state, :health, :body]
defstruct [:id, :state, :health, :body]
@doc """
Returns the head of a snake.
If the snake has at least one body part, the first body part (the head) is
returned. Otherwise, `nil` is returned.
## Examples
iex> body = [Snek.Board.Point.new(1, 2), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1)]
iex> snake = %Snake{id: "mysnek", state: :alive, health: 100, body: body}
iex> Snake.head(snake)
{1, 2}
iex> snake = %Snake{id: "mysnek", state: :alive, health: 100, body: []}
iex> Snake.head(snake)
nil
"""
@doc since: "0.1.0"
@spec head(t) :: Point.t | nil
def head(%Snake{body: [head | _]}), do: head
def head(%Snake{}), do: nil
@doc """
Moves the snake one space in a given direction.
Moving consists of adding a body part to the head of the snake in the given
direction, and also removing the tail body part. The snake's body length
remains unchanged.
If the snake is already eliminated or the snake does not have any body parts,
no move will be applied and the snake will remain unchanged.
If the direction given is `nil`, or not a valid direction in which to move,
the snake will be moved in the `:up` direction instead.
Returns the modified snake.
## Examples
iex> body = [Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1)]
iex> snake = %Snake{id: "mysnek", state: :alive, health: 100, body: body}
iex> Snake.move(snake, :up)
%Snake{
id: "mysnek",
state: :alive,
health: 100,
body: [Snek.Board.Point.new(1, 0), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1)]
}
iex> body = [Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1)]
iex> snake = %Snake{id: "mysnek", state: {:eliminated, :starvation}, health: 0, body: body}
iex> snake == Snake.move(snake, :up)
true
iex> snake = %Snake{id: "mysnek", state: :alive, health: 100, body: []}
iex> snake == Snake.move(snake, :up)
true
iex> body = [Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1)]
iex> snake = %Snake{id: "mysnek", state: :alive, health: 100, body: body}
iex> snake |> Snake.move(:right) |> Snake.move(nil)
%Snake{
id: "mysnek",
state: :alive,
health: 100,
body: [Snek.Board.Point.new(2, 0), Snek.Board.Point.new(2, 1), Snek.Board.Point.new(1, 1)]
}
"""
@doc since: "0.1.0"
@spec move(t, snake_move | nil) :: t
def move(%Snake{state: state} = snake, _direction) when state != :alive do
snake
end
def move(%Snake{body: body} = snake, _direction) when length(body) < 1 do
snake
end
def move(%Snake{body: [head | _rest]} = snake, direction) when direction in [:up, :down, :left, :right] do
slither(snake, Point.step(head, direction))
end
def move(%Snake{body: [head | _rest]} = snake, _direction) do
slither(snake, Point.step(head, :up))
end
defp slither(snake, new_head) do
new_body = [new_head | Enum.drop(snake.body, -1)]
%Snake{snake | body: new_body}
end
@doc """
Returns the point that is one step toward a given direction from this snake's
perspective.
If the snake has no body parts, `nil` is returned instead of a point.
## Examples
iex> body = [Snek.Board.Point.new(2, 1), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1)]
iex> snake = %Snake{id: "snek0", state: :alive, health: 99, body: body}
iex> Snake.step(snake, :up)
{2, 0}
iex> body = [Snek.Board.Point.new(2, 1), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1)]
iex> snake = %Snake{id: "snek0", state: :alive, health: 99, body: body}
iex> Snake.step(snake, :right)
{3, 1}
iex> body = [Snek.Board.Point.new(2, 1), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1)]
iex> snake = %Snake{id: "snek0", state: :alive, health: 99, body: body}
iex> Snake.step(snake, :down)
{2, 2}
iex> body = [Snek.Board.Point.new(2, 1), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 1)]
iex> snake = %Snake{id: "snek0", state: :alive, health: 99, body: body}
iex> Snake.step(snake, :left)
{1, 1}
iex> snake = %Snake{id: "snek0", state: :alive, health: 0, body: []}
iex> Snake.step(snake, :down)
nil
"""
@doc since: "0.2.0"
@spec step(t, Point.direction | snake_move) :: Point.t
def step(%Snake{body: [head | _rest]}, direction) when not is_nil(head) do
Point.step(head, direction)
end
def step(_snake, _direction), do: nil
@doc """
Decrements the snake's health by 1 point.
Returns the modified snake.
## Examples
iex> body = List.duplicate(Snek.Board.Point.new(1, 1), 3)
iex> snake = %Snake{id: "mysnek", state: :alive, health: 100, body: body}
iex> Snake.hurt(snake).health
99
"""
@doc since: "0.1.0"
@spec hurt(t) :: t
def hurt(snake) do
%Snake{snake | health: snake.health - 1}
end
@doc """
Feed a snake and grow its tail.
A snake is fed by restoring its health to a given value, and adding a part to
its tail. The new tail part is added in the same position as the current tail
(the last body part). Tail body parts may overlap until the snake moves.
Returns the modified snake.
## Examples
iex> body = [Snek.Board.Point.new(1, 0), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 2)]
iex> snake = %Snake{id: "mysnek", state: :alive, health: 98, body: body}
iex> Snake.feed(snake, 100)
%Snake{
id: "mysnek",
state: :alive,
health: 100,
body: [
Snek.Board.Point.new(1, 0),
Snek.Board.Point.new(1, 1),
Snek.Board.Point.new(1, 2),
Snek.Board.Point.new(1, 2)
]
}
"""
@doc since: "0.1.0"
@spec feed(t, non_neg_integer) :: t
def feed(snake, new_health) do
%Snake{snake | health: new_health}
|> grow
end
@doc """
Grow a snake's tail.
Adds a part to the snake's tail. The new tail part is added in the same
position as the current tail (the last body part). Tail body parts may
overlap until the snake moves.
This is equivelent to the tail growth in `feed/2` but without affecting the
snake's health.
Returns the modified snake.
## Examples
iex> body = [Snek.Board.Point.new(1, 0), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 2)]
iex> snake = %Snake{id: "mysnek", state: :alive, health: 98, body: body}
iex> Snake.grow(snake)
%Snake{
id: "mysnek",
state: :alive,
health: 98,
body: [
Snek.Board.Point.new(1, 0),
Snek.Board.Point.new(1, 1),
Snek.Board.Point.new(1, 2),
Snek.Board.Point.new(1, 2)
]
}
iex> snake = %Snake{id: "mysnek", state: :alive, health: 100, body: []}
iex> snake == Snake.grow(snake)
true
"""
@doc since: "0.1.0"
@spec grow(t) :: t
def grow(snake) do
if length(snake.body) > 0 do
tail = Enum.at(snake.body, -1)
new_body = snake.body ++ [tail]
%Snake{snake | body: new_body}
else
snake
end
end
@doc """
Returns true if and only if the snake is alive (not eliminated).
This does not check whether the snake's elimination status should be changed,
it is just a helper to check current state.
## Examples
iex> body = [Snek.Board.Point.new(1, 0), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 2)]
iex> Snake.alive?(%Snake{id: "mysnek", state: :alive, health: 98, body: body})
true
iex> body = [Snek.Board.Point.new(1, 0), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 2)]
iex> Snake.alive?(%Snake{id: "mysnek", state: {:eliminated, :starvation}, health: 0, body: body})
false
"""
@doc since: "0.1.0"
@spec alive?(t) :: boolean
def alive?(%Snake{state: :alive}), do: true
def alive?(%Snake{}), do: false
@doc """
Returns true if and only if the snake is eliminated.
This does not check whether the snake's elimination status should be changed,
it is just a helper to check current state.
## Examples
iex> body = [Snek.Board.Point.new(1, 0), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 2)]
iex> Snake.eliminated?(%Snake{id: "mysnek", state: :alive, health: 98, body: body})
false
iex> body = [Snek.Board.Point.new(1, 0), Snek.Board.Point.new(1, 1), Snek.Board.Point.new(1, 2)]
iex> Snake.eliminated?(%Snake{id: "mysnek", state: {:eliminated, :starvation}, health: 0, body: body})
true
"""
@doc since: "0.1.0"
@spec eliminated?(t) :: boolean
def eliminated?(%Snake{state: :alive}), do: false
def eliminated?(%Snake{}), do: true
end
|
lib/snek/board/snake.ex
| 0.915013
| 0.539772
|
snake.ex
|
starcoder
|
defmodule Money.Financial do
@moduledoc """
A set of financial functions, primarily related to discounted cash flows.
Some of the algorithms are from [finance formulas](http://www.financeformulas.net)
"""
alias Cldr.Math
@doc """
Calculates the future value for a present value, an interest rate
and a number of periods.
* `present_value` is a %Money{} representation of the present value
* `interest_rate` is a float representation of an interest rate. For
example, 12% would be represented as `0.12`
* `periods` in an integer number of periods
## Examples
iex> Money.Financial.future_value Money.new(:USD, 10000), 0.08, 1
#Money<:USD, 10800.00>
iex> Money.Financial.future_value Money.new(:USD, 10000), 0.04, 2
#Money<:USD, 10816.0000>
iex> Money.Financial.future_value Money.new(:USD, 10000), 0.02, 4
#Money<:USD, 10824.32160000>
"""
@spec future_value(Money.t(), number, number) :: Money.t()
@one Decimal.new(1)
def future_value(%Money{amount: amount} = money, interest_rate, periods)
when is_number(interest_rate) and is_number(periods) do
fv =
interest_rate
|> Decimal.from_float()
|> Decimal.add(@one)
|> Math.power(periods)
|> Decimal.mult(amount)
%{money | amount: fv}
end
@doc """
Calculates the future value for a list of cash flows and an interest rate.
* `flows` is a list of tuples representing a cash flow. Each flow is
represented as a tuple of the form `{period, %Money{}}`
* `interest_rate` is a float representation of an interest rate. For
example, 12% would be represented as `0.12`
## Example
iex> Money.Financial.future_value([{4, Money.new(:USD, 10000)}, {5, Money.new(:USD, 10000)}, {6, Money.new(:USD, 10000)}], 0.13)
#Money<:USD, 34068.99999999999999999999999>
iex> Money.Financial.future_value [{0, Money.new(:USD, 5000)},{1, Money.new(:USD, 2000)}], 0.12
#Money<:USD, 7600.000000000000000000000000>
"""
@spec future_value(list({number, Money.t()}), number) :: Money.t()
def future_value(flows, interest_rate)
def future_value([{period, %Money{}} | _other_flows] = flows, interest_rate)
when is_integer(period) and is_number(interest_rate) do
{max_period, _} = Enum.max(flows)
present_value(flows, interest_rate)
|> future_value(interest_rate, max_period)
end
@doc """
Calculates the present value for future value, an interest rate
and a number of periods
* `future_value` is a %Money{} representation of the future value
* `interest_rate` is a float representation of an interest rate. For
example, 12% would be represented as `0.12`
* `periods` in an integer number of periods
## Examples
iex> Money.Financial.present_value Money.new(:USD, 100), 0.08, 2
#Money<:USD, 85.73388203017832647462277092>
iex> Money.Financial.present_value Money.new(:USD, 1000), 0.10, 20
#Money<:USD, 148.6436280241436864020760472>
"""
@spec present_value(Money.t(), number, number) :: Money.t()
def present_value(%Money{amount: amount} = money, interest_rate, periods)
when is_number(interest_rate) and is_number(periods) and periods >= 0 do
pv_1 =
interest_rate
|> Decimal.from_float()
|> Decimal.add(@one)
|> Math.power(periods)
%{money | amount: Decimal.div(amount, pv_1)}
end
@doc """
Calculates the present value for a list of cash flows and an interest rate.
* `flows` is a list of tuples representing a cash flow. Each flow is
represented as a tuple of the form `{period, %Money{}}`
* `interest_rate` is a float representation of an interest rate. For
example, 12% would be represented as `0.12`
## Example
iex> Money.Financial.present_value([{4, Money.new(:USD, 10000)}, {5, Money.new(:USD, 10000)}, {6, Money.new(:USD, 10000)}], 0.13)
#Money<:USD, 16363.97191111964880256655144>
iex> Money.Financial.present_value [{0, Money.new(:USD, -1000)},{1, Money.new(:USD, -4000)}], 0.1
#Money<:USD, -4636.363636363636363636363636>
"""
@spec present_value(list({integer, Money.t()}), number) :: Money.t()
def present_value(flows, interest_rate)
def present_value([{period, %Money{}} | _other_flows] = flows, interest_rate)
when is_integer(period) and is_number(interest_rate) do
validate_same_currency!(flows)
do_present_value(flows, interest_rate)
end
defp do_present_value({period, %Money{} = flow}, interest_rate)
when is_integer(period) and is_number(interest_rate) do
present_value(flow, interest_rate, period)
end
defp do_present_value([{period, %Money{}} = flow | []], interest_rate)
when is_integer(period) and is_number(interest_rate) do
do_present_value(flow, interest_rate)
end
defp do_present_value([{period, %Money{}} = flow | other_flows], interest_rate)
when is_integer(period) and is_number(interest_rate) do
do_present_value(flow, interest_rate)
|> Money.add!(do_present_value(other_flows, interest_rate))
end
@doc """
Calculates the net present value of an initial investment, a list of
cash flows and an interest rate.
* `flows` is a list of tuples representing a cash flow. Each flow is
represented as a tuple of the form `{period, %Money{}}`
* `interest_rate` is a float representation of an interest rate. For
example, 12% would be represented as `0.12`
* `investment` is a %Money{} struct representing the initial investment
## Example
iex> flows = [{0, Money.new(:USD, 5000)},{1, Money.new(:USD, 2000)},{2, Money.new(:USD, 500)},{3, Money.new(:USD,10_000)}]
iex> Money.Financial.net_present_value flows, 0.08, Money.new(:USD, 100)
#Money<:USD, 15118.84367220444038002337042>
iex> Money.Financial.net_present_value flows, 0.08
#Money<:USD, 15218.84367220444038002337042>
"""
@spec net_present_value(list({integer, Money.t()}), number) :: Money.t()
def net_present_value([{period, %Money{currency: currency}} | _] = flows, interest_rate)
when is_integer(period) and is_number(interest_rate) do
net_present_value(flows, interest_rate, Money.zero(currency))
end
@spec net_present_value(list({integer, Money.t()}), number, Money.t()) :: Money.t()
def net_present_value([{period, %Money{}} | _] = flows, interest_rate, %Money{} = investment)
when is_integer(period) and is_number(interest_rate) do
validate_same_currency!(investment, flows)
present_value(flows, interest_rate)
|> Money.sub!(investment)
end
@doc """
Calculates the net present value of an initial investment, a recurring
payment, an interest rate and a number of periods
* `investment` is a %Money{} struct representing the initial investment
* `future_value` is a %Money{} representation of the future value
* `interest_rate` is a float representation of an interest rate. For
example, 12% would be represented as `0.12`
* `periods` in an integer number of a period
## Example
iex> Money.Financial.net_present_value Money.new(:USD, 10000), 0.13, 2
#Money<:USD, 7831.466833737959119743127888>
iex> Money.Financial.net_present_value Money.new(:USD, 10000), 0.13, 2, Money.new(:USD, 100)
#Money<:USD, 7731.466833737959119743127888>
"""
@spec net_present_value(Money.t(), number, number) :: Money.t()
def net_present_value(%Money{currency: currency} = future_value, interest_rate, periods) do
net_present_value(future_value, interest_rate, periods, Money.new(currency, 0))
end
@spec net_present_value(Money.t(), number, number, Money.t()) :: Money.t()
def net_present_value(%Money{} = future_value, interest_rate, periods, %Money{} = investment) do
present_value(future_value, interest_rate, periods)
|> Money.sub!(investment)
end
@doc """
Calculates the interal rate of return for a given list of cash flows.
* `flows` is a list of tuples representing a cash flow. Each flow is
represented as a tuple of the form `{period, %Money{}}`
"""
@spec internal_rate_of_return(list({integer, Money.t()})) :: number()
def internal_rate_of_return([{_period, %Money{}} | _other_flows] = flows) do
# estimate_m = sum_of_inflows(flows)
# |> Kernel./(abs(Math.to_float(amount)))
# |> :math.pow(2 / (number_of_flows(flows) + 1))
# |> Kernel.-(1)
# estimate_n = :math.pow(1 + estimate_m, )
estimate_n = 0.2
estimate_m = 0.1
do_internal_rate_of_return(flows, estimate_m, estimate_n)
end
@irr_precision 0.000001
defp do_internal_rate_of_return(flows, estimate_m, estimate_n) do
npv_n = net_present_value(flows, estimate_n).amount |> Math.to_float()
npv_m = net_present_value(flows, estimate_m).amount |> Math.to_float()
if abs(npv_n - npv_m) > @irr_precision do
estimate_o = estimate_n - (estimate_n - estimate_m) / (npv_n - npv_m) * npv_n
do_internal_rate_of_return(flows, estimate_n, estimate_o)
else
estimate_n
end
end
@doc """
Calculates the effective interest rate for a given present value,
a future value and a number of periods.
* `present_value` is a %Money{} representation of the present value
* `future_value` is a %Money{} representation of the future value
* `periods` is an integer number of a period
## Examples
iex> Money.Financial.interest_rate Money.new(:USD, 10000), Money.new(:USD, 10816), 2
#Decimal<0.04>
iex> Money.Financial.interest_rate Money.new(:USD, 10000), Money.new(:USD, "10824.3216"), 4
#Decimal<0.02>
"""
@spec interest_rate(Money.t(), Money.t(), number) :: Decimal.t()
def interest_rate(
%Money{currency: pv_currency, amount: pv_amount} = _present_value,
%Money{currency: fv_currency, amount: fv_amount} = _future_value,
periods
)
when pv_currency == fv_currency and is_integer(periods) and periods > 0 do
fv_amount
|> Decimal.div(pv_amount)
|> Math.root(periods)
|> Decimal.sub(@one)
end
@doc """
Calculates the number of periods between a present value and
a future value with a given interest rate.
* `present_value` is a %Money{} representation of the present value
* `future_value` is a %Money{} representation of the future value
* `interest_rate` is a float representation of an interest rate. For
example, 12% would be represented as `0.12`
## Example
iex> Money.Financial.periods Money.new(:USD, 1500), Money.new(:USD, 2000), 0.005
#Decimal<57.68013595323872502502238648>
"""
@spec periods(Money.t(), Money.t(), float) :: Decimal.t()
def periods(
%Money{currency: pv_currency, amount: pv_amount} = _present_value,
%Money{currency: fv_currency, amount: fv_amount} = _future_value,
interest_rate
)
when pv_currency == fv_currency and is_float(interest_rate) and interest_rate > 0 do
Decimal.div(
Math.log(Decimal.div(fv_amount, pv_amount)),
Math.log(Decimal.add(@one, Decimal.from_float(interest_rate)))
)
end
@doc """
Calculates the payment for a given loan or annuity given a
present value, an interest rate and a number of periods.
* `present_value` is a %Money{} representation of the present value
* `interest_rate` is a float representation of an interest rate. For
example, 12% would be represented as `0.12`
* `periods` is an integer number of periods
## Example
iex> Money.Financial.payment Money.new(:USD, 100), 0.12, 20
#Money<:USD, 13.38787800396606622792492299>
"""
@spec payment(Money.t(), float, number) :: Money.t()
def payment(
%Money{amount: pv_amount} = present_value,
interest_rate,
periods
)
when is_float(interest_rate) and interest_rate > 0 and is_number(periods) and periods > 0 do
interest_rate = Decimal.from_float(interest_rate)
p1 = Decimal.mult(pv_amount, interest_rate)
p2 = Decimal.sub(@one, Decimal.add(@one, interest_rate) |> Math.power(-periods))
%{present_value | amount: Decimal.div(p1, p2)}
end
defp validate_same_currency!(%Money{} = flow, flows) do
validate_same_currency!([{0, flow} | flows])
end
defp validate_same_currency!(flows) do
number_of_currencies =
flows
|> Enum.map(fn {_period, %Money{currency: currency}} -> currency end)
|> Enum.uniq()
|> Enum.count()
if number_of_currencies > 1 do
raise ArgumentError,
message:
"More than one currency found in cash flows; " <>
"implicit currency conversion is not supported. Cash flows: " <> inspect(flows)
end
end
end
|
lib/money/financial.ex
| 0.932215
| 0.779532
|
financial.ex
|
starcoder
|
defmodule Trunk.Storage.Filesystem do
@moduledoc """
A `Trunk.Storage` implementation for the local file system.
"""
@behaviour Trunk.Storage
@doc ~S"""
Saves the file to the file system.
- `directory` - The relative directory within which to store the file
- `filename` - The name of the file to be saved
- `source_path` - The full path to the file to be stored. This is a path to the uploaded file or a temporary file that has undergone transformation
- `opts` - The options for the storage system
- `path:` (required) The base path within which to save files
- `acl:` (optional) The file mode to store the file (accepts octal `0o644` or string `"0644"`). See `File.chmod/2` for more info.
## Example:
The file will be saved to /opts/uploads/path/to/file.ext
```
Trunk.Storage.Filesystem.save("path/to/", "file.ext", "/tmp/uploaded_file.ext", path: "/opt/uploads")
"""
@spec save(String.t(), String.t(), String.t(), keyword) :: :ok | {:error, :file.posix()}
def save(directory, filename, source_path, opts \\ []) do
base_directory = Keyword.fetch!(opts, :path)
save_path = Path.join(base_directory, directory)
file_path = Path.join(save_path, filename)
with :ok = File.mkdir_p(save_path) do
result = File.cp(source_path, file_path)
acl = parse_acl(Keyword.get(opts, :acl))
if acl, do: File.chmod(file_path, acl), else: result
end
end
defp parse_acl(nil), do: nil
defp parse_acl(<<mode::binary>>) do
case Integer.parse(mode, 8) do
{number, ""} ->
number
_ ->
nil
nil
end
end
defp parse_acl(mode) when is_number(mode), do: mode
defp parse_acl(mode) when is_atom(mode), do: nil
def retrieve(directory, filename, destination_path, opts \\ []) do
base_directory = Keyword.fetch!(opts, :path)
file_path = base_directory |> Path.join(directory)
File.cp(Path.join(file_path, filename), destination_path)
end
@doc ~S"""
Deletes the file from the file system.
- `directory` - The relative directory within which to store the file
- `filename` - The name of the file to be saved
- `opts` - The options for the storage system
- `path:` (required) The base path within which to save files
## Example:
The file will be removed from /opts/uploads/path/to/file.ext
```
Trunk.Storage.Filesystem.delete("path/to/", "file.ext", path: "/opt/uploads")
"""
@spec delete(String.t(), String.t(), keyword) :: :ok | {:error, :file.posix()}
def delete(directory, filename, opts \\ []) do
base_directory = Keyword.fetch!(opts, :path)
file_path = base_directory |> Path.join(directory)
case File.rm(Path.join(file_path, filename)) do
:ok -> :ok
{:error, :enoent} -> :ok
error -> error
end
end
@doc ~S"""
Generates a URL for the storage directory and file
- `directory` - The relative directory where the file is saved
- `filename` - The name of the file that was previous saved
- `opts` - The options for the storage system
- `base_url:` (optional) The base url to join the path to
## Example:
```
Trunk.Storage.Filesystem.build_url("path/to", "file.ext")
#=> "path/to/file.ext"
Trunk.Storage.Filesystem.build_url("path/to", "file.ext", base_uri: "http://example.com")
#=> "http://example.com/path/to/file.ext"
Trunk.Storage.Filesystem.build_url("path/to", "file.ext", base_uri: "http://example.com/uploads/")
#=> "http://example.com/uploads/path/to/file.ext"
Trunk.Storage.Filesystem.build_url("path/to", "file.ext", base_uri: "/uploads/")
#=> "/uploads/path/to/file.ext"
```
"""
def build_uri(directory, filename, opts \\ []) do
base_uri = Keyword.get(opts, :base_uri) || ""
base_uri |> Path.join(directory) |> Path.join(filename)
end
end
|
lib/trunk/storage/filesystem.ex
| 0.790369
| 0.765418
|
filesystem.ex
|
starcoder
|
defmodule X509.Certificate.Validity do
@moduledoc """
Convenience functions for creating `:Validity` records for use in
certificates. The `:Validity` record represents the X.509 Validity
type, defining the validity of a certificate in terms of `notBefore`
and `notAfter` timestamps.
"""
import X509.ASN1
@typedoc "X.509 Time type (UTCTime or GeneralizedTime)"
@type time :: {:utcTime | :generalTime, charlist()}
@typedoc "`:Validity` record, as used in Erlang's `:public_key` module"
@type t :: X509.ASN1.record(:validity)
@default_backdate_seconds 5 * 60
@seconds_per_day 24 * 60 * 60
@doc """
Creates a new `:Validity` record with the given start and end timestamps
in DateTime format.
## Examples:
iex> {:ok, not_before, 0} = DateTime.from_iso8601("2018-01-01T00:00:00Z")
iex> {:ok, not_after, 0} = DateTime.from_iso8601("2018-12-31T23:59:59Z")
iex> X509.Certificate.Validity.new(not_before, not_after)
{:Validity, {:utcTime, '180101000000Z'}, {:utcTime, '181231235959Z'}}
iex> {:ok, not_before, 0} = DateTime.from_iso8601("2051-01-01T00:00:00Z")
iex> {:ok, not_after, 0} = DateTime.from_iso8601("2051-12-31T23:59:59Z")
iex> X509.Certificate.Validity.new(not_before, not_after)
{:Validity, {:generalTime, '20510101000000Z'},
{:generalTime, '20511231235959Z'}}
"""
@spec new(DateTime.t(), DateTime.t()) :: t()
def new(%DateTime{} = not_before, %DateTime{} = not_after) do
validity(
notBefore: X509.DateTime.new(not_before),
notAfter: X509.DateTime.new(not_after)
)
end
@doc """
Creates a new `:Validity` record with an `notAfter` value a given number of
days in the future. The `notBefore` value can be backdated (by default
#{@default_backdate_seconds} seconds) to avoid newly issued certificates
from being rejected by peers due to poorly synchronized clocks.
For CA certificates, consider using `new/2` instead, with a `not_before`
value that does not reveal the exact time when the keypair was generated.
This minimizes information leakage about the state of the RNG.
"""
@spec days_from_now(integer(), non_neg_integer()) :: t()
def days_from_now(days, backdate_seconds \\ @default_backdate_seconds) do
validity(
notBefore: X509.DateTime.new(-backdate_seconds),
notAfter: X509.DateTime.new(days * @seconds_per_day)
)
end
end
|
lib/x509/certificate/validity.ex
| 0.917326
| 0.480662
|
validity.ex
|
starcoder
|
defmodule Cand.Socket do
@moduledoc """
TCP socket handler for socketcand endpoint.
This module provides functions for configuration, read/write CAN frames.
`Cand.Socket` is implemented as a `__using__` macro so that you can put it in any module,
you can initialize your Socket manually (see `test/socket_tests`) or by overwriting `configuration/1`,
`cyclic_frames/1` and `subscriptions/1` to autoset the configuration, cyclic_frames and subscription items.
It also helps you to handle new CAN frames and subscription events by overwriting `handle_frame/2` callback.
The following example shows a module that takes its configuration from the environment (see `test/terraform_test.exs`):
```elixir
defmodule MySocket do
use Cand.Socket
# Use the `init` function to configure your Socket.
def init({parent_pid, 103} = _user_init_state, socket_pid) do
%{parent_pid: parent_pid, socket_pid: socket_pid}
end
def configuration(_user_init_state), do: Application.get_env(:my_socket, :configuration, [])
def cyclic_frames(_user_init_state), do: Application.get_env(:my_socket, :cyclic_frames, [])
def subscriptions(_user_init_state), do: Application.get_env(:my_socket, :subscriptions, [])
def handle_frame(new_frame, state) do
send(state.parent_pid, {:handle_frame, new_frame})
state
end
end
```
Because it is small a GenServer, it accepts the same [options](https://hexdocs.pm/elixir/GenServer.html#module-how-to-supervise) for supervision
to configure the child spec and passes them along to `GenServer`:
```elixir
defmodule MyModule do
use Cand.Socket, restart: :transient, shutdown: 10_000
end
```
"""
use GenServer
require Logger
defmodule State do
@moduledoc """
* last_cmds: It is a record of the last configuration commands that will be
resent in case of an unscheduled reconnection.
* port: Socketcand deamon port, default => 29536.
* host: Network Interface IP, default => {127, 0, 0, 1}.
* socket: Socket PID.
* controlling_process: Parent process.
* status: nil, :connected, :disconnected.
"""
defstruct last_cmds: [],
port: 29536,
host: {127, 0, 0, 1},
socket: nil,
socket_opts: [],
controlling_process: nil,
reconnect: false
end
@type config_options ::
{:host, tuple()}
| {:port, integer()}
| {:interface, binary()}
| {:mode, atom()}
@doc """
Optional callback that gets the Socket connection and CAN bus parameters.
"""
@callback configuration(term()) :: config_options
@callback cyclic_frames(term()) :: list()
@callback subscriptions(term()) :: list()
@callback handle_frame({integer(), binary(), binary()}, term()) :: term()
@callback handle_disconnect(term()) :: term()
@callback handle_error(term(), term()) :: term()
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
use GenServer, Keyword.drop(opts, [:configuration])
@behaviour Cand.Socket
def start_link(user_initial_params \\ []) do
GenServer.start_link(__MODULE__, user_initial_params, unquote(opts))
end
@impl true
def init(user_initial_params) do
send(self(), :init)
{:ok, user_initial_params}
end
@impl true
def handle_info(:init, user_initial_params) do
# Socket Terraform
{:ok, cs_pid} = Cand.Socket.start_link()
configuration = apply(__MODULE__, :configuration, [user_initial_params])
cyclic_frames = apply(__MODULE__, :cyclic_frames, [user_initial_params])
subscriptions = apply(__MODULE__, :subscriptions, [user_initial_params])
# configutation = list()
set_socket_connection(cs_pid, configuration)
set_socket_bus(cs_pid, configuration)
# monitored_tiems = [subscription: 100.3, monitored_item: %MonitoredItem{}, ...]
set_cyclic_frames(cs_pid, cyclic_frames)
set_subscriptions(cs_pid, subscriptions)
# User initialization.
user_state = apply(__MODULE__, :init, [user_initial_params, cs_pid])
{:noreply, user_state}
end
def handle_info({:frame, { _can_id, _timestamp, _frame} = new_frame}, state) do
state = apply(__MODULE__, :handle_frame, [new_frame, state])
{:noreply, state}
end
def handle_info(:disconnect, state) do
state = apply(__MODULE__, :handle_disconnect, [state])
{:noreply, state}
end
def handle_info({:error, error_data}, state) do
state = apply(__MODULE__, :handle_error, [error_data, state])
{:noreply, state}
end
@impl true
def handle_frame(new_frame_data, state) do
require Logger
Logger.warn(
"No handle_frame/3 clause in #{__MODULE__} provided for #{inspect(new_frame_data)}"
)
state
end
@impl true
def handle_disconnect(state) do
require Logger
Logger.warn("No handle_disconnect/1 clause in #{__MODULE__} provided")
state
end
@impl true
def handle_error(error, state) do
require Logger
Logger.warn(
"No handle_error/2 clause in #{__MODULE__} provided for #{inspect(error)}"
)
state
end
@impl true
def configuration(_user_init_state), do: []
@impl true
def cyclic_frames(_user_init_state), do: []
@impl true
def subscriptions(_user_init_state), do: []
defp set_socket_connection(_cs_pid, nil), do: :ok
defp set_socket_connection(cs_pid, configuration) do
with host <- Keyword.get(configuration, :host, {127, 0, 0, 1}),
true <- is_tuple(host),
{:ok, ip_host} <- ip_to_tuple(host),
port <- Keyword.get(configuration, :port, 29536),
true <- is_integer(port) do
Cand.Socket.connect(cs_pid, ip_host, port, [active: true])
else
_ ->
require Logger
Logger.warn(
"Invalid Socket Connection params: #{inspect(configuration)} provided by #{
__MODULE__
}"
)
end
end
defp set_socket_bus(_cs_pid, nil), do: :ok
defp set_socket_bus(cs_pid, configuration) do
with interface <- Keyword.get(configuration, :interface, nil),
true <- is_binary(interface),
:ok <- Cand.Protocol.open(cs_pid, interface),
mode <- Keyword.get(configuration, :mode, :raw_mode),
true <- mode in [:bcm_mode, :raw_mode, :control_mode, :iso_tp_mode],
:ok <- apply(Cand.Protocol, mode, [cs_pid]) do
:ok
else
_ ->
require Logger
Logger.warn(
"Invalid Socket Bus params: #{inspect(configuration)} provided by #{__MODULE__}"
)
end
end
defp set_cyclic_frames(socket, cyclic_frames) do
Enum.each(cyclic_frames, fn cyclic_frame_data ->
apply(Cand.Protocol, :add_cyclic_frame, [socket] ++ cyclic_frame_data)
end)
end
defp set_subscriptions(socket, subscriptions) do
Enum.each(subscriptions, fn subscription_data ->
apply(Cand.Protocol, :subscribe, [socket] ++ subscription_data)
end)
end
defguardp is_ipv4_octet(v) when v >= 0 and v <= 255
defp ip_to_tuple({a, b, c, d} = ipa)
when is_ipv4_octet(a) and is_ipv4_octet(b) and is_ipv4_octet(c) and is_ipv4_octet(d),
do: {:ok, ipa}
defp ip_to_tuple(ipa) when is_binary(ipa) do
ipa_charlist = to_charlist(ipa)
case :inet.parse_address(ipa_charlist) do
{:ok, addr} -> {:ok, addr}
{:error, :einval} -> {:error, "Invalid IP address: #{ipa}"}
end
end
defp ip_to_tuple(ipa), do: {:error, "Invalid IP address: #{inspect(ipa)}"}
defoverridable start_link: 0,
start_link: 1,
configuration: 1,
cyclic_frames: 1,
subscriptions: 1,
handle_frame: 2,
handle_error: 2,
handle_disconnect: 1
end
end
def init(state), do: {:ok, state}
def start_link do
GenServer.start_link(__MODULE__, %State{controlling_process: self()})
end
def connect(pid, host, port, opts \\ [active: false]) do
GenServer.call(pid, {:connect, host, port, opts})
end
def disconnect(pid) do
GenServer.call(pid, :disconnect)
end
def send(pid, cmd, timeout \\ :infinity) do
GenServer.call(pid, {:send, cmd, timeout})
end
def receive(pid, timeout \\ :infinity) do
GenServer.call(pid, {:receive, timeout})
end
def handle_call({:connect, host, port, [active: false] = opts}, _from_, state) do
with {:ok, socket} <- :gen_tcp.connect(host, port, opts),
{:ok, message} <- :gen_tcp.recv(socket, 0),
response <- parse_message(message) do
{:reply, response, %{state | socket: socket, host: host, port: port, socket_opts: opts, reconnect: true}}
else
error_reason ->
{:reply, error_reason, state}
end
end
def handle_call({:connect, host, port, opts}, _from_, state) do
with {:ok, socket} <- :gen_tcp.connect(host, port, opts) do
{:reply, :ok, %{state | socket: socket, host: host, port: port, socket_opts: opts}}
else
error_reason ->
{:reply, error_reason, state}
end
end
def handle_call(_call, _from, %{socket: nil} = state) do
Logger.warn("(#{__MODULE__}) There is no available socket. #{inspect(state)}")
{:reply, {:error, :einval}, %{state | socket: nil}}
end
# wait for response
def handle_call({:send, cmd, timeout}, _from, %{socket_opts: [active: false]} = state) do
Logger.debug("(#{__MODULE__}) Sending: #{cmd}. #{inspect(state)}")
with :ok <- :gen_tcp.send(state.socket, cmd),
new_cmds <- add_new_cmd(cmd, state.last_cmds),
{:ok, message} <- receive_reponse(cmd, state.socket, timeout),
response <- parse_messages(message) do
{:reply, response, %{state | last_cmds: new_cmds}}
else
error_reason ->
{:reply, error_reason, %{state | socket: nil}}
end
end
def handle_call({:send, cmd, _timeout}, _from, %{last_cmds: cmds} = state) do
Logger.debug("(#{__MODULE__}) Sending: #{cmd}. #{inspect(state)}")
with :ok <- :gen_tcp.send(state.socket, cmd),
new_cmds <- add_new_cmd(cmd, cmds) do
{:reply, :ok, %{state | last_cmds: new_cmds}}
else
error_reason ->
{:reply, error_reason, %{state | socket: nil}}
end
end
def handle_call({:receive, timeout}, _from, %{socket_opts: [active: false]} = state) do
Logger.debug("(#{__MODULE__}) Reading. #{inspect(state)}")
with {:ok, message} <- :gen_tcp.recv(state.socket, 0, timeout),
response <- parse_messages(message) do
{:reply, response, state}
else
error_reason ->
{:reply, error_reason, %{state | socket: nil}}
end
end
def handle_call({:receive, _timeout}, _from, state) do
Logger.warn("(#{__MODULE__}) The socket is configured as passive. #{inspect(state)}")
{:reply, {:error, :einval}, state}
end
def handle_call(:disconnect, _from, %{socket: socket} = state) do
with :ok <- :gen_tcp.close(socket) do
{:reply, :ok, %{state | reconnect: false}}
else
error_reason ->
{:reply, error_reason, %{state | socket: nil}}
end
end
# Active Mode
def handle_info({:tcp, _port, '< hi >'}, state) do
Logger.info("(#{__MODULE__}) Connected. #{inspect(state)}")
{:noreply, %{state | reconnect: true}}
end
def handle_info({:tcp, _port, '< ok >'}, state) do
Logger.debug("(#{__MODULE__}) OK. #{inspect(state)}")
{:noreply, state}
end
def handle_info({:tcp, _port, '< echo >'}, state) do
Logger.debug("(#{__MODULE__}) Echo received. #{inspect(state)}")
{:noreply, state}
end
def handle_info({:tcp, _port, message}, %{controlling_process: p_pid} = state) do
message
|> parse_messages
|> Enum.map(fn message ->
dispatch(message, p_pid)
end)
{:noreply, state}
end
def handle_info({:tcp_closed, _port}, %{reconnect: false} = state) do
Logger.info("(#{__MODULE__}) Expected disconnection. #{inspect(state)}")
{:noreply, state}
end
def handle_info({:tcp_closed, _port}, state) do
Logger.warn("(#{__MODULE__}) Unexpected disconnection. Reconnect...")
Kernel.send(state.controlling_process, :disconnect)
{:noreply, state}
end
defp add_new_cmd("< send " <> _payload, last_cmds), do: last_cmds
defp add_new_cmd("< sendpdu " <> _payload, last_cmds), do: last_cmds
defp add_new_cmd(cmd, last_cmds), do: Enum.uniq(last_cmds ++ [cmd])
defp receive_reponse("< send " <> _payload, _socket, _timeout), do: {:ok, '< ok >'}
defp receive_reponse("< sendpdu " <> _payload, _socket, _timeout), do: {:ok, '< ok >'}
defp receive_reponse(_cmd, socket, timeout), do: :gen_tcp.recv(socket, 0, timeout)
defp parse_messages(messages) do
messages
|> List.to_string()
|> String.split("><")
|> Enum.map(fn frame ->
frame
|> String.trim("<")
|> String.trim(">")
|> String.trim()
|> parse_message
end)
end
defp parse_message(message) when is_list(message) do
message
|> List.to_string()
|> String.trim("<")
|> String.trim(">")
|> String.trim()
|> parse_message()
end
defp parse_message("frame " <> payload) do
with [can_id_str, timestamp, can_frame] <- String.split(payload, " ", parts: 3),
can_frame_bin <- str_to_bin_frame(can_frame),
can_id_int <- String.to_integer(can_id_str, 16) do
{:frame, {can_id_int, timestamp, can_frame_bin}}
else
error_reason ->
{:error, error_reason}
end
end
defp parse_message("ok"), do: :ok
defp parse_message("hi"), do: :hi
defp parse_message("echo"), do: :ok
defp parse_message("error " <> message), do: {:error, message}
defp parse_message(message), do: {:error, message}
defp str_to_bin_frame(can_frame) do
can_frame = String.replace(can_frame, " ", "")
for <<byte::binary-2 <- can_frame>>,reduce: <<>> do
acc -> acc <> <<String.to_integer(byte, 16)>>
end
end
defp dispatch({:frame, _frame_data} = message, p_pid), do: Kernel.send(p_pid, message)
defp dispatch({:error, _error_msg} = message, p_pid), do: Kernel.send(p_pid, message)
defp dispatch(message, _p_pid), do: Logger.debug("(#{__MODULE__}) #{inspect(message)}")
end
|
lib/cand/socket.ex
| 0.815894
| 0.664275
|
socket.ex
|
starcoder
|
defmodule Geometry.PointZM do
@moduledoc """
A point struct, representing a 3D point with a measurement.
"""
import Geometry.Guards
alias Geometry.{GeoJson, Hex, PointZM, WKB, WKT}
defstruct [:coordinate]
@blank " "
@empty %{
{:ndr, :hex} => "000000000000F87F000000000000F87F000000000000F87F000000000000F87F",
{:xdr, :hex} => "7FF80000000000007FF80000000000007FF80000000000007FF8000000000000",
{:ndr, :binary} =>
Hex.to_binary("000000000000F87F000000000000F87F000000000000F87F000000000000F87F"),
{:xdr, :binary} =>
Hex.to_binary("7FF80000000000007FF80000000000007FF80000000000007FF8000000000000")
}
@type t :: %PointZM{coordinate: Geometry.coordinate() | nil}
@doc """
Creates an empty `PointZM`.
## Examples
iex> PointZM.new()
%PointZM{coordinate: nil}
"""
@spec new :: t()
def new, do: %PointZM{}
@doc """
Creates a `PointZM` from the given `coordinate`.
## Examples
iex> PointZM.new([1.5, -2.1, 3, 4])
%PointZM{coordinate: [1.5, -2.1, 3, 4]}
"""
@spec new(Geometry.coordinate()) :: t()
def new([x, y, z, m] = coordinate) when is_coordinate(x, y, z, m) do
%PointZM{coordinate: coordinate}
end
@doc """
Creates a `PointZM` from the given `x`, `y`, `z`, and `m`.
## Examples
iex> PointZM.new(-1.1, 2.2, 3, 4)
%PointZM{coordinate: [-1.1, 2.2, 3, 4]}
"""
@spec new(number(), number(), number(), number()) :: t()
def new(x, y, z, m) when is_coordinate(x, y, z, m) do
%PointZM{coordinate: [x, y, z, m]}
end
@doc """
Returns `true` if the given `PointZM` is empty.
## Examples
iex> PointZM.empty?(PointZM.new())
true
iex> PointZM.empty?(PointZM.new(1, 2, 3, 4))
false
"""
@spec empty?(t()) :: boolean
def empty?(%PointZM{coordinate: coordinate}), do: is_nil(coordinate)
@doc """
Creates a `PointZM` from the given coordinate.
## Examples
iex> PointZM.from_coordinates([[-1, 1, 1, 1]])
%PointZM{coordinate: [-1, 1, 1, 1]}
"""
@spec from_coordinates(Geometry.coordinate() | [nil, ...]) :: t()
def from_coordinates([[x, y, z, m] = coordinate]) when is_coordinate(x, y, z, m) do
%PointZM{coordinate: coordinate}
end
def from_coordinates([x, y, z, m] = coordinate) when is_coordinate(x, y, z, m) do
%PointZM{coordinate: coordinate}
end
def from_coordinates([nil, nil, nil, nil]) do
%PointZM{}
end
@doc """
Returns the WKT representation for a `PointZM`. With option `:srid` an EWKT
representation with the SRID is returned.
## Examples
iex> PointZM.to_wkt(PointZM.new())
"Point ZM EMPTY"
iex> PointZM.to_wkt(PointZM.new(1.1, 2.2, 3.3, 4.4))
"Point ZM (1.1 2.2 3.3 4.4)"
iex> PointZM.to_wkt(PointZM.new(1.1, 2.2, 3.3, 4.4), srid: 4711)
"SRID=4711;Point ZM (1.1 2.2 3.3 4.4)"
"""
@spec to_wkt(t(), opts) :: Geometry.wkt()
when opts: [srid: Geometry.srid()]
def to_wkt(%PointZM{coordinate: coordinate}, opts \\ []) do
WKT.to_ewkt(<<"Point ZM ", to_wkt_point(coordinate)::binary()>>, opts)
end
@doc """
Returns an `:ok` tuple with the `PointZM` from the given WKT string. Otherwise
returns an `:error` tuple.
If the geometry contains an SRID the id is added to the tuple.
## Examples
iex> PointZM.from_wkt("Point ZM (-5.1 7.8 9.9 12)")
{:ok, %PointZM{coordinate: [-5.1, 7.8, 9.9, 12]}}
iex> PointZM.from_wkt("SRID=7219;Point ZM (-5.1 7.8 9.9 12)")
{:ok, {%PointZM{coordinate: [-5.1, 7.8, 9.9, 12]}, 7219}}
iex> PointZM.from_wkt("Point ZM EMPTY")
{:ok, %PointZM{}}
"""
@spec from_wkt(Geometry.wkt()) ::
{:ok, t()} | {t(), Geometry.srid()} | Geometry.wkt_error()
def from_wkt(wkt), do: WKT.to_geometry(wkt, PointZM)
@doc """
The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkt!(Geometry.wkt()) :: t() | {t(), Geometry.srid()}
def from_wkt!(wkt) do
case WKT.to_geometry(wkt, PointZM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term of a `PointZM`.
## Examples
iex> PointZM.to_geo_json(PointZM.new(1, 2, 3, 4))
%{"type" => "Point", "coordinates" => [1, 2, 3, 4]}
"""
@spec to_geo_json(t()) :: Geometry.geo_json_term()
def to_geo_json(%PointZM{coordinate: coordinate}) when not is_nil(coordinate) do
%{
"type" => "Point",
"coordinates" => coordinate
}
end
@doc """
Returns an `:ok` tuple with the `PointZM` from the given GeoJSON term.
Otherwise returns an `:error` tuple.
## Examples
iex> ~s({"type": "Point", "coordinates": [1.1, 2.2, 3.3, 4.4]})
iex> |> Jason.decode!()
iex> |> PointZM.from_geo_json()
{:ok, %PointZM{coordinate: [1.1, 2.2, 3.3, 4.4]}}
"""
@spec from_geo_json(Geometry.geo_json_term()) :: {:ok, t()} | Geometry.geo_json_error()
def from_geo_json(json), do: GeoJson.to_point(json, PointZM)
@doc """
The same as `from_geo_json/1`, but raises a `Geometry.Error` exception if it
fails.
"""
@spec from_geo_json!(Geometry.geo_json_term()) :: t()
def from_geo_json!(json) do
case GeoJson.to_point(json, PointZM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the WKB representation for a `PointZM`.
With option `:srid` an EWKB representation with the SRID is returned.
The option `:endian` indicates whether `:xdr` big endian or `:ndr` little
endian is returned. The default is `:xdr`.
The `:mode` determines whether a hex-string or binary is returned. The default
is `:binary`.
## Examples
iex> PointZM.to_wkb(PointZM.new(), mode: :hex)
"00C00000017FF80000000000007FF80000000000007FF80000000000007FF8000000000000"
iex> PointZM.to_wkb(PointZM.new(), endian: :ndr, mode: :hex)
"01010000C0000000000000F87F000000000000F87F000000000000F87F000000000000F87F"
iex> PointZM.to_wkb(PointZM.new(1.1, 2.2, 3.3, 4.4), endian: :xdr, mode: :hex)
"00C00000013FF199999999999A400199999999999A400A666666666666401199999999999A"
iex> PointZM.to_wkb(PointZM.new(1.1, 2.2, 3.3, 4.4), endian: :ndr, mode: :hex)
"01010000C09A9999999999F13F9A999999999901406666666666660A409A99999999991140"
iex> PointZM.to_wkb(PointZM.new(1.1, 2.2, 3.3, 4.4), srid: 4711, endian: :xdr, mode: :hex)
"00E0000001000012673FF199999999999A400199999999999A400A666666666666401199999999999A"
"""
@spec to_wkb(t(), opts) :: Geometry.wkb()
when opts: [endian: Geometry.endian(), srid: Geometry.srid(), mode: Geometry.mode()]
def to_wkb(%PointZM{coordinate: coordinate}, opts \\ []) do
endian = Keyword.get(opts, :endian, Geometry.default_endian())
srid = Keyword.get(opts, :srid)
mode = Keyword.get(opts, :mode, Geometry.default_mode())
to_wkb(coordinate, srid, endian, mode)
end
@doc """
Returns an `:ok` tuple with the `PointZM` from the given WKB string. Otherwise
returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
The optional second argument determines if a `:hex`-string or a `:binary`
input is expected. The default is `:binary`.
## Examples
iex> PointZM.from_wkb(
...> "00C00000017FF80000000000007FF80000000000007FF80000000000007FF8000000000000",
...> :hex
...> )
{:ok, %PointZM{coordinate: nil}}
iex> PointZM.from_wkb(
...> "00C00000013FF199999999999A400199999999999A400A666666666666401199999999999A",
...> :hex
...> )
{:ok, %PointZM{coordinate: [1.1, 2.2, 3.3, 4.4]}}
iex> PointZM.from_wkb(
...> "01010000C09A9999999999F13F9A999999999901406666666666660A409A99999999991140",
...> :hex
...> )
{:ok, %PointZM{coordinate: [1.1, 2.2, 3.3, 4.4]}}
iex> PointZM.from_wkb(
...> "00E0000001000012673FF199999999999A400199999999999A400A666666666666401199999999999A",
...> :hex
...> )
{:ok, {%PointZM{coordinate: [1.1, 2.2, 3.3, 4.4]}, 4711}}
"""
@spec from_wkb(Geometry.wkb(), Geometry.mode()) ::
{:ok, t() | {t(), Geometry.srid()}}
| Geometry.wkb_error()
def from_wkb(wkb, mode \\ :binary), do: WKB.to_geometry(wkb, mode, PointZM)
@doc """
The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkb!(Geometry.wkb(), Geometry.mode()) :: t() | {t(), Geometry.srid()}
def from_wkb!(wkb, mode \\ :binary) do
case WKB.to_geometry(wkb, mode, PointZM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc false
@compile {:inline, to_wkt_coordinate: 1}
@spec to_wkt_coordinate(Geometry.coordinate()) :: String.t()
def to_wkt_coordinate([x, y, z, m]) do
<<
to_wkt_number(x)::binary(),
@blank,
to_wkt_number(y)::binary(),
@blank,
to_wkt_number(z)::binary(),
@blank,
to_wkt_number(m)::binary()
>>
end
@compile {:inline, to_wkt_point: 1}
defp to_wkt_point(nil), do: "EMPTY"
defp to_wkt_point(coordinate), do: <<"(", to_wkt_coordinate(coordinate)::binary(), ")">>
@compile {:inline, to_wkt_number: 1}
defp to_wkt_number(num) when is_integer(num), do: Integer.to_string(num)
defp to_wkt_number(num) when is_float(num), do: Float.to_string(num)
@doc false
@compile {:inline, to_wkb: 4}
@spec to_wkb(
Geometry.coordinate() | nil,
Geometry.srid() | nil,
Geometry.endian(),
Geometry.mode()
) ::
binary()
def to_wkb(coordinate, srid, endian, mode) do
<<
WKB.byte_order(endian, mode)::binary(),
wkb_code(endian, not is_nil(srid), mode)::binary,
WKB.srid(srid, endian, mode)::binary(),
to_wkb_coordinate(coordinate, endian, mode)::binary
>>
end
@doc false
@compile {:inline, to_wkb_coordinate: 3}
@spec to_wkb_coordinate(coordinate, endian, mode) :: wkb
when coordinate: Geometry.coordinate() | nil,
endian: Geometry.endian(),
mode: Geometry.mode(),
wkb: Geometry.wkb()
def to_wkb_coordinate(nil, endian, mode), do: Map.fetch!(@empty, {endian, mode})
def to_wkb_coordinate([x, y, z, m], endian, mode) do
<<
to_wkb_number(x, endian, mode)::binary(),
to_wkb_number(y, endian, mode)::binary(),
to_wkb_number(z, endian, mode)::binary(),
to_wkb_number(m, endian, mode)::binary()
>>
end
@compile {:inline, to_wkb_number: 3}
defp to_wkb_number(num, endian, :hex), do: Hex.to_float_string(num, endian)
defp to_wkb_number(num, :xdr, :binary), do: <<num::big-float-size(64)>>
defp to_wkb_number(num, :ndr, :binary), do: <<num::little-float-size(64)>>
@compile {:inline, wkb_code: 3}
defp wkb_code(endian, srid?, :hex) do
case {endian, srid?} do
{:xdr, false} -> "C0000001"
{:ndr, false} -> "010000C0"
{:xdr, true} -> "E0000001"
{:ndr, true} -> "010000E0"
end
end
defp wkb_code(endian, srid?, :binary) do
case {endian, srid?} do
{:xdr, false} -> <<0xC0000001::big-integer-size(32)>>
{:ndr, false} -> <<0xC0000001::little-integer-size(32)>>
{:xdr, true} -> <<0xE0000001::big-integer-size(32)>>
{:ndr, true} -> <<0xE0000001::little-integer-size(32)>>
end
end
end
|
lib/geometry/point_zm.ex
| 0.959602
| 0.731766
|
point_zm.ex
|
starcoder
|
defmodule Mix.Tasks.Bonny.Gen.Manifest do
@moduledoc """
Generates the Kubernetes YAML manifest for this operator
mix bonny.gen.manifest expects a docker image name if deploying to a cluster. You may optionally provide a namespace.
## Examples
The `image` switch is required.
Options:
* --image (docker image to deploy)
* --namespace (of service account and deployment; defaults to "default")
* --out (path to save manifest; defaults to "manifest.yaml")
*Deploying to kubernetes:*
```shell
docker build -t $(YOUR_IMAGE_URL) .
docker push $(YOUR_IMAGE_URL)
mix bonny.gen.manifest --image $(YOUR_IMAGE_URL):latest --namespace default
kubectl apply -f manifest.yaml -n default
```
To skip the `deployment` for running an operator outside of the cluster (like in development) simply omit the `--image` flag:
```shell
mix bonny.gen.manifest
```
"""
use Mix.Task
alias Bonny.Operator
@default_opts [namespace: "default"]
@switches [out: :string, namespace: :string, image: :string]
@aliases [o: :out, n: :namespace, i: :image]
@shortdoc "Generate Kubernetes YAML manifest for this operator"
def run(args) do
Mix.Task.run("loadpaths", args)
{opts, _, _} =
Mix.Bonny.parse_args(args, @default_opts, switches: @switches, aliases: @aliases)
manifest =
opts
|> resource_manifests
|> Enum.map(fn m -> ["---\n", Poison.encode!(m, pretty: true), "\n"] end)
|> List.flatten()
out = opts[:out] || "manifest.yaml"
Mix.Bonny.render(manifest, out)
end
defp resource_manifests(opts) when is_list(opts), do: opts |> Enum.into(%{}) |> resource_manifests
defp resource_manifests(%{image: image, namespace: namespace}) do
deployment = Operator.deployment(image, namespace)
manifests = resource_manifests(%{namespace: namespace})
[deployment | manifests]
end
defp resource_manifests(%{namespace: namespace}) do
Operator.crds() ++
[
Operator.cluster_role(),
Operator.service_account(namespace),
Operator.cluster_role_binding(namespace)
]
end
end
|
lib/mix/tasks/bonny.gen.manifest.ex
| 0.898328
| 0.703728
|
bonny.gen.manifest.ex
|
starcoder
|
defmodule PlayfabEx.Client.SharedGroupData do
use Interface
@doc """
Adds users to the set of those able to update both the shared data, as well as the set of users in the group. Only users in the group can add new members. Shared Groups are designed for sharing data between a very small number of players, please see our guide:
[online docs](https://api.playfab.com/documentation/client/method/AddSharedGroupMembers)
"""
@spec add_shared_group_members(map()) :: {:ok, map} | {:error, String.t}
definterface add_shared_group_members(params), to: (Application.get_env(:playfab_ex, PlayfabEx.Client.SharedGroupData) || PlayfabEx.Client.Default.SharedGroupData)
@doc """
Retrieves data stored in a shared group object, as well as the list of members in the group. Non-members of the group may use this to retrieve group data, including membership, but they will not receive data for keys marked as private. Shared Groups are designed for sharing data between a very small number of players, please see our guide:
[online docs](https://api.playfab.com/documentation/client/method/CreateSharedGroup)
"""
@spec create_shared_group(map()) :: {:ok, map} | {:error, String.t}
definterface create_shared_group(params), to: (Application.get_env(:playfab_ex, PlayfabEx.Client.SharedGroupData) || PlayfabEx.Client.Default.SharedGroupData)
@doc """
Adds, updates, and removes data keys for a shared group object. If the permission is set to Public, all fields updated or added in this call will be readable by users not in the group. By default, data permissions are set to Private. Regardless of the permission setting, only members of the group can update the data. Shared Groups are designed for sharing data between a very small number of players, please see our guide:
[online docs](https://api.playfab.com/documentation/client/method/GetSharedGroupData)
"""
@spec get_shared_group_data(map()) :: {:ok, map} | {:error, String.t}
definterface get_shared_group_data(params), to: (Application.get_env(:playfab_ex, PlayfabEx.Client.SharedGroupData) || PlayfabEx.Client.Default.SharedGroupData)
@doc """
[online docs](https://api.playfab.com/documentation/client/method/RemoveSharedGroupMembers)
"""
@spec remove_shared_group_members(map()) :: {:ok, map} | {:error, String.t}
definterface remove_shared_group_members(params), to: (Application.get_env(:playfab_ex, PlayfabEx.Client.SharedGroupData) || PlayfabEx.Client.Default.SharedGroupData)
@doc """
[online docs](https://api.playfab.com/documentation/client/method/UpdateSharedGroupData)
"""
@spec update_shared_group_data(map()) :: {:ok, map} | {:error, String.t}
definterface update_shared_group_data(params), to: (Application.get_env(:playfab_ex, PlayfabEx.Client.SharedGroupData) || PlayfabEx.Client.Default.SharedGroupData)
end
|
lib/client/shared_group_data.ex
| 0.602179
| 0.407776
|
shared_group_data.ex
|
starcoder
|
defmodule Puid do
@moduledoc """
Define modules for the efficient generation of cryptographically strong probably unique
identifiers (<strong>puid</strong>s, aka random strings) of specified entropy from various
character sets
## Examples
The simplest usage of `Puid` requires no options. The library adds a `generate/0` function for
generating **puid**s:
iex> defmodule(Id, do: use(Puid))
iex> Id.generate()
"p3CYi24M8tJNmroTLogO3b"
By default, `Puid` modules generate **puid**s with at least 128 bits of entropy, making the
**puid**s suitable replacements for **uuid**s.
### Character Set
The default character set for `Puid` modules is the Base64 URL and file system safe character set
specified in [RFC 3548](https://tools.ietf.org/html/rfc3548#section-4). Any of the pre-defined
character sets from `Puid.CharSet` can easily be specified using the `charset` option:
iex> defmodule(HexId, do: use(Puid, charset: :hex))
iex> HexId.generate()
"a60dec6d0b71355aa9579bb46c001700"
### Custom Characters
Any sequence of unique, printable characters can be used to generate **puid**s.
iex> defmodule(DingoSkyId, do: use(Puid, chars: "dingosky"))
iex> DingoSkyId.generate()
"yoisknoydoknkoikgoknynkinoknkygdiikoosksyni"
iex> defmodule(UnicodeId, do: use(Puid, chars: "ŮήιƈŏδεĊħąŕαсτəř"))
iex> UnicodeId.generate()
"αήήδħƈĊŕąąιŏήąŕħƈδəəήιττδδŕąĊδŕι"
### Specific Entropy
#### Bits
The `bits` option can be used to specify desired entropy bits.
iex> defmodule Password, do: use Puid, bits: 96, charset: :printable_ascii
iex> Password.generate()
"0&pu=w+T#~o)N=E"
Since the total entropy bits of a **puid** must be a multiple of the entropy bits per character
used, the actual **puid** `bits` will be equal to or greater than specified. In the example above,
the entropy bits of a `Password` generated **puid** is actually 98.32.
#### Total and Risk
The amount of entropy can be intuitively specified through the `total` and `risk` options. For
example, to generate a `total` of 10 million **puid**s with a 1 in a quadrillion `risk` of repeat
using `:safe32` characters:
iex> defmodule(Safe32Id, do: use(Puid, total: 1.0e7, risk: 1.0e15, charset: :safe32))
iex> Safe32Id.generate()
"hjM7md2R9j8D7PNTjBPB"
The actual `Safe32Id` **puid** entropy bits is 100.
### Custom Randomness
`Puid` generates **puid**s using bytes from the function specified with the `rand_bytes`
option. If `rand_bytes` is not specified, `Puid` defaults to `:crypto.strong_rand_bytes/1`.
iex> defmodule(MyRandBytesId, do: use(Puid, bits: 96, charset: :safe32, rand_bytes: &MyRand.bytes/1))
iex> MyRandBytesId.generate()
"G2jrmPr3mQPBt2gGB3T4"
The `MyRand.bytes/1` function must be of the form `(non_neg_integer) -> binary()`
### Module Functions
`Puid` adds the following 2 functions to each created module:
| Function | Description |
| -------- | ----------- |
| generate/0 | function for generating a **puid** |
| info/0 | `Puid.Info` struct of module information |
The `Puid.Info` struct has the following fields:
| Field | Description |
| ----- | ----------- |
| chars | source character set |
| charset | pre-defined `Puid.Charset` or :custom |
| entropy_bits | entropy bits for generated **puid**s |
| entropy_bits_per_char | entropy bits per character for generated **puid**s |
| ere | **puid** entropy string representation efficiency |
| length | **puid** string length |
| rand_bytes | entropy source function |
iex> defmodule(AlphanumId, do: use(Puid, total: 10e06, risk: 1.0e15, charset: :alphanum))
iex> AlphanumId.info()
%Puid.Info{
chars: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
charset: :alphanum,
entropy_bits: 107.18,
entropy_bits_per_char: 5.95,
ere: 0.74,
length: 18,
rand_bytes: &:crypto.strong_rand_bytes/1
}
"""
use Bitwise, only: bsl
alias Puid.CharSet
alias Puid.Info
import Puid.Entropy
@doc false
defmacro __using__(opts) do
quote do
import Puid
import CharSet
puid_default = %Info{}
bits = unquote(opts)[:bits]
total = unquote(opts)[:total]
risk = unquote(opts)[:risk]
if !is_nil(total) and is_nil(risk),
do: raise(Puid.Error, "Must specify risk when specifying total")
if is_nil(total) and !is_nil(risk),
do: raise(Puid.Error, "Must specify total when specifying risk")
puid_bits =
cond do
is_nil(bits) and is_nil(total) and is_nil(risk) ->
puid_default.entropy_bits
is_number(bits) and bits < 1 ->
raise Puid.Error, "Invalid bits. Must be greater than 1"
is_number(bits) ->
bits
!is_nil(bits) ->
raise Puid.Error, "Invalid bits. Must be numeric"
true ->
bits(total, risk)
end
charset = unquote(opts)[:charset]
chars = unquote(opts)[:chars]
{puid_charset, puid_chars} =
cond do
is_nil(charset) and is_nil(chars) ->
{puid_default.charset, puid_default.chars}
!is_nil(charset) and !is_nil(chars) ->
raise Puid.Error, "Only one of charset or chars option allowed"
!is_nil(charset) and is_atom(charset) ->
case CharSet.chars(charset) do
:undefined ->
raise Puid.Error, "Invalid charset: #{charset}"
chars ->
{charset, chars}
end
!is_nil(charset) ->
raise Puid.Error, "Invalid charset: #{charset}"
!is_nil(chars) and is_binary(chars) ->
if CharSet.unique?(chars) do
if String.printable?(chars) do
if String.length(chars) > 1 do
{:custom, chars}
else
raise Puid.Error, "Invalid chars: must be more than 1 char"
end
else
raise Puid.Error, "Invalid chars: not printable"
end
else
raise Puid.Error, "Invalid chars: not unique"
end
true ->
raise Puid.Error, "Invalid chars"
end
ebpc = puid_chars |> String.length() |> :math.log2()
puid_len = (puid_bits / ebpc) |> :math.ceil() |> round()
chars_count = puid_chars |> String.length()
total_bytes = puid_chars |> String.graphemes() |> Enum.reduce(0, &(byte_size(&1) + &2))
ere = (ebpc * chars_count / 8 / total_bytes) |> Float.round(2)
@puid_charset puid_charset
@puid_chars puid_chars
@puid_chars_count chars_count
@puid_entropy_bits_per_char ebpc
@puid_len puid_len
@puid_ere ere
rand_bytes = unquote(opts[:rand_bytes])
if !is_nil(rand_bytes) do
if !is_function(rand_bytes), do: raise(Puid.Error, "rand_bytes not a function")
if :erlang.fun_info(rand_bytes)[:arity] !== 1,
do: raise(Puid.Error, "rand_bytes not arity 1")
end
@puid_rand_bytes rand_bytes || (&:crypto.strong_rand_bytes/1)
n_encode_bytes =
case @puid_charset do
charset when charset in [:hex, :hex_upper] ->
(@puid_len / 2) |> round()
charset when charset in [:base32, :base32_hex, :base32_hex_upper] ->
(@puid_len * 5 / 8)
|> :math.floor()
|> round()
:safe64 ->
(@puid_len * 6 / 8)
|> :math.floor()
|> round()
_ ->
nil
end
@puid_n_encode_bytes n_encode_bytes
if @puid_charset === :custom do
pow2 = &bsl(1, &1)
@puid_chars_count
|> :math.log2()
|> round()
|> pow2.()
|> Kernel.==(@puid_chars_count)
|> if do
n_bits =
@puid_chars_count
|> :math.log2()
|> :math.ceil()
|> round()
@puid_n_rand_bytes (n_bits * (@puid_len / 8))
|> :math.ceil()
|> round()
else
@puid_n_rand_bytes 0
end
end
@before_compile unquote(__MODULE__)
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
@doc """
Generate __puid__
"""
case @puid_charset do
:custom ->
if 0 < @puid_n_rand_bytes do
def generate do
custom_chars(
@puid_len,
@puid_entropy_bits_per_char |> :math.ceil() |> round(),
0,
@puid_rand_bytes.(@puid_n_rand_bytes),
@puid_chars,
""
)
end
else
def generate,
do:
custom_chars(
@puid_len,
@puid_entropy_bits_per_char |> :math.ceil() |> round(),
0,
@puid_chars_count |> CryptoRand.uniform_bytes(@puid_len, @puid_rand_bytes),
@puid_chars,
""
)
end
:alpha ->
def generate,
do:
alpha_chars(
@puid_len,
6,
0,
CryptoRand.uniform_bytes(52, @puid_len),
<<>>
)
:alpha_lower ->
def generate,
do:
ul_alpha_chars(
@puid_len,
5,
0,
CryptoRand.uniform_bytes(@puid_chars_count, @puid_len, @puid_rand_bytes),
?a,
""
)
:alpha_upper ->
def generate,
do:
ul_alpha_chars(
@puid_len,
5,
0,
CryptoRand.uniform_bytes(@puid_chars_count, @puid_len, @puid_rand_bytes),
?A,
""
)
:alphanum ->
def generate,
do:
alphanum_chars(
@puid_len,
6,
0,
CryptoRand.uniform_bytes(62, @puid_len),
<<>>
)
:alphanum_lower ->
def generate,
do:
ul_alphanum_chars(
@puid_len,
6,
0,
CryptoRand.uniform_bytes(@puid_chars_count, @puid_len, @puid_rand_bytes),
?a,
""
)
:alphanum_upper ->
def generate,
do:
ul_alphanum_chars(
@puid_len,
5,
0,
CryptoRand.uniform_bytes(@puid_chars_count, @puid_len, @puid_rand_bytes),
?A,
""
)
:decimal ->
def generate,
do:
decimal_chars(
@puid_len,
4,
0,
CryptoRand.uniform_bytes(@puid_chars_count, @puid_len, @puid_rand_bytes),
""
)
:hex ->
def generate,
do: @puid_n_encode_bytes |> @puid_rand_bytes.() |> Base.encode16(case: :lower)
:hex_upper ->
def generate,
do: @puid_n_encode_bytes |> @puid_rand_bytes.() |> Base.encode16(case: :upper)
:base32 ->
def generate,
do:
@puid_n_encode_bytes
|> @puid_rand_bytes.()
|> Base.encode32(padding: false)
:base32_hex ->
def generate,
do:
@puid_n_encode_bytes
|> @puid_rand_bytes.()
|> Base.hex_encode32(padding: false, case: :lower)
:base32_hex_upper ->
def generate,
do:
@puid_n_encode_bytes
|> @puid_rand_bytes.()
|> Base.hex_encode32(padding: false, case: :upper)
:safe32 ->
def generate,
do:
safe32_chars(
@puid_len,
5,
0,
CryptoRand.uniform_bytes(32, @puid_len, @puid_rand_bytes),
<<>>
)
:safe64 ->
def generate,
do:
@puid_n_encode_bytes
|> @puid_rand_bytes.()
|> Base.url_encode64(padding: false)
:printable_ascii ->
def generate,
do:
printable_ascii_chars(
@puid_len,
7,
0,
CryptoRand.uniform_bytes(94, @puid_len, @puid_rand_bytes),
<<>>
)
end
if @puid_charset == :custom do
@puid_chars
|> String.graphemes()
|> Enum.find(&(&1 |> byte_size() > 1))
|> is_nil()
|> if do
defp char_at(ndx), do: @puid_chars |> :binary.part(ndx, 1)
else
defp char_at(ndx), do: @puid_chars |> String.at(ndx)
end
defp custom_chars(0, _, _, _, _, string), do: string
defp custom_chars(n, bits, uniform_offset, uniform_bytes, alphabet, string) do
<<_::size(uniform_offset), ndx::size(bits), _::bits>> = uniform_bytes
char = char_at(ndx)
custom_chars(
n - 1,
bits,
uniform_offset + bits,
uniform_bytes,
alphabet,
<<char::binary, string::binary>>
)
end
end
if @puid_charset == :alpha_lower or @puid_charset == :alpha_upper do
defp ul_alpha_chars(0, _, _, _, _, string), do: string
defp ul_alpha_chars(n, bits, uniform_offset, uniform_bytes, char_offset, string) do
<<_::size(uniform_offset), value::size(bits), _::bits>> = uniform_bytes
char = char_offset + value
ul_alpha_chars(
n - 1,
bits,
uniform_offset + bits,
uniform_bytes,
char_offset,
<<char::size(8), string::binary>>
)
end
end
if @puid_charset == :alpha do
defp alpha_chars(0, _, _, _, string), do: string
defp alpha_chars(n, bits, uniform_offset, uniform_bytes, string) do
<<_::size(uniform_offset), value::size(bits), _::bits>> = uniform_bytes
char =
cond do
value < 26 ->
?A + value
true ->
?a + value - 26
end
alpha_chars(
n - 1,
bits,
uniform_offset + bits,
uniform_bytes,
<<char::size(8), string::binary>>
)
end
end
if @puid_charset == :alphanum do
defp alphanum_chars(0, _, _, _, string), do: string
defp alphanum_chars(n, bits, uniform_offset, uniform_bytes, string) do
<<_::size(uniform_offset), value::size(bits), _::bits>> = uniform_bytes
char =
cond do
value < 10 ->
?0 + value
value < 36 ->
?A + value - 10
true ->
?a + value - 36
end
alphanum_chars(
n - 1,
bits,
uniform_offset + bits,
uniform_bytes,
<<char::size(8), string::binary>>
)
end
end
if @puid_charset == :alphanum_lower or @puid_charset == :alphanum_upper do
defp ul_alphanum_chars(0, _, _, _, _, string), do: string
defp ul_alphanum_chars(n, bits, uniform_offset, uniform_bytes, char_offset, string) do
<<_::size(uniform_offset), value::size(bits), _::bits>> = uniform_bytes
char =
cond do
value < 26 ->
char_offset + value
true ->
value - 26 + ?0
end
ul_alphanum_chars(
n - 1,
bits,
uniform_offset + bits,
uniform_bytes,
char_offset,
<<char::size(8), string::binary>>
)
end
end
if @puid_charset == :decimal do
defp decimal_chars(0, _, _, _, string), do: string
defp decimal_chars(n, bits, uniform_offset, uniform_bytes, string) do
<<_::size(uniform_offset), value::size(bits), _::bits>> = uniform_bytes
char = ?0 + value
decimal_chars(
n - 1,
bits,
uniform_offset + bits,
uniform_bytes,
<<char::size(8), string::binary>>
)
end
end
# 2 3 4 6 7 8 9 b d f g h j m n p q r t B D F G H J L M N P Q R T
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
# ----- ------- - - ------- -- ----- -------- -- -- -- -------- -- -------- -------- --
# <3 <7,-3 7 8 <12,-9 12 <15,-13 <18,-15 18 19 20 <24,-21 24 <28,-25 <31,-28 31
if @puid_charset == :safe32 do
defp safe32_chars(0, _, _, _, string), do: string
defp safe32_chars(n, bits, uniform_offset, uniform_bytes, string) do
<<_::size(uniform_offset), value::size(bits), _::bits>> = uniform_bytes
char =
cond do
value < 3 ->
?2 + value
value < 7 ->
?6 + value - 3
value == 7 ->
?b
value == 8 ->
?d
value < 12 ->
?f + value - 9
value == 12 ->
?j
value < 15 ->
?m + value - 13
value < 18 ->
?p + value - 15
value == 18 ->
?t
value == 19 ->
?B
value == 20 ->
?D
value < 24 ->
?F + value - 21
value == 24 ->
?J
value < 28 ->
?L + value - 25
value < 31 ->
?P + value - 28
value == 31 ->
?T
end
safe32_chars(
n - 1,
bits,
uniform_offset + bits,
uniform_bytes,
<<char::size(8), string::binary>>
)
end
end
if @puid_charset == :printable_ascii do
defp printable_ascii_chars(0, _, _, _, string), do: string
defp printable_ascii_chars(n, bits, uniform_offset, uniform_bytes, string) do
<<_::size(uniform_offset), value::size(bits), _::bits>> = uniform_bytes
char = ?! + value
printable_ascii_chars(
n - 1,
bits,
uniform_offset + bits,
uniform_bytes,
<<char::size(8), string::binary>>
)
end
end
@doc """
`Puid.Info` module info
"""
def info,
do: %Info{
chars: @puid_chars,
charset: @puid_charset,
entropy_bits_per_char: Float.round(@puid_entropy_bits_per_char, 2),
entropy_bits: Float.round(@puid_len * @puid_entropy_bits_per_char, 2),
ere: @puid_ere,
length: @puid_len,
rand_bytes: @puid_rand_bytes
}
end
end
end
|
lib/puid/puid.ex
| 0.904353
| 0.446374
|
puid.ex
|
starcoder
|
defmodule Geometry.GeometryCollectionM do
@moduledoc """
A collection set of 2D geometries with a measurement.
`GeometryCollectionM` implements the protocols `Enumerable` and `Collectable`.
## Examples
iex> Enum.map(
...> GeometryCollectionM.new([
...> PointM.new(11, 12, 14),
...> LineStringM.new([
...> PointM.new(21, 22, 24),
...> PointM.new(31, 32, 34)
...> ])
...> ]),
...> fn
...> %PointM{} -> :point
...> %LineStringM{} -> :line_string
...> end
...> ) |> Enum.sort()
[:line_string, :point]
iex> Enum.into([PointM.new(1, 2, 4)], GeometryCollectionM.new())
%GeometryCollectionM{
geometries: MapSet.new([%PointM{coordinate: [1, 2, 4]}])
}
"""
alias Geometry.{
GeoJson,
GeometryCollectionM,
WKB,
WKT
}
defstruct geometries: MapSet.new()
@type t :: %GeometryCollectionM{geometries: MapSet.t(Geometry.t())}
@doc """
Creates an empty `GeometryCollectionM`.
## Examples
iex> GeometryCollectionM.new()
%GeometryCollectionM{geometries: MapSet.new()}
"""
@spec new :: t()
def new, do: %GeometryCollectionM{}
@doc """
Creates an empty `GeometryCollectionM`.
## Examples
iex> GeometryCollectionM.new([
...> PointM.new(1, 2, 4),
...> LineStringM.new([PointM.new(1, 1, 1), PointM.new(2, 2, 2)])
...> ])
%GeometryCollectionM{geometries: MapSet.new([
%PointM{coordinate: [1, 2, 4]},
%LineStringM{points: [[1, 1, 1], [2, 2, 2]]}
])}
"""
@spec new([Geometry.t()]) :: t()
def new(geometries), do: %GeometryCollectionM{geometries: MapSet.new(geometries)}
@doc """
Returns `true` if the given `GeometryCollectionM` is empty.
## Examples
iex> GeometryCollectionM.empty?(GeometryCollectionM.new())
true
iex> GeometryCollectionM.empty?(GeometryCollectionM.new([PointM.new(1, 2, 4)]))
false
"""
@spec empty?(t()) :: boolean
def empty?(%GeometryCollectionM{geometries: geometries}), do: Enum.empty?(geometries)
@doc """
Returns the WKT representation for a `GeometryCollectionM`. With option
`:srid` an EWKT representation with the SRID is returned.
## Examples
iex> GeometryCollectionM.to_wkt(GeometryCollectionM.new())
"GeometryCollection M EMPTY"
iex> GeometryCollectionM.to_wkt(
...> GeometryCollectionM.new([
...> PointM.new(1.1, 1.2, 1.4),
...> PointM.new(2.1, 2.2, 2.4)
...> ])
...> )
"GeometryCollection M (Point M (1.1 1.2 1.4), Point M (2.1 2.2 2.4))"
iex> GeometryCollectionM.to_wkt(
...> GeometryCollectionM.new([PointM.new(1.1, 2.2, 4.4)]),
...> srid: 4711)
"SRID=4711;GeometryCollection M (Point M (1.1 2.2 4.4))"
"""
@spec to_wkt(t(), opts) :: Geometry.wkt()
when opts: [srid: Geometry.srid()]
def to_wkt(%GeometryCollectionM{geometries: geometries}, opts \\ []) do
WKT.to_ewkt(
<<
"GeometryCollection M ",
geometries |> MapSet.to_list() |> to_wkt_geometries()::binary()
>>,
opts
)
end
@doc """
Returns an `:ok` tuple with the `GeometryCollectionM` from the given WKT
string. Otherwise returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
## Examples
iex> GeometryCollectionM.from_wkt(
...> "GeometryCollection M (Point M (1.1 2.2 4.4))")
{
:ok,
%GeometryCollectionM{
geometries: MapSet.new([%PointM{coordinate: [1.1, 2.2, 4.4]}])
}
}
iex> GeometryCollectionM.from_wkt(
...> "SRID=123;GeometryCollection M (Point M (1.1 2.2 4.4))")
{:ok, {
%GeometryCollectionM{
geometries: MapSet.new([%PointM{coordinate: [1.1, 2.2, 4.4]}])
},
123
}}
iex> GeometryCollectionM.from_wkt("GeometryCollection M EMPTY")
{:ok, %GeometryCollectionM{}}
"""
@spec from_wkt(Geometry.wkt()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkt_error()
def from_wkt(wkt), do: WKT.to_geometry(wkt, GeometryCollectionM)
@doc """
The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkt!(Geometry.wkt()) :: t() | {t(), Geometry.srid()}
def from_wkt!(wkt) do
case WKT.to_geometry(wkt, GeometryCollectionM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term of a `GeometryCollectionM`.
## Examples
iex> GeometryCollectionM.to_geo_json(
...> GeometryCollectionM.new([PointM.new(1.1, 2.2, 4.4)]))
%{
"type" => "GeometryCollection",
"geometries" => [
%{
"type" => "Point",
"coordinates" => [1.1, 2.2, 4.4]
}
]
}
"""
@spec to_geo_json(t()) :: Geometry.geo_json_term()
def to_geo_json(%GeometryCollectionM{geometries: geometries}) do
%{
"type" => "GeometryCollection",
"geometries" =>
Enum.map(geometries, fn geometry ->
Geometry.to_geo_json(geometry)
end)
}
end
@doc """
Returns an `:ok` tuple with the `GeometryCollectionM` from the given GeoJSON
term. Otherwise returns an `:error` tuple.
## Examples
iex> ~s({
...> "type": "GeometryCollection",
...> "geometries": [
...> {"type": "Point", "coordinates": [1.1, 2.2, 4.4]}
...> ]
...> })
iex> |> Jason.decode!()
iex> |> GeometryCollectionM.from_geo_json()
{
:ok,
%GeometryCollectionM{
geometries: MapSet.new([%PointM{coordinate: [1.1, 2.2, 4.4]}])
}
}
"""
@spec from_geo_json(Geometry.geo_json_term()) :: {:ok, t()} | Geometry.geo_json_error()
def from_geo_json(json) do
GeoJson.to_geometry_collection(json, GeometryCollectionM, type: :m)
end
@doc """
The same as `from_geo_json/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_geo_json!(Geometry.geo_json_term()) :: t()
def from_geo_json!(json) do
case GeoJson.to_geometry_collection(json, GeometryCollectionM, type: :m) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the WKB representation for a `GeometryCollectionM`.
With option `:srid` an EWKB representation with the SRID is returned.
The option `endian` indicates whether `:xdr` big endian or `:ndr` little
endian is returned. The default is `:ndr`.
The `:mode` determines whether a hex-string or binary is returned. The default
is `:binary`.
An example of a simpler geometry can be found in the description for the
`Geometry.PointM.to_wkb/1` function.
"""
@spec to_wkb(t(), opts) :: Geometry.wkb()
when opts: [endian: Geometry.endian(), srid: Geometry.srid()]
def to_wkb(%GeometryCollectionM{geometries: geometries}, opts \\ []) do
endian = Keyword.get(opts, :endian, Geometry.default_endian())
mode = Keyword.get(opts, :mode, Geometry.default_mode())
srid = Keyword.get(opts, :srid)
<<
WKB.byte_order(endian, mode)::binary(),
wkb_code(endian, not is_nil(srid), mode)::binary(),
WKB.srid(srid, endian, mode)::binary(),
to_wkb_geometries(geometries, endian, mode)::binary()
>>
end
@doc """
Returns an `:ok` tuple with the `GeometryCollectionM` from the given WKB
string. Otherwise returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
An example of a simpler geometry can be found in the description for the
`Geometry.PointM.from_wkb/2` function.
"""
@spec from_wkb(Geometry.wkb(), Geometry.mode()) ::
{:ok, t() | {t(), Geometry.srid()}} | Geometry.wkb_error()
def from_wkb(wkb, mode \\ :binary), do: WKB.to_geometry(wkb, mode, GeometryCollectionM)
@doc """
The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkb!(Geometry.wkb(), Geometry.mode()) :: t() | {t(), Geometry.srid()}
def from_wkb!(wkb, mode \\ :binary) do
case WKB.to_geometry(wkb, mode, GeometryCollectionM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the number of elements in `GeometryCollectionM`.
## Examples
iex> GeometryCollectionM.size(
...> GeometryCollectionM.new([
...> PointM.new(11, 12, 14),
...> LineStringM.new([
...> PointM.new(21, 22, 24),
...> PointM.new(31, 32, 34)
...> ])
...> ])
...> )
2
"""
@spec size(t()) :: non_neg_integer()
def size(%GeometryCollectionM{geometries: geometries}), do: MapSet.size(geometries)
@doc """
Checks if `GeometryCollectionM` contains `geometry`.
## Examples
iex> GeometryCollectionM.member?(
...> GeometryCollectionM.new([
...> PointM.new(11, 12, 14),
...> LineStringM.new([
...> PointM.new(21, 22, 24),
...> PointM.new(31, 32, 34)
...> ])
...> ]),
...> PointM.new(11, 12, 14)
...> )
true
iex> GeometryCollectionM.member?(
...> GeometryCollectionM.new([
...> PointM.new(11, 12, 14),
...> LineStringM.new([
...> PointM.new(21, 22, 24),
...> PointM.new(31, 32, 34)
...> ])
...> ]),
...> PointM.new(1, 2, 4)
...> )
false
"""
@spec member?(t(), Geometry.t()) :: boolean()
def member?(%GeometryCollectionM{geometries: geometries}, geometry),
do: MapSet.member?(geometries, geometry)
@doc """
Converts `GeometryCollectionM` to a list.
## Examples
iex> GeometryCollectionM.to_list(
...> GeometryCollectionM.new([
...> PointM.new(11, 12, 14)
...> ])
...> )
[%PointM{coordinate: [11, 12, 14]}]
"""
@spec to_list(t()) :: [Geometry.t()]
def to_list(%GeometryCollectionM{geometries: geometries}), do: MapSet.to_list(geometries)
@compile {:inline, to_wkt_geometries: 1}
defp to_wkt_geometries([]), do: "EMPTY"
defp to_wkt_geometries([geometry | geometries]) do
<<"(",
Enum.reduce(geometries, Geometry.to_wkt(geometry), fn %module{} = geometry, acc ->
<<acc::binary(), ", ", module.to_wkt(geometry)::binary()>>
end)::binary(), ")">>
end
@compile {:inline, to_wkb_geometries: 3}
defp to_wkb_geometries(geometries, endian, mode) do
Enum.reduce(geometries, WKB.length(geometries, endian, mode), fn %module{} = geometry, acc ->
<<acc::binary(), module.to_wkb(geometry, endian: endian, mode: mode)::binary()>>
end)
end
@compile {:inline, wkb_code: 3}
defp wkb_code(endian, srid?, :hex) do
case {endian, srid?} do
{:xdr, false} -> "40000007"
{:ndr, false} -> "07000040"
{:xdr, true} -> "60000007"
{:ndr, true} -> "07000060"
end
end
defp wkb_code(endian, srid?, :binary) do
case {endian, srid?} do
{:xdr, false} -> <<0x40000007::big-integer-size(32)>>
{:ndr, false} -> <<0x40000007::little-integer-size(32)>>
{:xdr, true} -> <<0x60000007::big-integer-size(32)>>
{:ndr, true} -> <<0x60000007::little-integer-size(32)>>
end
end
defimpl Enumerable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def count(geometry_collection) do
{:ok, GeometryCollectionM.size(geometry_collection)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def member?(geometry_collection, val) do
{:ok, GeometryCollectionM.member?(geometry_collection, val)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def slice(geometry_collection) do
size = GeometryCollectionM.size(geometry_collection)
{:ok, size,
&Enumerable.List.slice(GeometryCollectionM.to_list(geometry_collection), &1, &2, size)}
end
# credo:disable-for-next-line Credo.Check.Readability.Specs
def reduce(geometry_collection, acc, fun) do
Enumerable.List.reduce(GeometryCollectionM.to_list(geometry_collection), acc, fun)
end
end
defimpl Collectable do
# credo:disable-for-next-line Credo.Check.Readability.Specs
def into(%GeometryCollectionM{geometries: geometries}) do
fun = fn
list, {:cont, x} ->
[{x, []} | list]
list, :done ->
%GeometryCollectionM{
geometries: %{geometries | map: Map.merge(geometries.map, Map.new(list))}
}
_list, :halt ->
:ok
end
{[], fun}
end
end
end
|
lib/geometry/geometry_collection_m.ex
| 0.967869
| 0.627837
|
geometry_collection_m.ex
|
starcoder
|
defmodule TinkoffInvest do
@moduledoc """
Convenient functions for frequent use-cases
"""
@type mode() :: :sandbox | :production
@default_endpoint "https://api-invest.tinkoff.ru/openapi"
alias TinkoffInvest.Portfolio
alias TinkoffInvest.User
alias TinkoffInvest.Orders
alias TinkoffInvest.Model.Api.Response
@doc delegate_to: {User, :accounts, 0}
defdelegate accounts, to: User
@doc delegate_to: {Orders, :active_orders, 0}
defdelegate active_orders, to: Orders
@doc delegate_to: {Orders, :create_limit_order, 1}
defdelegate create_limit_order(figi, lots, operation, price), to: Orders
@doc delegate_to: {Orders, :create_market_order, 1}
defdelegate create_market_order(figi, lots, operation), to: Orders
@doc delegate_to: {Orders, :cancel_order, 1}
defdelegate cancel_order(order_id), to: Orders
@doc delegate_to: {Portfolio, :full, 0}
defdelegate portfolio, to: Portfolio, as: :full
@doc """
Change broker account id.
Useful when wanna switch between accounts dynamically.
iex> TinkoffInvest.change_account_id("SB1234")
:ok
"""
@spec change_account_id(String.t()) :: :ok
def change_account_id(id) do
Application.put_env(:tinkoff_invest, :broker_account_id, id)
end
@doc """
Change token dynamically without restarting app.
Useful when using multiple broker accounts on different clients
iex> TinkoffInvest.change_token("mytoken")
:ok
"""
@spec change_token(String.t()) :: :ok
def change_token(token) do
Application.put_env(:tinkoff_invest, :token, token)
end
@doc """
Change mode dynamically.
iex> TinkoffInvest.set_mode(:sandbox)
:ok
"""
@spec set_mode(mode()) :: :ok
def set_mode(mode) when mode in [:sandbox, :production] do
Application.put_env(:tinkoff_invest, :mode, mode)
end
@doc """
Returns current mode
iex> TinkoffInvest.mode()
:sandbox
"""
@spec mode :: mode()
def mode do
Application.fetch_env!(:tinkoff_invest, :mode)
end
@doc """
Returns boolean indicating whether logs are enabled or not
iex> TinkoffInvest.logs_enabled?()
false
"""
@spec logs_enabled?() :: boolean()
def logs_enabled? do
Application.get_env(:tinkoff_invest, :logs_enabled, false)
end
@doc """
Enables or disables logging API response via `Logger.debug/1`
`false` by default
iex> TinkoffInvest.toggle_logs(false)
:ok
"""
@spec toggle_logs(boolean()) :: :ok
def toggle_logs(val) when val in [true, false] do
Application.put_env(:tinkoff_invest, :logs_enabled, val)
end
@doc """
Returns API endpoint for current mode
iex> TinkoffInvest.endpoint()
"https://api-invest.tinkoff.ru/openapi/sandbox"
"""
@spec endpoint :: String.t()
def endpoint do
Application.get_env(:tinkoff_invest, :endpoint, @default_endpoint) <> endpoint_prefix()
end
def default_endpoint, do: @default_endpoint
@doc """
Takes payload from response and returns it, useful for piping
iex> TinkoffInvest.payload(%TinkoffInvest.Model.Api.Response{tracking_id: "123", status_code: 200, status: "Ok", payload: []})
[]
"""
@spec payload(Response.t()) :: term()
def payload(%Response{payload: payload}), do: payload
defp endpoint_prefix do
case TinkoffInvest.mode() do
:sandbox -> "/sandbox"
:production -> ""
end
end
end
|
lib/tinkoff_invest.ex
| 0.714927
| 0.489381
|
tinkoff_invest.ex
|
starcoder
|
defmodule Protobuf.DSL.Typespecs do
@moduledoc false
alias Protobuf.{FieldProps, MessageProps}
@spec quoted_enum_typespec(MessageProps.t()) :: Macro.t()
def quoted_enum_typespec(%MessageProps{field_props: field_props}) do
atom_specs =
field_props
|> Enum.sort_by(fn {fnum, _prop} -> fnum end)
|> Enum.map(fn {_fnum, %FieldProps{name_atom: name}} -> name end)
|> union_specs()
quote do
integer() | unquote(atom_specs)
end
end
@spec quoted_message_typespec(MessageProps.t()) :: Macro.t()
def quoted_message_typespec(%MessageProps{syntax: syntax} = message_props) do
regular_fields =
for {_fnum, %FieldProps{oneof: nil} = prop} <- message_props.field_props,
do: {prop.name_atom, field_prop_to_spec(syntax, prop)}
oneof_fields =
for {field_name, fnum} <- message_props.oneof do
possible_fields =
for {_fnum, %FieldProps{oneof: ^fnum} = prop} <- message_props.field_props, do: prop
{field_name, oneof_spec(syntax, possible_fields)}
end
extension_fields =
case message_props.extension_range do
[_ | _] -> [{:__pb_extensions__, quote(do: map())}]
_other -> []
end
unknown_fields = [
{:__unknown_fields__,
quote(
do: [
{field_number :: integer(), Protobuf.Wire.Types.wire_type(), value :: term()}
]
)}
]
field_specs = regular_fields ++ oneof_fields ++ extension_fields ++ unknown_fields
quote do: %__MODULE__{unquote_splicing(field_specs)}
end
defp oneof_spec(syntax, possible_oneof_fields) do
possible_oneof_fields
|> Enum.map(fn prop -> {prop.name_atom, field_prop_to_spec(syntax, prop)} end)
|> Kernel.++([nil])
|> union_specs()
end
defp field_prop_to_spec(_syntax, %FieldProps{map?: true, type: map_mod} = prop) do
if not Code.ensure_loaded?(map_mod) do
raise "module #{inspect(map_mod)} was not loaded, but was expected to be since it's used as a map entry"
end
map_props = map_mod.__message_props__()
key_spec = scalar_type_to_spec(map_props.field_props[map_props.field_tags.key].type)
value_prop = map_props.field_props[map_props.field_tags.value]
value_spec = type_to_spec(value_prop.type, value_prop)
value_spec = if prop.embedded?, do: quote(do: unquote(value_spec) | nil), else: value_spec
quote do: %{optional(unquote(key_spec)) => unquote(value_spec)}
end
defp field_prop_to_spec(syntax, %FieldProps{type: type} = prop) do
spec = type_to_spec(type, prop)
cond do
prop.repeated? ->
quote do: [unquote(spec)]
prop.embedded? or (prop.optional? and is_nil(prop.oneof) and syntax != :proto3) ->
quote do: unquote(spec) | nil
true ->
spec
end
end
defp type_to_spec({:enum, enum_mod}, _prop), do: quote(do: unquote(enum_mod).t())
defp type_to_spec(mod, %FieldProps{embedded?: true}), do: quote(do: unquote(mod).t())
defp type_to_spec(:group, _prop), do: quote(do: term())
defp type_to_spec(type, _prop), do: scalar_type_to_spec(type)
defp scalar_type_to_spec(:string), do: quote(do: String.t())
defp scalar_type_to_spec(:bytes), do: quote(do: binary())
defp scalar_type_to_spec(:bool), do: quote(do: boolean())
defp scalar_type_to_spec(type)
when type in [:int32, :int64, :sint32, :sint64, :sfixed32, :sfixed64],
do: quote(do: integer())
defp scalar_type_to_spec(type)
when type in [:uint32, :uint64, :fixed32, :fixed64],
do: quote(do: non_neg_integer())
defp scalar_type_to_spec(type) when type in [:float, :double],
do: quote(do: float() | :infinity | :negative_infinity | :nan)
# We do this because the :| operator is left-associative, so if we just map and build "acc |
# spec" then we end up with "((foo | bar) | baz) | bong". By building it from right to left, it
# works just fine.
defp union_specs(specs) do
Enum.reduce(Enum.reverse(specs), fn spec, acc ->
quote do: unquote(spec) | unquote(acc)
end)
end
end
|
lib/protobuf/dsl/typespecs.ex
| 0.867317
| 0.409132
|
typespecs.ex
|
starcoder
|
defmodule ExJsonSchema.Schema do
defmodule UnsupportedSchemaVersionError do
defexception message: "unsupported schema version, only draft 4 is supported"
end
defmodule InvalidSchemaError do
defexception message: "invalid schema"
end
defmodule UndefinedRemoteSchemaResolverError do
defexception message:
"trying to resolve a remote schema but no remote schema resolver function is defined"
end
defmodule InvalidReferenceError do
defexception message: "invalid reference"
end
alias ExJsonSchema.Schema.Draft4
alias ExJsonSchema.Schema.Root
alias ExJsonSchema.Validator
@type ref_path :: [:root | String.t()]
@type resolved :: ExJsonSchema.data() | %{String.t() => (Root.t() -> {Root.t(), resolved}) | ref_path}
@type invalid_reference_error :: {:error, :invalid_reference}
@current_draft_schema_url "http://json-schema.org/schema"
@draft4_schema_url "http://json-schema.org/draft-04/schema"
@spec resolve(Root.t() | ExJsonSchema.object(), custom_format_validator: {module(), atom()}) ::
Root.t() | no_return
def resolve(schema, options \\ [])
def resolve(root = %Root{}, options) do
root = %Root{root | custom_format_validator: Keyword.get(options, :custom_format_validator)}
resolve_root(root)
end
def resolve(schema = %{}, options), do: resolve(%Root{schema: schema}, options)
@spec get_fragment(Root.t(), ref_path | ExJsonSchema.json_path()) ::
{:ok, resolved} | invalid_reference_error | no_return
def get_fragment(root = %Root{}, path) when is_binary(path) do
case resolve_ref(root, path) do
{:ok, {_root, ref}} -> get_fragment(root, ref)
error -> error
end
end
def get_fragment(root = %Root{}, [:root | path] = ref) do
do_get_fragment(root.schema, path, ref)
end
def get_fragment(root = %Root{}, [url | path] = ref) when is_binary(url) do
do_get_fragment(root.refs[url], path, ref)
end
@spec get_fragment!(Root.t(), ref_path | ExJsonSchema.json_path()) :: resolved | no_return
def get_fragment!(schema, ref) do
case get_fragment(schema, ref) do
{:ok, schema} -> schema
{:error, :invalid_reference} -> raise_invalid_reference_error(ref)
end
end
defp resolve_root(root) do
assert_supported_schema_version(
Map.get(root.schema, "$schema", @current_draft_schema_url <> "#")
)
assert_valid_schema(root.schema)
{root, schema} = resolve_with_root(root, root.schema)
%{root | schema: schema}
end
defp assert_supported_schema_version(version) do
unless supported_schema_version?(version), do: raise(UnsupportedSchemaVersionError)
end
defp assert_valid_schema(schema) do
unless meta?(schema) do
case Validator.validate(resolve(Draft4.schema()), schema, error_formatter: false) do
{:error, errors} ->
raise InvalidSchemaError,
message: "schema did not pass validation against its meta-schema: #{inspect(errors)}"
_ ->
nil
end
end
end
defp supported_schema_version?(version) do
case version do
@current_draft_schema_url <> _ -> true
@draft4_schema_url <> _ -> true
_ -> false
end
end
defp resolve_with_root(root, schema, scope \\ "")
defp resolve_with_root(root, schema = %{"id" => id}, scope) when is_binary(id) do
scope =
case URI.parse(scope) do
%URI{host: nil} -> id
uri -> uri |> URI.merge(id) |> to_string()
end
do_resolve(root, schema, scope)
end
defp resolve_with_root(root, schema = %{}, scope), do: do_resolve(root, schema, scope)
defp resolve_with_root(root, non_schema, _scope), do: {root, non_schema}
defp do_resolve(root, schema, scope) do
{root, schema} =
Enum.reduce(schema, {root, %{}}, fn property, {root, schema} ->
{root, {k, v}} = resolve_property(root, property, scope)
{root, Map.put(schema, k, v)}
end)
{root, schema |> sanitize_properties_attribute |> sanitize_additional_items_attribute}
end
defp resolve_property(root, {key, value}, scope) when is_map(value) do
{root, resolved} = resolve_with_root(root, value, scope)
{root, {key, resolved}}
end
defp resolve_property(root, {key, values}, scope) when is_list(values) do
{root, values} =
Enum.reduce(values, {root, []}, fn value, {root, values} ->
{root, resolved} = resolve_with_root(root, value, scope)
{root, [resolved | values]}
end)
{root, {key, Enum.reverse(values)}}
end
defp resolve_property(root, {"$ref", ref}, scope) do
scoped_ref =
case URI.parse(ref) do
# TODO: this special case is only needed until there is proper support for URL references
# that point to a local schema (via scope changes)
%URI{host: nil, path: nil} = uri ->
to_string(uri)
ref_uri ->
case URI.parse(scope) do
%URI{host: nil} -> ref
scope_uri -> URI.merge(scope_uri, ref_uri) |> to_string()
end
end
{root, path} = resolve_ref!(root, scoped_ref)
{root, {"$ref", path}}
end
defp resolve_property(root, tuple, _), do: {root, tuple}
defp resolve_ref(root, "#") do
{:ok, {root, [root.location]}}
end
defp resolve_ref(root, ref) do
[url | anchor] = String.split(ref, "#")
ref_path = validate_ref_path(anchor, ref)
{root, path} = root_and_path_for_url(root, ref_path, url)
case get_fragment(root, path) do
{:ok, _schema} -> {:ok, {root, path}}
error -> error
end
end
defp resolve_ref!(root, ref) do
case resolve_ref(root, ref) do
{:ok, result} -> result
{:error, :invalid_reference} -> raise_invalid_reference_error(ref)
end
end
defp validate_ref_path([], _), do: nil
defp validate_ref_path([""], _), do: nil
defp validate_ref_path([fragment = "/" <> _], _), do: fragment
defp validate_ref_path(_, ref), do: raise_invalid_reference_error(ref)
defp root_and_path_for_url(root, fragment, "") do
{root, [root.location | relative_path(fragment)]}
end
defp root_and_path_for_url(root, fragment, url) do
root = resolve_and_cache_remote_schema(root, url)
{root, [url | relative_path(fragment)]}
end
defp relative_path(nil), do: []
defp relative_path(fragment), do: relative_ref_path(fragment)
defp relative_ref_path(ref) do
["" | keys] = unescaped_ref_segments(ref)
Enum.map(keys, fn key ->
case key =~ ~r/^\d+$/ do
true ->
String.to_integer(key)
false ->
key
end
end)
end
defp resolve_and_cache_remote_schema(root, url) do
if root.refs[url], do: root, else: fetch_and_resolve_remote_schema(root, url)
end
defp fetch_and_resolve_remote_schema(root, url)
when url == @current_draft_schema_url or url == @draft4_schema_url do
resolve_remote_schema(root, url, Draft4.schema())
end
defp fetch_and_resolve_remote_schema(root, url) do
resolve_remote_schema(root, url, fetch_remote_schema(url))
end
defp resolve_remote_schema(root, url, remote_schema) do
root = root_with_ref(root, url, remote_schema)
resolved_root = resolve_root(%{root | schema: remote_schema, location: url})
root = %{root | refs: resolved_root.refs}
root_with_ref(root, url, resolved_root.schema)
end
defp root_with_ref(root, url, ref) do
%{root | refs: Map.put(root.refs, url, ref)}
end
defp fetch_remote_schema(url) do
case remote_schema_resolver() do
fun when is_function(fun) -> fun.(url)
{mod, fun_name} -> apply(mod, fun_name, [url])
end
end
defp remote_schema_resolver do
Application.get_env(:ex_json_schema, :remote_schema_resolver) ||
fn _url -> raise UndefinedRemoteSchemaResolverError end
end
defp sanitize_properties_attribute(schema) do
if needs_properties_attribute?(schema), do: Map.put(schema, "properties", %{}), else: schema
end
defp needs_properties_attribute?(schema) do
Enum.any?(~w(patternProperties additionalProperties), &Map.has_key?(schema, &1)) and
not Map.has_key?(schema, "properties")
end
defp sanitize_additional_items_attribute(schema) do
if needs_additional_items_attribute?(schema),
do: Map.put(schema, "additionalItems", true),
else: schema
end
defp needs_additional_items_attribute?(schema) do
Map.has_key?(schema, "items") and is_list(schema["items"]) and
not Map.has_key?(schema, "additionalItems")
end
defp unescaped_ref_segments(ref) do
ref
|> String.split("/")
|> Enum.map(fn segment ->
segment
|> String.replace("~0", "~")
|> String.replace("~1", "/")
|> URI.decode()
end)
end
defp meta?(schema) do
String.starts_with?(Map.get(schema, "id", ""), @draft4_schema_url)
end
defp do_get_fragment(nil, _, _ref), do: {:error, :invalid_reference}
defp do_get_fragment(schema, [], _), do: {:ok, schema}
defp do_get_fragment(schema, [key | path], ref) when is_binary(key),
do: do_get_fragment(Map.get(schema, key), path, ref)
defp do_get_fragment(schema, [idx | path], ref) when is_integer(idx) do
try do
do_get_fragment(:lists.nth(idx + 1, schema), path, ref)
catch
:error, :function_clause -> {:error, :invalid_reference}
end
end
defp ref_to_string([:root | path]), do: ["#" | path] |> Enum.join("/")
defp ref_to_string([url | path]), do: [url <> "#" | path] |> Enum.join("/")
@spec raise_invalid_reference_error(any) :: no_return
def raise_invalid_reference_error(ref) when is_binary(ref),
do: raise(InvalidReferenceError, message: "invalid reference #{ref}")
def raise_invalid_reference_error(ref),
do: ref |> ref_to_string |> raise_invalid_reference_error
end
|
lib/ex_json_schema/schema.ex
| 0.706393
| 0.434641
|
schema.ex
|
starcoder
|
defmodule Logger.Backends.GelfAsync do
@moduledoc """
GELF Logger Backend Async
A logger backend that will generate Graylog Extended Log Format messages. The
current version only supports UDP messages. This module specify an async way
to send the messages avoiding a bottleneck.
## Configuration
In the config.exs, add gelf_logger as a backend like this:
```
config :logger,
backends: [:console, {Logger.Backends.GelfAsync, :gelf_logger}]
```
In addition, you'll need to pass in some configuration items to the backend
itself:
```
config :logger, :gelf_logger,
host: "127.0.0.1",
port: 12201,
format: "$message",
application: "myapp",
compression: :gzip, # Defaults to :gzip, also accepts :zlib or :raw
metadata: [:request_id, :function, :module, :file, :line],
hostname: "hostname-override",
json_encoder: Poison,
tags: [
list: "of",
extra: "tags"
]
```
In addition, if you want to use your custom metadata formatter as a "callback",
you'll need to add below configuration entry:
```
format: {Module, :function}
```
Please bear in mind that your formating function MUST return a tuple in following
format: `{level, message, timestamp, metadata}`
In addition to the backend configuration, you might want to check the
[Logger configuration](https://hexdocs.pm/logger/Logger.html) for other
options that might be important for your particular environment. In
particular, modifying the `:utc_log` setting might be necessary
depending on your server configuration.
This backend supports `metadata: :all`.
### Note on the JSON encoder:
Currently, the logger defaults to Poison but it can be switched out for any
module that has an encode!/1 function.
## Usage
Just use Logger as normal.
## Improvements
- [x] Tests
- [ ] TCP Support
- [x] Options for compression (none, zlib)
- [x] Send timestamp instead of relying on the Graylog server to set it
- [x] Find a better way of pulling the hostname
And probably many more. This is only out here because it might be useful to
someone in its current state. Pull requests are always welcome.
## Notes
Credit where credit is due, this would not exist without
[protofy/erl_graylog_sender](https://github.com/protofy/erl_graylog_sender).
"""
@behaviour :gen_event
def init({_module, name}) do
GelfLogger.Balancer.configure(name, [])
log_level = GelfLogger.Config.get_loglevel(name, [])
{:ok, %{name: name, level: log_level}}
end
def handle_call({:configure, options}, state) do
GelfLogger.Balancer.configure(state.name, options)
log_level = GelfLogger.Config.get_loglevel(state.name, options)
{:ok, :ok, %{state | level: log_level}}
end
def handle_event({_level, gl, _event}, state) when node(gl) != node() do
{:ok, state}
end
def handle_event({level, _gl, {Logger, msg, ts, md}}, %{level: min_level} = state) do
if is_nil(min_level) or Logger.compare_levels(level, min_level) != :lt do
GelfLogger.Balancer.cast(level, msg, ts, md)
end
{:ok, state}
end
def handle_event(:flush, state) do
{:ok, state}
end
defdelegate handle_info(message, state), to: Logger.Backends.Gelf
defdelegate code_change(old_vsn, state, extra), to: Logger.Backends.Gelf
defdelegate terminate(reason, state), to: Logger.Backends.Gelf
end
|
lib/logger/backends/gelf_async.ex
| 0.772702
| 0.797399
|
gelf_async.ex
|
starcoder
|
defmodule AWS.DocDB do
@moduledoc """
Amazon DocumentDB API documentation
"""
@doc """
Adds metadata tags to an Amazon DocumentDB resource. You can use these tags
with cost allocation reporting to track costs that are associated with
Amazon DocumentDB resources. or in a `Condition` statement in an AWS
Identity and Access Management (IAM) policy for Amazon DocumentDB.
"""
def add_tags_to_resource(client, input, options \\ []) do
request(client, "AddTagsToResource", input, options)
end
@doc """
Applies a pending maintenance action to a resource (for example, to a DB
instance).
"""
def apply_pending_maintenance_action(client, input, options \\ []) do
request(client, "ApplyPendingMaintenanceAction", input, options)
end
@doc """
Copies the specified cluster parameter group.
"""
def copy_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "CopyDBClusterParameterGroup", input, options)
end
@doc """
Copies a snapshot of a cluster.
To copy a cluster snapshot from a shared manual cluster snapshot,
`SourceDBClusterSnapshotIdentifier` must be the Amazon Resource Name (ARN)
of the shared cluster snapshot.
To cancel the copy operation after it is in progress, delete the target
cluster snapshot identified by `TargetDBClusterSnapshotIdentifier` while
that DB cluster snapshot is in the *copying* status.
"""
def copy_d_b_cluster_snapshot(client, input, options \\ []) do
request(client, "CopyDBClusterSnapshot", input, options)
end
@doc """
Creates a new Amazon DocumentDB cluster.
"""
def create_d_b_cluster(client, input, options \\ []) do
request(client, "CreateDBCluster", input, options)
end
@doc """
Creates a new cluster parameter group.
Parameters in a cluster parameter group apply to all of the instances in a
DB cluster.
A cluster parameter group is initially created with the default parameters
for the database engine used by instances in the cluster. To provide custom
values for any of the parameters, you must modify the group after you
create it. After you create a DB cluster parameter group, you must
associate it with your cluster. For the new DB cluster parameter group and
associated settings to take effect, you must then reboot the instances in
the cluster without failover.
<important> After you create a cluster parameter group, you should wait at
least 5 minutes before creating your first cluster that uses that cluster
parameter group as the default parameter group. This allows Amazon
DocumentDB to fully complete the create action before the cluster parameter
group is used as the default for a new cluster. This step is especially
important for parameters that are critical when creating the default
database for a cluster, such as the character set for the default database
defined by the `character_set_database` parameter.
</important>
"""
def create_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "CreateDBClusterParameterGroup", input, options)
end
@doc """
Creates a snapshot of a cluster.
"""
def create_d_b_cluster_snapshot(client, input, options \\ []) do
request(client, "CreateDBClusterSnapshot", input, options)
end
@doc """
Creates a new instance.
"""
def create_d_b_instance(client, input, options \\ []) do
request(client, "CreateDBInstance", input, options)
end
@doc """
Creates a new subnet group. subnet groups must contain at least one subnet
in at least two Availability Zones in the AWS Region.
"""
def create_d_b_subnet_group(client, input, options \\ []) do
request(client, "CreateDBSubnetGroup", input, options)
end
@doc """
Deletes a previously provisioned cluster. When you delete a cluster, all
automated backups for that cluster are deleted and can't be recovered.
Manual DB cluster snapshots of the specified cluster are not deleted.
<p/>
"""
def delete_d_b_cluster(client, input, options \\ []) do
request(client, "DeleteDBCluster", input, options)
end
@doc """
Deletes a specified cluster parameter group. The cluster parameter group to
be deleted can't be associated with any clusters.
"""
def delete_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "DeleteDBClusterParameterGroup", input, options)
end
@doc """
Deletes a cluster snapshot. If the snapshot is being copied, the copy
operation is terminated.
<note> The cluster snapshot must be in the `available` state to be deleted.
</note>
"""
def delete_d_b_cluster_snapshot(client, input, options \\ []) do
request(client, "DeleteDBClusterSnapshot", input, options)
end
@doc """
Deletes a previously provisioned instance.
"""
def delete_d_b_instance(client, input, options \\ []) do
request(client, "DeleteDBInstance", input, options)
end
@doc """
Deletes a subnet group.
<note> The specified database subnet group must not be associated with any
DB instances.
</note>
"""
def delete_d_b_subnet_group(client, input, options \\ []) do
request(client, "DeleteDBSubnetGroup", input, options)
end
@doc """
Returns a list of certificate authority (CA) certificates provided by
Amazon DocumentDB for this AWS account.
"""
def describe_certificates(client, input, options \\ []) do
request(client, "DescribeCertificates", input, options)
end
@doc """
Returns a list of `DBClusterParameterGroup` descriptions. If a
`DBClusterParameterGroupName` parameter is specified, the list contains
only the description of the specified cluster parameter group.
"""
def describe_d_b_cluster_parameter_groups(client, input, options \\ []) do
request(client, "DescribeDBClusterParameterGroups", input, options)
end
@doc """
Returns the detailed parameter list for a particular cluster parameter
group.
"""
def describe_d_b_cluster_parameters(client, input, options \\ []) do
request(client, "DescribeDBClusterParameters", input, options)
end
@doc """
Returns a list of cluster snapshot attribute names and values for a manual
DB cluster snapshot.
When you share snapshots with other AWS accounts,
`DescribeDBClusterSnapshotAttributes` returns the `restore` attribute and a
list of IDs for the AWS accounts that are authorized to copy or restore the
manual cluster snapshot. If `all` is included in the list of values for the
`restore` attribute, then the manual cluster snapshot is public and can be
copied or restored by all AWS accounts.
"""
def describe_d_b_cluster_snapshot_attributes(client, input, options \\ []) do
request(client, "DescribeDBClusterSnapshotAttributes", input, options)
end
@doc """
Returns information about cluster snapshots. This API operation supports
pagination.
"""
def describe_d_b_cluster_snapshots(client, input, options \\ []) do
request(client, "DescribeDBClusterSnapshots", input, options)
end
@doc """
Returns information about provisioned Amazon DocumentDB clusters. This API
operation supports pagination. For certain management features such as
cluster and instance lifecycle management, Amazon DocumentDB leverages
operational technology that is shared with Amazon RDS and Amazon Neptune.
Use the `filterName=engine,Values=docdb` filter parameter to return only
Amazon DocumentDB clusters.
"""
def describe_d_b_clusters(client, input, options \\ []) do
request(client, "DescribeDBClusters", input, options)
end
@doc """
Returns a list of the available engines.
"""
def describe_d_b_engine_versions(client, input, options \\ []) do
request(client, "DescribeDBEngineVersions", input, options)
end
@doc """
Returns information about provisioned Amazon DocumentDB instances. This API
supports pagination.
"""
def describe_d_b_instances(client, input, options \\ []) do
request(client, "DescribeDBInstances", input, options)
end
@doc """
Returns a list of `DBSubnetGroup` descriptions. If a `DBSubnetGroupName` is
specified, the list will contain only the descriptions of the specified
`DBSubnetGroup`.
"""
def describe_d_b_subnet_groups(client, input, options \\ []) do
request(client, "DescribeDBSubnetGroups", input, options)
end
@doc """
Returns the default engine and system parameter information for the cluster
database engine.
"""
def describe_engine_default_cluster_parameters(client, input, options \\ []) do
request(client, "DescribeEngineDefaultClusterParameters", input, options)
end
@doc """
Displays a list of categories for all event source types, or, if specified,
for a specified source type.
"""
def describe_event_categories(client, input, options \\ []) do
request(client, "DescribeEventCategories", input, options)
end
@doc """
Returns events related to instances, security groups, snapshots, and DB
parameter groups for the past 14 days. You can obtain events specific to a
particular DB instance, security group, snapshot, or parameter group by
providing the name as a parameter. By default, the events of the past hour
are returned.
"""
def describe_events(client, input, options \\ []) do
request(client, "DescribeEvents", input, options)
end
@doc """
Returns a list of orderable instance options for the specified engine.
"""
def describe_orderable_d_b_instance_options(client, input, options \\ []) do
request(client, "DescribeOrderableDBInstanceOptions", input, options)
end
@doc """
Returns a list of resources (for example, instances) that have at least one
pending maintenance action.
"""
def describe_pending_maintenance_actions(client, input, options \\ []) do
request(client, "DescribePendingMaintenanceActions", input, options)
end
@doc """
Forces a failover for a cluster.
A failover for a cluster promotes one of the Amazon DocumentDB replicas
(read-only instances) in the cluster to be the primary instance (the
cluster writer).
If the primary instance fails, Amazon DocumentDB automatically fails over
to an Amazon DocumentDB replica, if one exists. You can force a failover
when you want to simulate a failure of a primary instance for testing.
"""
def failover_d_b_cluster(client, input, options \\ []) do
request(client, "FailoverDBCluster", input, options)
end
@doc """
Lists all tags on an Amazon DocumentDB resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Modifies a setting for an Amazon DocumentDB cluster. You can change one or
more database configuration parameters by specifying these parameters and
the new values in the request.
"""
def modify_d_b_cluster(client, input, options \\ []) do
request(client, "ModifyDBCluster", input, options)
end
@doc """
Modifies the parameters of a cluster parameter group. To modify more than
one parameter, submit a list of the following: `ParameterName`,
`ParameterValue`, and `ApplyMethod`. A maximum of 20 parameters can be
modified in a single request.
<note> Changes to dynamic parameters are applied immediately. Changes to
static parameters require a reboot or maintenance window before the change
can take effect.
</note> <important> After you create a cluster parameter group, you should
wait at least 5 minutes before creating your first cluster that uses that
cluster parameter group as the default parameter group. This allows Amazon
DocumentDB to fully complete the create action before the parameter group
is used as the default for a new cluster. This step is especially important
for parameters that are critical when creating the default database for a
cluster, such as the character set for the default database defined by the
`character_set_database` parameter.
</important>
"""
def modify_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "ModifyDBClusterParameterGroup", input, options)
end
@doc """
Adds an attribute and values to, or removes an attribute and values from, a
manual DB cluster snapshot.
To share a manual cluster snapshot with other AWS accounts, specify
`restore` as the `AttributeName`, and use the `ValuesToAdd` parameter to
add a list of IDs of the AWS accounts that are authorized to restore the
manual cluster snapshot. Use the value `all` to make the manual cluster
snapshot public, which means that it can be copied or restored by all AWS
accounts. Do not add the `all` value for any manual DB cluster snapshots
that contain private information that you don't want available to all AWS
accounts. If a manual cluster snapshot is encrypted, it can be shared, but
only by specifying a list of authorized AWS account IDs for the
`ValuesToAdd` parameter. You can't use `all` as a value for that parameter
in this case.
"""
def modify_d_b_cluster_snapshot_attribute(client, input, options \\ []) do
request(client, "ModifyDBClusterSnapshotAttribute", input, options)
end
@doc """
Modifies settings for an instance. You can change one or more database
configuration parameters by specifying these parameters and the new values
in the request.
"""
def modify_d_b_instance(client, input, options \\ []) do
request(client, "ModifyDBInstance", input, options)
end
@doc """
Modifies an existing subnet group. subnet groups must contain at least one
subnet in at least two Availability Zones in the AWS Region.
"""
def modify_d_b_subnet_group(client, input, options \\ []) do
request(client, "ModifyDBSubnetGroup", input, options)
end
@doc """
You might need to reboot your instance, usually for maintenance reasons.
For example, if you make certain changes, or if you change the cluster
parameter group that is associated with the instance, you must reboot the
instance for the changes to take effect.
Rebooting an instance restarts the database engine service. Rebooting an
instance results in a momentary outage, during which the instance status is
set to *rebooting*.
"""
def reboot_d_b_instance(client, input, options \\ []) do
request(client, "RebootDBInstance", input, options)
end
@doc """
Removes metadata tags from an Amazon DocumentDB resource.
"""
def remove_tags_from_resource(client, input, options \\ []) do
request(client, "RemoveTagsFromResource", input, options)
end
@doc """
Modifies the parameters of a cluster parameter group to the default value.
To reset specific parameters, submit a list of the following:
`ParameterName` and `ApplyMethod`. To reset the entire cluster parameter
group, specify the `DBClusterParameterGroupName` and `ResetAllParameters`
parameters.
When you reset the entire group, dynamic parameters are updated immediately
and static parameters are set to `pending-reboot` to take effect on the
next DB instance reboot.
"""
def reset_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "ResetDBClusterParameterGroup", input, options)
end
@doc """
Creates a new cluster from a snapshot or cluster snapshot.
If a snapshot is specified, the target cluster is created from the source
DB snapshot with a default configuration and default security group.
If a cluster snapshot is specified, the target cluster is created from the
source cluster restore point with the same configuration as the original
source DB cluster, except that the new cluster is created with the default
security group.
"""
def restore_d_b_cluster_from_snapshot(client, input, options \\ []) do
request(client, "RestoreDBClusterFromSnapshot", input, options)
end
@doc """
Restores a cluster to an arbitrary point in time. Users can restore to any
point in time before `LatestRestorableTime` for up to
`BackupRetentionPeriod` days. The target cluster is created from the source
cluster with the same configuration as the original cluster, except that
the new cluster is created with the default security group.
"""
def restore_d_b_cluster_to_point_in_time(client, input, options \\ []) do
request(client, "RestoreDBClusterToPointInTime", input, options)
end
@doc """
Restarts the stopped cluster that is specified by `DBClusterIdentifier`.
For more information, see [Stopping and Starting an Amazon DocumentDB
Cluster](https://docs.aws.amazon.com/documentdb/latest/developerguide/db-cluster-stop-start.html).
"""
def start_d_b_cluster(client, input, options \\ []) do
request(client, "StartDBCluster", input, options)
end
@doc """
Stops the running cluster that is specified by `DBClusterIdentifier`. The
cluster must be in the *available* state. For more information, see
[Stopping and Starting an Amazon DocumentDB
Cluster](https://docs.aws.amazon.com/documentdb/latest/developerguide/db-cluster-stop-start.html).
"""
def stop_d_b_cluster(client, input, options \\ []) do
request(client, "StopDBCluster", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "rds"}
host = build_host("rds", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-www-form-urlencoded"}
]
input = Map.merge(input, %{"Action" => action, "Version" => "2014-10-31"})
payload = AWS.Util.encode_query(input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, AWS.Util.decode_xml(body), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = AWS.Util.decode_xml(body)
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/doc_db.ex
| 0.884502
| 0.481027
|
doc_db.ex
|
starcoder
|
defmodule AlphaVantage.TechnicalIndicators do
@moduledoc """
A set of functions for fetching technical indicators from [Alpha Vantage](www.alphavantage.co/documentation/#technical-indicators).
"""
alias AlphaVantage.Gateway
@doc """
Returns the simple moving average (SMA) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#sma for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec sma(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def sma(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "SMA",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the exponential moving average (EMA) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#ema for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ema(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def ema(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "EMA",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the weighted moving average (WMA) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#wma for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec wma(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def wma(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "WMA",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the double exponential moving average (DEMA) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#dema for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec dema(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def dema(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "DEMA",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the triple exponential moving average (TEMA) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#tema for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec tema(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def tema(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "TEMA",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the triangular moving average (TRIMA) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#trima for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec trima(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def trima(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "TRIMA",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Kaufman adaptive moving average (KAMA) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#kama for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec kama(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def kama(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "KAMA",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the MESA adaptive moving average (MAMA) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#mama for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec mama(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def mama(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "MAMA",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the volume weighted average price (VWAP) for intraday time series given a symbol and interval.
Please reference https://www.alphavantage.co/documentation/#vwap for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec vwap(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def vwap(symbol, interval, opts \\ []) do
params = [
function: "VWAP",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the triple exponential moving average (T3) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#t3 for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec t3(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def t3(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "T3",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the moving average convergence / divergence (MACD) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#macd for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:fastperiod`
Positive integers are accepted. By default, `fastperiod: 12`.
- `:slowperiod`
Positive integers are accepted. By default, `slowperiod: 26`.
- `:signalperiod`
Positive integers are accepted. By default, `signalperiod: 9`.
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec macd(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def macd(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "MACD",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the moving average convergence / divergence values with controllable moving average type (MACDEXT) for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#macdext for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:fastperiod`
Positive integers are accepted. By default, `fastperiod: 12`.
- `:slowperiod`
Positive integers are accepted. By default, `slowperiod: 26`.
- `:signalperiod`
Positive integers are accepted. By default, `signalperiod: 9`.
- `:fastmatype`
Moving average type for the faster moving average.
By default, `fastmatype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:slowmatype`
Moving average type for the slower moving average.
By default, `slowmatype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:signalmatype`
Moving average type for the signal moving average.
By default, `signalmatype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec macdext(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def macdext(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "MACDEXT",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the stochastic oscillator (STOCH) values for a given symbol and interval.
Please reference https://www.alphavantage.co/documentation/#stoch for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
_Optional_ (accepted as a keyword list)
- `:fastkperiod`
The time period of the fastk moving average. Positive integers are accepted. By default, `fastkperiod: 5`.
- `:slowkperiod`
The time period of the slowk moving average. Positive integers are accepted. By default, `slowkperiod: 3`.
- `:slowdperiod`
The time period of the slowd moving average. Positive integers are accepted. By default, `slowdperiod: 3`.
- `:slowkmatype`
Moving average type for the slowk moving average.
By default, `slowkmatype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:slowdmatype`
Moving average type for the slowd moving average.
By default, `slowdmatype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec stoch(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def stoch(symbol, interval, opts \\ []) do
params = [
function: "STOCH",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the stochastic fast (STOCHF) values for a given symbol and interval.
Please reference https://www.alphavantage.co/documentation/#stochf for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
_Optional_ (accepted as a keyword list)
- `:fastkperiod`
The time period of the fastk moving average. Positive integers are accepted. By default, `fastkperiod: 5`.
- `:fastdperiod`
The time period of the fastd moving average. Positive integers are accepted. By default, `fastdperiod: 3`.
- `:fastdmatype`
Moving average type for the fastd moving average.
By default, `fastdmatype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec stochf(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def stochf(symbol, interval, opts \\ []) do
params = [
function: "STOCHF",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the relative strength index (RSI) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#rsi for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec rsi(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def rsi(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "RSI",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the stochastic relative strength index (STOCHRSI) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#stochrsi for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:fastkperiod`
The time period of the fastk moving average. Positive integers are accepted. By default, `fastkperiod: 5`.
- `:fastdperiod`
The time period of the fastd moving average. Positive integers are accepted. By default, `fastdperiod: 3`.
- `:fastdmatype`
Moving average type for the fastd moving average.
By default, `fastdmatype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec stochrsi(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def stochrsi(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "STOCHRSI",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Williams' %R (WILLR) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#willr for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec willr(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def willr(symbol, interval, time_period, opts \\ []) do
params = [
function: "WILLR",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the average directional movement index (ADX) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#adx for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec adx(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def adx(symbol, interval, time_period, opts \\ []) do
params = [
function: "ADX",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the average directional movement index rating (ADXR) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#adxr for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec adxr(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def adxr(symbol, interval, time_period, opts \\ []) do
params = [
function: "ADXR",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the absolute price oscillator (APO) values for a given symbol, interval, and series type.
Please reference https://www.alphavantage.co/documentation/#apo for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:fastperiod`
Positive integers are accepted. By default, `fastperiod: 12`.
- `:slowperiod`
Positive integers are accepted. By default, `slowperiod: 26`.
- `:matype`
Moving average type.
By default, `matype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec apo(String.t(), String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def apo(symbol, interval, series_type, opts \\ []) do
params = [
function: "APO",
symbol: symbol,
interval: interval,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the percentage price oscillator (PPO) values for a given symbol, interval, and series type.
Please reference https://www.alphavantage.co/documentation/#ppo for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:fastperiod`
Positive integers are accepted. By default, `fastperiod: 12`.
- `:slowperiod`
Positive integers are accepted. By default, `slowperiod: 26`.
- `:matype`
Moving average type.
By default, `matype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ppo(String.t(), String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def ppo(symbol, interval, series_type, opts \\ []) do
params = [
function: "PPO",
symbol: symbol,
interval: interval,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the momentum (MOM) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#mom for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec mom(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def mom(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "MOM",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the balance of power (BOP) values for a given symbol and interval.
Please reference https://www.alphavantage.co/documentation/#bop for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec bop(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def bop(symbol, interval, opts \\ []) do
params = [
function: "BOP",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the commodity channel index (CCI) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#cci for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec cci(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def cci(symbol, interval, time_period, opts \\ []) do
params = [
function: "CCI",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Chande momentum oscillator (CMO) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#cmo for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec cmo(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def cmo(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "CMO",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the rate of change (ROC) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#roc for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec roc(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def roc(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "ROC",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the rate of change ratio (ROCR) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#rocr for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec rocr(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def rocr(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "ROCR",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Aroon (AROON) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#aroon for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec aroon(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def aroon(symbol, interval, time_period, opts \\ []) do
params = [
function: "AROON",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Aroon oscillator (AROONOSC) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#aroonosc for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec aroonosc(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def aroonosc(symbol, interval, time_period, opts \\ []) do
params = [
function: "AROONOSC",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the money flow index (MFI) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#mfi for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec mfi(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def mfi(symbol, interval, time_period, opts \\ []) do
params = [
function: "MFI",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the 1-day rate of change of a triple smooth exponential moving average (TRIX) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#trix for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec trix(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def trix(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "TRIX",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the ultimate oscillator (ULTOSC) values for a given symbol and interval.
Please reference https://www.alphavantage.co/documentation/#ultosc for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
_Optional_ (accepted as a keyword list)
- `:timeperiod1`
The first time period for the indicator.
Positive integers are accepted. By default, `timeperiod1: 7`.
- `:timeperiod2`
The second time period for the indicator.
Positive integers are accepted. By default, `timeperiod1: 14`.
- `:timeperiod3`
The third time period for the indicator.
Positive integers are accepted. By default, `timeperiod1: 28`.
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ultosc(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def ultosc(symbol, interval, opts \\ []) do
params = [
function: "ULTOSC",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the directional movement index (DX) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#ex for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec dx(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def dx(symbol, interval, time_period, opts \\ []) do
params = [
function: "DX",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the minus directional indicator (MINUS_DI) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#minusdi for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec minus_di(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def minus_di(symbol, interval, time_period, opts \\ []) do
params = [
function: "MINUS_DI",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the plus directional indicator (PLUS_DI) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#plusdi for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec plus_di(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def plus_di(symbol, interval, time_period, opts \\ []) do
params = [
function: "PLUS_DI",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the minus directional movement (MINUS_DM) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#minusdm for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec minus_dm(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def minus_dm(symbol, interval, time_period, opts \\ []) do
params = [
function: "MINUS_DM",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the plus directional movement (PLUS_DM) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#plusdm for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec plus_dm(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def plus_dm(symbol, interval, time_period, opts \\ []) do
params = [
function: "PLUS_DM",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Bollinger bands (BBANDS) values for a given symbol, interval, time period, and series type.
Please reference https://www.alphavantage.co/documentation/#bbands for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:nbdevup`
The standard deviation multiplier of the upper band.
Positive integers are accepted. By default, `nbdevup: 2`.
- `:nbdevdn`
The standard deviation multiplier of the lower band.
Positive integers are accepted. By default, `nbdevdn: 2`.
- `:matype`
Moving average type of the time series.
By default, `matype: 0`. Integers 0 - 8 are accepted with the following mappings. 0 = Simple Moving Average (SMA), 1 = Exponential Moving Average (EMA), 2 = Weighted Moving Average (WMA), 3 = Double Exponential Moving Average (DEMA), 4 = Triple Exponential Moving Average (TEMA), 5 = Triangular Moving Average (TRIMA), 6 = T3 Moving Average, 7 = Kaufman Adaptive Moving Average (KAMA), 8 = MESA Adaptive Moving Average (MAMA).
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec bbands(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def bbands(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "BBANDS",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the midpoint (MIDPOINT) values for a given symbol, interval, time period, and series type.
MIDPOINT = (highest value + lowest value)/2.
Please reference https://www.alphavantage.co/documentation/#midpoint for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec midpoint(String.t(), String.t(), integer, String.t(), Keyword.t()) ::
Gateway.response()
def midpoint(symbol, interval, time_period, series_type, opts \\ []) do
params = [
function: "MIDPOINT",
symbol: symbol,
interval: interval,
time_period: time_period,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the midprice (MIDPRICE) values for a given symbol, interval, and time period.
MIDPRICE = (highest value + lowest value)/2.
Please reference https://www.alphavantage.co/documentation/#midpoint for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec midprice(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def midprice(symbol, interval, time_period, opts \\ []) do
params = [
function: "MIDPRICE",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the parabolic SAR (SAR) values for a given symbol and interval.
Please reference https://www.alphavantage.co/documentation/#sar for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
_Optional_ (accepted as a keyword list)
- `:acceleration`
The acceleration factor.
Positive floats are accepted. By default, `acceleration: 0.01`.
- `:maximum`
The acceleration factor maximum value.
Positive floats are accepted. By default, `maximum: 0.20`.
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec sar(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def sar(symbol, interval, opts \\ []) do
params = [
function: "SAR",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the true range (TRANGE) values for a given symbol and interval.
Please reference https://www.alphavantage.co/documentation/#trange for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec trange(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def trange(symbol, interval, opts \\ []) do
params = [
function: "TRANGE",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the average true range (ATR) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#atr for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec atr(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def atr(symbol, interval, time_period, opts \\ []) do
params = [
function: "ATR",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the normalized average true range (NATR) values for a given symbol, interval, and time period.
Please reference https://www.alphavantage.co/documentation/#natr for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:time_period`
Number of data points used to calculate each moving average value.
Positive integers are accepted: (e.g., `60`, `200`)
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec natr(String.t(), String.t(), integer, Keyword.t()) ::
Gateway.response()
def natr(symbol, interval, time_period, opts \\ []) do
params = [
function: "NATR",
symbol: symbol,
interval: interval,
time_period: time_period
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Chaikin A/D line (AD) values for a given symbol and interval.
Please reference https://www.alphavantage.co/documentation/#ad for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ad(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def ad(symbol, interval, opts \\ []) do
params = [
function: "AD",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Chaikin A/D line oscillator (ADOSC) values for a given symbol and interval.
Please reference https://www.alphavantage.co/documentation/#adosc for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
_Optional_ (accepted as a keyword list)
- `:fastperiod`
The time period of the fast EMA.
Positive integers are accepted. By default, `fastperiod: 3`.
- `:slowperiod`
The time period of the slow EMA.
Positive integers are accepted. By default, `slowperiod: 10`.
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec adosc(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def adosc(symbol, interval, opts \\ []) do
params = [
function: "ADOSC",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the on balance volume (OBV) values for a given symbol and interval.
Please reference https://www.alphavantage.co/documentation/#obv for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec obv(String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def obv(symbol, interval, opts \\ []) do
params = [
function: "OBV",
symbol: symbol,
interval: interval
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Hilbert transform, instantaneous trendline (HT_TRENDLINE) values for a given symbol, interval, and series type.
Please reference https://www.alphavantage.co/documentation/#httrendline for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ht_trendline(String.t(), String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def ht_trendline(symbol, interval, series_type, opts \\ []) do
params = [
function: "HT_TRENDLINE",
symbol: symbol,
interval: interval,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Hilbert transform, sine wave (HT_SINE) values for a given symbol, interval, and series type.
Please reference https://www.alphavantage.co/documentation/#htsine for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ht_sine(String.t(), String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def ht_sine(symbol, interval, series_type, opts \\ []) do
params = [
function: "HT_SINE",
symbol: symbol,
interval: interval,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Hilbert transform, trend vs cycle mode (HT_TRENDMODE) values for a given symbol, interval, and series type.
Please reference https://www.alphavantage.co/documentation/#httrendmode for more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ht_trendmode(String.t(), String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def ht_trendmode(symbol, interval, series_type, opts \\ []) do
params = [
function: "HT_TRENDMODE",
symbol: symbol,
interval: interval,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Hilbert transform, dominant cycle period (HT_DCPERIOD) values for a given symbol, interval, and series type.
Please reference https://www.alphavantage.co/documentation/#htdcperiod more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ht_dcperiod(String.t(), String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def ht_dcperiod(symbol, interval, series_type, opts \\ []) do
params = [
function: "HT_DCPERIOD",
symbol: symbol,
interval: interval,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Hilbert transform, dominant cycle phase (HT_DCPHASE) values for a given symbol, interval, and series type.
Please reference https://www.alphavantage.co/documentation/#htdcphase more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ht_dcphase(String.t(), String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def ht_dcphase(symbol, interval, series_type, opts \\ []) do
params = [
function: "HT_DCPHASE",
symbol: symbol,
interval: interval,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
@doc """
Returns the Hilbert transform, phasor components (HT_PHASOR) values for a given symbol, interval, and series type.
Please reference https://www.alphavantage.co/documentation/#htphasor more detail.
## Parameters
**Required**
- `:symbol`
The name of the security of your choice, provided as a string.
For example: `"MSFT"`
- `:interval`
Time interval between two consecutive data points in the time series.
The following values are supported and accepted as strings: `"1min"`, `"5min"`, `"15min"`, `"30min"`, `"60min"`, `"daily"`, `"weekly"`, `"monthly"`
- `:series_type`
The desired price type in the time series.
Four types are supported and accepted as strings: `"close"`, `"open"`, `"high"`, `"low"`
_Optional_ (accepted as a keyword list)
- `:datatype`
- `"map"` returns a map (default);
- `"json"` returns JSON format;
- `"csv"` returns a CSV (comma separated value) file string.
"""
@spec ht_phasor(String.t(), String.t(), String.t(), Keyword.t()) ::
Gateway.response()
def ht_phasor(symbol, interval, series_type, opts \\ []) do
params = [
function: "HT_PHASOR",
symbol: symbol,
interval: interval,
series_type: series_type
]
AlphaVantage.query(Keyword.merge(params, opts))
end
end
|
lib/alpha_vantage/technical_indicators.ex
| 0.948834
| 0.940463
|
technical_indicators.ex
|
starcoder
|
defmodule Console.GraphQl.Observability do
use Console.GraphQl.Schema.Base
alias Console.GraphQl.Resolvers.Observability
alias Console.Middleware.{Authenticated, Rbac}
enum :autoscaling_target do
value :statefulset
value :deployment
end
input_object :label_input do
field :name, :string
field :value, :string
end
object :dashboard do
field :id, non_null(:string), resolve: fn %{metadata: %{name: n}}, _, _ -> {:ok, n} end
field :spec, non_null(:dashboard_spec)
end
object :dashboard_spec do
field :name, :string
field :description, :string
field :timeslices, list_of(:string)
field :labels, list_of(:dashboard_label)
field :graphs, list_of(:dashboard_graph)
end
object :dashboard_label do
field :name, non_null(:string)
field :values, list_of(:string)
end
object :dashboard_graph do
field :name, non_null(:string)
field :queries, list_of(:dashboard_metric)
field :format, :string
end
object :dashboard_metric do
field :legend, :string
field :query, :string
field :results, list_of(:metric_result)
end
object :metric_result do
field :timestamp, :integer, resolve: fn %{timestamp: ts}, _, _ -> {:ok, ceil(ts)} end
field :value, :string
end
object :metric_response do
field :metric, :map
field :values, list_of(:metric_result), resolve: fn %{values: vals}, _, _ ->
{:ok, Enum.map(vals, fn [ts, val] -> %{timestamp: ts, value: val} end)}
end
end
object :log_stream do
field :stream, :map
field :values, list_of(:metric_result)
end
object :observability_queries do
field :dashboards, list_of(:dashboard) do
middleware Authenticated
arg :repo, non_null(:string)
middleware Rbac, perm: :read, arg: :repo
safe_resolve &Observability.resolve_dashboards/2
end
field :dashboard, :dashboard do
middleware Authenticated
arg :repo, non_null(:string)
arg :name, non_null(:string)
arg :step, :string
arg :offset, :integer
arg :labels, list_of(:label_input)
middleware Rbac, perm: :read, arg: :repo
safe_resolve &Observability.resolve_dashboard/2
end
field :metric, list_of(:metric_response) do
middleware Authenticated
arg :query, non_null(:string)
arg :offset, :integer
arg :step, :string
safe_resolve &Observability.resolve_metric/2
end
field :logs, list_of(:log_stream) do
middleware Authenticated
arg :query, non_null(:string)
arg :start, :long
arg :end, :long
arg :limit, non_null(:integer)
safe_resolve &Observability.resolve_logs/2
end
field :scaling_recommendation, :vertical_pod_autoscaler do
middleware Authenticated
arg :kind, non_null(:autoscaling_target)
arg :namespace, non_null(:string)
arg :name, non_null(:string)
safe_resolve &Observability.resolve_scaling_recommendation/2
end
end
end
|
lib/console/graphql/observability.ex
| 0.70028
| 0.417984
|
observability.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.SupervisionReport do
@moduledoc """
This command is used to advertise the status of one or more command process(es).
Params:
* `:more_status_updates` - used to advertise if more Supervision Reports follow for the actual Session ID (required)
* `:session_id` - carries the same value as the Session ID field of the Supervision Get Command which
initiated this session (required)
* `:status` - the current status of the command process, one of :no_support, :working, :fail or :success (required)
* `:duration` - the time in seconds needed to complete the current operation (required)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.Supervision
@type more_status_updates :: :last_report | :more_reports
@type status :: :no_support | :working | :fail | :success
@type param ::
{:more_status_updates, more_status_updates}
| {:status, status}
| {:duration, :unknown | non_neg_integer()}
@impl true
def new(params) do
command = %Command{
name: :supervision_report,
command_byte: 0x02,
command_class: Supervision,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
more_status_updates_bit =
Command.param!(command, :more_status_updates) |> encode_more_status_updates()
status_byte = Command.param!(command, :status) |> encode_status()
duration_byte = Command.param!(command, :duration) |> encode_duration()
session_id = Command.param!(command, :session_id)
<<more_status_updates_bit::size(1), 0x00::size(1), session_id::size(6), status_byte,
duration_byte>>
end
@impl true
def decode_params(
<<more_status_updates_byte::size(1), _::size(1), session_id::size(6), status_byte,
duration_byte>>
) do
with {:ok, more_status_updates} <- decode_more_status_updates(more_status_updates_byte),
{:ok, status} <- decode_status(status_byte),
{:ok, duration} <- decode_duration(duration_byte) do
{:ok,
[
more_status_updates: more_status_updates,
session_id: session_id,
status: status,
duration: duration
]}
else
{:error, %DecodeError{} = error} ->
error
end
end
defp encode_more_status_updates(:last_report), do: 0x00
defp encode_more_status_updates(:more_reports), do: 0x01
defp decode_more_status_updates(0x00), do: {:ok, :last_report}
defp decode_more_status_updates(0x01), do: {:ok, :more_reports}
defp decode_status(0x00), do: {:ok, :no_support}
defp decode_status(0x01), do: {:ok, :working}
defp decode_status(0x02), do: {:ok, :fail}
defp decode_status(0xFF), do: {:ok, :success}
defp decode_status(byte),
do: {:error, %DecodeError{value: byte, param: :status, command: :supervision_report}}
defp encode_status(:no_support), do: 0x00
defp encode_status(:working), do: 0x01
defp encode_status(:fail), do: 0x02
defp encode_status(:success), do: 0xFF
defp encode_duration(secs) when secs in 0..127, do: secs
defp encode_duration(secs) when secs in 128..(126 * 60), do: round(secs / 60) + 0x7F
defp encode_duration(:unknown), do: 0xFE
defp decode_duration(byte) when byte in 0x00..0x7F, do: {:ok, byte}
defp decode_duration(byte) when byte in 0x80..0xFD, do: {:ok, (byte - 0x7F) * 60}
defp decode_duration(0xFE), do: :unknown
defp decode_duration(byte),
do: {:error, %DecodeError{value: byte, param: :duration, command: :supervision_report}}
end
|
lib/grizzly/zwave/commands/supervision_report.ex
| 0.858437
| 0.499329
|
supervision_report.ex
|
starcoder
|
defmodule CodeCorps.StripeConnectSubscription do
@moduledoc """
Represents a `Subscription` object created using the Stripe API
## Fields
* `application_fee_percent` - Percentage of fee taken by Code Corps
* `cancelled_at` - Timestamp of cancellation, provided the subscription has been cancelled
* `created` - A timestamp, indicating when the plan was created by Stripe
* `current_period_end` - End date of the period the subscription has been last invoiced for
* `current_period_start` - Start date of the period the subscription was last invoiced
* `customer_id_from_stripe` - Stripe's `customer_id`
* `ended_at` - End date, if the subscribtion has been ended (by cancelling or switching plans)
* `id_from_stripe` - Stripe's `id`
* `plan_id_from_stripe` - Stripe's plan `id`
* `quantity` - Quantity of the plan to subscribe to. For example, we have a $0.01 plan, which we subscribe in multiple quantities for.
* `start` - Date the most recent update to this subscription started
* `status` - trialing, active, past_due, canceled, or unpaid
## Note on `status`
Subscriptioms start at `trialing` and then move on to active when trial period is over.
When `active`, if payment fails, it will go into `past_due`.
Once enough retry attempts failures occur,
it goes either to `cancelled` or `unpaid` depending on settings.
"""
use CodeCorps.Web, :model
@type t :: %__MODULE__{}
schema "stripe_connect_subscriptions" do
field :application_fee_percent, :decimal
field :cancelled_at, :integer
field :created, :integer
field :current_period_end, :integer
field :current_period_start, :integer
field :customer_id_from_stripe, :string
field :ended_at, :integer
field :id_from_stripe, :string, null: false
field :plan_id_from_stripe, :string, null: false
field :quantity, :integer
field :start, :integer
field :status, :string
belongs_to :stripe_connect_plan, CodeCorps.StripeConnectPlan
belongs_to :user, CodeCorps.User
has_one :project, through: [:stripe_connect_plan, :project]
timestamps()
end
@permitted_params [
:application_fee_percent, :cancelled_at, :created, :current_period_end,
:current_period_start, :customer_id_from_stripe, :ended_at,
:id_from_stripe, :plan_id_from_stripe, :quantity, :start, :status,
:stripe_connect_plan_id, :user_id
]
@required_params [
:application_fee_percent, :id_from_stripe, :plan_id_from_stripe,
:quantity, :stripe_connect_plan_id, :user_id
]
def create_changeset(struct, params \\ %{}) do
struct
|> cast(params, @permitted_params)
|> validate_required(@required_params)
|> unique_constraint(:id_from_stripe)
|> assoc_constraint(:stripe_connect_plan)
|> assoc_constraint(:user)
end
@update_params [:cancelled_at, :current_period_end, :current_period_start, :ended_at, :quantity, :start, :status]
def webhook_update_changeset(struct, params \\ %{}) do
struct
|> cast(params, @update_params)
end
end
|
web/models/stripe_connect_subscription.ex
| 0.901894
| 0.445831
|
stripe_connect_subscription.ex
|
starcoder
|
defmodule JSON.Parser.Bitstring.Number do
@doc """
parses a valid JSON numerical value, returns its elixir numerical representation
## Examples
iex> JSON.Parser.Bitstring.Number.parse ""
{:error, :unexpected_end_of_buffer}
iex> JSON.Parser.Bitstring.Number.parse "face0ff"
{:error, {:unexpected_token, "face0ff"} }
iex> JSON.Parser.Bitstring.Number.parse "-hello"
{:error, {:unexpected_token, "hello"} }
iex> JSON.Parser.Bitstring.Number.parse "129245"
{:ok, 129245, "" }
iex> JSON.Parser.Bitstring.Number.parse "7.something"
{:ok, 7, ".something" }
iex> JSON.Parser.Bitstring.Number.parse "7.4566something"
{:ok, 7.4566, "something" }
iex> JSON.Parser.Bitstring.Number.parse "-88.22suffix"
{:ok, -88.22, "suffix" }
iex> JSON.Parser.Bitstring.Number.parse "-12e4and then some"
{:ok, -1.2e+5, "and then some" }
iex> JSON.Parser.Bitstring.Number.parse "7842490016E-12-and more"
{:ok, 7.842490016e-3, "-and more" }
"""
def parse(<< ?- , rest :: binary >>) do
case parse(rest) do
{ :ok, number, json } -> { :ok, -1 * number, json }
{ :error, error_info } -> { :error, error_info }
end
end
def parse(binary) do
case binary do
<< number :: utf8 , _ :: binary >> when number in ?0..?9 ->
to_integer(binary) |> add_fractional |> apply_exponent
<< >> -> { :error, :unexpected_end_of_buffer }
_ -> { :error, { :unexpected_token, binary } }
end
end
defp add_fractional({ :error, error_info }), do: { :error, error_info }
defp add_fractional({ :ok, acc, bin }) do
case bin do
<< ?., after_dot :: binary >> ->
case after_dot do
<< c :: utf8, _ :: binary >> when c in ?0..?9 ->
{ fractional, rest } = parse_fractional(after_dot, 0, 10.0)
{ :ok, acc + fractional, rest }
_ ->
{ :ok, acc, bin }
end
_ ->
{ :ok, acc, bin }
end
end
defp parse_fractional(<< number :: utf8, rest :: binary >>, acc, power) when number in ?0..?9 do
parse_fractional(rest, acc + (number - ?0) / power, power * 10)
end
defp parse_fractional(json, acc , _) when is_binary(json), do: { acc, json }
defp apply_exponent({ :error, error_info }), do: { :error, error_info }
defp apply_exponent({ :ok, acc, << exponent :: utf8, rest :: binary >> }) when exponent in 'eE' do
case to_integer(rest) do
{ :ok, power, rest } -> { :ok, acc * :math.pow(10, power), rest }
{ :error, error_info } -> { :error, error_info }
end
end
defp apply_exponent({ :ok, acc, json }), do: { :ok, acc, json }
defp to_integer(<< >>), do: { :error, :unexpected_end_of_buffer }
defp to_integer(binary) do
case Integer.parse(binary) do
{ :error, _ } -> { :error, { :unexpected_token, binary } }
{ result, rest } -> {:ok, result, rest}
end
end
end
|
elixir/codes-from-books/little-elixir/cap4/metex/deps/json/lib/json/parser/bitstring/number.ex
| 0.562657
| 0.435601
|
number.ex
|
starcoder
|
defmodule Faker.Beer.En do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for generating Beer related data in English
"""
@doc """
Returns a Beer brand string
## Examples
iex> Faker.Beer.En.brand()
"Paulaner"
iex> Faker.Beer.En.brand()
"Pabst Blue Ribbon"
iex> Faker.Beer.En.brand()
"Kirin Inchiban"
iex> Faker.Beer.En.brand()
"Birra Moretti"
"""
@spec brand() :: String.t()
sampler(:brand, [
"Corona Extra",
"Heineken",
"Budweiser",
"Becks",
"BudLight",
"Pabst Blue Ribbon",
"Dos Equis",
"Blue Moon",
"Stella Artois",
"Miller Draft",
"Coors lite",
"Amstel",
"Guinness",
"Kirin Inchiban",
"Tsingtao",
"Sierra Nevada",
"Rolling Rock",
"Red Stripe",
"Paulaner",
"Patagonia",
"Delirium Tremens",
"Delirium Noctorum'",
"<NAME>",
"Sapporo Premium",
"Carlsberg",
"Pacifico",
"Quimes",
"Murphys",
"<NAME>",
"Harp",
"Fosters",
"Hoegaarden",
"Leffe",
"Lowenbrau"
])
@doc """
Returns a Beer name string
## Examples
iex> Faker.Beer.En.name()
"Duvel"
iex> Faker.Beer.En.name()
"Founders Kentucky Breakfast"
iex> Faker.Beer.En.name()
"Yeti Imperial Stout"
iex> Faker.Beer.En.name()
"Stone Imperial Russian Stout"
"""
@spec name() :: String.t()
sampler(:name, [
"Pliny The Elder",
"Founders Kentucky Breakfast",
"Trappistes Rochefort 10",
"HopSlam Ale",
"Stone Imperial Russian Stout",
"St. Bernardus Abt 12",
"Founders Breakfast Stout",
"<NAME>",
"<NAME>",
"<NAME>",
"Duvel",
"Dreadnaught IPA",
"<NAME>",
"La Fin Du Monde",
"Bourbon County Stout",
"Two Hearted Ale",
"Old Rasputin Russian Imperial Stout",
"Ruination IPA",
"90 Minute IPA",
"<NAME>",
"Double Bastard Ale",
"Hop Rod Rye",
"Trappistes Rochefort 8",
"<NAME>",
"Stone IPA",
"Arrogant Bastard Ale",
"<NAME>",
"Chocolate St",
"Oak Aged Yeti Imperial Stout",
"Ten FIDY",
"Storm King Stout",
"<NAME>",
"Alpha King Pale Ale",
"Westmalle Trappist Tripel",
"<NAME>’s Imperial IPA",
"Yeti Imperial Stout",
"Hennepin",
"<NAME>’s Oatmeal Stout",
"<NAME>",
"Trois Pistoles",
"Oaked Arrogant Bastard Ale",
"Sublimely Self-Righteous Ale",
"Maudite",
"Bell’s Expedition",
"Sierra Nevada Celebration Ale",
"Maharaj",
"Sierra Nevada Bigfoot Barleywine Style Ale",
"Orval Trappist Ale",
"Racer 5 India Pale Ale, Bear Republic Bre",
"Hercules Double IPA"
])
@doc """
Returns a hop name string
## Examples
iex> Faker.Beer.En.hop()
"Eroica"
iex> Faker.Beer.En.hop()
"Bullion"
iex> Faker.Beer.En.hop()
"Mt. Rainier"
iex> Faker.Beer.En.hop()
"Citra"
"""
@spec hop() :: String.t()
sampler(:hop, [
"Ahtanum",
"Amarillo",
"Bitter Gold",
"Bravo",
"Brewer’s Gold",
"Bullion",
"Cascade",
"Cashmere",
"Centennial",
"Chelan",
"Chinook",
"Citra",
"Cluster",
"Columbia",
"Columbus",
"Comet",
"Crystal",
"Equinox",
"Eroica",
"Fuggle",
"Galena",
"Glacier",
"Golding",
"Hallertau",
"Horizon",
"Liberty",
"Magnum",
"Millennium",
"Mosaic",
"Mt. Hood",
"Mt. Rainier",
"Newport",
"Northern Brewer",
"Nugget",
"Olympic",
"Palisade",
"Perle",
"Saaz",
"Santiam",
"Simcoe",
"Sorachi Ace",
"Sterling",
"Summit",
"Tahoma",
"Tettnang",
"TriplePearl",
"Ultra",
"Vanguard",
"Warrior",
"Willamette",
"Yakima Gol"
])
@doc """
Returns a yeast name string
## Examples
iex> Faker.Beer.En.yeast()
"2206 - Bavarian Lager"
iex> Faker.Beer.En.yeast()
"3763 - Roeselare Ale Blend"
iex> Faker.Beer.En.yeast()
"3711 - French Saison"
iex> Faker.Beer.En.yeast()
"3944 - Belgian Witbier"
"""
@spec yeast() :: String.t()
sampler(:yeast, [
"1007 - German Ale",
"1010 - American Wheat",
"1028 - London Ale",
"1056 - American Ale",
"1084 - Irish Ale",
"1098 - British Ale",
"1099 - Whitbread Ale",
"1187 - Ringwood Ale",
"1272 - American Ale II",
"1275 - Thames Valley Ale",
"1318 - London Ale III",
"1332 - Northwest Ale",
"1335 - British Ale II",
"1450 - Dennys Favorite 50",
"1469 - West Yorkshire Ale",
"1728 - Scottish Ale",
"1968 - London ESB Ale",
"2565 - Kölsch",
"1214 - Belgian Abbey",
"1388 - Belgian Strong Ale",
"1762 - Belgian Abbey II",
"3056 - Bavarian Wheat Blend",
"3068 - <NAME>",
"3278 - Belgian Lambic Blend",
"3333 - German Wheat",
"3463 - Forbidden Fruit",
"3522 - Belgian Ardennes",
"3638 - Bavarian Wheat",
"3711 - French Saison",
"3724 - Belgian Saison",
"3763 - Roeselare Ale Blend",
"3787 - Trappist High Gravity",
"3942 - Belgian Wheat",
"3944 - Belgian Witbier",
"2000 - Budvar Lager",
"2001 - Urquell Lager",
"2007 - Pilsen Lager",
"2035 - American Lager",
"2042 - Danish Lager",
"2112 - California Lager",
"2124 - Bohemian Lager",
"2206 - Bavarian Lager",
"2278 - Czech Pils",
"2308 - Munich Lager",
"2633 - Octoberfest Lager Blend",
"5112 - Brettanomyces bruxellensis",
"5335 - Lactobacillus",
"5526 - Brettanomyces lambicus",
"5733 - Pediococcus"
])
@doc """
Returns a malt name string
## Examples
iex> Faker.Beer.En.malt()
"Carapils"
iex> Faker.Beer.En.malt()
"Pale"
iex> Faker.Beer.En.malt()
"Rye malt"
iex> Faker.Beer.En.malt()
"Munich"
"""
@spec malt() :: String.t()
sampler(:malt, [
"Black malt",
"Caramel",
"Carapils",
"Chocolate",
"Munich",
"Caramel",
"Carapils",
"Chocolate malt",
"Munich",
"Pale",
"Roasted barley",
"Rye malt",
"Special roast",
"Victory",
"Vienna",
"Wheat mal"
])
@doc """
Returns a style name string
## Examples
iex> Faker.Beer.En.style()
"Stout"
iex> Faker.Beer.En.style()
"European Amber Lager"
iex> Faker.Beer.En.style()
"Strong Ale"
iex> Faker.Beer.En.style()
"German Wheat And Rye Beer"
"""
@spec style() :: String.t()
sampler(:style, [
"Light Lager",
"Pilsner",
"European Amber Lager",
"Dark Lager",
"Bock",
"Light Hybrid Beer",
"Amber Hybrid Beer",
"English Pale Ale",
"Scottish And Irish Ale",
"Merican Ale",
"English Brown Ale",
"Porter",
"Stout",
"India Pale Ale",
"German Wheat And Rye Beer",
"Belgian And French Ale",
"Sour Ale",
"Belgian Strong Ale",
"Strong Ale",
"Fruit Beer",
"Vegetable Beer",
"Smoke-flavored",
"Wood-aged Beer"
])
end
|
lib/faker/beer/en.ex
| 0.732592
| 0.419648
|
en.ex
|
starcoder
|
defmodule Membrane.AAC.Parser do
@moduledoc """
Parser for Advanced Audio Codec.
Supports both plain and ADTS-encapsulated output (configured by `out_encapsulation`).
Input with encapsulation `:none` is supported, but correct AAC caps need to be supplied with the stream.
Adds sample rate based timestamp to metadata if absent.
"""
use Membrane.Filter
alias __MODULE__.Helper
alias Membrane.{Buffer, AAC}
def_input_pad :input, demand_unit: :buffers, caps: :any
def_output_pad :output, caps: AAC
def_options samples_per_frame: [
spec: AAC.samples_per_frame_t(),
default: 1024,
description: "Count of audio samples in each AAC frame"
],
out_encapsulation: [
spec: AAC.encapsulation_t(),
default: :ADTS,
description: """
Determines whether output AAC frames should be prefixed with ADTS headers
"""
],
in_encapsulation: [
spec: AAC.encapsulation_t(),
default: :ADTS
]
@type timestamp_t :: Ratio.t() | Membrane.Time.t()
@impl true
def handle_init(options) do
state = options |> Map.from_struct() |> Map.merge(%{leftover: <<>>, timestamp: 0})
{:ok, state}
end
@impl true
def handle_caps(:input, %AAC{encapsulation: encapsulation} = caps, _ctx, state)
when state.in_encapsulation == encapsulation do
{{:ok, caps: {:output, %{caps | encapsulation: state.out_encapsulation}}}, state}
end
@impl true
def handle_caps(:input, %Membrane.AAC.RemoteStream{} = caps, _ctx, state) do
caps = Helper.parse_audio_specific_config!(caps.audio_specific_config)
{{:ok, caps: {:output, %{caps | encapsulation: state.out_encapsulation}}}, state}
end
@impl true
def handle_caps(:input, %AAC{encapsulation: encapsulation}, _ctx, state)
when encapsulation != state.in_encapsulation,
do:
raise(
"%AAC{encapsulation: #{inspect(state.in_encapsulation)}} caps are required when declaring in_encapsulation as #{inspect(state.in_encapsulation)}"
)
@impl true
def handle_caps(:input, _caps, _ctx, state) do
{:ok, state}
end
@impl true
def handle_process(:input, buffer, ctx, state) when state.in_encapsulation == :ADTS do
%{caps: caps} = ctx.pads.output
timestamp = Map.get(buffer.metadata, :timestamp, state.timestamp)
parse_opts = Map.take(state, [:samples_per_frame, :out_encapsulation, :in_encapsulation])
case Helper.parse_adts(state.leftover <> buffer.payload, caps, timestamp, parse_opts) do
{:ok, {output, leftover, timestamp}} ->
actions = Enum.map(output, fn {action, value} -> {action, {:output, value}} end)
{{:ok, actions ++ [redemand: :output]},
%{state | leftover: leftover, timestamp: timestamp}}
{:error, reason} ->
{{:error, reason}, state}
end
end
@impl true
def handle_process(:input, buffer, ctx, state) when state.in_encapsulation == :none do
timestamp = Helper.next_timestamp(state.timestamp, ctx.pads.output.caps)
buffer = %{buffer | pts: timestamp}
buffer =
case state.out_encapsulation do
:ADTS ->
%Buffer{buffer | payload: Helper.payload_to_adts(buffer.payload, ctx.pads.output.caps)}
_other ->
buffer
end
{{:ok, buffer: {:output, buffer}}, %{state | timestamp: timestamp}}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
end
|
lib/membrane/aac/parser.ex
| 0.885835
| 0.620176
|
parser.ex
|
starcoder
|
defmodule TinyColor do
@moduledoc """
TinyColor is an elixir port of the javascript tinycolor2 library used to manipulate color values
and convert them to different color spaces. Alpha is supported for all color spaces.
Currently it supports RGB, HSV, HSL, and the OKLab color space. All supported color spaces can
be used in any method; however, most operations are implemented by converting the provided color
into a specific color space, and so there will be some additional accuracy loss.
## Parsing CSS colors
`TinyColor.parser` defines a parser that will interpret CSS Level 3 compliant color codes into
the appropriate TinyColor struct.
## Rendering colors in html and json
TinyColor implements the protocol for Jason and Phoenix so that colors can be directly rendered in
both HTML and json responses as css compatible color codes.
"""
@white %TinyColor.RGB{red: 255, green: 255, blue: 255}
@black %TinyColor.RGB{red: 0, green: 0, blue: 0}
@typedoc """
A representation of a color in hue, saturation, lightness and alpha.
"""
@type hsl_color :: %TinyColor.HSL{
hue: :float,
saturation: :float,
lightness: :float,
alpha: :float
}
@typedoc """
A representation of a color in hue, saturation, value and alpha.
"""
@type hsv_color :: %TinyColor.HSV{
hue: :float,
saturation: :float,
value: :float,
alpha: :float
}
@typedoc """
A representation of a color in red, green, blue and alpha.
"""
@type rgb_color :: %TinyColor.RGB{
red: :float,
green: :float,
blue: :float,
alpha: :float
}
@typedoc """
A representation of a color in the oklab color space with optional alpha
"""
@type oklab_color :: %TinyColor.OKLab{
l: :float,
a: :float,
b: :float,
alpha: :float
}
@typedoc """
A representation of a color in any supported system.
"""
@type color :: hsl_color | hsv_color | rgb_color
@doc ~S"""
Parses the given values into an HSL struct
## Examples
iex> TinyColor.hsl(128, 0.41, 0.13)
%TinyColor.HSL{hue: 128.0, saturation: 41.0, lightness: 13.0, alpha: 1.0}
iex> TinyColor.hsl(450, 0.41, 0.13)
%TinyColor.HSL{alpha: 1.0, hue: 90.0, lightness: 13.0, saturation: 41.0}
iex> TinyColor.hsl(128, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSL{hue: 128.0, saturation: 26.5, lightness: 54.0, alpha: 1.0}
iex> TinyColor.hsl(450, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSL{alpha: 1.0, hue: 90.0, lightness: 54.0, saturation: 26.5}
iex> TinyColor.hsl({0.54, :percent}, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSL{hue: 194.4, saturation: 26.5, lightness: 54.0, alpha: 1.0}
iex> TinyColor.hsl({1.4, :percent}, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSL{alpha: 1.0, hue: 143.99999999999994, lightness: 54.0, saturation: 26.5}
iex> TinyColor.hsl(128, 0.41, 0.13, 0.5)
%TinyColor.HSL{hue: 128.0, saturation: 41.0, lightness: 13.0, alpha: 0.5}
iex> TinyColor.hsl(450, 0.41, 0.13, 0.5)
%TinyColor.HSL{alpha: 0.5, hue: 90.0, lightness: 13.0, saturation: 41.0}
iex> TinyColor.hsl(128, {0.265, :percent}, {0.54, :percent}, 0.5)
%TinyColor.HSL{hue: 128.0, saturation: 26.5, lightness: 54.0, alpha: 0.5}
iex> TinyColor.hsl(450, {0.265, :percent}, {0.54, :percent}, 0.5)
%TinyColor.HSL{alpha: 0.5, hue: 90.0, lightness: 54.0, saturation: 26.5}
iex> TinyColor.hsl({0.54, :percent}, {0.265, :percent}, {0.54, :percent}, 0.5)
%TinyColor.HSL{hue: 194.4, saturation: 26.5, lightness: 54.0, alpha: 0.5}
iex> TinyColor.hsl({1.4, :percent}, {0.265, :percent}, {0.54, :percent}, 0.5)
%TinyColor.HSL{alpha: 0.5, hue: 143.99999999999994, lightness: 54.0, saturation: 26.5}
"""
def hsl(hue, saturation, lightness, alpha \\ 1.0) do
TinyColor.HSL.new(hue, saturation, lightness, alpha)
end
@doc ~S"""
Parses the given values into an HSL struct
## Examples
iex> TinyColor.hsv(128, 0.41, 0.13)
%TinyColor.HSV{hue: 128.0, saturation: 41.0, value: 13.0}
iex> TinyColor.hsv(450, 0.41, 0.13)
%TinyColor.HSV{hue: 90.0, saturation: 41.0, value: 13.0}
iex> TinyColor.hsv(128, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSV{hue: 128.0, saturation: 26.5, value: 54.0}
iex> TinyColor.hsv(450, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSV{hue: 90.0, saturation: 26.5, value: 54.0}
iex> TinyColor.hsv({0.54, :percent}, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSV{hue: 194.4, saturation: 26.5, value: 54.0}
iex> TinyColor.hsv({1.4, :percent}, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSV{hue: 143.99999999999994, saturation: 26.5, value: 54.0}
iex> TinyColor.hsv(128, 0.41, 0.13)
%TinyColor.HSV{hue: 128.0, saturation: 41.0, value: 13.0, alpha: 1.0}
iex> TinyColor.hsv(450, 0.41, 0.13)
%TinyColor.HSV{alpha: 1.0, hue: 90.0, saturation: 41.0, value: 13.0}
iex> TinyColor.hsv(128, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSV{hue: 128.0, saturation: 26.5, value: 54.0, alpha: 1.0}
iex> TinyColor.hsv(450, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSV{alpha: 1.0, hue: 90.0, saturation: 26.5, value: 54.0}
iex> TinyColor.hsv({0.54, :percent}, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSV{hue: 194.4, saturation: 26.5, value: 54.0, alpha: 1.0}
iex> TinyColor.hsv({1.4, :percent}, {0.265, :percent}, {0.54, :percent})
%TinyColor.HSV{alpha: 1.0, hue: 143.99999999999994, saturation: 26.5, value: 54.0}
iex> TinyColor.hsv(128, 0.41, 0.13, 0.5)
%TinyColor.HSV{hue: 128.0, saturation: 41.0, value: 13.0, alpha: 0.5}
iex> TinyColor.hsv(450, 0.41, 0.13, 0.5)
%TinyColor.HSV{alpha: 0.5, hue: 90.0, saturation: 41.0, value: 13.0}
iex> TinyColor.hsv(128, {0.265, :percent}, {0.54, :percent}, 0.5)
%TinyColor.HSV{hue: 128.0, saturation: 26.5, value: 54.0, alpha: 0.5}
iex> TinyColor.hsv(450, {0.265, :percent}, {0.54, :percent}, 0.5)
%TinyColor.HSV{alpha: 0.5, hue: 90.0, saturation: 26.5, value: 54.0}
iex> TinyColor.hsv({0.54, :percent}, {0.265, :percent}, {0.54, :percent}, 0.5)
%TinyColor.HSV{hue: 194.4, saturation: 26.5, value: 54.0, alpha: 0.5}
iex> TinyColor.hsv({1.4, :percent}, {0.265, :percent}, {0.54, :percent}, 0.5)
%TinyColor.HSV{alpha: 0.5, hue: 143.99999999999994, saturation: 26.5, value: 54.0}
"""
def hsv(hue, saturation, value, alpha \\ 1.0) do
TinyColor.HSV.new(hue, saturation, value, alpha)
end
@doc ~S"""
Parses the given values into an RGB struct
## Examples
iex> TinyColor.rgb(128, 129, 130)
%TinyColor.RGB{red: 128.0, green: 129.0, blue: 130.0, alpha: 1.0}
iex> TinyColor.rgb(333, 129, 130)
%TinyColor.RGB{alpha: 1.0, blue: 130.0, green: 129.0, red: 255.0}
iex> TinyColor.rgb(128, 129, 130, 0.5)
%TinyColor.RGB{red: 128.0, green: 129.0, blue: 130.0, alpha: 0.5}
iex> TinyColor.rgb(128, 129, 130, -0.5)
%TinyColor.RGB{alpha: 0.0, blue: 130.0, green: 129.0, red: 128.0}
iex> TinyColor.rgb({0.125, :percent}, {0.265, :percent}, {0.525, :percent})
%TinyColor.RGB{red: 31.875, green: 67.575, blue: 133.875, alpha: 1.0}
iex> TinyColor.rgb({1.4, :percent}, {0.265, :percent}, {0.54, :percent})
%TinyColor.RGB{alpha: 1.0, blue: 137.70000000000002, green: 67.575, red: 255.0}
iex> TinyColor.rgb({0.125, :percent}, {0.265, :percent}, {0.525, :percent}, 0.5)
%TinyColor.RGB{red: 31.875, green: 67.575, blue: 133.875, alpha: 0.5}
iex> TinyColor.rgb({0.54, :percent}, {0.265, :percent}, {0.54, :percent}, -0.5)
%TinyColor.RGB{alpha: 0.0, blue: 137.70000000000002, green: 67.575, red: 137.70000000000002}
"""
def rgb(red, green, blue, alpha \\ 1.0) do
TinyColor.RGB.new(red, green, blue, alpha)
end
def oklab(l, a, b, alpha \\ 1.0) do
TinyColor.OKLab.new(l, a, b, alpha)
end
def light?(color) do
not dark?(color)
end
def dark?(color) do
brightness(color) < 128
end
def brightness(color) do
%{red: r, blue: b, green: g} = TinyColor.Conversions.to_rgb(color)
(r * 299 + g * 587 + b * 114) / 1000
end
@spec luminance(color()) :: float
def luminance(color) do
{rs_rgb, gs_rgb, bs_rgb, _} =
color
|> TinyColor.Conversions.to_rgb()
|> TinyColor.RGB.percentages()
r =
if rs_rgb <= 0.03928 do
rs_rgb / 12.92
else
:math.pow((rs_rgb + 0.055) / 1.055, 2.4)
end
g =
if gs_rgb <= 0.03928 do
gs_rgb / 12.92
else
:math.pow((gs_rgb + 0.055) / 1.055, 2.4)
end
b =
if bs_rgb <= 0.03928 do
bs_rgb / 12.92
else
:math.pow((bs_rgb + 0.055) / 1.055, 2.4)
end
0.2126 * r + 0.7152 * g + 0.0722 * b
end
def lighten(color, amount \\ 10) do
color = TinyColor.Conversions.to_hsl(color)
TinyColor.hsl(color.hue, color.lightness + amount, color.saturation, color.alpha)
end
def brighten(color, amount \\ 10) do
%{red: r, green: g, blue: b, alpha: a} = TinyColor.Conversions.to_rgb(color)
TinyColor.rgb(
r - round(255 * -(amount / 100)),
g - round(255 * -(amount / 100)),
b - round(255 * -(amount / 100)),
a
)
end
def darken(color, amount \\ 10) do
color = TinyColor.Conversions.to_hsl(color)
TinyColor.hsl(color.hue, color.lightness - amount, color.saturation, color.alpha)
end
def tint(color, amount \\ 10) do
mix(color, TinyColor.Named.get("white"), amount)
end
def shade(color, amount \\ 10) do
mix(color, TinyColor.Named.get("black"), amount)
end
def desaturate(color, amount \\ 10) do
color = TinyColor.Conversions.to_hsl(color)
TinyColor.hsl(color.hue, color.lightness, color.saturation - amount, color.alpha)
end
def saturate(color, amount \\ 10) do
color = TinyColor.Conversions.to_hsl(color)
TinyColor.hsl(color.hue, color.lightness, color.saturation + amount, color.alpha)
end
def grayscale(color), do: desaturate(color, 100)
def spin(color, amount) do
color = TinyColor.Conversions.to_hsl(color)
TinyColor.hsl(color.hue + amount, color.lightness, color.saturation, color.alpha)
end
def mix(self, color, amount \\ 50) do
%{red: r1, green: g1, blue: b1, alpha: a1} = TinyColor.Conversions.to_rgb(self)
%{red: r2, green: g2, blue: b2, alpha: a2} = TinyColor.Conversions.to_rgb(color)
percentage = amount / 100
TinyColor.rgb(
(r2 - r1) * percentage + r1,
(g2 - g1) * percentage + g1,
(b2 - b1) * percentage + b1,
(a2 - a1) * percentage + a1
)
end
@spec contrast(color(), color()) :: float()
def contrast(color1, color2) do
c1_luminance = luminance(color1)
c2_luminance = luminance(color2)
(max(c1_luminance, c2_luminance) + 0.05) / (min(c1_luminance, c2_luminance) + 0.05)
end
@type font_size_option :: {:size, :small | :large}
@type contrast_level_option :: {:level, :AA | :AAA}
@spec readable?(color(), color(), [font_size_option() | contrast_level_option()]) ::
boolean()
def readable?(color1, color2, opts \\ []) do
level = Keyword.get(opts, :level, :AA)
size = Keyword.get(opts, :size, :small)
case {level, size, contrast(color1, color2)} do
{:AAA, :small, level} -> level >= 7
{:AAA, :large, level} -> level >= 4.5
{:AA, :small, level} -> level >= 4.5
{:AA, :large, level} -> level >= 3
_ -> false
end
end
@spec most_readable(
color(),
list(color()),
[{atom, [any]}]
) :: color()
def most_readable(base, choices, opts \\ []) do
best_choice = Enum.max_by(choices, fn choice -> contrast(base, choice) end)
include_fallbacks = Keyword.get(opts, :include_fallback_colors, false)
if readable?(base, best_choice, opts) or not include_fallbacks do
best_choice
else
most_readable(base, [@white, @black])
end
end
@doc """
Checks for equality of two colors.
"""
@spec equal?(any, any) :: boolean
def equal?(nil, nil), do: true
def equal?(nil, _), do: false
def equal?(_, nil), do: false
def equal?(%TinyColor.RGB{} = a, %TinyColor.RGB{} = b), do: a == b
def equal?(%TinyColor.HSL{} = a, %TinyColor.HSL{} = b), do: a == b
def equal?(%TinyColor.HSV{} = a, %TinyColor.HSV{} = b), do: a == b
def equal?(color1, color2) do
a = color1 |> TinyColor.Conversions.to_rgb() |> to_string()
b = color2 |> TinyColor.Conversions.to_rgb() |> to_string()
a == b
end
end
|
lib/tiny_color.ex
| 0.936219
| 0.665404
|
tiny_color.ex
|
starcoder
|
defmodule Operate.Adapter.Bob do
@moduledoc """
Adapter module for loading tapes and Ops from [BOB](https://bob.planaria.network).
## Examples
iex> Operate.Adapter.Bob.fetch_tx(txid, api_key: "mykey")
{:ok, %Operate.BPU.Transaction{}}
"""
alias Operate.BPU
use Operate.Adapter
use Tesla, only: [:get], docs: false
plug Tesla.Middleware.BaseUrl, "https://bob.planaria.network/q/1GgmC7Cg782YtQ6R9QkM58voyWeQJmJJzG/"
plug Tesla.Middleware.JSON
def fetch_tx(txid, options \\ []) do
api_key = Keyword.get(options, :api_key)
path = encode_query(%{
"v" => "3",
"q" => %{
"find" => %{
"tx.h" => txid,
"out.tape" => %{
"$elemMatch" => %{
"i" => 0,
"cell.op" => 106
}
}
},
"limit" => 1
}
})
case get(path, headers: [key: api_key]) do
{:ok, res} ->
tx = to_bpu(res.body) |> List.first
{:ok, tx}
error -> error
end
end
@doc """
Fetches a list of transactions by the given query map, and returns the result
in an `:ok` / `:error` tuple pair.
The `query` parameter should be a valid Bitquery. The `project` attribute
cannot be used and unless otherwise specified, `limit` defaults to `10`.
## Options
The accepted options are:
* `:api_key` - Planaria API key
## Examples
Operate.Adapter.Bob.fetch_tx_by(%{
"find" => %{
"out.tape.cell" => %{
"$elemMatch" => %{
"i" => 0,
"s" => "1PuQa7K62MiKCtssSLKy1kh56WWU7MtUR5"
}
}
}
})
"""
def fetch_tx_by(query, options \\ []) when is_map(query) do
api_key = Keyword.get(options, :api_key)
query = query
|> Map.delete("project")
|> Map.put_new("limit", 10)
path = encode_query(%{
"v" => "3",
"q" => query
})
case get(path, headers: [key: api_key]) do
{:ok, res} ->
tx = to_bpu(res.body)
{:ok, tx}
error -> error
end
end
@doc """
Converts the map from the Planaria HTTP response to a `t:Operate.BPU.Transaction.t/0`.
"""
@spec to_bpu(map) :: BPU.Transaction.t | [BPU.Transaction.t, ...]
def to_bpu(%{"u" => u, "c" => c}),
do: u ++ c |> Enum.map(&to_bpu/1)
def to_bpu(tx) do
txid = get_in(tx, ["tx", "h"])
outputs = Enum.map(tx["out"], fn o ->
case get_in(o, ["e", "a"]) do
"false" -> put_in(o, ["e", "a"], nil)
_ -> o
end
end)
tx
|> Map.put(:txid, txid)
|> Map.put("out", outputs)
|> BPU.Transaction.from_map
end
# Private: Encodes map into Fat URI path
defp encode_query(query) do
query
|> Jason.encode!
|> Base.encode64
end
end
|
lib/operate/adapter/bob.ex
| 0.738198
| 0.498657
|
bob.ex
|
starcoder
|
defmodule BackupHandler do
@moduledoc """
A module for keeping and periodically synchronizing a log of all orders not yet finished in the distributed system.
"""
use GenServer, restart: :permanent
require Logger
@backupRate Application.compile_env(:elevator, :backupRate)
# Public functions
# --------------------------------------------
@doc "Starts the Backup Handler in a supervision tree, see `Supervisor`."
def start_link([]), do: GenServer.start_link(__MODULE__, [], name: __MODULE__)
@doc "Initializes the Backup Handler by starting a periodic call to `multiTriggerLogPush/0`."
def init(log) do
# Sends itself a nudge immediately, to attempt synchronization with other BackupHandlers. After a crash, this amounts to getting the backup from the other nodes.
send(__MODULE__, :routineSync)
{:ok, log}
end
# API
# --------------------------------------------
@doc "Requests the `BackupHandler` to do a backup of an order."
def backupOrder(order), do: GenServer.multi_call(__MODULE__, {:backupOrder, order})
@doc "Signals to the `BackupHandler` that a floor has been cleared, which triggers a deletion of all orders in the `BackupHandler`s handled by the signalling node."
def floorFinished(floor), do: GenServer.multi_call(__MODULE__, {:floorFinished, floor, node()})
@doc "Triggers all `BackupHandler`s to push their list of orders to the other n-1 `BackupHandler`s, effectively synchronizing the `BackupHandler`s."
def multiTriggerLogPush, do: GenServer.abcast(__MODULE__, :triggerLogPush)
@doc "Requests a queue of all orders from the `BackupHandler`s, filtering out metadata and returning a list of orders chronologically sorted."
def requestBackup() do
{replies, _bad_nodes} = GenServer.multi_call(__MODULE__, :requestBackup)
replies
|> Enum.map(fn {_nodeID, log} -> log end)
|> mergeLogs()
|> Enum.filter(fn {_timestamp, entryType, _entry} -> entryType == :order end)
|> Enum.map(fn {_timestamp, :order, entry} -> entry end)
|> Enum.filter(fn {_floor, _orderType, handledBy} -> handledBy == node() end)
end
# Calls/Casts
# --------------------------------------------
# Handles the BackupHandler being asked to back up a new order.
# If the structure of the order and the types of the fields are correct, add the order to the log with a timestamp. Returns :ok or :error.
def handle_call({:backupOrder, order}, _from, log) do
with {floor, orderType, handledBy} <- order do
if is_integer(floor) && orderType in [:up, :down, :cab] &&
is_atom(handledBy) do
{:reply, :ok, [{:os.system_time(:milli_seconds), :order, order} | log]}
else
{:reply, :error, log}
end
else
_ -> {:reply, :error, log}
end
end
# Handles the BackupHandler being informed that the orders on a floor handled by a given node is finished.
# Deletes all orders on the given floor by given node, and inserts a "floor finished" token to avoid erroneously re-adding them during synchronization procedures.
def handle_call({:floorFinished, floor, elevatorNode}, _from, log) do
# Anonymous function which returns true if the given log entry is an order on the given floor.
orderOnFloor? = fn logEntry, floor ->
with {_timestamp, :order, {orderFloor, _orderType, handledBy}} <- logEntry do
handledBy == elevatorNode && orderFloor == floor
else
{_, :floorFinished, _} ->
false
end
end
filtered_log =
log
|> Enum.reject(fn logEntry -> orderOnFloor?.(logEntry, floor) end)
|> Enum.reject(fn {_timestamp, entryType, entry} ->
entryType == :floorFinished && entry == {floor, elevatorNode}
end)
{:reply, :ok,
[{:os.system_time(:milli_seconds), :floorFinished, {floor, elevatorNode}} | filtered_log]}
end
# Handles someone pushing their log to the BackupHandler, by merging the two logs.
def handle_cast({:pushLog, remoteLog}, localLog),
do: {:noreply, mergeLogs([remoteLog, localLog])}
# Handles someone triggering a log push from the BackupHandler, pushing its log to all other reachable BackupHandlers.
def handle_cast(:triggerLogPush, log) do
GenServer.abcast(Node.list(), __MODULE__, {:pushLog, log})
{:noreply, log}
end
def handle_call(:requestBackup, _from, log), do: {:reply, log, log}
# Gets a nudge to start a routine synchronization procedure with the other backupHandlers
def handle_info(:routineSync, log) do
# Trigger log pushing in all backup handlers, which constitutes a full synchonization
multiTriggerLogPush()
# To attempt syncing with other servers after a certain time.
Process.send_after(__MODULE__, :routineSync, @backupRate)
{:noreply, log}
end
# Private functions
# --------------------------------------------
# Merges logs, using the floorFinished tokens to avoid erroneously keeping outdated log entries.
defp mergeLogs(logList) do
# Concatenate log so that newer entries are first, then older entires.
concattedLog =
logList
|> Enum.concat()
|> Enum.uniq()
|> Enum.sort(fn logEntry1, logEntry2 -> logEntry1 >= logEntry2 end)
floorFinishedList =
concattedLog
|> Enum.filter(fn {_timestamp, type, _} -> type == :floorFinished end)
|> Enum.map(fn {timestamp, :floorFinished, {floor, elevatorNode}} ->
{timestamp, floor, elevatorNode}
end)
# Tells whether a given order ocurred before a clearing of the floor or not.
beforeFloorfinished? = fn logEntry, clearanceList ->
with {orderTimestamp, :order, {orderFloor, _, _, handledBy}} <- logEntry,
{clearanceTimestamp, _, _} <-
Enum.find(clearanceList, fn {_timestamp, floor, elevatorNode} ->
{floor, elevatorNode} == {orderFloor, handledBy}
end) do
orderTimestamp < clearanceTimestamp
else
_ -> false
end
end
# Removes all orders that were in fact cleared from the merged log and returns it.
concattedLog
|> Enum.reject(fn logEntry -> beforeFloorfinished?.(logEntry, floorFinishedList) end)
|> Enum.uniq()
end
end
|
elevator/lib/backupHandler.ex
| 0.790126
| 0.432603
|
backupHandler.ex
|
starcoder
|
defmodule :gl do
# Private Types
@typep clamp :: float()
@typep enum :: non_neg_integer()
@typep matrix :: (matrix12() | matrix16())
@typep matrix12 :: {float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}
@typep matrix16 :: {float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}
@typep mem :: (binary() | tuple())
@typep offset :: non_neg_integer()
# Functions
@spec accum(op, value) :: :ok when op: enum(), value: float()
def accum(op, value), do: ...
@spec activeShaderProgram(pipeline, program) :: :ok when pipeline: integer(), program: integer()
def activeShaderProgram(pipeline, program), do: ...
@spec activeTexture(texture) :: :ok when texture: enum()
def activeTexture(texture), do: ...
@spec alphaFunc(func, ref) :: :ok when func: enum(), ref: clamp()
def alphaFunc(func, ref), do: ...
@spec areTexturesResident(textures) :: {(0 | 1), residences :: [(0 | 1)]} when textures: [integer()]
def areTexturesResident(textures), do: ...
@spec arrayElement(i) :: :ok when i: integer()
def arrayElement(i), do: ...
@spec attachObjectARB(containerObj, obj) :: :ok when containerObj: integer(), obj: integer()
def attachObjectARB(containerObj, obj), do: ...
@spec attachShader(program, shader) :: :ok when program: integer(), shader: integer()
def attachShader(program, shader), do: ...
@spec begin(mode) :: :ok when mode: enum()
def begin(mode), do: ...
@spec beginConditionalRender(id, mode) :: :ok when id: integer(), mode: enum()
def beginConditionalRender(id, mode), do: ...
@spec beginQuery(target, id) :: :ok when target: enum(), id: integer()
def beginQuery(target, id), do: ...
@spec beginQueryIndexed(target, index, id) :: :ok when target: enum(), index: integer(), id: integer()
def beginQueryIndexed(target, index, id), do: ...
@spec beginTransformFeedback(primitiveMode) :: :ok when primitiveMode: enum()
def beginTransformFeedback(primitiveMode), do: ...
@spec bindAttribLocation(program, index, name) :: :ok when program: integer(), index: integer(), name: charlist()
def bindAttribLocation(program, index, name), do: ...
@spec bindAttribLocationARB(programObj, index, name) :: :ok when programObj: integer(), index: integer(), name: charlist()
def bindAttribLocationARB(programObj, index, name), do: ...
@spec bindBuffer(target, buffer) :: :ok when target: enum(), buffer: integer()
def bindBuffer(target, buffer), do: ...
@spec bindBufferBase(target, index, buffer) :: :ok when target: enum(), index: integer(), buffer: integer()
def bindBufferBase(target, index, buffer), do: ...
@spec bindBufferRange(target, index, buffer, offset, size) :: :ok when target: enum(), index: integer(), buffer: integer(), offset: integer(), size: integer()
def bindBufferRange(target, index, buffer, offset, size), do: ...
@spec bindFragDataLocation(program, color, name) :: :ok when program: integer(), color: integer(), name: charlist()
def bindFragDataLocation(program, color, name), do: ...
@spec bindFragDataLocationIndexed(program, colorNumber, index, name) :: :ok when program: integer(), colorNumber: integer(), index: integer(), name: charlist()
def bindFragDataLocationIndexed(program, colorNumber, index, name), do: ...
@spec bindFramebuffer(target, framebuffer) :: :ok when target: enum(), framebuffer: integer()
def bindFramebuffer(target, framebuffer), do: ...
@spec bindImageTexture(unit, texture, level, layered, layer, access, format) :: :ok when unit: integer(), texture: integer(), level: integer(), layered: (0 | 1), layer: integer(), access: enum(), format: enum()
def bindImageTexture(unit, texture, level, layered, layer, access, format), do: ...
@spec bindProgramARB(target, program) :: :ok when target: enum(), program: integer()
def bindProgramARB(target, program), do: ...
@spec bindProgramPipeline(pipeline) :: :ok when pipeline: integer()
def bindProgramPipeline(pipeline), do: ...
@spec bindRenderbuffer(target, renderbuffer) :: :ok when target: enum(), renderbuffer: integer()
def bindRenderbuffer(target, renderbuffer), do: ...
@spec bindSampler(unit, sampler) :: :ok when unit: integer(), sampler: integer()
def bindSampler(unit, sampler), do: ...
@spec bindTexture(target, texture) :: :ok when target: enum(), texture: integer()
def bindTexture(target, texture), do: ...
@spec bindTransformFeedback(target, id) :: :ok when target: enum(), id: integer()
def bindTransformFeedback(target, id), do: ...
@spec bindVertexArray(array) :: :ok when array: integer()
def bindVertexArray(array), do: ...
@spec bitmap(width, height, xorig, yorig, xmove, ymove, bitmap) :: :ok when width: integer(), height: integer(), xorig: float(), yorig: float(), xmove: float(), ymove: float(), bitmap: (offset() | mem())
def bitmap(width, height, xorig, yorig, xmove, ymove, bitmap) when is_integer(bitmap), do: ...
def bitmap(width, height, xorig, yorig, xmove, ymove, bitmap), do: ...
@spec blendColor(red, green, blue, alpha) :: :ok when red: clamp(), green: clamp(), blue: clamp(), alpha: clamp()
def blendColor(red, green, blue, alpha), do: ...
@spec blendEquation(mode) :: :ok when mode: enum()
def blendEquation(mode), do: ...
@spec blendEquationSeparate(modeRGB, modeAlpha) :: :ok when modeRGB: enum(), modeAlpha: enum()
def blendEquationSeparate(modeRGB, modeAlpha), do: ...
@spec blendEquationSeparatei(buf, modeRGB, modeAlpha) :: :ok when buf: integer(), modeRGB: enum(), modeAlpha: enum()
def blendEquationSeparatei(buf, modeRGB, modeAlpha), do: ...
@spec blendEquationi(buf, mode) :: :ok when buf: integer(), mode: enum()
def blendEquationi(buf, mode), do: ...
@spec blendFunc(sfactor, dfactor) :: :ok when sfactor: enum(), dfactor: enum()
def blendFunc(sfactor, dfactor), do: ...
@spec blendFuncSeparate(sfactorRGB, dfactorRGB, sfactorAlpha, dfactorAlpha) :: :ok when sfactorRGB: enum(), dfactorRGB: enum(), sfactorAlpha: enum(), dfactorAlpha: enum()
def blendFuncSeparate(sfactorRGB, dfactorRGB, sfactorAlpha, dfactorAlpha), do: ...
@spec blendFuncSeparatei(buf, srcRGB, dstRGB, srcAlpha, dstAlpha) :: :ok when buf: integer(), srcRGB: enum(), dstRGB: enum(), srcAlpha: enum(), dstAlpha: enum()
def blendFuncSeparatei(buf, srcRGB, dstRGB, srcAlpha, dstAlpha), do: ...
@spec blendFunci(buf, src, dst) :: :ok when buf: integer(), src: enum(), dst: enum()
def blendFunci(buf, src, dst), do: ...
@spec blitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter) :: :ok when srcX0: integer(), srcY0: integer(), srcX1: integer(), srcY1: integer(), dstX0: integer(), dstY0: integer(), dstX1: integer(), dstY1: integer(), mask: integer(), filter: enum()
def blitFramebuffer(srcX0, srcY0, srcX1, srcY1, dstX0, dstY0, dstX1, dstY1, mask, filter), do: ...
@spec bufferData(target, size, data, usage) :: :ok when target: enum(), size: integer(), data: (offset() | mem()), usage: enum()
def bufferData(target, size, data, usage) when is_integer(data), do: ...
def bufferData(target, size, data, usage), do: ...
@spec bufferSubData(target, offset, size, data) :: :ok when target: enum(), offset: integer(), size: integer(), data: (offset() | mem())
def bufferSubData(target, offset, size, data) when is_integer(data), do: ...
def bufferSubData(target, offset, size, data), do: ...
def call(op, args), do: ...
@spec callList(list) :: :ok when list: integer()
def callList(list), do: ...
@spec callLists(lists) :: :ok when lists: [integer()]
def callLists(lists), do: ...
def cast(op, args), do: ...
@spec checkFramebufferStatus(target) :: enum() when target: enum()
def checkFramebufferStatus(target), do: ...
@spec clampColor(target, clamp) :: :ok when target: enum(), clamp: enum()
def clampColor(target, clamp), do: ...
@spec clear(mask) :: :ok when mask: integer()
def clear(mask), do: ...
@spec clearAccum(red, green, blue, alpha) :: :ok when red: float(), green: float(), blue: float(), alpha: float()
def clearAccum(red, green, blue, alpha), do: ...
@spec clearBufferfi(buffer, drawbuffer, depth, stencil) :: :ok when buffer: enum(), drawbuffer: integer(), depth: float(), stencil: integer()
def clearBufferfi(buffer, drawbuffer, depth, stencil), do: ...
@spec clearBufferfv(buffer, drawbuffer, value) :: :ok when buffer: enum(), drawbuffer: integer(), value: tuple()
def clearBufferfv(buffer, drawbuffer, value), do: ...
@spec clearBufferiv(buffer, drawbuffer, value) :: :ok when buffer: enum(), drawbuffer: integer(), value: tuple()
def clearBufferiv(buffer, drawbuffer, value), do: ...
@spec clearBufferuiv(buffer, drawbuffer, value) :: :ok when buffer: enum(), drawbuffer: integer(), value: tuple()
def clearBufferuiv(buffer, drawbuffer, value), do: ...
@spec clearColor(red, green, blue, alpha) :: :ok when red: clamp(), green: clamp(), blue: clamp(), alpha: clamp()
def clearColor(red, green, blue, alpha), do: ...
@spec clearDepth(depth) :: :ok when depth: clamp()
def clearDepth(depth), do: ...
@spec clearDepthf(d) :: :ok when d: clamp()
def clearDepthf(d), do: ...
@spec clearIndex(c) :: :ok when c: float()
def clearIndex(c), do: ...
@spec clearStencil(s) :: :ok when s: integer()
def clearStencil(s), do: ...
@spec clientActiveTexture(texture) :: :ok when texture: enum()
def clientActiveTexture(texture), do: ...
@spec clientWaitSync(sync, flags, timeout) :: enum() when sync: integer(), flags: integer(), timeout: integer()
def clientWaitSync(sync, flags, timeout), do: ...
@spec clipPlane(plane, equation) :: :ok when plane: enum(), equation: {float(), float(), float(), float()}
def clipPlane(plane, {e1, e2, e3, e4}), do: ...
@spec color3b(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def color3b(red, green, blue), do: ...
@spec color3bv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def color3bv({red, green, blue}), do: ...
@spec color3d(red, green, blue) :: :ok when red: float(), green: float(), blue: float()
def color3d(red, green, blue), do: ...
@spec color3dv(v) :: :ok when v: {red :: float(), green :: float(), blue :: float()}
def color3dv({red, green, blue}), do: ...
@spec color3f(red, green, blue) :: :ok when red: float(), green: float(), blue: float()
def color3f(red, green, blue), do: ...
@spec color3fv(v) :: :ok when v: {red :: float(), green :: float(), blue :: float()}
def color3fv({red, green, blue}), do: ...
@spec color3i(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def color3i(red, green, blue), do: ...
@spec color3iv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def color3iv({red, green, blue}), do: ...
@spec color3s(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def color3s(red, green, blue), do: ...
@spec color3sv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def color3sv({red, green, blue}), do: ...
@spec color3ub(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def color3ub(red, green, blue), do: ...
@spec color3ubv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def color3ubv({red, green, blue}), do: ...
@spec color3ui(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def color3ui(red, green, blue), do: ...
@spec color3uiv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def color3uiv({red, green, blue}), do: ...
@spec color3us(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def color3us(red, green, blue), do: ...
@spec color3usv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def color3usv({red, green, blue}), do: ...
@spec color4b(red, green, blue, alpha) :: :ok when red: integer(), green: integer(), blue: integer(), alpha: integer()
def color4b(red, green, blue, alpha), do: ...
@spec color4bv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer(), alpha :: integer()}
def color4bv({red, green, blue, alpha}), do: ...
@spec color4d(red, green, blue, alpha) :: :ok when red: float(), green: float(), blue: float(), alpha: float()
def color4d(red, green, blue, alpha), do: ...
@spec color4dv(v) :: :ok when v: {red :: float(), green :: float(), blue :: float(), alpha :: float()}
def color4dv({red, green, blue, alpha}), do: ...
@spec color4f(red, green, blue, alpha) :: :ok when red: float(), green: float(), blue: float(), alpha: float()
def color4f(red, green, blue, alpha), do: ...
@spec color4fv(v) :: :ok when v: {red :: float(), green :: float(), blue :: float(), alpha :: float()}
def color4fv({red, green, blue, alpha}), do: ...
@spec color4i(red, green, blue, alpha) :: :ok when red: integer(), green: integer(), blue: integer(), alpha: integer()
def color4i(red, green, blue, alpha), do: ...
@spec color4iv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer(), alpha :: integer()}
def color4iv({red, green, blue, alpha}), do: ...
@spec color4s(red, green, blue, alpha) :: :ok when red: integer(), green: integer(), blue: integer(), alpha: integer()
def color4s(red, green, blue, alpha), do: ...
@spec color4sv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer(), alpha :: integer()}
def color4sv({red, green, blue, alpha}), do: ...
@spec color4ub(red, green, blue, alpha) :: :ok when red: integer(), green: integer(), blue: integer(), alpha: integer()
def color4ub(red, green, blue, alpha), do: ...
@spec color4ubv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer(), alpha :: integer()}
def color4ubv({red, green, blue, alpha}), do: ...
@spec color4ui(red, green, blue, alpha) :: :ok when red: integer(), green: integer(), blue: integer(), alpha: integer()
def color4ui(red, green, blue, alpha), do: ...
@spec color4uiv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer(), alpha :: integer()}
def color4uiv({red, green, blue, alpha}), do: ...
@spec color4us(red, green, blue, alpha) :: :ok when red: integer(), green: integer(), blue: integer(), alpha: integer()
def color4us(red, green, blue, alpha), do: ...
@spec color4usv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer(), alpha :: integer()}
def color4usv({red, green, blue, alpha}), do: ...
@spec colorMask(red, green, blue, alpha) :: :ok when red: (0 | 1), green: (0 | 1), blue: (0 | 1), alpha: (0 | 1)
def colorMask(red, green, blue, alpha), do: ...
@spec colorMaski(index, r, g, b, a) :: :ok when index: integer(), r: (0 | 1), g: (0 | 1), b: (0 | 1), a: (0 | 1)
def colorMaski(index, r, g, b, a), do: ...
@spec colorMaterial(face, mode) :: :ok when face: enum(), mode: enum()
def colorMaterial(face, mode), do: ...
@spec colorPointer(size, type, stride, ptr) :: :ok when size: integer(), type: enum(), stride: integer(), ptr: (offset() | mem())
def colorPointer(size, type, stride, ptr) when is_integer(ptr), do: ...
def colorPointer(size, type, stride, ptr), do: ...
@spec colorSubTable(target, start, count, format, type, data) :: :ok when target: enum(), start: integer(), count: integer(), format: enum(), type: enum(), data: (offset() | mem())
def colorSubTable(target, start, count, format, type, data) when is_integer(data), do: ...
def colorSubTable(target, start, count, format, type, data), do: ...
@spec colorTable(target, internalformat, width, format, type, table) :: :ok when target: enum(), internalformat: enum(), width: integer(), format: enum(), type: enum(), table: (offset() | mem())
def colorTable(target, internalformat, width, format, type, table) when is_integer(table), do: ...
def colorTable(target, internalformat, width, format, type, table), do: ...
@spec colorTableParameterfv(target, pname, params) :: :ok when target: enum(), pname: enum(), params: {float(), float(), float(), float()}
def colorTableParameterfv(target, pname, {p1, p2, p3, p4}), do: ...
@spec colorTableParameteriv(target, pname, params) :: :ok when target: enum(), pname: enum(), params: {integer(), integer(), integer(), integer()}
def colorTableParameteriv(target, pname, {p1, p2, p3, p4}), do: ...
@spec compileShader(shader) :: :ok when shader: integer()
def compileShader(shader), do: ...
@spec compileShaderARB(shaderObj) :: :ok when shaderObj: integer()
def compileShaderARB(shaderObj), do: ...
@spec compileShaderIncludeARB(shader, path) :: :ok when shader: integer(), path: iolist()
def compileShaderIncludeARB(shader, path), do: ...
@spec compressedTexImage1D(target, level, internalformat, width, border, imageSize, data) :: :ok when target: enum(), level: integer(), internalformat: enum(), width: integer(), border: integer(), imageSize: integer(), data: (offset() | mem())
def compressedTexImage1D(target, level, internalformat, width, border, imageSize, data) when is_integer(data), do: ...
def compressedTexImage1D(target, level, internalformat, width, border, imageSize, data), do: ...
@spec compressedTexImage2D(target, level, internalformat, width, height, border, imageSize, data) :: :ok when target: enum(), level: integer(), internalformat: enum(), width: integer(), height: integer(), border: integer(), imageSize: integer(), data: (offset() | mem())
def compressedTexImage2D(target, level, internalformat, width, height, border, imageSize, data) when is_integer(data), do: ...
def compressedTexImage2D(target, level, internalformat, width, height, border, imageSize, data), do: ...
@spec compressedTexImage3D(target, level, internalformat, width, height, depth, border, imageSize, data) :: :ok when target: enum(), level: integer(), internalformat: enum(), width: integer(), height: integer(), depth: integer(), border: integer(), imageSize: integer(), data: (offset() | mem())
def compressedTexImage3D(target, level, internalformat, width, height, depth, border, imageSize, data) when is_integer(data), do: ...
def compressedTexImage3D(target, level, internalformat, width, height, depth, border, imageSize, data), do: ...
@spec compressedTexSubImage1D(target, level, xoffset, width, format, imageSize, data) :: :ok when target: enum(), level: integer(), xoffset: integer(), width: integer(), format: enum(), imageSize: integer(), data: (offset() | mem())
def compressedTexSubImage1D(target, level, xoffset, width, format, imageSize, data) when is_integer(data), do: ...
def compressedTexSubImage1D(target, level, xoffset, width, format, imageSize, data), do: ...
@spec compressedTexSubImage2D(target, level, xoffset, yoffset, width, height, format, imageSize, data) :: :ok when target: enum(), level: integer(), xoffset: integer(), yoffset: integer(), width: integer(), height: integer(), format: enum(), imageSize: integer(), data: (offset() | mem())
def compressedTexSubImage2D(target, level, xoffset, yoffset, width, height, format, imageSize, data) when is_integer(data), do: ...
def compressedTexSubImage2D(target, level, xoffset, yoffset, width, height, format, imageSize, data), do: ...
@spec compressedTexSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, imageSize, data) :: :ok when target: enum(), level: integer(), xoffset: integer(), yoffset: integer(), zoffset: integer(), width: integer(), height: integer(), depth: integer(), format: enum(), imageSize: integer(), data: (offset() | mem())
def compressedTexSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, imageSize, data) when is_integer(data), do: ...
def compressedTexSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, imageSize, data), do: ...
@spec convolutionFilter1D(target, internalformat, width, format, type, image) :: :ok when target: enum(), internalformat: enum(), width: integer(), format: enum(), type: enum(), image: (offset() | mem())
def convolutionFilter1D(target, internalformat, width, format, type, image) when is_integer(image), do: ...
def convolutionFilter1D(target, internalformat, width, format, type, image), do: ...
@spec convolutionFilter2D(target, internalformat, width, height, format, type, image) :: :ok when target: enum(), internalformat: enum(), width: integer(), height: integer(), format: enum(), type: enum(), image: (offset() | mem())
def convolutionFilter2D(target, internalformat, width, height, format, type, image) when is_integer(image), do: ...
def convolutionFilter2D(target, internalformat, width, height, format, type, image), do: ...
@spec convolutionParameterf(target, pname, params) :: :ok when target: enum(), pname: enum(), params: tuple()
def convolutionParameterf(target, pname, params), do: ...
@spec convolutionParameterfv(target :: enum(), pname :: enum(), params) :: :ok when params: {params :: tuple()}
def convolutionParameterfv(target, pname, {params}), do: ...
@spec convolutionParameteri(target, pname, params) :: :ok when target: enum(), pname: enum(), params: tuple()
def convolutionParameteri(target, pname, params), do: ...
@spec convolutionParameteriv(target :: enum(), pname :: enum(), params) :: :ok when params: {params :: tuple()}
def convolutionParameteriv(target, pname, {params}), do: ...
@spec copyBufferSubData(readTarget, writeTarget, readOffset, writeOffset, size) :: :ok when readTarget: enum(), writeTarget: enum(), readOffset: integer(), writeOffset: integer(), size: integer()
def copyBufferSubData(readTarget, writeTarget, readOffset, writeOffset, size), do: ...
@spec copyColorSubTable(target, start, x, y, width) :: :ok when target: enum(), start: integer(), x: integer(), y: integer(), width: integer()
def copyColorSubTable(target, start, x, y, width), do: ...
@spec copyColorTable(target, internalformat, x, y, width) :: :ok when target: enum(), internalformat: enum(), x: integer(), y: integer(), width: integer()
def copyColorTable(target, internalformat, x, y, width), do: ...
@spec copyConvolutionFilter1D(target, internalformat, x, y, width) :: :ok when target: enum(), internalformat: enum(), x: integer(), y: integer(), width: integer()
def copyConvolutionFilter1D(target, internalformat, x, y, width), do: ...
@spec copyConvolutionFilter2D(target, internalformat, x, y, width, height) :: :ok when target: enum(), internalformat: enum(), x: integer(), y: integer(), width: integer(), height: integer()
def copyConvolutionFilter2D(target, internalformat, x, y, width, height), do: ...
@spec copyPixels(x, y, width, height, type) :: :ok when x: integer(), y: integer(), width: integer(), height: integer(), type: enum()
def copyPixels(x, y, width, height, type), do: ...
@spec copyTexImage1D(target, level, internalformat, x, y, width, border) :: :ok when target: enum(), level: integer(), internalformat: enum(), x: integer(), y: integer(), width: integer(), border: integer()
def copyTexImage1D(target, level, internalformat, x, y, width, border), do: ...
@spec copyTexImage2D(target, level, internalformat, x, y, width, height, border) :: :ok when target: enum(), level: integer(), internalformat: enum(), x: integer(), y: integer(), width: integer(), height: integer(), border: integer()
def copyTexImage2D(target, level, internalformat, x, y, width, height, border), do: ...
@spec copyTexSubImage1D(target, level, xoffset, x, y, width) :: :ok when target: enum(), level: integer(), xoffset: integer(), x: integer(), y: integer(), width: integer()
def copyTexSubImage1D(target, level, xoffset, x, y, width), do: ...
@spec copyTexSubImage2D(target, level, xoffset, yoffset, x, y, width, height) :: :ok when target: enum(), level: integer(), xoffset: integer(), yoffset: integer(), x: integer(), y: integer(), width: integer(), height: integer()
def copyTexSubImage2D(target, level, xoffset, yoffset, x, y, width, height), do: ...
@spec copyTexSubImage3D(target, level, xoffset, yoffset, zoffset, x, y, width, height) :: :ok when target: enum(), level: integer(), xoffset: integer(), yoffset: integer(), zoffset: integer(), x: integer(), y: integer(), width: integer(), height: integer()
def copyTexSubImage3D(target, level, xoffset, yoffset, zoffset, x, y, width, height), do: ...
@spec createProgram() :: integer()
def createProgram(), do: ...
@spec createProgramObjectARB() :: integer()
def createProgramObjectARB(), do: ...
@spec createShader(type) :: integer() when type: enum()
def createShader(type), do: ...
@spec createShaderObjectARB(shaderType) :: integer() when shaderType: enum()
def createShaderObjectARB(shaderType), do: ...
@spec createShaderProgramv(type, strings) :: integer() when type: enum(), strings: iolist()
def createShaderProgramv(type, strings), do: ...
@spec cullFace(mode) :: :ok when mode: enum()
def cullFace(mode), do: ...
@spec currentPaletteMatrixARB(index) :: :ok when index: integer()
def currentPaletteMatrixARB(index), do: ...
@spec debugMessageControlARB(source, type, severity, ids, enabled) :: :ok when source: enum(), type: enum(), severity: enum(), ids: [integer()], enabled: (0 | 1)
def debugMessageControlARB(source, type, severity, ids, enabled), do: ...
@spec debugMessageInsertARB(source, type, id, severity, buf) :: :ok when source: enum(), type: enum(), id: integer(), severity: enum(), buf: charlist()
def debugMessageInsertARB(source, type, id, severity, buf), do: ...
@spec deleteBuffers(buffers) :: :ok when buffers: [integer()]
def deleteBuffers(buffers), do: ...
@spec deleteFramebuffers(framebuffers) :: :ok when framebuffers: [integer()]
def deleteFramebuffers(framebuffers), do: ...
@spec deleteLists(list, range) :: :ok when list: integer(), range: integer()
def deleteLists(list, range), do: ...
@spec deleteNamedStringARB(name) :: :ok when name: charlist()
def deleteNamedStringARB(name), do: ...
@spec deleteObjectARB(obj) :: :ok when obj: integer()
def deleteObjectARB(obj), do: ...
@spec deleteProgram(program) :: :ok when program: integer()
def deleteProgram(program), do: ...
@spec deleteProgramPipelines(pipelines) :: :ok when pipelines: [integer()]
def deleteProgramPipelines(pipelines), do: ...
@spec deleteProgramsARB(programs) :: :ok when programs: [integer()]
def deleteProgramsARB(programs), do: ...
@spec deleteQueries(ids) :: :ok when ids: [integer()]
def deleteQueries(ids), do: ...
@spec deleteRenderbuffers(renderbuffers) :: :ok when renderbuffers: [integer()]
def deleteRenderbuffers(renderbuffers), do: ...
@spec deleteSamplers(samplers) :: :ok when samplers: [integer()]
def deleteSamplers(samplers), do: ...
@spec deleteShader(shader) :: :ok when shader: integer()
def deleteShader(shader), do: ...
@spec deleteSync(sync) :: :ok when sync: integer()
def deleteSync(sync), do: ...
@spec deleteTextures(textures) :: :ok when textures: [integer()]
def deleteTextures(textures), do: ...
@spec deleteTransformFeedbacks(ids) :: :ok when ids: [integer()]
def deleteTransformFeedbacks(ids), do: ...
@spec deleteVertexArrays(arrays) :: :ok when arrays: [integer()]
def deleteVertexArrays(arrays), do: ...
@spec depthBoundsEXT(zmin, zmax) :: :ok when zmin: clamp(), zmax: clamp()
def depthBoundsEXT(zmin, zmax), do: ...
@spec depthFunc(func) :: :ok when func: enum()
def depthFunc(func), do: ...
@spec depthMask(flag) :: :ok when flag: (0 | 1)
def depthMask(flag), do: ...
@spec depthRange(near_val, far_val) :: :ok when near_val: clamp(), far_val: clamp()
def depthRange(near_val, far_val), do: ...
@spec depthRangeArrayv(first, v) :: :ok when first: integer(), v: [{clamp(), clamp()}]
def depthRangeArrayv(first, v), do: ...
@spec depthRangeIndexed(index, n, f) :: :ok when index: integer(), n: clamp(), f: clamp()
def depthRangeIndexed(index, n, f), do: ...
@spec depthRangef(n, f) :: :ok when n: clamp(), f: clamp()
def depthRangef(n, f), do: ...
@spec detachObjectARB(containerObj, attachedObj) :: :ok when containerObj: integer(), attachedObj: integer()
def detachObjectARB(containerObj, attachedObj), do: ...
@spec detachShader(program, shader) :: :ok when program: integer(), shader: integer()
def detachShader(program, shader), do: ...
@spec disable(cap) :: :ok when cap: enum()
def disable(cap), do: ...
@spec disableClientState(cap) :: :ok when cap: enum()
def disableClientState(cap), do: ...
@spec disableVertexAttribArray(index) :: :ok when index: integer()
def disableVertexAttribArray(index), do: ...
@spec disablei(target, index) :: :ok when target: enum(), index: integer()
def disablei(target, index), do: ...
@spec drawArrays(mode, first, count) :: :ok when mode: enum(), first: integer(), count: integer()
def drawArrays(mode, first, count), do: ...
@spec drawArraysIndirect(mode, indirect) :: :ok when mode: enum(), indirect: (offset() | mem())
def drawArraysIndirect(mode, indirect) when is_integer(indirect), do: ...
def drawArraysIndirect(mode, indirect), do: ...
@spec drawArraysInstanced(mode, first, count, primcount) :: :ok when mode: enum(), first: integer(), count: integer(), primcount: integer()
def drawArraysInstanced(mode, first, count, primcount), do: ...
@spec drawArraysInstancedBaseInstance(mode, first, count, primcount, baseinstance) :: :ok when mode: enum(), first: integer(), count: integer(), primcount: integer(), baseinstance: integer()
def drawArraysInstancedBaseInstance(mode, first, count, primcount, baseinstance), do: ...
@spec drawBuffer(mode) :: :ok when mode: enum()
def drawBuffer(mode), do: ...
@spec drawBuffers(bufs) :: :ok when bufs: [enum()]
def drawBuffers(bufs), do: ...
@spec drawElements(mode, count, type, indices) :: :ok when mode: enum(), count: integer(), type: enum(), indices: (offset() | mem())
def drawElements(mode, count, type, indices) when is_integer(indices), do: ...
def drawElements(mode, count, type, indices), do: ...
@spec drawElementsBaseVertex(mode, count, type, indices, basevertex) :: :ok when mode: enum(), count: integer(), type: enum(), indices: (offset() | mem()), basevertex: integer()
def drawElementsBaseVertex(mode, count, type, indices, basevertex) when is_integer(indices), do: ...
def drawElementsBaseVertex(mode, count, type, indices, basevertex), do: ...
@spec drawElementsIndirect(mode, type, indirect) :: :ok when mode: enum(), type: enum(), indirect: (offset() | mem())
def drawElementsIndirect(mode, type, indirect) when is_integer(indirect), do: ...
def drawElementsIndirect(mode, type, indirect), do: ...
@spec drawElementsInstanced(mode, count, type, indices, primcount) :: :ok when mode: enum(), count: integer(), type: enum(), indices: (offset() | mem()), primcount: integer()
def drawElementsInstanced(mode, count, type, indices, primcount) when is_integer(indices), do: ...
def drawElementsInstanced(mode, count, type, indices, primcount), do: ...
@spec drawElementsInstancedBaseInstance(mode, count, type, indices, primcount, baseinstance) :: :ok when mode: enum(), count: integer(), type: enum(), indices: (offset() | mem()), primcount: integer(), baseinstance: integer()
def drawElementsInstancedBaseInstance(mode, count, type, indices, primcount, baseinstance) when is_integer(indices), do: ...
def drawElementsInstancedBaseInstance(mode, count, type, indices, primcount, baseinstance), do: ...
@spec drawElementsInstancedBaseVertex(mode, count, type, indices, primcount, basevertex) :: :ok when mode: enum(), count: integer(), type: enum(), indices: (offset() | mem()), primcount: integer(), basevertex: integer()
def drawElementsInstancedBaseVertex(mode, count, type, indices, primcount, basevertex) when is_integer(indices), do: ...
def drawElementsInstancedBaseVertex(mode, count, type, indices, primcount, basevertex), do: ...
@spec drawElementsInstancedBaseVertexBaseInstance(mode, count, type, indices, primcount, basevertex, baseinstance) :: :ok when mode: enum(), count: integer(), type: enum(), indices: (offset() | mem()), primcount: integer(), basevertex: integer(), baseinstance: integer()
def drawElementsInstancedBaseVertexBaseInstance(mode, count, type, indices, primcount, basevertex, baseinstance) when is_integer(indices), do: ...
def drawElementsInstancedBaseVertexBaseInstance(mode, count, type, indices, primcount, basevertex, baseinstance), do: ...
@spec drawPixels(width, height, format, type, pixels) :: :ok when width: integer(), height: integer(), format: enum(), type: enum(), pixels: (offset() | mem())
def drawPixels(width, height, format, type, pixels) when is_integer(pixels), do: ...
def drawPixels(width, height, format, type, pixels), do: ...
@spec drawRangeElements(mode, start, erlangVariableEnd, count, type, indices) :: :ok when mode: enum(), start: integer(), erlangVariableEnd: integer(), count: integer(), type: enum(), indices: (offset() | mem())
def drawRangeElements(mode, start, erlangVariableEnd, count, type, indices) when is_integer(indices), do: ...
def drawRangeElements(mode, start, erlangVariableEnd, count, type, indices), do: ...
@spec drawRangeElementsBaseVertex(mode, start, erlangVariableEnd, count, type, indices, basevertex) :: :ok when mode: enum(), start: integer(), erlangVariableEnd: integer(), count: integer(), type: enum(), indices: (offset() | mem()), basevertex: integer()
def drawRangeElementsBaseVertex(mode, start, erlangVariableEnd, count, type, indices, basevertex) when is_integer(indices), do: ...
def drawRangeElementsBaseVertex(mode, start, erlangVariableEnd, count, type, indices, basevertex), do: ...
@spec drawTransformFeedback(mode, id) :: :ok when mode: enum(), id: integer()
def drawTransformFeedback(mode, id), do: ...
@spec drawTransformFeedbackInstanced(mode, id, primcount) :: :ok when mode: enum(), id: integer(), primcount: integer()
def drawTransformFeedbackInstanced(mode, id, primcount), do: ...
@spec drawTransformFeedbackStream(mode, id, stream) :: :ok when mode: enum(), id: integer(), stream: integer()
def drawTransformFeedbackStream(mode, id, stream), do: ...
@spec drawTransformFeedbackStreamInstanced(mode, id, stream, primcount) :: :ok when mode: enum(), id: integer(), stream: integer(), primcount: integer()
def drawTransformFeedbackStreamInstanced(mode, id, stream, primcount), do: ...
@spec edgeFlag(flag) :: :ok when flag: (0 | 1)
def edgeFlag(flag), do: ...
@spec edgeFlagPointer(stride, ptr) :: :ok when stride: integer(), ptr: (offset() | mem())
def edgeFlagPointer(stride, ptr) when is_integer(ptr), do: ...
def edgeFlagPointer(stride, ptr), do: ...
@spec edgeFlagv(flag) :: :ok when flag: {flag :: (0 | 1)}
def edgeFlagv({flag}), do: ...
@spec enable(cap) :: :ok when cap: enum()
def enable(cap), do: ...
@spec enableClientState(cap) :: :ok when cap: enum()
def enableClientState(cap), do: ...
@spec enableVertexAttribArray(index) :: :ok when index: integer()
def enableVertexAttribArray(index), do: ...
@spec enablei(target, index) :: :ok when target: enum(), index: integer()
def enablei(target, index), do: ...
@spec unquote(:end)() :: :ok
def unquote(:end)(), do: ...
@spec endConditionalRender() :: :ok
def endConditionalRender(), do: ...
@spec endList() :: :ok
def endList(), do: ...
@spec endQuery(target) :: :ok when target: enum()
def endQuery(target), do: ...
@spec endQueryIndexed(target, index) :: :ok when target: enum(), index: integer()
def endQueryIndexed(target, index), do: ...
@spec endTransformFeedback() :: :ok
def endTransformFeedback(), do: ...
@spec evalCoord1d(u) :: :ok when u: float()
def evalCoord1d(u), do: ...
@spec evalCoord1dv(u) :: :ok when u: {u :: float()}
def evalCoord1dv({u}), do: ...
@spec evalCoord1f(u) :: :ok when u: float()
def evalCoord1f(u), do: ...
@spec evalCoord1fv(u) :: :ok when u: {u :: float()}
def evalCoord1fv({u}), do: ...
@spec evalCoord2d(u, v) :: :ok when u: float(), v: float()
def evalCoord2d(u, v), do: ...
@spec evalCoord2dv(u) :: :ok when u: {u :: float(), v :: float()}
def evalCoord2dv({u, v}), do: ...
@spec evalCoord2f(u, v) :: :ok when u: float(), v: float()
def evalCoord2f(u, v), do: ...
@spec evalCoord2fv(u) :: :ok when u: {u :: float(), v :: float()}
def evalCoord2fv({u, v}), do: ...
@spec evalMesh1(mode, i1, i2) :: :ok when mode: enum(), i1: integer(), i2: integer()
def evalMesh1(mode, i1, i2), do: ...
@spec evalMesh2(mode, i1, i2, j1, j2) :: :ok when mode: enum(), i1: integer(), i2: integer(), j1: integer(), j2: integer()
def evalMesh2(mode, i1, i2, j1, j2), do: ...
@spec evalPoint1(i) :: :ok when i: integer()
def evalPoint1(i), do: ...
@spec evalPoint2(i, j) :: :ok when i: integer(), j: integer()
def evalPoint2(i, j), do: ...
@spec feedbackBuffer(size, type, buffer) :: :ok when size: integer(), type: enum(), buffer: mem()
def feedbackBuffer(size, type, buffer), do: ...
@spec fenceSync(condition, flags) :: integer() when condition: enum(), flags: integer()
def fenceSync(condition, flags), do: ...
@spec finish() :: :ok
def finish(), do: ...
@spec flush() :: :ok
def flush(), do: ...
@spec flushMappedBufferRange(target, offset, length) :: :ok when target: enum(), offset: integer(), length: integer()
def flushMappedBufferRange(target, offset, length), do: ...
@spec fogCoordPointer(type, stride, pointer) :: :ok when type: enum(), stride: integer(), pointer: (offset() | mem())
def fogCoordPointer(type, stride, pointer) when is_integer(pointer), do: ...
def fogCoordPointer(type, stride, pointer), do: ...
@spec fogCoordd(coord) :: :ok when coord: float()
def fogCoordd(coord), do: ...
@spec fogCoorddv(coord) :: :ok when coord: {coord :: float()}
def fogCoorddv({coord}), do: ...
@spec fogCoordf(coord) :: :ok when coord: float()
def fogCoordf(coord), do: ...
@spec fogCoordfv(coord) :: :ok when coord: {coord :: float()}
def fogCoordfv({coord}), do: ...
@spec fogf(pname, param) :: :ok when pname: enum(), param: float()
def fogf(pname, param), do: ...
@spec fogfv(pname, params) :: :ok when pname: enum(), params: tuple()
def fogfv(pname, params), do: ...
@spec fogi(pname, param) :: :ok when pname: enum(), param: integer()
def fogi(pname, param), do: ...
@spec fogiv(pname, params) :: :ok when pname: enum(), params: tuple()
def fogiv(pname, params), do: ...
@spec framebufferRenderbuffer(target, attachment, renderbuffertarget, renderbuffer) :: :ok when target: enum(), attachment: enum(), renderbuffertarget: enum(), renderbuffer: integer()
def framebufferRenderbuffer(target, attachment, renderbuffertarget, renderbuffer), do: ...
@spec framebufferTexture(target, attachment, texture, level) :: :ok when target: enum(), attachment: enum(), texture: integer(), level: integer()
def framebufferTexture(target, attachment, texture, level), do: ...
@spec framebufferTexture1D(target, attachment, textarget, texture, level) :: :ok when target: enum(), attachment: enum(), textarget: enum(), texture: integer(), level: integer()
def framebufferTexture1D(target, attachment, textarget, texture, level), do: ...
@spec framebufferTexture2D(target, attachment, textarget, texture, level) :: :ok when target: enum(), attachment: enum(), textarget: enum(), texture: integer(), level: integer()
def framebufferTexture2D(target, attachment, textarget, texture, level), do: ...
@spec framebufferTexture3D(target, attachment, textarget, texture, level, zoffset) :: :ok when target: enum(), attachment: enum(), textarget: enum(), texture: integer(), level: integer(), zoffset: integer()
def framebufferTexture3D(target, attachment, textarget, texture, level, zoffset), do: ...
@spec framebufferTextureFaceARB(target, attachment, texture, level, face) :: :ok when target: enum(), attachment: enum(), texture: integer(), level: integer(), face: enum()
def framebufferTextureFaceARB(target, attachment, texture, level, face), do: ...
@spec framebufferTextureLayer(target, attachment, texture, level, layer) :: :ok when target: enum(), attachment: enum(), texture: integer(), level: integer(), layer: integer()
def framebufferTextureLayer(target, attachment, texture, level, layer), do: ...
@spec frontFace(mode) :: :ok when mode: enum()
def frontFace(mode), do: ...
@spec frustum(left, right, bottom, top, near_val, far_val) :: :ok when left: float(), right: float(), bottom: float(), top: float(), near_val: float(), far_val: float()
def frustum(left, right, bottom, top, near_val, far_val), do: ...
@spec genBuffers(n) :: [integer()] when n: integer()
def genBuffers(n), do: ...
@spec genFramebuffers(n) :: [integer()] when n: integer()
def genFramebuffers(n), do: ...
@spec genLists(range) :: integer() when range: integer()
def genLists(range), do: ...
@spec genProgramPipelines(n) :: [integer()] when n: integer()
def genProgramPipelines(n), do: ...
@spec genProgramsARB(n) :: [integer()] when n: integer()
def genProgramsARB(n), do: ...
@spec genQueries(n) :: [integer()] when n: integer()
def genQueries(n), do: ...
@spec genRenderbuffers(n) :: [integer()] when n: integer()
def genRenderbuffers(n), do: ...
@spec genSamplers(count) :: [integer()] when count: integer()
def genSamplers(count), do: ...
@spec genTextures(n) :: [integer()] when n: integer()
def genTextures(n), do: ...
@spec genTransformFeedbacks(n) :: [integer()] when n: integer()
def genTransformFeedbacks(n), do: ...
@spec genVertexArrays(n) :: [integer()] when n: integer()
def genVertexArrays(n), do: ...
@spec generateMipmap(target) :: :ok when target: enum()
def generateMipmap(target), do: ...
@spec getActiveAttrib(program, index, bufSize) :: {size :: integer(), type :: enum(), name :: charlist()} when program: integer(), index: integer(), bufSize: integer()
def getActiveAttrib(program, index, bufSize), do: ...
@spec getActiveAttribARB(programObj, index, maxLength) :: {size :: integer(), type :: enum(), name :: charlist()} when programObj: integer(), index: integer(), maxLength: integer()
def getActiveAttribARB(programObj, index, maxLength), do: ...
@spec getActiveSubroutineName(program, shadertype, index, bufsize) :: charlist() when program: integer(), shadertype: enum(), index: integer(), bufsize: integer()
def getActiveSubroutineName(program, shadertype, index, bufsize), do: ...
@spec getActiveSubroutineUniformName(program, shadertype, index, bufsize) :: charlist() when program: integer(), shadertype: enum(), index: integer(), bufsize: integer()
def getActiveSubroutineUniformName(program, shadertype, index, bufsize), do: ...
@spec getActiveUniform(program, index, bufSize) :: {size :: integer(), type :: enum(), name :: charlist()} when program: integer(), index: integer(), bufSize: integer()
def getActiveUniform(program, index, bufSize), do: ...
@spec getActiveUniformARB(programObj, index, maxLength) :: {size :: integer(), type :: enum(), name :: charlist()} when programObj: integer(), index: integer(), maxLength: integer()
def getActiveUniformARB(programObj, index, maxLength), do: ...
@spec getActiveUniformBlockName(program, uniformBlockIndex, bufSize) :: charlist() when program: integer(), uniformBlockIndex: integer(), bufSize: integer()
def getActiveUniformBlockName(program, uniformBlockIndex, bufSize), do: ...
@spec getActiveUniformBlockiv(program, uniformBlockIndex, pname, params) :: :ok when program: integer(), uniformBlockIndex: integer(), pname: enum(), params: mem()
def getActiveUniformBlockiv(program, uniformBlockIndex, pname, params), do: ...
@spec getActiveUniformName(program, uniformIndex, bufSize) :: charlist() when program: integer(), uniformIndex: integer(), bufSize: integer()
def getActiveUniformName(program, uniformIndex, bufSize), do: ...
@spec getActiveUniformsiv(program, uniformIndices, pname) :: [integer()] when program: integer(), uniformIndices: [integer()], pname: enum()
def getActiveUniformsiv(program, uniformIndices, pname), do: ...
@spec getAttachedObjectsARB(containerObj, maxCount) :: [integer()] when containerObj: integer(), maxCount: integer()
def getAttachedObjectsARB(containerObj, maxCount), do: ...
@spec getAttachedShaders(program, maxCount) :: [integer()] when program: integer(), maxCount: integer()
def getAttachedShaders(program, maxCount), do: ...
@spec getAttribLocation(program, name) :: integer() when program: integer(), name: charlist()
def getAttribLocation(program, name), do: ...
@spec getAttribLocationARB(programObj, name) :: integer() when programObj: integer(), name: charlist()
def getAttribLocationARB(programObj, name), do: ...
@spec getBooleani_v(target, index) :: [(0 | 1)] when target: enum(), index: integer()
def getBooleani_v(target, index), do: ...
@spec getBooleanv(pname) :: [(0 | 1)] when pname: enum()
def getBooleanv(pname), do: ...
@spec getBufferParameteri64v(target, pname) :: [integer()] when target: enum(), pname: enum()
def getBufferParameteri64v(target, pname), do: ...
@spec getBufferParameteriv(target, pname) :: integer() when target: enum(), pname: enum()
def getBufferParameteriv(target, pname), do: ...
@spec getBufferParameterivARB(target, pname) :: [integer()] when target: enum(), pname: enum()
def getBufferParameterivARB(target, pname), do: ...
@spec getBufferSubData(target, offset, size, data) :: :ok when target: enum(), offset: integer(), size: integer(), data: mem()
def getBufferSubData(target, offset, size, data), do: ...
@spec getClipPlane(plane) :: {float(), float(), float(), float()} when plane: enum()
def getClipPlane(plane), do: ...
@spec getColorTable(target, format, type, table) :: :ok when target: enum(), format: enum(), type: enum(), table: mem()
def getColorTable(target, format, type, table), do: ...
@spec getColorTableParameterfv(target, pname) :: {float(), float(), float(), float()} when target: enum(), pname: enum()
def getColorTableParameterfv(target, pname), do: ...
@spec getColorTableParameteriv(target, pname) :: {integer(), integer(), integer(), integer()} when target: enum(), pname: enum()
def getColorTableParameteriv(target, pname), do: ...
@spec getCompressedTexImage(target, lod, img) :: :ok when target: enum(), lod: integer(), img: mem()
def getCompressedTexImage(target, lod, img), do: ...
@spec getConvolutionFilter(target, format, type, image) :: :ok when target: enum(), format: enum(), type: enum(), image: mem()
def getConvolutionFilter(target, format, type, image), do: ...
@spec getConvolutionParameterfv(target, pname) :: {float(), float(), float(), float()} when target: enum(), pname: enum()
def getConvolutionParameterfv(target, pname), do: ...
@spec getConvolutionParameteriv(target, pname) :: {integer(), integer(), integer(), integer()} when target: enum(), pname: enum()
def getConvolutionParameteriv(target, pname), do: ...
@spec getDebugMessageLogARB(count, bufsize) :: {integer(), sources :: [enum()], types :: [enum()], ids :: [integer()], severities :: [enum()], messageLog :: [charlist()]} when count: integer(), bufsize: integer()
def getDebugMessageLogARB(count, bufsize), do: ...
@spec getDoublei_v(target, index) :: [float()] when target: enum(), index: integer()
def getDoublei_v(target, index), do: ...
@spec getDoublev(pname) :: [float()] when pname: enum()
def getDoublev(pname), do: ...
@spec getError() :: enum()
def getError(), do: ...
@spec getFloati_v(target, index) :: [float()] when target: enum(), index: integer()
def getFloati_v(target, index), do: ...
@spec getFloatv(pname) :: [float()] when pname: enum()
def getFloatv(pname), do: ...
@spec getFragDataIndex(program, name) :: integer() when program: integer(), name: charlist()
def getFragDataIndex(program, name), do: ...
@spec getFragDataLocation(program, name) :: integer() when program: integer(), name: charlist()
def getFragDataLocation(program, name), do: ...
@spec getFramebufferAttachmentParameteriv(target, attachment, pname) :: integer() when target: enum(), attachment: enum(), pname: enum()
def getFramebufferAttachmentParameteriv(target, attachment, pname), do: ...
@spec getGraphicsResetStatusARB() :: enum()
def getGraphicsResetStatusARB(), do: ...
@spec getHandleARB(pname) :: integer() when pname: enum()
def getHandleARB(pname), do: ...
@spec getHistogram(target, reset, format, type, values) :: :ok when target: enum(), reset: (0 | 1), format: enum(), type: enum(), values: mem()
def getHistogram(target, reset, format, type, values), do: ...
@spec getHistogramParameterfv(target, pname) :: {float()} when target: enum(), pname: enum()
def getHistogramParameterfv(target, pname), do: ...
@spec getHistogramParameteriv(target, pname) :: {integer()} when target: enum(), pname: enum()
def getHistogramParameteriv(target, pname), do: ...
@spec getInfoLogARB(obj, maxLength) :: charlist() when obj: integer(), maxLength: integer()
def getInfoLogARB(obj, maxLength), do: ...
@spec getInteger64i_v(target, index) :: [integer()] when target: enum(), index: integer()
def getInteger64i_v(target, index), do: ...
@spec getInteger64v(pname) :: [integer()] when pname: enum()
def getInteger64v(pname), do: ...
@spec getIntegeri_v(target, index) :: [integer()] when target: enum(), index: integer()
def getIntegeri_v(target, index), do: ...
@spec getIntegerv(pname) :: [integer()] when pname: enum()
def getIntegerv(pname), do: ...
@spec getInternalformativ(target, internalformat, pname, bufSize) :: [integer()] when target: enum(), internalformat: enum(), pname: enum(), bufSize: integer()
def getInternalformativ(target, internalformat, pname, bufSize), do: ...
@spec getLightfv(light, pname) :: {float(), float(), float(), float()} when light: enum(), pname: enum()
def getLightfv(light, pname), do: ...
@spec getLightiv(light, pname) :: {integer(), integer(), integer(), integer()} when light: enum(), pname: enum()
def getLightiv(light, pname), do: ...
@spec getMapdv(target, query, v) :: :ok when target: enum(), query: enum(), v: mem()
def getMapdv(target, query, v), do: ...
@spec getMapfv(target, query, v) :: :ok when target: enum(), query: enum(), v: mem()
def getMapfv(target, query, v), do: ...
@spec getMapiv(target, query, v) :: :ok when target: enum(), query: enum(), v: mem()
def getMapiv(target, query, v), do: ...
@spec getMaterialfv(face, pname) :: {float(), float(), float(), float()} when face: enum(), pname: enum()
def getMaterialfv(face, pname), do: ...
@spec getMaterialiv(face, pname) :: {integer(), integer(), integer(), integer()} when face: enum(), pname: enum()
def getMaterialiv(face, pname), do: ...
@spec getMinmax(target, reset, format, types, values) :: :ok when target: enum(), reset: (0 | 1), format: enum(), types: enum(), values: mem()
def getMinmax(target, reset, format, types, values), do: ...
@spec getMinmaxParameterfv(target, pname) :: {float()} when target: enum(), pname: enum()
def getMinmaxParameterfv(target, pname), do: ...
@spec getMinmaxParameteriv(target, pname) :: {integer()} when target: enum(), pname: enum()
def getMinmaxParameteriv(target, pname), do: ...
@spec getMultisamplefv(pname, index) :: {float(), float()} when pname: enum(), index: integer()
def getMultisamplefv(pname, index), do: ...
@spec getNamedStringARB(name, bufSize) :: charlist() when name: charlist(), bufSize: integer()
def getNamedStringARB(name, bufSize), do: ...
@spec getNamedStringivARB(name, pname) :: integer() when name: charlist(), pname: enum()
def getNamedStringivARB(name, pname), do: ...
@spec getObjectParameterfvARB(obj, pname) :: float() when obj: integer(), pname: enum()
def getObjectParameterfvARB(obj, pname), do: ...
@spec getObjectParameterivARB(obj, pname) :: integer() when obj: integer(), pname: enum()
def getObjectParameterivARB(obj, pname), do: ...
@spec getPixelMapfv(map, values) :: :ok when map: enum(), values: mem()
def getPixelMapfv(map, values), do: ...
@spec getPixelMapuiv(map, values) :: :ok when map: enum(), values: mem()
def getPixelMapuiv(map, values), do: ...
@spec getPixelMapusv(map, values) :: :ok when map: enum(), values: mem()
def getPixelMapusv(map, values), do: ...
@spec getPolygonStipple() :: binary()
def getPolygonStipple(), do: ...
@spec getProgramBinary(program, bufSize) :: {binaryFormat :: enum(), binary :: binary()} when program: integer(), bufSize: integer()
def getProgramBinary(program, bufSize), do: ...
@spec getProgramEnvParameterdvARB(target, index) :: {float(), float(), float(), float()} when target: enum(), index: integer()
def getProgramEnvParameterdvARB(target, index), do: ...
@spec getProgramEnvParameterfvARB(target, index) :: {float(), float(), float(), float()} when target: enum(), index: integer()
def getProgramEnvParameterfvARB(target, index), do: ...
@spec getProgramInfoLog(program, bufSize) :: charlist() when program: integer(), bufSize: integer()
def getProgramInfoLog(program, bufSize), do: ...
@spec getProgramLocalParameterdvARB(target, index) :: {float(), float(), float(), float()} when target: enum(), index: integer()
def getProgramLocalParameterdvARB(target, index), do: ...
@spec getProgramLocalParameterfvARB(target, index) :: {float(), float(), float(), float()} when target: enum(), index: integer()
def getProgramLocalParameterfvARB(target, index), do: ...
@spec getProgramPipelineInfoLog(pipeline, bufSize) :: charlist() when pipeline: integer(), bufSize: integer()
def getProgramPipelineInfoLog(pipeline, bufSize), do: ...
@spec getProgramPipelineiv(pipeline, pname) :: integer() when pipeline: integer(), pname: enum()
def getProgramPipelineiv(pipeline, pname), do: ...
@spec getProgramStageiv(program, shadertype, pname) :: integer() when program: integer(), shadertype: enum(), pname: enum()
def getProgramStageiv(program, shadertype, pname), do: ...
@spec getProgramStringARB(target, pname, string) :: :ok when target: enum(), pname: enum(), string: mem()
def getProgramStringARB(target, pname, string), do: ...
@spec getProgramiv(program, pname) :: integer() when program: integer(), pname: enum()
def getProgramiv(program, pname), do: ...
@spec getQueryIndexediv(target, index, pname) :: integer() when target: enum(), index: integer(), pname: enum()
def getQueryIndexediv(target, index, pname), do: ...
@spec getQueryObjecti64v(id, pname) :: integer() when id: integer(), pname: enum()
def getQueryObjecti64v(id, pname), do: ...
@spec getQueryObjectiv(id, pname) :: integer() when id: integer(), pname: enum()
def getQueryObjectiv(id, pname), do: ...
@spec getQueryObjectui64v(id, pname) :: integer() when id: integer(), pname: enum()
def getQueryObjectui64v(id, pname), do: ...
@spec getQueryObjectuiv(id, pname) :: integer() when id: integer(), pname: enum()
def getQueryObjectuiv(id, pname), do: ...
@spec getQueryiv(target, pname) :: integer() when target: enum(), pname: enum()
def getQueryiv(target, pname), do: ...
@spec getRenderbufferParameteriv(target, pname) :: integer() when target: enum(), pname: enum()
def getRenderbufferParameteriv(target, pname), do: ...
@spec getSamplerParameterIiv(sampler, pname) :: [integer()] when sampler: integer(), pname: enum()
def getSamplerParameterIiv(sampler, pname), do: ...
@spec getSamplerParameterIuiv(sampler, pname) :: [integer()] when sampler: integer(), pname: enum()
def getSamplerParameterIuiv(sampler, pname), do: ...
@spec getSamplerParameterfv(sampler, pname) :: [float()] when sampler: integer(), pname: enum()
def getSamplerParameterfv(sampler, pname), do: ...
@spec getSamplerParameteriv(sampler, pname) :: [integer()] when sampler: integer(), pname: enum()
def getSamplerParameteriv(sampler, pname), do: ...
@spec getShaderInfoLog(shader, bufSize) :: charlist() when shader: integer(), bufSize: integer()
def getShaderInfoLog(shader, bufSize), do: ...
@spec getShaderPrecisionFormat(shadertype, precisiontype) :: {range :: {integer(), integer()}, precision :: integer()} when shadertype: enum(), precisiontype: enum()
def getShaderPrecisionFormat(shadertype, precisiontype), do: ...
@spec getShaderSource(shader, bufSize) :: charlist() when shader: integer(), bufSize: integer()
def getShaderSource(shader, bufSize), do: ...
@spec getShaderSourceARB(obj, maxLength) :: charlist() when obj: integer(), maxLength: integer()
def getShaderSourceARB(obj, maxLength), do: ...
@spec getShaderiv(shader, pname) :: integer() when shader: integer(), pname: enum()
def getShaderiv(shader, pname), do: ...
@spec getString(name) :: charlist() when name: enum()
def getString(name), do: ...
@spec getStringi(name, index) :: charlist() when name: enum(), index: integer()
def getStringi(name, index), do: ...
@spec getSubroutineIndex(program, shadertype, name) :: integer() when program: integer(), shadertype: enum(), name: charlist()
def getSubroutineIndex(program, shadertype, name), do: ...
@spec getSubroutineUniformLocation(program, shadertype, name) :: integer() when program: integer(), shadertype: enum(), name: charlist()
def getSubroutineUniformLocation(program, shadertype, name), do: ...
@spec getSynciv(sync, pname, bufSize) :: [integer()] when sync: integer(), pname: enum(), bufSize: integer()
def getSynciv(sync, pname, bufSize), do: ...
@spec getTexEnvfv(target, pname) :: {float(), float(), float(), float()} when target: enum(), pname: enum()
def getTexEnvfv(target, pname), do: ...
@spec getTexEnviv(target, pname) :: {integer(), integer(), integer(), integer()} when target: enum(), pname: enum()
def getTexEnviv(target, pname), do: ...
@spec getTexGendv(coord, pname) :: {float(), float(), float(), float()} when coord: enum(), pname: enum()
def getTexGendv(coord, pname), do: ...
@spec getTexGenfv(coord, pname) :: {float(), float(), float(), float()} when coord: enum(), pname: enum()
def getTexGenfv(coord, pname), do: ...
@spec getTexGeniv(coord, pname) :: {integer(), integer(), integer(), integer()} when coord: enum(), pname: enum()
def getTexGeniv(coord, pname), do: ...
@spec getTexImage(target, level, format, type, pixels) :: :ok when target: enum(), level: integer(), format: enum(), type: enum(), pixels: mem()
def getTexImage(target, level, format, type, pixels), do: ...
@spec getTexLevelParameterfv(target, level, pname) :: {float()} when target: enum(), level: integer(), pname: enum()
def getTexLevelParameterfv(target, level, pname), do: ...
@spec getTexLevelParameteriv(target, level, pname) :: {integer()} when target: enum(), level: integer(), pname: enum()
def getTexLevelParameteriv(target, level, pname), do: ...
@spec getTexParameterIiv(target, pname) :: {integer(), integer(), integer(), integer()} when target: enum(), pname: enum()
def getTexParameterIiv(target, pname), do: ...
@spec getTexParameterIuiv(target, pname) :: {integer(), integer(), integer(), integer()} when target: enum(), pname: enum()
def getTexParameterIuiv(target, pname), do: ...
@spec getTexParameterfv(target, pname) :: {float(), float(), float(), float()} when target: enum(), pname: enum()
def getTexParameterfv(target, pname), do: ...
@spec getTexParameteriv(target, pname) :: {integer(), integer(), integer(), integer()} when target: enum(), pname: enum()
def getTexParameteriv(target, pname), do: ...
@spec getTransformFeedbackVarying(program, index, bufSize) :: {size :: integer(), type :: enum(), name :: charlist()} when program: integer(), index: integer(), bufSize: integer()
def getTransformFeedbackVarying(program, index, bufSize), do: ...
@spec getUniformBlockIndex(program, uniformBlockName) :: integer() when program: integer(), uniformBlockName: charlist()
def getUniformBlockIndex(program, uniformBlockName), do: ...
@spec getUniformIndices(program, uniformNames) :: [integer()] when program: integer(), uniformNames: iolist()
def getUniformIndices(program, uniformNames), do: ...
@spec getUniformLocation(program, name) :: integer() when program: integer(), name: charlist()
def getUniformLocation(program, name), do: ...
@spec getUniformLocationARB(programObj, name) :: integer() when programObj: integer(), name: charlist()
def getUniformLocationARB(programObj, name), do: ...
@spec getUniformSubroutineuiv(shadertype, location) :: {integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer()} when shadertype: enum(), location: integer()
def getUniformSubroutineuiv(shadertype, location), do: ...
@spec getUniformdv(program, location) :: matrix() when program: integer(), location: integer()
def getUniformdv(program, location), do: ...
@spec getUniformfv(program, location) :: matrix() when program: integer(), location: integer()
def getUniformfv(program, location), do: ...
@spec getUniformfvARB(programObj, location) :: matrix() when programObj: integer(), location: integer()
def getUniformfvARB(programObj, location), do: ...
@spec getUniformiv(program, location) :: {integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer()} when program: integer(), location: integer()
def getUniformiv(program, location), do: ...
@spec getUniformivARB(programObj, location) :: {integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer()} when programObj: integer(), location: integer()
def getUniformivARB(programObj, location), do: ...
@spec getUniformuiv(program, location) :: {integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer(), integer()} when program: integer(), location: integer()
def getUniformuiv(program, location), do: ...
@spec getVertexAttribIiv(index, pname) :: {integer(), integer(), integer(), integer()} when index: integer(), pname: enum()
def getVertexAttribIiv(index, pname), do: ...
@spec getVertexAttribIuiv(index, pname) :: {integer(), integer(), integer(), integer()} when index: integer(), pname: enum()
def getVertexAttribIuiv(index, pname), do: ...
@spec getVertexAttribLdv(index, pname) :: {float(), float(), float(), float()} when index: integer(), pname: enum()
def getVertexAttribLdv(index, pname), do: ...
@spec getVertexAttribdv(index, pname) :: {float(), float(), float(), float()} when index: integer(), pname: enum()
def getVertexAttribdv(index, pname), do: ...
@spec getVertexAttribfv(index, pname) :: {float(), float(), float(), float()} when index: integer(), pname: enum()
def getVertexAttribfv(index, pname), do: ...
@spec getVertexAttribiv(index, pname) :: {integer(), integer(), integer(), integer()} when index: integer(), pname: enum()
def getVertexAttribiv(index, pname), do: ...
@spec hint(target, mode) :: :ok when target: enum(), mode: enum()
def hint(target, mode), do: ...
@spec histogram(target, width, internalformat, sink) :: :ok when target: enum(), width: integer(), internalformat: enum(), sink: (0 | 1)
def histogram(target, width, internalformat, sink), do: ...
@spec indexMask(mask) :: :ok when mask: integer()
def indexMask(mask), do: ...
@spec indexPointer(type, stride, ptr) :: :ok when type: enum(), stride: integer(), ptr: (offset() | mem())
def indexPointer(type, stride, ptr) when is_integer(ptr), do: ...
def indexPointer(type, stride, ptr), do: ...
@spec indexd(c) :: :ok when c: float()
def indexd(c), do: ...
@spec indexdv(c) :: :ok when c: {c :: float()}
def indexdv({c}), do: ...
@spec indexf(c) :: :ok when c: float()
def indexf(c), do: ...
@spec indexfv(c) :: :ok when c: {c :: float()}
def indexfv({c}), do: ...
@spec indexi(c) :: :ok when c: integer()
def indexi(c), do: ...
@spec indexiv(c) :: :ok when c: {c :: integer()}
def indexiv({c}), do: ...
@spec indexs(c) :: :ok when c: integer()
def indexs(c), do: ...
@spec indexsv(c) :: :ok when c: {c :: integer()}
def indexsv({c}), do: ...
@spec indexub(c) :: :ok when c: integer()
def indexub(c), do: ...
@spec indexubv(c) :: :ok when c: {c :: integer()}
def indexubv({c}), do: ...
@spec initNames() :: :ok
def initNames(), do: ...
@spec interleavedArrays(format, stride, pointer) :: :ok when format: enum(), stride: integer(), pointer: (offset() | mem())
def interleavedArrays(format, stride, pointer) when is_integer(pointer), do: ...
def interleavedArrays(format, stride, pointer), do: ...
@spec isBuffer(buffer) :: (0 | 1) when buffer: integer()
def isBuffer(buffer), do: ...
@spec isEnabled(cap) :: (0 | 1) when cap: enum()
def isEnabled(cap), do: ...
@spec isEnabledi(target, index) :: (0 | 1) when target: enum(), index: integer()
def isEnabledi(target, index), do: ...
@spec isFramebuffer(framebuffer) :: (0 | 1) when framebuffer: integer()
def isFramebuffer(framebuffer), do: ...
@spec isList(list) :: (0 | 1) when list: integer()
def isList(list), do: ...
@spec isNamedStringARB(name) :: (0 | 1) when name: charlist()
def isNamedStringARB(name), do: ...
@spec isProgram(program) :: (0 | 1) when program: integer()
def isProgram(program), do: ...
@spec isProgramPipeline(pipeline) :: (0 | 1) when pipeline: integer()
def isProgramPipeline(pipeline), do: ...
@spec isQuery(id) :: (0 | 1) when id: integer()
def isQuery(id), do: ...
@spec isRenderbuffer(renderbuffer) :: (0 | 1) when renderbuffer: integer()
def isRenderbuffer(renderbuffer), do: ...
@spec isSampler(sampler) :: (0 | 1) when sampler: integer()
def isSampler(sampler), do: ...
@spec isShader(shader) :: (0 | 1) when shader: integer()
def isShader(shader), do: ...
@spec isSync(sync) :: (0 | 1) when sync: integer()
def isSync(sync), do: ...
@spec isTexture(texture) :: (0 | 1) when texture: integer()
def isTexture(texture), do: ...
@spec isTransformFeedback(id) :: (0 | 1) when id: integer()
def isTransformFeedback(id), do: ...
@spec isVertexArray(array) :: (0 | 1) when array: integer()
def isVertexArray(array), do: ...
@spec lightModelf(pname, param) :: :ok when pname: enum(), param: float()
def lightModelf(pname, param), do: ...
@spec lightModelfv(pname, params) :: :ok when pname: enum(), params: tuple()
def lightModelfv(pname, params), do: ...
@spec lightModeli(pname, param) :: :ok when pname: enum(), param: integer()
def lightModeli(pname, param), do: ...
@spec lightModeliv(pname, params) :: :ok when pname: enum(), params: tuple()
def lightModeliv(pname, params), do: ...
@spec lightf(light, pname, param) :: :ok when light: enum(), pname: enum(), param: float()
def lightf(light, pname, param), do: ...
@spec lightfv(light, pname, params) :: :ok when light: enum(), pname: enum(), params: tuple()
def lightfv(light, pname, params), do: ...
@spec lighti(light, pname, param) :: :ok when light: enum(), pname: enum(), param: integer()
def lighti(light, pname, param), do: ...
@spec lightiv(light, pname, params) :: :ok when light: enum(), pname: enum(), params: tuple()
def lightiv(light, pname, params), do: ...
@spec lineStipple(factor, pattern) :: :ok when factor: integer(), pattern: integer()
def lineStipple(factor, pattern), do: ...
@spec lineWidth(width) :: :ok when width: float()
def lineWidth(width), do: ...
@spec linkProgram(program) :: :ok when program: integer()
def linkProgram(program), do: ...
@spec linkProgramARB(programObj) :: :ok when programObj: integer()
def linkProgramARB(programObj), do: ...
@spec listBase(base) :: :ok when base: integer()
def listBase(base), do: ...
@spec loadIdentity() :: :ok
def loadIdentity(), do: ...
@spec loadMatrixd(m) :: :ok when m: matrix()
def loadMatrixd({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def loadMatrixd({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec loadMatrixf(m) :: :ok when m: matrix()
def loadMatrixf({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def loadMatrixf({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec loadName(name) :: :ok when name: integer()
def loadName(name), do: ...
@spec loadTransposeMatrixd(m) :: :ok when m: matrix()
def loadTransposeMatrixd({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def loadTransposeMatrixd({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec loadTransposeMatrixdARB(m) :: :ok when m: matrix()
def loadTransposeMatrixdARB({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def loadTransposeMatrixdARB({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec loadTransposeMatrixf(m) :: :ok when m: matrix()
def loadTransposeMatrixf({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def loadTransposeMatrixf({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec loadTransposeMatrixfARB(m) :: :ok when m: matrix()
def loadTransposeMatrixfARB({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def loadTransposeMatrixfARB({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec logicOp(opcode) :: :ok when opcode: enum()
def logicOp(opcode), do: ...
@spec map1d(target, u1, u2, stride, order, points) :: :ok when target: enum(), u1: float(), u2: float(), stride: integer(), order: integer(), points: binary()
def map1d(target, u1, u2, stride, order, points), do: ...
@spec map1f(target, u1, u2, stride, order, points) :: :ok when target: enum(), u1: float(), u2: float(), stride: integer(), order: integer(), points: binary()
def map1f(target, u1, u2, stride, order, points), do: ...
@spec map2d(target, u1, u2, ustride, uorder, v1, v2, vstride, vorder, points) :: :ok when target: enum(), u1: float(), u2: float(), ustride: integer(), uorder: integer(), v1: float(), v2: float(), vstride: integer(), vorder: integer(), points: binary()
def map2d(target, u1, u2, ustride, uorder, v1, v2, vstride, vorder, points), do: ...
@spec map2f(target, u1, u2, ustride, uorder, v1, v2, vstride, vorder, points) :: :ok when target: enum(), u1: float(), u2: float(), ustride: integer(), uorder: integer(), v1: float(), v2: float(), vstride: integer(), vorder: integer(), points: binary()
def map2f(target, u1, u2, ustride, uorder, v1, v2, vstride, vorder, points), do: ...
@spec mapGrid1d(un, u1, u2) :: :ok when un: integer(), u1: float(), u2: float()
def mapGrid1d(un, u1, u2), do: ...
@spec mapGrid1f(un, u1, u2) :: :ok when un: integer(), u1: float(), u2: float()
def mapGrid1f(un, u1, u2), do: ...
@spec mapGrid2d(un, u1, u2, vn, v1, v2) :: :ok when un: integer(), u1: float(), u2: float(), vn: integer(), v1: float(), v2: float()
def mapGrid2d(un, u1, u2, vn, v1, v2), do: ...
@spec mapGrid2f(un, u1, u2, vn, v1, v2) :: :ok when un: integer(), u1: float(), u2: float(), vn: integer(), v1: float(), v2: float()
def mapGrid2f(un, u1, u2, vn, v1, v2), do: ...
@spec materialf(face, pname, param) :: :ok when face: enum(), pname: enum(), param: float()
def materialf(face, pname, param), do: ...
@spec materialfv(face, pname, params) :: :ok when face: enum(), pname: enum(), params: tuple()
def materialfv(face, pname, params), do: ...
@spec materiali(face, pname, param) :: :ok when face: enum(), pname: enum(), param: integer()
def materiali(face, pname, param), do: ...
@spec materialiv(face, pname, params) :: :ok when face: enum(), pname: enum(), params: tuple()
def materialiv(face, pname, params), do: ...
@spec matrixIndexubvARB(indices) :: :ok when indices: [integer()]
def matrixIndexubvARB(indices), do: ...
@spec matrixIndexuivARB(indices) :: :ok when indices: [integer()]
def matrixIndexuivARB(indices), do: ...
@spec matrixIndexusvARB(indices) :: :ok when indices: [integer()]
def matrixIndexusvARB(indices), do: ...
@spec matrixMode(mode) :: :ok when mode: enum()
def matrixMode(mode), do: ...
@spec memoryBarrier(barriers) :: :ok when barriers: integer()
def memoryBarrier(barriers), do: ...
@spec minSampleShading(value) :: :ok when value: clamp()
def minSampleShading(value), do: ...
@spec minmax(target, internalformat, sink) :: :ok when target: enum(), internalformat: enum(), sink: (0 | 1)
def minmax(target, internalformat, sink), do: ...
def module_info() do
# body not decompiled
end
def module_info(p0) do
# body not decompiled
end
@spec multMatrixd(m) :: :ok when m: matrix()
def multMatrixd({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def multMatrixd({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec multMatrixf(m) :: :ok when m: matrix()
def multMatrixf({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def multMatrixf({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec multTransposeMatrixd(m) :: :ok when m: matrix()
def multTransposeMatrixd({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def multTransposeMatrixd({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec multTransposeMatrixdARB(m) :: :ok when m: matrix()
def multTransposeMatrixdARB({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def multTransposeMatrixdARB({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec multTransposeMatrixf(m) :: :ok when m: matrix()
def multTransposeMatrixf({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def multTransposeMatrixf({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec multTransposeMatrixfARB(m) :: :ok when m: matrix()
def multTransposeMatrixfARB({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12, m13, m14, m15, m16}), do: ...
def multTransposeMatrixfARB({m1, m2, m3, m4, m5, m6, m7, m8, m9, m10, m11, m12}), do: ...
@spec multiDrawArrays(mode, first, count) :: :ok when mode: enum(), first: ([integer()] | mem()), count: ([integer()] | mem())
def multiDrawArrays(mode, first, count) when is_list(first) and is_list(count), do: ...
def multiDrawArrays(mode, first, count), do: ...
@spec multiTexCoord1d(target, s) :: :ok when target: enum(), s: float()
def multiTexCoord1d(target, s), do: ...
@spec multiTexCoord1dv(target :: enum(), v) :: :ok when v: {s :: float()}
def multiTexCoord1dv(target, {s}), do: ...
@spec multiTexCoord1f(target, s) :: :ok when target: enum(), s: float()
def multiTexCoord1f(target, s), do: ...
@spec multiTexCoord1fv(target :: enum(), v) :: :ok when v: {s :: float()}
def multiTexCoord1fv(target, {s}), do: ...
@spec multiTexCoord1i(target, s) :: :ok when target: enum(), s: integer()
def multiTexCoord1i(target, s), do: ...
@spec multiTexCoord1iv(target :: enum(), v) :: :ok when v: {s :: integer()}
def multiTexCoord1iv(target, {s}), do: ...
@spec multiTexCoord1s(target, s) :: :ok when target: enum(), s: integer()
def multiTexCoord1s(target, s), do: ...
@spec multiTexCoord1sv(target :: enum(), v) :: :ok when v: {s :: integer()}
def multiTexCoord1sv(target, {s}), do: ...
@spec multiTexCoord2d(target, s, t) :: :ok when target: enum(), s: float(), t: float()
def multiTexCoord2d(target, s, t), do: ...
@spec multiTexCoord2dv(target :: enum(), v) :: :ok when v: {s :: float(), t :: float()}
def multiTexCoord2dv(target, {s, t}), do: ...
@spec multiTexCoord2f(target, s, t) :: :ok when target: enum(), s: float(), t: float()
def multiTexCoord2f(target, s, t), do: ...
@spec multiTexCoord2fv(target :: enum(), v) :: :ok when v: {s :: float(), t :: float()}
def multiTexCoord2fv(target, {s, t}), do: ...
@spec multiTexCoord2i(target, s, t) :: :ok when target: enum(), s: integer(), t: integer()
def multiTexCoord2i(target, s, t), do: ...
@spec multiTexCoord2iv(target :: enum(), v) :: :ok when v: {s :: integer(), t :: integer()}
def multiTexCoord2iv(target, {s, t}), do: ...
@spec multiTexCoord2s(target, s, t) :: :ok when target: enum(), s: integer(), t: integer()
def multiTexCoord2s(target, s, t), do: ...
@spec multiTexCoord2sv(target :: enum(), v) :: :ok when v: {s :: integer(), t :: integer()}
def multiTexCoord2sv(target, {s, t}), do: ...
@spec multiTexCoord3d(target, s, t, r) :: :ok when target: enum(), s: float(), t: float(), r: float()
def multiTexCoord3d(target, s, t, r), do: ...
@spec multiTexCoord3dv(target :: enum(), v) :: :ok when v: {s :: float(), t :: float(), r :: float()}
def multiTexCoord3dv(target, {s, t, r}), do: ...
@spec multiTexCoord3f(target, s, t, r) :: :ok when target: enum(), s: float(), t: float(), r: float()
def multiTexCoord3f(target, s, t, r), do: ...
@spec multiTexCoord3fv(target :: enum(), v) :: :ok when v: {s :: float(), t :: float(), r :: float()}
def multiTexCoord3fv(target, {s, t, r}), do: ...
@spec multiTexCoord3i(target, s, t, r) :: :ok when target: enum(), s: integer(), t: integer(), r: integer()
def multiTexCoord3i(target, s, t, r), do: ...
@spec multiTexCoord3iv(target :: enum(), v) :: :ok when v: {s :: integer(), t :: integer(), r :: integer()}
def multiTexCoord3iv(target, {s, t, r}), do: ...
@spec multiTexCoord3s(target, s, t, r) :: :ok when target: enum(), s: integer(), t: integer(), r: integer()
def multiTexCoord3s(target, s, t, r), do: ...
@spec multiTexCoord3sv(target :: enum(), v) :: :ok when v: {s :: integer(), t :: integer(), r :: integer()}
def multiTexCoord3sv(target, {s, t, r}), do: ...
@spec multiTexCoord4d(target, s, t, r, q) :: :ok when target: enum(), s: float(), t: float(), r: float(), q: float()
def multiTexCoord4d(target, s, t, r, q), do: ...
@spec multiTexCoord4dv(target :: enum(), v) :: :ok when v: {s :: float(), t :: float(), r :: float(), q :: float()}
def multiTexCoord4dv(target, {s, t, r, q}), do: ...
@spec multiTexCoord4f(target, s, t, r, q) :: :ok when target: enum(), s: float(), t: float(), r: float(), q: float()
def multiTexCoord4f(target, s, t, r, q), do: ...
@spec multiTexCoord4fv(target :: enum(), v) :: :ok when v: {s :: float(), t :: float(), r :: float(), q :: float()}
def multiTexCoord4fv(target, {s, t, r, q}), do: ...
@spec multiTexCoord4i(target, s, t, r, q) :: :ok when target: enum(), s: integer(), t: integer(), r: integer(), q: integer()
def multiTexCoord4i(target, s, t, r, q), do: ...
@spec multiTexCoord4iv(target :: enum(), v) :: :ok when v: {s :: integer(), t :: integer(), r :: integer(), q :: integer()}
def multiTexCoord4iv(target, {s, t, r, q}), do: ...
@spec multiTexCoord4s(target, s, t, r, q) :: :ok when target: enum(), s: integer(), t: integer(), r: integer(), q: integer()
def multiTexCoord4s(target, s, t, r, q), do: ...
@spec multiTexCoord4sv(target :: enum(), v) :: :ok when v: {s :: integer(), t :: integer(), r :: integer(), q :: integer()}
def multiTexCoord4sv(target, {s, t, r, q}), do: ...
@spec namedStringARB(type, name, string) :: :ok when type: enum(), name: charlist(), string: charlist()
def namedStringARB(type, name, string), do: ...
@spec newList(list, mode) :: :ok when list: integer(), mode: enum()
def newList(list, mode), do: ...
@spec normal3b(nx, ny, nz) :: :ok when nx: integer(), ny: integer(), nz: integer()
def normal3b(nx, ny, nz), do: ...
@spec normal3bv(v) :: :ok when v: {nx :: integer(), ny :: integer(), nz :: integer()}
def normal3bv({nx, ny, nz}), do: ...
@spec normal3d(nx, ny, nz) :: :ok when nx: float(), ny: float(), nz: float()
def normal3d(nx, ny, nz), do: ...
@spec normal3dv(v) :: :ok when v: {nx :: float(), ny :: float(), nz :: float()}
def normal3dv({nx, ny, nz}), do: ...
@spec normal3f(nx, ny, nz) :: :ok when nx: float(), ny: float(), nz: float()
def normal3f(nx, ny, nz), do: ...
@spec normal3fv(v) :: :ok when v: {nx :: float(), ny :: float(), nz :: float()}
def normal3fv({nx, ny, nz}), do: ...
@spec normal3i(nx, ny, nz) :: :ok when nx: integer(), ny: integer(), nz: integer()
def normal3i(nx, ny, nz), do: ...
@spec normal3iv(v) :: :ok when v: {nx :: integer(), ny :: integer(), nz :: integer()}
def normal3iv({nx, ny, nz}), do: ...
@spec normal3s(nx, ny, nz) :: :ok when nx: integer(), ny: integer(), nz: integer()
def normal3s(nx, ny, nz), do: ...
@spec normal3sv(v) :: :ok when v: {nx :: integer(), ny :: integer(), nz :: integer()}
def normal3sv({nx, ny, nz}), do: ...
@spec normalPointer(type, stride, ptr) :: :ok when type: enum(), stride: integer(), ptr: (offset() | mem())
def normalPointer(type, stride, ptr) when is_integer(ptr), do: ...
def normalPointer(type, stride, ptr), do: ...
@spec ortho(left, right, bottom, top, near_val, far_val) :: :ok when left: float(), right: float(), bottom: float(), top: float(), near_val: float(), far_val: float()
def ortho(left, right, bottom, top, near_val, far_val), do: ...
@spec passThrough(token) :: :ok when token: float()
def passThrough(token), do: ...
@spec patchParameterfv(pname, values) :: :ok when pname: enum(), values: [float()]
def patchParameterfv(pname, values), do: ...
@spec patchParameteri(pname, value) :: :ok when pname: enum(), value: integer()
def patchParameteri(pname, value), do: ...
@spec pauseTransformFeedback() :: :ok
def pauseTransformFeedback(), do: ...
@spec pixelMapfv(map, mapsize, values) :: :ok when map: enum(), mapsize: integer(), values: binary()
def pixelMapfv(map, mapsize, values), do: ...
@spec pixelMapuiv(map, mapsize, values) :: :ok when map: enum(), mapsize: integer(), values: binary()
def pixelMapuiv(map, mapsize, values), do: ...
@spec pixelMapusv(map, mapsize, values) :: :ok when map: enum(), mapsize: integer(), values: binary()
def pixelMapusv(map, mapsize, values), do: ...
@spec pixelStoref(pname, param) :: :ok when pname: enum(), param: float()
def pixelStoref(pname, param), do: ...
@spec pixelStorei(pname, param) :: :ok when pname: enum(), param: integer()
def pixelStorei(pname, param), do: ...
@spec pixelTransferf(pname, param) :: :ok when pname: enum(), param: float()
def pixelTransferf(pname, param), do: ...
@spec pixelTransferi(pname, param) :: :ok when pname: enum(), param: integer()
def pixelTransferi(pname, param), do: ...
@spec pixelZoom(xfactor, yfactor) :: :ok when xfactor: float(), yfactor: float()
def pixelZoom(xfactor, yfactor), do: ...
@spec pointParameterf(pname, param) :: :ok when pname: enum(), param: float()
def pointParameterf(pname, param), do: ...
@spec pointParameterfv(pname, params) :: :ok when pname: enum(), params: tuple()
def pointParameterfv(pname, params), do: ...
@spec pointParameteri(pname, param) :: :ok when pname: enum(), param: integer()
def pointParameteri(pname, param), do: ...
@spec pointParameteriv(pname, params) :: :ok when pname: enum(), params: tuple()
def pointParameteriv(pname, params), do: ...
@spec pointSize(size) :: :ok when size: float()
def pointSize(size), do: ...
@spec polygonMode(face, mode) :: :ok when face: enum(), mode: enum()
def polygonMode(face, mode), do: ...
@spec polygonOffset(factor, units) :: :ok when factor: float(), units: float()
def polygonOffset(factor, units), do: ...
@spec polygonStipple(mask) :: :ok when mask: binary()
def polygonStipple(mask), do: ...
@spec popAttrib() :: :ok
def popAttrib(), do: ...
@spec popClientAttrib() :: :ok
def popClientAttrib(), do: ...
@spec popMatrix() :: :ok
def popMatrix(), do: ...
@spec popName() :: :ok
def popName(), do: ...
@spec primitiveRestartIndex(index) :: :ok when index: integer()
def primitiveRestartIndex(index), do: ...
@spec prioritizeTextures(textures, priorities) :: :ok when textures: [integer()], priorities: [clamp()]
def prioritizeTextures(textures, priorities), do: ...
@spec programBinary(program, binaryFormat, binary) :: :ok when program: integer(), binaryFormat: enum(), binary: binary()
def programBinary(program, binaryFormat, binary), do: ...
@spec programEnvParameter4dARB(target, index, x, y, z, w) :: :ok when target: enum(), index: integer(), x: float(), y: float(), z: float(), w: float()
def programEnvParameter4dARB(target, index, x, y, z, w), do: ...
@spec programEnvParameter4dvARB(target, index, params) :: :ok when target: enum(), index: integer(), params: {float(), float(), float(), float()}
def programEnvParameter4dvARB(target, index, {p1, p2, p3, p4}), do: ...
@spec programEnvParameter4fARB(target, index, x, y, z, w) :: :ok when target: enum(), index: integer(), x: float(), y: float(), z: float(), w: float()
def programEnvParameter4fARB(target, index, x, y, z, w), do: ...
@spec programEnvParameter4fvARB(target, index, params) :: :ok when target: enum(), index: integer(), params: {float(), float(), float(), float()}
def programEnvParameter4fvARB(target, index, {p1, p2, p3, p4}), do: ...
@spec programLocalParameter4dARB(target, index, x, y, z, w) :: :ok when target: enum(), index: integer(), x: float(), y: float(), z: float(), w: float()
def programLocalParameter4dARB(target, index, x, y, z, w), do: ...
@spec programLocalParameter4dvARB(target, index, params) :: :ok when target: enum(), index: integer(), params: {float(), float(), float(), float()}
def programLocalParameter4dvARB(target, index, {p1, p2, p3, p4}), do: ...
@spec programLocalParameter4fARB(target, index, x, y, z, w) :: :ok when target: enum(), index: integer(), x: float(), y: float(), z: float(), w: float()
def programLocalParameter4fARB(target, index, x, y, z, w), do: ...
@spec programLocalParameter4fvARB(target, index, params) :: :ok when target: enum(), index: integer(), params: {float(), float(), float(), float()}
def programLocalParameter4fvARB(target, index, {p1, p2, p3, p4}), do: ...
@spec programParameteri(program, pname, value) :: :ok when program: integer(), pname: enum(), value: integer()
def programParameteri(program, pname, value), do: ...
@spec programStringARB(target, format, string) :: :ok when target: enum(), format: enum(), string: charlist()
def programStringARB(target, format, string), do: ...
@spec programUniform1d(program, location, v0) :: :ok when program: integer(), location: integer(), v0: float()
def programUniform1d(program, location, v0), do: ...
@spec programUniform1dv(program, location, value) :: :ok when program: integer(), location: integer(), value: [float()]
def programUniform1dv(program, location, value), do: ...
@spec programUniform1f(program, location, v0) :: :ok when program: integer(), location: integer(), v0: float()
def programUniform1f(program, location, v0), do: ...
@spec programUniform1fv(program, location, value) :: :ok when program: integer(), location: integer(), value: [float()]
def programUniform1fv(program, location, value), do: ...
@spec programUniform1i(program, location, v0) :: :ok when program: integer(), location: integer(), v0: integer()
def programUniform1i(program, location, v0), do: ...
@spec programUniform1iv(program, location, value) :: :ok when program: integer(), location: integer(), value: [integer()]
def programUniform1iv(program, location, value), do: ...
@spec programUniform1ui(program, location, v0) :: :ok when program: integer(), location: integer(), v0: integer()
def programUniform1ui(program, location, v0), do: ...
@spec programUniform1uiv(program, location, value) :: :ok when program: integer(), location: integer(), value: [integer()]
def programUniform1uiv(program, location, value), do: ...
@spec programUniform2d(program, location, v0, v1) :: :ok when program: integer(), location: integer(), v0: float(), v1: float()
def programUniform2d(program, location, v0, v1), do: ...
@spec programUniform2dv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{float(), float()}]
def programUniform2dv(program, location, value), do: ...
@spec programUniform2f(program, location, v0, v1) :: :ok when program: integer(), location: integer(), v0: float(), v1: float()
def programUniform2f(program, location, v0, v1), do: ...
@spec programUniform2fv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{float(), float()}]
def programUniform2fv(program, location, value), do: ...
@spec programUniform2i(program, location, v0, v1) :: :ok when program: integer(), location: integer(), v0: integer(), v1: integer()
def programUniform2i(program, location, v0, v1), do: ...
@spec programUniform2iv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{integer(), integer()}]
def programUniform2iv(program, location, value), do: ...
@spec programUniform2ui(program, location, v0, v1) :: :ok when program: integer(), location: integer(), v0: integer(), v1: integer()
def programUniform2ui(program, location, v0, v1), do: ...
@spec programUniform2uiv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{integer(), integer()}]
def programUniform2uiv(program, location, value), do: ...
@spec programUniform3d(program, location, v0, v1, v2) :: :ok when program: integer(), location: integer(), v0: float(), v1: float(), v2: float()
def programUniform3d(program, location, v0, v1, v2), do: ...
@spec programUniform3dv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{float(), float(), float()}]
def programUniform3dv(program, location, value), do: ...
@spec programUniform3f(program, location, v0, v1, v2) :: :ok when program: integer(), location: integer(), v0: float(), v1: float(), v2: float()
def programUniform3f(program, location, v0, v1, v2), do: ...
@spec programUniform3fv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{float(), float(), float()}]
def programUniform3fv(program, location, value), do: ...
@spec programUniform3i(program, location, v0, v1, v2) :: :ok when program: integer(), location: integer(), v0: integer(), v1: integer(), v2: integer()
def programUniform3i(program, location, v0, v1, v2), do: ...
@spec programUniform3iv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{integer(), integer(), integer()}]
def programUniform3iv(program, location, value), do: ...
@spec programUniform3ui(program, location, v0, v1, v2) :: :ok when program: integer(), location: integer(), v0: integer(), v1: integer(), v2: integer()
def programUniform3ui(program, location, v0, v1, v2), do: ...
@spec programUniform3uiv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{integer(), integer(), integer()}]
def programUniform3uiv(program, location, value), do: ...
@spec programUniform4d(program, location, v0, v1, v2, v3) :: :ok when program: integer(), location: integer(), v0: float(), v1: float(), v2: float(), v3: float()
def programUniform4d(program, location, v0, v1, v2, v3), do: ...
@spec programUniform4dv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{float(), float(), float(), float()}]
def programUniform4dv(program, location, value), do: ...
@spec programUniform4f(program, location, v0, v1, v2, v3) :: :ok when program: integer(), location: integer(), v0: float(), v1: float(), v2: float(), v3: float()
def programUniform4f(program, location, v0, v1, v2, v3), do: ...
@spec programUniform4fv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{float(), float(), float(), float()}]
def programUniform4fv(program, location, value), do: ...
@spec programUniform4i(program, location, v0, v1, v2, v3) :: :ok when program: integer(), location: integer(), v0: integer(), v1: integer(), v2: integer(), v3: integer()
def programUniform4i(program, location, v0, v1, v2, v3), do: ...
@spec programUniform4iv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{integer(), integer(), integer(), integer()}]
def programUniform4iv(program, location, value), do: ...
@spec programUniform4ui(program, location, v0, v1, v2, v3) :: :ok when program: integer(), location: integer(), v0: integer(), v1: integer(), v2: integer(), v3: integer()
def programUniform4ui(program, location, v0, v1, v2, v3), do: ...
@spec programUniform4uiv(program, location, value) :: :ok when program: integer(), location: integer(), value: [{integer(), integer(), integer(), integer()}]
def programUniform4uiv(program, location, value), do: ...
@spec programUniformMatrix2dv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float()}]
def programUniformMatrix2dv(program, location, transpose, value), do: ...
@spec programUniformMatrix2fv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float()}]
def programUniformMatrix2fv(program, location, transpose, value), do: ...
@spec programUniformMatrix2x3dv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float()}]
def programUniformMatrix2x3dv(program, location, transpose, value), do: ...
@spec programUniformMatrix2x3fv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float()}]
def programUniformMatrix2x3fv(program, location, transpose, value), do: ...
@spec programUniformMatrix2x4dv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix2x4dv(program, location, transpose, value), do: ...
@spec programUniformMatrix2x4fv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix2x4fv(program, location, transpose, value), do: ...
@spec programUniformMatrix3dv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix3dv(program, location, transpose, value), do: ...
@spec programUniformMatrix3fv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix3fv(program, location, transpose, value), do: ...
@spec programUniformMatrix3x2dv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float()}]
def programUniformMatrix3x2dv(program, location, transpose, value), do: ...
@spec programUniformMatrix3x2fv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float()}]
def programUniformMatrix3x2fv(program, location, transpose, value), do: ...
@spec programUniformMatrix3x4dv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix3x4dv(program, location, transpose, value), do: ...
@spec programUniformMatrix3x4fv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix3x4fv(program, location, transpose, value), do: ...
@spec programUniformMatrix4dv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix4dv(program, location, transpose, value), do: ...
@spec programUniformMatrix4fv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix4fv(program, location, transpose, value), do: ...
@spec programUniformMatrix4x2dv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix4x2dv(program, location, transpose, value), do: ...
@spec programUniformMatrix4x2fv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix4x2fv(program, location, transpose, value), do: ...
@spec programUniformMatrix4x3dv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix4x3dv(program, location, transpose, value), do: ...
@spec programUniformMatrix4x3fv(program, location, transpose, value) :: :ok when program: integer(), location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def programUniformMatrix4x3fv(program, location, transpose, value), do: ...
@spec provokingVertex(mode) :: :ok when mode: enum()
def provokingVertex(mode), do: ...
@spec pushAttrib(mask) :: :ok when mask: integer()
def pushAttrib(mask), do: ...
@spec pushClientAttrib(mask) :: :ok when mask: integer()
def pushClientAttrib(mask), do: ...
@spec pushMatrix() :: :ok
def pushMatrix(), do: ...
@spec pushName(name) :: :ok when name: integer()
def pushName(name), do: ...
@spec queryCounter(id, target) :: :ok when id: integer(), target: enum()
def queryCounter(id, target), do: ...
@spec rasterPos2d(x, y) :: :ok when x: float(), y: float()
def rasterPos2d(x, y), do: ...
@spec rasterPos2dv(v) :: :ok when v: {x :: float(), y :: float()}
def rasterPos2dv({x, y}), do: ...
@spec rasterPos2f(x, y) :: :ok when x: float(), y: float()
def rasterPos2f(x, y), do: ...
@spec rasterPos2fv(v) :: :ok when v: {x :: float(), y :: float()}
def rasterPos2fv({x, y}), do: ...
@spec rasterPos2i(x, y) :: :ok when x: integer(), y: integer()
def rasterPos2i(x, y), do: ...
@spec rasterPos2iv(v) :: :ok when v: {x :: integer(), y :: integer()}
def rasterPos2iv({x, y}), do: ...
@spec rasterPos2s(x, y) :: :ok when x: integer(), y: integer()
def rasterPos2s(x, y), do: ...
@spec rasterPos2sv(v) :: :ok when v: {x :: integer(), y :: integer()}
def rasterPos2sv({x, y}), do: ...
@spec rasterPos3d(x, y, z) :: :ok when x: float(), y: float(), z: float()
def rasterPos3d(x, y, z), do: ...
@spec rasterPos3dv(v) :: :ok when v: {x :: float(), y :: float(), z :: float()}
def rasterPos3dv({x, y, z}), do: ...
@spec rasterPos3f(x, y, z) :: :ok when x: float(), y: float(), z: float()
def rasterPos3f(x, y, z), do: ...
@spec rasterPos3fv(v) :: :ok when v: {x :: float(), y :: float(), z :: float()}
def rasterPos3fv({x, y, z}), do: ...
@spec rasterPos3i(x, y, z) :: :ok when x: integer(), y: integer(), z: integer()
def rasterPos3i(x, y, z), do: ...
@spec rasterPos3iv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer()}
def rasterPos3iv({x, y, z}), do: ...
@spec rasterPos3s(x, y, z) :: :ok when x: integer(), y: integer(), z: integer()
def rasterPos3s(x, y, z), do: ...
@spec rasterPos3sv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer()}
def rasterPos3sv({x, y, z}), do: ...
@spec rasterPos4d(x, y, z, w) :: :ok when x: float(), y: float(), z: float(), w: float()
def rasterPos4d(x, y, z, w), do: ...
@spec rasterPos4dv(v) :: :ok when v: {x :: float(), y :: float(), z :: float(), w :: float()}
def rasterPos4dv({x, y, z, w}), do: ...
@spec rasterPos4f(x, y, z, w) :: :ok when x: float(), y: float(), z: float(), w: float()
def rasterPos4f(x, y, z, w), do: ...
@spec rasterPos4fv(v) :: :ok when v: {x :: float(), y :: float(), z :: float(), w :: float()}
def rasterPos4fv({x, y, z, w}), do: ...
@spec rasterPos4i(x, y, z, w) :: :ok when x: integer(), y: integer(), z: integer(), w: integer()
def rasterPos4i(x, y, z, w), do: ...
@spec rasterPos4iv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer(), w :: integer()}
def rasterPos4iv({x, y, z, w}), do: ...
@spec rasterPos4s(x, y, z, w) :: :ok when x: integer(), y: integer(), z: integer(), w: integer()
def rasterPos4s(x, y, z, w), do: ...
@spec rasterPos4sv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer(), w :: integer()}
def rasterPos4sv({x, y, z, w}), do: ...
@spec readBuffer(mode) :: :ok when mode: enum()
def readBuffer(mode), do: ...
@spec readPixels(x, y, width, height, format, type, pixels) :: :ok when x: integer(), y: integer(), width: integer(), height: integer(), format: enum(), type: enum(), pixels: mem()
def readPixels(x, y, width, height, format, type, pixels), do: ...
@spec rectd(x1, y1, x2, y2) :: :ok when x1: float(), y1: float(), x2: float(), y2: float()
def rectd(x1, y1, x2, y2), do: ...
@spec rectdv(v1, v2) :: :ok when v1: {float(), float()}, v2: {float(), float()}
def rectdv({v1, v2}, {^v1, ^v2}), do: ...
@spec rectf(x1, y1, x2, y2) :: :ok when x1: float(), y1: float(), x2: float(), y2: float()
def rectf(x1, y1, x2, y2), do: ...
@spec rectfv(v1, v2) :: :ok when v1: {float(), float()}, v2: {float(), float()}
def rectfv({v1, v2}, {^v1, ^v2}), do: ...
@spec recti(x1, y1, x2, y2) :: :ok when x1: integer(), y1: integer(), x2: integer(), y2: integer()
def recti(x1, y1, x2, y2), do: ...
@spec rectiv(v1, v2) :: :ok when v1: {integer(), integer()}, v2: {integer(), integer()}
def rectiv({v1, v2}, {^v1, ^v2}), do: ...
@spec rects(x1, y1, x2, y2) :: :ok when x1: integer(), y1: integer(), x2: integer(), y2: integer()
def rects(x1, y1, x2, y2), do: ...
@spec rectsv(v1, v2) :: :ok when v1: {integer(), integer()}, v2: {integer(), integer()}
def rectsv({v1, v2}, {^v1, ^v2}), do: ...
@spec releaseShaderCompiler() :: :ok
def releaseShaderCompiler(), do: ...
@spec renderMode(mode) :: integer() when mode: enum()
def renderMode(mode), do: ...
@spec renderbufferStorage(target, internalformat, width, height) :: :ok when target: enum(), internalformat: enum(), width: integer(), height: integer()
def renderbufferStorage(target, internalformat, width, height), do: ...
@spec renderbufferStorageMultisample(target, samples, internalformat, width, height) :: :ok when target: enum(), samples: integer(), internalformat: enum(), width: integer(), height: integer()
def renderbufferStorageMultisample(target, samples, internalformat, width, height), do: ...
@spec resetHistogram(target) :: :ok when target: enum()
def resetHistogram(target), do: ...
@spec resetMinmax(target) :: :ok when target: enum()
def resetMinmax(target), do: ...
@spec resumeTransformFeedback() :: :ok
def resumeTransformFeedback(), do: ...
@spec rotated(angle, x, y, z) :: :ok when angle: float(), x: float(), y: float(), z: float()
def rotated(angle, x, y, z), do: ...
@spec rotatef(angle, x, y, z) :: :ok when angle: float(), x: float(), y: float(), z: float()
def rotatef(angle, x, y, z), do: ...
@spec sampleCoverage(value, invert) :: :ok when value: clamp(), invert: (0 | 1)
def sampleCoverage(value, invert), do: ...
@spec sampleMaski(index, mask) :: :ok when index: integer(), mask: integer()
def sampleMaski(index, mask), do: ...
@spec samplerParameterIiv(sampler, pname, param) :: :ok when sampler: integer(), pname: enum(), param: [integer()]
def samplerParameterIiv(sampler, pname, param), do: ...
@spec samplerParameterIuiv(sampler, pname, param) :: :ok when sampler: integer(), pname: enum(), param: [integer()]
def samplerParameterIuiv(sampler, pname, param), do: ...
@spec samplerParameterf(sampler, pname, param) :: :ok when sampler: integer(), pname: enum(), param: float()
def samplerParameterf(sampler, pname, param), do: ...
@spec samplerParameterfv(sampler, pname, param) :: :ok when sampler: integer(), pname: enum(), param: [float()]
def samplerParameterfv(sampler, pname, param), do: ...
@spec samplerParameteri(sampler, pname, param) :: :ok when sampler: integer(), pname: enum(), param: integer()
def samplerParameteri(sampler, pname, param), do: ...
@spec samplerParameteriv(sampler, pname, param) :: :ok when sampler: integer(), pname: enum(), param: [integer()]
def samplerParameteriv(sampler, pname, param), do: ...
@spec scaled(x, y, z) :: :ok when x: float(), y: float(), z: float()
def scaled(x, y, z), do: ...
@spec scalef(x, y, z) :: :ok when x: float(), y: float(), z: float()
def scalef(x, y, z), do: ...
@spec scissor(x, y, width, height) :: :ok when x: integer(), y: integer(), width: integer(), height: integer()
def scissor(x, y, width, height), do: ...
@spec scissorArrayv(first, v) :: :ok when first: integer(), v: [{integer(), integer(), integer(), integer()}]
def scissorArrayv(first, v), do: ...
@spec scissorIndexed(index, left, bottom, width, height) :: :ok when index: integer(), left: integer(), bottom: integer(), width: integer(), height: integer()
def scissorIndexed(index, left, bottom, width, height), do: ...
@spec scissorIndexedv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def scissorIndexedv(index, {v1, v2, v3, v4}), do: ...
@spec secondaryColor3b(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def secondaryColor3b(red, green, blue), do: ...
@spec secondaryColor3bv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def secondaryColor3bv({red, green, blue}), do: ...
@spec secondaryColor3d(red, green, blue) :: :ok when red: float(), green: float(), blue: float()
def secondaryColor3d(red, green, blue), do: ...
@spec secondaryColor3dv(v) :: :ok when v: {red :: float(), green :: float(), blue :: float()}
def secondaryColor3dv({red, green, blue}), do: ...
@spec secondaryColor3f(red, green, blue) :: :ok when red: float(), green: float(), blue: float()
def secondaryColor3f(red, green, blue), do: ...
@spec secondaryColor3fv(v) :: :ok when v: {red :: float(), green :: float(), blue :: float()}
def secondaryColor3fv({red, green, blue}), do: ...
@spec secondaryColor3i(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def secondaryColor3i(red, green, blue), do: ...
@spec secondaryColor3iv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def secondaryColor3iv({red, green, blue}), do: ...
@spec secondaryColor3s(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def secondaryColor3s(red, green, blue), do: ...
@spec secondaryColor3sv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def secondaryColor3sv({red, green, blue}), do: ...
@spec secondaryColor3ub(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def secondaryColor3ub(red, green, blue), do: ...
@spec secondaryColor3ubv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def secondaryColor3ubv({red, green, blue}), do: ...
@spec secondaryColor3ui(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def secondaryColor3ui(red, green, blue), do: ...
@spec secondaryColor3uiv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def secondaryColor3uiv({red, green, blue}), do: ...
@spec secondaryColor3us(red, green, blue) :: :ok when red: integer(), green: integer(), blue: integer()
def secondaryColor3us(red, green, blue), do: ...
@spec secondaryColor3usv(v) :: :ok when v: {red :: integer(), green :: integer(), blue :: integer()}
def secondaryColor3usv({red, green, blue}), do: ...
@spec secondaryColorPointer(size, type, stride, pointer) :: :ok when size: integer(), type: enum(), stride: integer(), pointer: (offset() | mem())
def secondaryColorPointer(size, type, stride, pointer) when is_integer(pointer), do: ...
def secondaryColorPointer(size, type, stride, pointer), do: ...
@spec selectBuffer(size, buffer) :: :ok when size: integer(), buffer: mem()
def selectBuffer(size, buffer), do: ...
def send_bin(bin) when is_binary(bin), do: ...
def send_bin(tuple) when is_tuple(tuple), do: ...
@spec separableFilter2D(target, internalformat, width, height, format, type, row, column) :: :ok when target: enum(), internalformat: enum(), width: integer(), height: integer(), format: enum(), type: enum(), row: (offset() | mem()), column: (offset() | mem())
def separableFilter2D(target, internalformat, width, height, format, type, row, column) when is_integer(row) and is_integer(column), do: ...
def separableFilter2D(target, internalformat, width, height, format, type, row, column), do: ...
@spec shadeModel(mode) :: :ok when mode: enum()
def shadeModel(mode), do: ...
@spec shaderBinary(shaders, binaryformat, binary) :: :ok when shaders: [integer()], binaryformat: enum(), binary: binary()
def shaderBinary(shaders, binaryformat, binary), do: ...
@spec shaderSource(shader, string) :: :ok when shader: integer(), string: iolist()
def shaderSource(shader, string), do: ...
@spec shaderSourceARB(shaderObj, string) :: :ok when shaderObj: integer(), string: iolist()
def shaderSourceARB(shaderObj, string), do: ...
@spec stencilClearTagEXT(stencilTagBits, stencilClearTag) :: :ok when stencilTagBits: integer(), stencilClearTag: integer()
def stencilClearTagEXT(stencilTagBits, stencilClearTag), do: ...
@spec stencilFunc(func, ref, mask) :: :ok when func: enum(), ref: integer(), mask: integer()
def stencilFunc(func, ref, mask), do: ...
@spec stencilFuncSeparate(face, func, ref, mask) :: :ok when face: enum(), func: enum(), ref: integer(), mask: integer()
def stencilFuncSeparate(face, func, ref, mask), do: ...
@spec stencilMask(mask) :: :ok when mask: integer()
def stencilMask(mask), do: ...
@spec stencilMaskSeparate(face, mask) :: :ok when face: enum(), mask: integer()
def stencilMaskSeparate(face, mask), do: ...
@spec stencilOp(fail, zfail, zpass) :: :ok when fail: enum(), zfail: enum(), zpass: enum()
def stencilOp(fail, zfail, zpass), do: ...
@spec stencilOpSeparate(face, sfail, dpfail, dppass) :: :ok when face: enum(), sfail: enum(), dpfail: enum(), dppass: enum()
def stencilOpSeparate(face, sfail, dpfail, dppass), do: ...
@spec texBuffer(target, internalformat, buffer) :: :ok when target: enum(), internalformat: enum(), buffer: integer()
def texBuffer(target, internalformat, buffer), do: ...
@spec texCoord1d(s) :: :ok when s: float()
def texCoord1d(s), do: ...
@spec texCoord1dv(v) :: :ok when v: {s :: float()}
def texCoord1dv({s}), do: ...
@spec texCoord1f(s) :: :ok when s: float()
def texCoord1f(s), do: ...
@spec texCoord1fv(v) :: :ok when v: {s :: float()}
def texCoord1fv({s}), do: ...
@spec texCoord1i(s) :: :ok when s: integer()
def texCoord1i(s), do: ...
@spec texCoord1iv(v) :: :ok when v: {s :: integer()}
def texCoord1iv({s}), do: ...
@spec texCoord1s(s) :: :ok when s: integer()
def texCoord1s(s), do: ...
@spec texCoord1sv(v) :: :ok when v: {s :: integer()}
def texCoord1sv({s}), do: ...
@spec texCoord2d(s, t) :: :ok when s: float(), t: float()
def texCoord2d(s, t), do: ...
@spec texCoord2dv(v) :: :ok when v: {s :: float(), t :: float()}
def texCoord2dv({s, t}), do: ...
@spec texCoord2f(s, t) :: :ok when s: float(), t: float()
def texCoord2f(s, t), do: ...
@spec texCoord2fv(v) :: :ok when v: {s :: float(), t :: float()}
def texCoord2fv({s, t}), do: ...
@spec texCoord2i(s, t) :: :ok when s: integer(), t: integer()
def texCoord2i(s, t), do: ...
@spec texCoord2iv(v) :: :ok when v: {s :: integer(), t :: integer()}
def texCoord2iv({s, t}), do: ...
@spec texCoord2s(s, t) :: :ok when s: integer(), t: integer()
def texCoord2s(s, t), do: ...
@spec texCoord2sv(v) :: :ok when v: {s :: integer(), t :: integer()}
def texCoord2sv({s, t}), do: ...
@spec texCoord3d(s, t, r) :: :ok when s: float(), t: float(), r: float()
def texCoord3d(s, t, r), do: ...
@spec texCoord3dv(v) :: :ok when v: {s :: float(), t :: float(), r :: float()}
def texCoord3dv({s, t, r}), do: ...
@spec texCoord3f(s, t, r) :: :ok when s: float(), t: float(), r: float()
def texCoord3f(s, t, r), do: ...
@spec texCoord3fv(v) :: :ok when v: {s :: float(), t :: float(), r :: float()}
def texCoord3fv({s, t, r}), do: ...
@spec texCoord3i(s, t, r) :: :ok when s: integer(), t: integer(), r: integer()
def texCoord3i(s, t, r), do: ...
@spec texCoord3iv(v) :: :ok when v: {s :: integer(), t :: integer(), r :: integer()}
def texCoord3iv({s, t, r}), do: ...
@spec texCoord3s(s, t, r) :: :ok when s: integer(), t: integer(), r: integer()
def texCoord3s(s, t, r), do: ...
@spec texCoord3sv(v) :: :ok when v: {s :: integer(), t :: integer(), r :: integer()}
def texCoord3sv({s, t, r}), do: ...
@spec texCoord4d(s, t, r, q) :: :ok when s: float(), t: float(), r: float(), q: float()
def texCoord4d(s, t, r, q), do: ...
@spec texCoord4dv(v) :: :ok when v: {s :: float(), t :: float(), r :: float(), q :: float()}
def texCoord4dv({s, t, r, q}), do: ...
@spec texCoord4f(s, t, r, q) :: :ok when s: float(), t: float(), r: float(), q: float()
def texCoord4f(s, t, r, q), do: ...
@spec texCoord4fv(v) :: :ok when v: {s :: float(), t :: float(), r :: float(), q :: float()}
def texCoord4fv({s, t, r, q}), do: ...
@spec texCoord4i(s, t, r, q) :: :ok when s: integer(), t: integer(), r: integer(), q: integer()
def texCoord4i(s, t, r, q), do: ...
@spec texCoord4iv(v) :: :ok when v: {s :: integer(), t :: integer(), r :: integer(), q :: integer()}
def texCoord4iv({s, t, r, q}), do: ...
@spec texCoord4s(s, t, r, q) :: :ok when s: integer(), t: integer(), r: integer(), q: integer()
def texCoord4s(s, t, r, q), do: ...
@spec texCoord4sv(v) :: :ok when v: {s :: integer(), t :: integer(), r :: integer(), q :: integer()}
def texCoord4sv({s, t, r, q}), do: ...
@spec texCoordPointer(size, type, stride, ptr) :: :ok when size: integer(), type: enum(), stride: integer(), ptr: (offset() | mem())
def texCoordPointer(size, type, stride, ptr) when is_integer(ptr), do: ...
def texCoordPointer(size, type, stride, ptr), do: ...
@spec texEnvf(target, pname, param) :: :ok when target: enum(), pname: enum(), param: float()
def texEnvf(target, pname, param), do: ...
@spec texEnvfv(target, pname, params) :: :ok when target: enum(), pname: enum(), params: tuple()
def texEnvfv(target, pname, params), do: ...
@spec texEnvi(target, pname, param) :: :ok when target: enum(), pname: enum(), param: integer()
def texEnvi(target, pname, param), do: ...
@spec texEnviv(target, pname, params) :: :ok when target: enum(), pname: enum(), params: tuple()
def texEnviv(target, pname, params), do: ...
@spec texGend(coord, pname, param) :: :ok when coord: enum(), pname: enum(), param: float()
def texGend(coord, pname, param), do: ...
@spec texGendv(coord, pname, params) :: :ok when coord: enum(), pname: enum(), params: tuple()
def texGendv(coord, pname, params), do: ...
@spec texGenf(coord, pname, param) :: :ok when coord: enum(), pname: enum(), param: float()
def texGenf(coord, pname, param), do: ...
@spec texGenfv(coord, pname, params) :: :ok when coord: enum(), pname: enum(), params: tuple()
def texGenfv(coord, pname, params), do: ...
@spec texGeni(coord, pname, param) :: :ok when coord: enum(), pname: enum(), param: integer()
def texGeni(coord, pname, param), do: ...
@spec texGeniv(coord, pname, params) :: :ok when coord: enum(), pname: enum(), params: tuple()
def texGeniv(coord, pname, params), do: ...
@spec texImage1D(target, level, internalFormat, width, border, format, type, pixels) :: :ok when target: enum(), level: integer(), internalFormat: integer(), width: integer(), border: integer(), format: enum(), type: enum(), pixels: (offset() | mem())
def texImage1D(target, level, internalFormat, width, border, format, type, pixels) when is_integer(pixels), do: ...
def texImage1D(target, level, internalFormat, width, border, format, type, pixels), do: ...
@spec texImage2D(target, level, internalFormat, width, height, border, format, type, pixels) :: :ok when target: enum(), level: integer(), internalFormat: integer(), width: integer(), height: integer(), border: integer(), format: enum(), type: enum(), pixels: (offset() | mem())
def texImage2D(target, level, internalFormat, width, height, border, format, type, pixels) when is_integer(pixels), do: ...
def texImage2D(target, level, internalFormat, width, height, border, format, type, pixels), do: ...
@spec texImage2DMultisample(target, samples, internalformat, width, height, fixedsamplelocations) :: :ok when target: enum(), samples: integer(), internalformat: integer(), width: integer(), height: integer(), fixedsamplelocations: (0 | 1)
def texImage2DMultisample(target, samples, internalformat, width, height, fixedsamplelocations), do: ...
@spec texImage3D(target, level, internalFormat, width, height, depth, border, format, type, pixels) :: :ok when target: enum(), level: integer(), internalFormat: integer(), width: integer(), height: integer(), depth: integer(), border: integer(), format: enum(), type: enum(), pixels: (offset() | mem())
def texImage3D(target, level, internalFormat, width, height, depth, border, format, type, pixels) when is_integer(pixels), do: ...
def texImage3D(target, level, internalFormat, width, height, depth, border, format, type, pixels), do: ...
@spec texImage3DMultisample(target, samples, internalformat, width, height, depth, fixedsamplelocations) :: :ok when target: enum(), samples: integer(), internalformat: integer(), width: integer(), height: integer(), depth: integer(), fixedsamplelocations: (0 | 1)
def texImage3DMultisample(target, samples, internalformat, width, height, depth, fixedsamplelocations), do: ...
@spec texParameterIiv(target, pname, params) :: :ok when target: enum(), pname: enum(), params: tuple()
def texParameterIiv(target, pname, params), do: ...
@spec texParameterIuiv(target, pname, params) :: :ok when target: enum(), pname: enum(), params: tuple()
def texParameterIuiv(target, pname, params), do: ...
@spec texParameterf(target, pname, param) :: :ok when target: enum(), pname: enum(), param: float()
def texParameterf(target, pname, param), do: ...
@spec texParameterfv(target, pname, params) :: :ok when target: enum(), pname: enum(), params: tuple()
def texParameterfv(target, pname, params), do: ...
@spec texParameteri(target, pname, param) :: :ok when target: enum(), pname: enum(), param: integer()
def texParameteri(target, pname, param), do: ...
@spec texParameteriv(target, pname, params) :: :ok when target: enum(), pname: enum(), params: tuple()
def texParameteriv(target, pname, params), do: ...
@spec texStorage1D(target, levels, internalformat, width) :: :ok when target: enum(), levels: integer(), internalformat: enum(), width: integer()
def texStorage1D(target, levels, internalformat, width), do: ...
@spec texStorage2D(target, levels, internalformat, width, height) :: :ok when target: enum(), levels: integer(), internalformat: enum(), width: integer(), height: integer()
def texStorage2D(target, levels, internalformat, width, height), do: ...
@spec texStorage3D(target, levels, internalformat, width, height, depth) :: :ok when target: enum(), levels: integer(), internalformat: enum(), width: integer(), height: integer(), depth: integer()
def texStorage3D(target, levels, internalformat, width, height, depth), do: ...
@spec texSubImage1D(target, level, xoffset, width, format, type, pixels) :: :ok when target: enum(), level: integer(), xoffset: integer(), width: integer(), format: enum(), type: enum(), pixels: (offset() | mem())
def texSubImage1D(target, level, xoffset, width, format, type, pixels) when is_integer(pixels), do: ...
def texSubImage1D(target, level, xoffset, width, format, type, pixels), do: ...
@spec texSubImage2D(target, level, xoffset, yoffset, width, height, format, type, pixels) :: :ok when target: enum(), level: integer(), xoffset: integer(), yoffset: integer(), width: integer(), height: integer(), format: enum(), type: enum(), pixels: (offset() | mem())
def texSubImage2D(target, level, xoffset, yoffset, width, height, format, type, pixels) when is_integer(pixels), do: ...
def texSubImage2D(target, level, xoffset, yoffset, width, height, format, type, pixels), do: ...
@spec texSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, pixels) :: :ok when target: enum(), level: integer(), xoffset: integer(), yoffset: integer(), zoffset: integer(), width: integer(), height: integer(), depth: integer(), format: enum(), type: enum(), pixels: (offset() | mem())
def texSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, pixels) when is_integer(pixels), do: ...
def texSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, depth, format, type, pixels), do: ...
@spec transformFeedbackVaryings(program, varyings, bufferMode) :: :ok when program: integer(), varyings: iolist(), bufferMode: enum()
def transformFeedbackVaryings(program, varyings, bufferMode), do: ...
@spec translated(x, y, z) :: :ok when x: float(), y: float(), z: float()
def translated(x, y, z), do: ...
@spec translatef(x, y, z) :: :ok when x: float(), y: float(), z: float()
def translatef(x, y, z), do: ...
@spec uniform1d(location, x) :: :ok when location: integer(), x: float()
def uniform1d(location, x), do: ...
@spec uniform1dv(location, value) :: :ok when location: integer(), value: [float()]
def uniform1dv(location, value), do: ...
@spec uniform1f(location, v0) :: :ok when location: integer(), v0: float()
def uniform1f(location, v0), do: ...
@spec uniform1fv(location, value) :: :ok when location: integer(), value: [float()]
def uniform1fv(location, value), do: ...
@spec uniform1i(location, v0) :: :ok when location: integer(), v0: integer()
def uniform1i(location, v0), do: ...
@spec uniform1iv(location, value) :: :ok when location: integer(), value: [integer()]
def uniform1iv(location, value), do: ...
@spec uniform1ui(location, v0) :: :ok when location: integer(), v0: integer()
def uniform1ui(location, v0), do: ...
@spec uniform1uiv(location, value) :: :ok when location: integer(), value: [integer()]
def uniform1uiv(location, value), do: ...
@spec uniform2d(location, x, y) :: :ok when location: integer(), x: float(), y: float()
def uniform2d(location, x, y), do: ...
@spec uniform2dv(location, value) :: :ok when location: integer(), value: [{float(), float()}]
def uniform2dv(location, value), do: ...
@spec uniform2f(location, v0, v1) :: :ok when location: integer(), v0: float(), v1: float()
def uniform2f(location, v0, v1), do: ...
@spec uniform2fv(location, value) :: :ok when location: integer(), value: [{float(), float()}]
def uniform2fv(location, value), do: ...
@spec uniform2i(location, v0, v1) :: :ok when location: integer(), v0: integer(), v1: integer()
def uniform2i(location, v0, v1), do: ...
@spec uniform2iv(location, value) :: :ok when location: integer(), value: [{integer(), integer()}]
def uniform2iv(location, value), do: ...
@spec uniform2ui(location, v0, v1) :: :ok when location: integer(), v0: integer(), v1: integer()
def uniform2ui(location, v0, v1), do: ...
@spec uniform2uiv(location, value) :: :ok when location: integer(), value: [{integer(), integer()}]
def uniform2uiv(location, value), do: ...
@spec uniform3d(location, x, y, z) :: :ok when location: integer(), x: float(), y: float(), z: float()
def uniform3d(location, x, y, z), do: ...
@spec uniform3dv(location, value) :: :ok when location: integer(), value: [{float(), float(), float()}]
def uniform3dv(location, value), do: ...
@spec uniform3f(location, v0, v1, v2) :: :ok when location: integer(), v0: float(), v1: float(), v2: float()
def uniform3f(location, v0, v1, v2), do: ...
@spec uniform3fv(location, value) :: :ok when location: integer(), value: [{float(), float(), float()}]
def uniform3fv(location, value), do: ...
@spec uniform3i(location, v0, v1, v2) :: :ok when location: integer(), v0: integer(), v1: integer(), v2: integer()
def uniform3i(location, v0, v1, v2), do: ...
@spec uniform3iv(location, value) :: :ok when location: integer(), value: [{integer(), integer(), integer()}]
def uniform3iv(location, value), do: ...
@spec uniform3ui(location, v0, v1, v2) :: :ok when location: integer(), v0: integer(), v1: integer(), v2: integer()
def uniform3ui(location, v0, v1, v2), do: ...
@spec uniform3uiv(location, value) :: :ok when location: integer(), value: [{integer(), integer(), integer()}]
def uniform3uiv(location, value), do: ...
@spec uniform4d(location, x, y, z, w) :: :ok when location: integer(), x: float(), y: float(), z: float(), w: float()
def uniform4d(location, x, y, z, w), do: ...
@spec uniform4dv(location, value) :: :ok when location: integer(), value: [{float(), float(), float(), float()}]
def uniform4dv(location, value), do: ...
@spec uniform4f(location, v0, v1, v2, v3) :: :ok when location: integer(), v0: float(), v1: float(), v2: float(), v3: float()
def uniform4f(location, v0, v1, v2, v3), do: ...
@spec uniform4fv(location, value) :: :ok when location: integer(), value: [{float(), float(), float(), float()}]
def uniform4fv(location, value), do: ...
@spec uniform4i(location, v0, v1, v2, v3) :: :ok when location: integer(), v0: integer(), v1: integer(), v2: integer(), v3: integer()
def uniform4i(location, v0, v1, v2, v3), do: ...
@spec uniform4iv(location, value) :: :ok when location: integer(), value: [{integer(), integer(), integer(), integer()}]
def uniform4iv(location, value), do: ...
@spec uniform4ui(location, v0, v1, v2, v3) :: :ok when location: integer(), v0: integer(), v1: integer(), v2: integer(), v3: integer()
def uniform4ui(location, v0, v1, v2, v3), do: ...
@spec uniform4uiv(location, value) :: :ok when location: integer(), value: [{integer(), integer(), integer(), integer()}]
def uniform4uiv(location, value), do: ...
@spec uniformBlockBinding(program, uniformBlockIndex, uniformBlockBinding) :: :ok when program: integer(), uniformBlockIndex: integer(), uniformBlockBinding: integer()
def uniformBlockBinding(program, uniformBlockIndex, uniformBlockBinding), do: ...
@spec uniformMatrix2dv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float()}]
def uniformMatrix2dv(location, transpose, value), do: ...
@spec uniformMatrix2fv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float()}]
def uniformMatrix2fv(location, transpose, value), do: ...
@spec uniformMatrix2x3dv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float()}]
def uniformMatrix2x3dv(location, transpose, value), do: ...
@spec uniformMatrix2x3fv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float()}]
def uniformMatrix2x3fv(location, transpose, value), do: ...
@spec uniformMatrix2x4dv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix2x4dv(location, transpose, value), do: ...
@spec uniformMatrix2x4fv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix2x4fv(location, transpose, value), do: ...
@spec uniformMatrix3dv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix3dv(location, transpose, value), do: ...
@spec uniformMatrix3fv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix3fv(location, transpose, value), do: ...
@spec uniformMatrix3x2dv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float()}]
def uniformMatrix3x2dv(location, transpose, value), do: ...
@spec uniformMatrix3x2fv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float()}]
def uniformMatrix3x2fv(location, transpose, value), do: ...
@spec uniformMatrix3x4dv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix3x4dv(location, transpose, value), do: ...
@spec uniformMatrix3x4fv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix3x4fv(location, transpose, value), do: ...
@spec uniformMatrix4dv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix4dv(location, transpose, value), do: ...
@spec uniformMatrix4fv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix4fv(location, transpose, value), do: ...
@spec uniformMatrix4x2dv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix4x2dv(location, transpose, value), do: ...
@spec uniformMatrix4x2fv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix4x2fv(location, transpose, value), do: ...
@spec uniformMatrix4x3dv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix4x3dv(location, transpose, value), do: ...
@spec uniformMatrix4x3fv(location, transpose, value) :: :ok when location: integer(), transpose: (0 | 1), value: [{float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float(), float()}]
def uniformMatrix4x3fv(location, transpose, value), do: ...
@spec uniformSubroutinesuiv(shadertype, indices) :: :ok when shadertype: enum(), indices: [integer()]
def uniformSubroutinesuiv(shadertype, indices), do: ...
@spec useProgram(program) :: :ok when program: integer()
def useProgram(program), do: ...
@spec useProgramObjectARB(programObj) :: :ok when programObj: integer()
def useProgramObjectARB(programObj), do: ...
@spec useProgramStages(pipeline, stages, program) :: :ok when pipeline: integer(), stages: integer(), program: integer()
def useProgramStages(pipeline, stages, program), do: ...
@spec validateProgram(program) :: :ok when program: integer()
def validateProgram(program), do: ...
@spec validateProgramARB(programObj) :: :ok when programObj: integer()
def validateProgramARB(programObj), do: ...
@spec validateProgramPipeline(pipeline) :: :ok when pipeline: integer()
def validateProgramPipeline(pipeline), do: ...
@spec vertex2d(x, y) :: :ok when x: float(), y: float()
def vertex2d(x, y), do: ...
@spec vertex2dv(v) :: :ok when v: {x :: float(), y :: float()}
def vertex2dv({x, y}), do: ...
@spec vertex2f(x, y) :: :ok when x: float(), y: float()
def vertex2f(x, y), do: ...
@spec vertex2fv(v) :: :ok when v: {x :: float(), y :: float()}
def vertex2fv({x, y}), do: ...
@spec vertex2i(x, y) :: :ok when x: integer(), y: integer()
def vertex2i(x, y), do: ...
@spec vertex2iv(v) :: :ok when v: {x :: integer(), y :: integer()}
def vertex2iv({x, y}), do: ...
@spec vertex2s(x, y) :: :ok when x: integer(), y: integer()
def vertex2s(x, y), do: ...
@spec vertex2sv(v) :: :ok when v: {x :: integer(), y :: integer()}
def vertex2sv({x, y}), do: ...
@spec vertex3d(x, y, z) :: :ok when x: float(), y: float(), z: float()
def vertex3d(x, y, z), do: ...
@spec vertex3dv(v) :: :ok when v: {x :: float(), y :: float(), z :: float()}
def vertex3dv({x, y, z}), do: ...
@spec vertex3f(x, y, z) :: :ok when x: float(), y: float(), z: float()
def vertex3f(x, y, z), do: ...
@spec vertex3fv(v) :: :ok when v: {x :: float(), y :: float(), z :: float()}
def vertex3fv({x, y, z}), do: ...
@spec vertex3i(x, y, z) :: :ok when x: integer(), y: integer(), z: integer()
def vertex3i(x, y, z), do: ...
@spec vertex3iv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer()}
def vertex3iv({x, y, z}), do: ...
@spec vertex3s(x, y, z) :: :ok when x: integer(), y: integer(), z: integer()
def vertex3s(x, y, z), do: ...
@spec vertex3sv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer()}
def vertex3sv({x, y, z}), do: ...
@spec vertex4d(x, y, z, w) :: :ok when x: float(), y: float(), z: float(), w: float()
def vertex4d(x, y, z, w), do: ...
@spec vertex4dv(v) :: :ok when v: {x :: float(), y :: float(), z :: float(), w :: float()}
def vertex4dv({x, y, z, w}), do: ...
@spec vertex4f(x, y, z, w) :: :ok when x: float(), y: float(), z: float(), w: float()
def vertex4f(x, y, z, w), do: ...
@spec vertex4fv(v) :: :ok when v: {x :: float(), y :: float(), z :: float(), w :: float()}
def vertex4fv({x, y, z, w}), do: ...
@spec vertex4i(x, y, z, w) :: :ok when x: integer(), y: integer(), z: integer(), w: integer()
def vertex4i(x, y, z, w), do: ...
@spec vertex4iv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer(), w :: integer()}
def vertex4iv({x, y, z, w}), do: ...
@spec vertex4s(x, y, z, w) :: :ok when x: integer(), y: integer(), z: integer(), w: integer()
def vertex4s(x, y, z, w), do: ...
@spec vertex4sv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer(), w :: integer()}
def vertex4sv({x, y, z, w}), do: ...
@spec vertexAttrib1d(index, x) :: :ok when index: integer(), x: float()
def vertexAttrib1d(index, x), do: ...
@spec vertexAttrib1dv(index :: integer(), v) :: :ok when v: {x :: float()}
def vertexAttrib1dv(index, {x}), do: ...
@spec vertexAttrib1f(index, x) :: :ok when index: integer(), x: float()
def vertexAttrib1f(index, x), do: ...
@spec vertexAttrib1fv(index :: integer(), v) :: :ok when v: {x :: float()}
def vertexAttrib1fv(index, {x}), do: ...
@spec vertexAttrib1s(index, x) :: :ok when index: integer(), x: integer()
def vertexAttrib1s(index, x), do: ...
@spec vertexAttrib1sv(index :: integer(), v) :: :ok when v: {x :: integer()}
def vertexAttrib1sv(index, {x}), do: ...
@spec vertexAttrib2d(index, x, y) :: :ok when index: integer(), x: float(), y: float()
def vertexAttrib2d(index, x, y), do: ...
@spec vertexAttrib2dv(index :: integer(), v) :: :ok when v: {x :: float(), y :: float()}
def vertexAttrib2dv(index, {x, y}), do: ...
@spec vertexAttrib2f(index, x, y) :: :ok when index: integer(), x: float(), y: float()
def vertexAttrib2f(index, x, y), do: ...
@spec vertexAttrib2fv(index :: integer(), v) :: :ok when v: {x :: float(), y :: float()}
def vertexAttrib2fv(index, {x, y}), do: ...
@spec vertexAttrib2s(index, x, y) :: :ok when index: integer(), x: integer(), y: integer()
def vertexAttrib2s(index, x, y), do: ...
@spec vertexAttrib2sv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer()}
def vertexAttrib2sv(index, {x, y}), do: ...
@spec vertexAttrib3d(index, x, y, z) :: :ok when index: integer(), x: float(), y: float(), z: float()
def vertexAttrib3d(index, x, y, z), do: ...
@spec vertexAttrib3dv(index :: integer(), v) :: :ok when v: {x :: float(), y :: float(), z :: float()}
def vertexAttrib3dv(index, {x, y, z}), do: ...
@spec vertexAttrib3f(index, x, y, z) :: :ok when index: integer(), x: float(), y: float(), z: float()
def vertexAttrib3f(index, x, y, z), do: ...
@spec vertexAttrib3fv(index :: integer(), v) :: :ok when v: {x :: float(), y :: float(), z :: float()}
def vertexAttrib3fv(index, {x, y, z}), do: ...
@spec vertexAttrib3s(index, x, y, z) :: :ok when index: integer(), x: integer(), y: integer(), z: integer()
def vertexAttrib3s(index, x, y, z), do: ...
@spec vertexAttrib3sv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer()}
def vertexAttrib3sv(index, {x, y, z}), do: ...
@spec vertexAttrib4Nbv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4Nbv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttrib4Niv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4Niv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttrib4Nsv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4Nsv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttrib4Nub(index, x, y, z, w) :: :ok when index: integer(), x: integer(), y: integer(), z: integer(), w: integer()
def vertexAttrib4Nub(index, x, y, z, w), do: ...
@spec vertexAttrib4Nubv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer(), w :: integer()}
def vertexAttrib4Nubv(index, {x, y, z, w}), do: ...
@spec vertexAttrib4Nuiv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4Nuiv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttrib4Nusv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4Nusv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttrib4bv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4bv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttrib4d(index, x, y, z, w) :: :ok when index: integer(), x: float(), y: float(), z: float(), w: float()
def vertexAttrib4d(index, x, y, z, w), do: ...
@spec vertexAttrib4dv(index :: integer(), v) :: :ok when v: {x :: float(), y :: float(), z :: float(), w :: float()}
def vertexAttrib4dv(index, {x, y, z, w}), do: ...
@spec vertexAttrib4f(index, x, y, z, w) :: :ok when index: integer(), x: float(), y: float(), z: float(), w: float()
def vertexAttrib4f(index, x, y, z, w), do: ...
@spec vertexAttrib4fv(index :: integer(), v) :: :ok when v: {x :: float(), y :: float(), z :: float(), w :: float()}
def vertexAttrib4fv(index, {x, y, z, w}), do: ...
@spec vertexAttrib4iv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4iv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttrib4s(index, x, y, z, w) :: :ok when index: integer(), x: integer(), y: integer(), z: integer(), w: integer()
def vertexAttrib4s(index, x, y, z, w), do: ...
@spec vertexAttrib4sv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer(), w :: integer()}
def vertexAttrib4sv(index, {x, y, z, w}), do: ...
@spec vertexAttrib4ubv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4ubv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttrib4uiv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4uiv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttrib4usv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttrib4usv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttribDivisor(index, divisor) :: :ok when index: integer(), divisor: integer()
def vertexAttribDivisor(index, divisor), do: ...
@spec vertexAttribI1i(index, x) :: :ok when index: integer(), x: integer()
def vertexAttribI1i(index, x), do: ...
@spec vertexAttribI1iv(index :: integer(), v) :: :ok when v: {x :: integer()}
def vertexAttribI1iv(index, {x}), do: ...
@spec vertexAttribI1ui(index, x) :: :ok when index: integer(), x: integer()
def vertexAttribI1ui(index, x), do: ...
@spec vertexAttribI1uiv(index :: integer(), v) :: :ok when v: {x :: integer()}
def vertexAttribI1uiv(index, {x}), do: ...
@spec vertexAttribI2i(index, x, y) :: :ok when index: integer(), x: integer(), y: integer()
def vertexAttribI2i(index, x, y), do: ...
@spec vertexAttribI2iv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer()}
def vertexAttribI2iv(index, {x, y}), do: ...
@spec vertexAttribI2ui(index, x, y) :: :ok when index: integer(), x: integer(), y: integer()
def vertexAttribI2ui(index, x, y), do: ...
@spec vertexAttribI2uiv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer()}
def vertexAttribI2uiv(index, {x, y}), do: ...
@spec vertexAttribI3i(index, x, y, z) :: :ok when index: integer(), x: integer(), y: integer(), z: integer()
def vertexAttribI3i(index, x, y, z), do: ...
@spec vertexAttribI3iv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer()}
def vertexAttribI3iv(index, {x, y, z}), do: ...
@spec vertexAttribI3ui(index, x, y, z) :: :ok when index: integer(), x: integer(), y: integer(), z: integer()
def vertexAttribI3ui(index, x, y, z), do: ...
@spec vertexAttribI3uiv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer()}
def vertexAttribI3uiv(index, {x, y, z}), do: ...
@spec vertexAttribI4bv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttribI4bv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttribI4i(index, x, y, z, w) :: :ok when index: integer(), x: integer(), y: integer(), z: integer(), w: integer()
def vertexAttribI4i(index, x, y, z, w), do: ...
@spec vertexAttribI4iv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer(), w :: integer()}
def vertexAttribI4iv(index, {x, y, z, w}), do: ...
@spec vertexAttribI4sv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttribI4sv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttribI4ubv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttribI4ubv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttribI4ui(index, x, y, z, w) :: :ok when index: integer(), x: integer(), y: integer(), z: integer(), w: integer()
def vertexAttribI4ui(index, x, y, z, w), do: ...
@spec vertexAttribI4uiv(index :: integer(), v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer(), w :: integer()}
def vertexAttribI4uiv(index, {x, y, z, w}), do: ...
@spec vertexAttribI4usv(index, v) :: :ok when index: integer(), v: {integer(), integer(), integer(), integer()}
def vertexAttribI4usv(index, {v1, v2, v3, v4}), do: ...
@spec vertexAttribIPointer(index, size, type, stride, pointer) :: :ok when index: integer(), size: integer(), type: enum(), stride: integer(), pointer: (offset() | mem())
def vertexAttribIPointer(index, size, type, stride, pointer) when is_integer(pointer), do: ...
def vertexAttribIPointer(index, size, type, stride, pointer), do: ...
@spec vertexAttribL1d(index, x) :: :ok when index: integer(), x: float()
def vertexAttribL1d(index, x), do: ...
@spec vertexAttribL1dv(index :: integer(), v) :: :ok when v: {x :: float()}
def vertexAttribL1dv(index, {x}), do: ...
@spec vertexAttribL2d(index, x, y) :: :ok when index: integer(), x: float(), y: float()
def vertexAttribL2d(index, x, y), do: ...
@spec vertexAttribL2dv(index :: integer(), v) :: :ok when v: {x :: float(), y :: float()}
def vertexAttribL2dv(index, {x, y}), do: ...
@spec vertexAttribL3d(index, x, y, z) :: :ok when index: integer(), x: float(), y: float(), z: float()
def vertexAttribL3d(index, x, y, z), do: ...
@spec vertexAttribL3dv(index :: integer(), v) :: :ok when v: {x :: float(), y :: float(), z :: float()}
def vertexAttribL3dv(index, {x, y, z}), do: ...
@spec vertexAttribL4d(index, x, y, z, w) :: :ok when index: integer(), x: float(), y: float(), z: float(), w: float()
def vertexAttribL4d(index, x, y, z, w), do: ...
@spec vertexAttribL4dv(index :: integer(), v) :: :ok when v: {x :: float(), y :: float(), z :: float(), w :: float()}
def vertexAttribL4dv(index, {x, y, z, w}), do: ...
@spec vertexAttribLPointer(index, size, type, stride, pointer) :: :ok when index: integer(), size: integer(), type: enum(), stride: integer(), pointer: (offset() | mem())
def vertexAttribLPointer(index, size, type, stride, pointer) when is_integer(pointer), do: ...
def vertexAttribLPointer(index, size, type, stride, pointer), do: ...
@spec vertexAttribPointer(index, size, type, normalized, stride, pointer) :: :ok when index: integer(), size: integer(), type: enum(), normalized: (0 | 1), stride: integer(), pointer: (offset() | mem())
def vertexAttribPointer(index, size, type, normalized, stride, pointer) when is_integer(pointer), do: ...
def vertexAttribPointer(index, size, type, normalized, stride, pointer), do: ...
@spec vertexBlendARB(count) :: :ok when count: integer()
def vertexBlendARB(count), do: ...
@spec vertexPointer(size, type, stride, ptr) :: :ok when size: integer(), type: enum(), stride: integer(), ptr: (offset() | mem())
def vertexPointer(size, type, stride, ptr) when is_integer(ptr), do: ...
def vertexPointer(size, type, stride, ptr), do: ...
@spec viewport(x, y, width, height) :: :ok when x: integer(), y: integer(), width: integer(), height: integer()
def viewport(x, y, width, height), do: ...
@spec viewportArrayv(first, v) :: :ok when first: integer(), v: [{float(), float(), float(), float()}]
def viewportArrayv(first, v), do: ...
@spec viewportIndexedf(index, x, y, w, h) :: :ok when index: integer(), x: float(), y: float(), w: float(), h: float()
def viewportIndexedf(index, x, y, w, h), do: ...
@spec viewportIndexedfv(index, v) :: :ok when index: integer(), v: {float(), float(), float(), float()}
def viewportIndexedfv(index, {v1, v2, v3, v4}), do: ...
@spec waitSync(sync, flags, timeout) :: :ok when sync: integer(), flags: integer(), timeout: integer()
def waitSync(sync, flags, timeout), do: ...
@spec weightbvARB(weights) :: :ok when weights: [integer()]
def weightbvARB(weights), do: ...
@spec weightdvARB(weights) :: :ok when weights: [float()]
def weightdvARB(weights), do: ...
@spec weightfvARB(weights) :: :ok when weights: [float()]
def weightfvARB(weights), do: ...
@spec weightivARB(weights) :: :ok when weights: [integer()]
def weightivARB(weights), do: ...
@spec weightsvARB(weights) :: :ok when weights: [integer()]
def weightsvARB(weights), do: ...
@spec weightubvARB(weights) :: :ok when weights: [integer()]
def weightubvARB(weights), do: ...
@spec weightuivARB(weights) :: :ok when weights: [integer()]
def weightuivARB(weights), do: ...
@spec weightusvARB(weights) :: :ok when weights: [integer()]
def weightusvARB(weights), do: ...
@spec windowPos2d(x, y) :: :ok when x: float(), y: float()
def windowPos2d(x, y), do: ...
@spec windowPos2dv(v) :: :ok when v: {x :: float(), y :: float()}
def windowPos2dv({x, y}), do: ...
@spec windowPos2f(x, y) :: :ok when x: float(), y: float()
def windowPos2f(x, y), do: ...
@spec windowPos2fv(v) :: :ok when v: {x :: float(), y :: float()}
def windowPos2fv({x, y}), do: ...
@spec windowPos2i(x, y) :: :ok when x: integer(), y: integer()
def windowPos2i(x, y), do: ...
@spec windowPos2iv(v) :: :ok when v: {x :: integer(), y :: integer()}
def windowPos2iv({x, y}), do: ...
@spec windowPos2s(x, y) :: :ok when x: integer(), y: integer()
def windowPos2s(x, y), do: ...
@spec windowPos2sv(v) :: :ok when v: {x :: integer(), y :: integer()}
def windowPos2sv({x, y}), do: ...
@spec windowPos3d(x, y, z) :: :ok when x: float(), y: float(), z: float()
def windowPos3d(x, y, z), do: ...
@spec windowPos3dv(v) :: :ok when v: {x :: float(), y :: float(), z :: float()}
def windowPos3dv({x, y, z}), do: ...
@spec windowPos3f(x, y, z) :: :ok when x: float(), y: float(), z: float()
def windowPos3f(x, y, z), do: ...
@spec windowPos3fv(v) :: :ok when v: {x :: float(), y :: float(), z :: float()}
def windowPos3fv({x, y, z}), do: ...
@spec windowPos3i(x, y, z) :: :ok when x: integer(), y: integer(), z: integer()
def windowPos3i(x, y, z), do: ...
@spec windowPos3iv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer()}
def windowPos3iv({x, y, z}), do: ...
@spec windowPos3s(x, y, z) :: :ok when x: integer(), y: integer(), z: integer()
def windowPos3s(x, y, z), do: ...
@spec windowPos3sv(v) :: :ok when v: {x :: integer(), y :: integer(), z :: integer()}
def windowPos3sv({x, y, z}), do: ...
end
|
testData/org/elixir_lang/beam/decompiler/gl.ex
| 0.905533
| 0.675987
|
gl.ex
|
starcoder
|
defmodule CCSP.Chapter5.GeneticAlgorithm do
alias __MODULE__, as: T
alias CCSP.Chapter5.Chromosome
@moduledoc """
Corresponds to CCSP in Python, Chapter 5, titled "Genetic Algorithms"
"""
@type c :: Chromosome.t()
@type t :: %T{
population: list(c),
threshold: float | integer,
max_generations: integer,
mutation_chance: float,
crossover_chance: float,
selection_type: atom,
fitness_key: (c -> c)
}
defstruct [
:population,
:threshold,
:max_generations,
:mutation_chance,
:crossover_chance,
:selection_type,
:fitness_key
]
@spec new(list(c), float | integer, integer, float, float, atom) :: t
def new(
population,
threshold,
max_generations \\ 100,
mutation_chance \\ 0.01,
crossover_chance \\ 0.7,
selection_type \\ :tournament
) do
%T{
population: population,
threshold: threshold,
max_generations: max_generations,
mutation_chance: mutation_chance,
crossover_chance: crossover_chance,
selection_type: selection_type,
fitness_key: &Chromosome.fitness/1
}
end
@spec run(t) :: c
def run(ga) do
best = Enum.max_by(ga.population, ga.fitness_key)
{_ga, best} =
Enum.reduce_while(0..ga.max_generations, {ga, best}, fn generation, {ga, best} ->
if Chromosome.fitness(best) >= ga.threshold do
{:halt, {ga, best}}
else
avg =
Enum.reduce(ga.population, 0, fn v, acc -> Chromosome.fitness(v) + acc end) /
length(ga.population)
IO.puts("Generation #{generation} Best #{Chromosome.fitness(best)} Avg #{avg}")
ga =
ga
|> reproduce_and_replace()
|> mutate()
highest = Enum.max_by(ga.population, ga.fitness_key)
if Chromosome.fitness(highest) > Chromosome.fitness(best) do
{:cont, {ga, highest}}
else
{:cont, {ga, best}}
end
end
end)
best
end
@spec reproduce_and_replace(t) :: t
def reproduce_and_replace(ga) do
new_population =
Enum.reduce_while(ga.population, [], fn _x, acc ->
if length(acc) < length(ga.population) do
{p1, p2} =
cond do
:roulette == ga.selection_type ->
fitnesses = Enum.map(ga.population, &Chromosome.fitness(&1))
pick_roulette(ga, fitnesses)
:tournament == ga.selection_type ->
pick_tournament(ga, length(ga.population))
true ->
IO.puts("Unhandled selection type")
end
new_population =
if :rand.uniform() < ga.crossover_chance do
{c1, c2} = Chromosome.crossover(p1, p2)
acc ++ [c1, c2]
else
acc ++ [p1, p2]
end
{:cont, new_population}
else
{:halt, acc}
end
end)
new_population =
if length(new_population) > length(ga.population) do
tl(new_population)
else
new_population
end
%T{ga | :population => new_population}
end
@spec mutate(t) :: t
def mutate(ga) do
population =
Enum.map(ga.population, fn element ->
if :rand.uniform() < ga.mutation_chance do
Chromosome.mutate(element)
else
element
end
end)
%T{ga | :population => population}
end
@spec pick_roulette(t, list(float)) :: {c, c}
def pick_roulette(ga, wheel) do
choices(ga.population, wheel, 2)
|> List.to_tuple()
end
@spec pick_tournament(t, non_neg_integer) :: {c, c}
def pick_tournament(ga, num_participants) do
participants = choices(ga.population, nil, num_participants)
Enum.sort_by(participants, ga.fitness_key, &>=/2)
|> Enum.take(2)
|> List.to_tuple()
end
@doc """
Given the list of elements and (non-cumulative) weights, returns k random elements. Duplicates are possible.
choices functions are loosely adapted from cpython https://github.com/python/cpython/blob/master/Lib/random.py#L397
"""
@spec choices(list(any), list(float) | nil, non_neg_integer) :: list(any)
def choices(population, weights, k) when k > 0 do
n = length(population)
k = k - 1
if weights == nil do
Enum.reduce(0..k, [], fn _i, acc ->
element = Enum.at(population, trunc(:rand.uniform() * n))
[element | acc]
end)
else
cum_weights = Enum.scan(weights, 0, &(&1 + &2))
choices_weighted(population, cum_weights, k, n)
end
end
@spec choices_weighted(list(any), list(float), non_neg_integer, non_neg_integer) :: list(any)
def choices_weighted(population, cum_weights, k, n) when k > 0 and n > 0 do
total = hd(Enum.reverse(cum_weights))
sorted_weight = Enum.sort(cum_weights)
hi = n
Enum.reduce(0..k, [], fn _i, acc ->
index = bisect_right(sorted_weight, :rand.uniform() * total, 0, hi)
element = Enum.at(population, index)
[element | acc]
end)
end
@doc """
See https://github.com/python/cpython/blob/master/Lib/bisect.py#L15
"""
@spec bisect_right(list(integer), float, non_neg_integer, non_neg_integer) :: non_neg_integer
def bisect_right(a, x, lo, hi) when lo < hi and lo >= 0 do
# truncates to int
mid = div(lo + hi, 2)
{lo, hi} =
if x < Enum.at(a, mid) do
{lo, mid}
else
{mid + 1, hi}
end
bisect_right(a, x, lo, hi)
end
def bisect_right(_a, _x, lo, hi) when lo >= hi and lo >= 0 do
lo
end
end
|
lib/ccsp/chapter5/genetic_algorithm.ex
| 0.849472
| 0.557002
|
genetic_algorithm.ex
|
starcoder
|
defmodule Membrane.RTP.Packet do
@moduledoc """
Defines a struct describing an RTP packet and a way to parse and serialize it.
Based on [RFC3550](https://tools.ietf.org/html/rfc3550#page-13)
"""
alias Membrane.RTP.{Header, Utils}
@type t :: %__MODULE__{
header: Header.t(),
payload: binary()
}
@enforce_keys [:header, :payload]
defstruct @enforce_keys
@spec identify(binary()) :: :rtp | :rtcp
def identify(<<_first_byte, _marker::1, payload_type::7, _rest::binary>>)
when payload_type in 64..95,
do: :rtcp
def identify(_packet), do: :rtp
@spec serialize(t, align_to: pos_integer()) :: binary
def serialize(%__MODULE__{} = packet, [align_to: align_to] \\ [align_to: 1]) do
%__MODULE__{header: header, payload: payload} = packet
%Header{version: 2} = header
has_padding = 0
has_extension = if header.extension, do: 1, else: 0
marker = if header.marker, do: 1, else: 0
csrcs = Enum.map_join(header.csrcs, &<<&1::32>>)
serialized =
<<header.version::2, has_padding::1, has_extension::1, length(header.csrcs)::4, marker::1,
header.payload_type::7, header.sequence_number::16, header.timestamp::32, header.ssrc::32,
csrcs::binary, serialize_header_extension(header.extension)::binary, payload::binary>>
case Utils.align(serialized, align_to) do
{serialized, 0} ->
serialized
{serialized, _padding} ->
<<pre::2, _has_padding::1, post::bitstring>> = serialized
<<pre::2, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, post::bitstring>>
end
end
defp serialize_header_extension(nil) do
<<>>
end
defp serialize_header_extension(%Header.Extension{data: data} = extension)
when byte_size(data) |> rem(4) == 0 do
<<extension.profile_specific::bitstring, byte_size(data) |> div(4)::16, data::binary>>
end
@spec parse(binary(), boolean()) ::
{:ok, %{packet: t(), has_padding?: boolean(), total_header_size: non_neg_integer()}}
| {:error, :wrong_version | :malformed_packet}
def parse(packet, encrypted?)
def parse(<<version::2, _::bitstring>>, _encrypted?) when version != 2,
do: {:error, :wrong_version}
def parse(
<<version::2, has_padding::1, has_extension::1, csrcs_cnt::4, marker::1, payload_type::7,
sequence_number::16, timestamp::32, ssrc::32, csrcs::binary-size(csrcs_cnt)-unit(32),
rest::binary>> = original_packet,
encrypted?
) do
with {:ok, {extension, payload}} <-
parse_header_extension(rest, has_extension == 1),
{:ok, {payload, padding}} =
Utils.strip_padding(payload, not encrypted? and has_padding == 1) do
header = %Header{
version: version,
marker: marker == 1,
ssrc: ssrc,
sequence_number: sequence_number,
payload_type: payload_type,
timestamp: timestamp,
csrcs: for(<<csrc::32 <- csrcs>>, do: csrc),
extension: extension
}
{:ok,
%{
packet: %__MODULE__{
header: header,
payload: if(encrypted?, do: original_packet, else: payload)
},
has_padding?: has_padding == 1,
total_header_size: byte_size(original_packet) - byte_size(payload) - padding
}}
else
:error -> {:error, :malformed_packet}
end
end
def parse(_binary, _parse_payload?), do: {:error, :malformed_packet}
defp parse_header_extension(binary, header_present?)
defp parse_header_extension(binary, false), do: {:ok, {nil, binary}}
defp parse_header_extension(
<<profile_specific::binary-size(2), data_len::16, data::binary-size(data_len)-unit(32),
rest::binary>>,
true
) do
extension = %Header.Extension{profile_specific: profile_specific, data: data}
{:ok, {extension, rest}}
end
defp parse_header_extension(_binary, true), do: :error
end
|
lib/membrane/rtp/packet.ex
| 0.746231
| 0.454048
|
packet.ex
|
starcoder
|
defmodule Reactivity.DSL.Behaviour do
@moduledoc """
The DSL for distributed reactive programming,
specifically, operations applicable to Behaviours.
"""
alias Reactivity.DSL.SignalObs, as: Sobs
alias Reactivity.DSL.Signal, as: Signal
alias ReactiveMiddleware.Registry
alias Observables.Obs
require Logger
@doc """
Checks if the given argument is a Behaviour.
"""
def is_behaviour?({:behaviour, _sobs, _gs}), do: true
def is_behaviour?(_o), do: false
@doc """
Creates a Behaviour from a plain RX Observable `obs`.
Attaches the given Guarantee `g` to it if provided.
Otherwise attaches the globally defined Guarantee,
which is FIFO (the absence of any Guarantee) by default.
"""
def from_plain_obs(obs) do
g = Registry.get_guarantee()
from_plain_obs(obs, g)
end
def from_plain_obs(obs, g) do
sobs =
obs
|> Sobs.from_plain_obs()
b = {:behaviour, sobs, []}
case g do
nil -> b
_ -> b |> Signal.add_guarantee(g)
end
end
@doc """
Creates a Behaviour from a Signal Observable, tags it with the given guarantees.
The assumption here is that the contexts of the Observable have already been attached.
The primitive can be used for Guarantees with non-obvious contexts (other than e.g. counters)
the developer might come up with.
Attaches the given Guarantee to it if provided without changing the context.
Otherwise attaches the globally defined Guarantee,
which is FIFO (the absence of any Guarantee) by default.
"""
def from_signal_obs(sobs) do
g = Registry.get_guarantee()
gs =
case g do
nil -> []
_ -> [g]
end
from_signal_obs(sobs, gs)
end
def from_signal_obs(sobs, gs) do
{:behaviour, sobs, gs}
end
@doc """
Returns the current value of the Behaviour `b`.
"""
def evaluate({:behaviour, sobs, _gs}=_b) do
case Obs.last(sobs) do
nil -> nil
{v, _c} -> v
end
end
@doc """
Transforms a Behaviour `b` into an Event Stream.
"""
def changes({:behaviour, sobs, gs}=_b) do
{:event_stream, sobs, gs}
end
@doc """
Switches from an intial Behaviour to newly supplied Behaviours.
Takes an initial Behaviour `b` and a higher-order Event Stream `eh` carrying Behaviours.
Returns a Behaviour that is at first equal to the initial Behaviour.
Each time the Event Stream emits a new Behaviour,
the returned Behaviour switches to this new Behaviour.
Requires that all Behaviours have the same set of consistency guarantees.
"""
def switch({:behaviour, b_sobs, gs}=_b, {:event_stream, es_sobs, _}=_eh) do
switch_obs =
es_sobs
|> Obs.map(fn {{:behaviour, obs, _}, _gs} -> obs end)
robs = Obs.switch_repeat(b_sobs, switch_obs)
{:behaviour, robs, gs}
end
@doc """
Switches from one Behaviour to another on an event occurrence.
Takes two Behaviours, `b1` and `b2`, and an Event Stream `es`.
Returns a Behaviour that is equal to the first Behaviour until the an event occurs,
at which point the resulting Behaviour switches to the second Behaviour.
The value of the event is irrelevant.
Requires that both Behaviours have the same set of consistency guarantees.
"""
def until({:behaviour, b_sobs1, gs1}=_b1, {:behaviour, b_sobs2, _gs2}=_b2, {:event_stream, es_sobs, _gse}=_es) do
robs = Obs.until_repeat(b_sobs1, b_sobs2, es_sobs)
{:behaviour, robs, gs1}
end
end
|
lib/reactivity/dsl/behaviour.ex
| 0.859502
| 0.624794
|
behaviour.ex
|
starcoder
|
defmodule Mars.Robot do
@moduledoc """
This module has a Mars Rover Robot that takes a list of actions (movements or
turns), and returns the new state of the robot
"""
alias Mars.Robot
@type action :: atom
@type position :: {number, number}
@type orientation :: atom
defstruct orientation: :north, position: {1, 1}
@doc """
Takes a robot (with a position and a orientation) and a list of
actions and returns a new robot with a new position and/or orientation
"""
@spec do_actions(%Robot{}, [action]) :: %Robot{}
def do_actions(robot = %Robot{}, []) do
robot
end
def do_actions(robot = %Robot{}, [first_action | rest]) do
do_action(robot, first_action)
|> do_actions(rest)
end
@doc """
Takes a robot and a do_action and returns a new robot with a new position
and/or orientation
##Examples
iex> Mars.do_action(%Robot{orientation: :north, position: {2, 4}}, :forward)
%Robot{orientation: north, position: {2, 5}}
"""
@spec do_action(%Robot{}, action) :: %Robot{}
#Move forward
def do_action(robot = %Robot{orientation: :north, position: {x, y}}, :forward), do: %{robot | position: move_square({x, y}, :up)}
def do_action(robot = %Robot{orientation: :east, position: {x, y}}, :forward), do: %{robot | position: move_square({x, y}, :right)}
def do_action(robot = %Robot{orientation: :south, position: {x, y}}, :forward), do: %{robot | position: move_square({x, y}, :down)}
def do_action(robot = %Robot{orientation: :west, position: {x, y}}, :forward), do: %{robot | position: move_square({x, y}, :left)}
# Moving backward
def do_action(robot = %Robot{orientation: :north, position: {x, y}}, :backward), do: %{robot | position: move_square({x, y}, :down)}
def do_action(robot = %Robot{orientation: :east, position: {x, y}}, :backward), do: %{robot | position: move_square({x, y}, :left)}
def do_action(robot = %Robot{orientation: :south, position: {x, y}}, :backward), do: %{robot | position: move_square({x, y}, :up)}
def do_action(robot = %Robot{orientation: :west, position: {x, y}}, :backward), do: %{robot | position: move_square({x, y}, :right)}
# Turning right
def do_action(robot = %Robot{orientation: :north}, :turn_right), do: %{robot | orientation: :east}
def do_action(robot = %Robot{orientation: :east}, :turn_right), do: %{robot | orientation: :south}
def do_action(robot = %Robot{orientation: :south}, :turn_right), do: %{robot | orientation: :west}
def do_action(robot = %Robot{orientation: :west}, :turn_right), do: %{robot | orientation: :north}
# Turning left
def do_action(robot = %Robot{orientation: :north}, :turn_left), do: %{robot | orientation: :west}
def do_action(robot = %Robot{orientation: :east}, :turn_left), do: %{robot | orientation: :north}
def do_action(robot = %Robot{orientation: :south}, :turn_left), do: %{robot | orientation: :east}
def do_action(robot = %Robot{orientation: :west}, :turn_left), do: %{robot | orientation: :south}
defp move_square({x, y}, :up), do: {x, y + 1}
defp move_square({x, y}, :down), do: {x, y - 1}
defp move_square({x, y}, :right), do: {x + 1, y}
defp move_square({x, y}, :left), do: {x - 1, y}
end
|
lib/mars/robot.ex
| 0.929328
| 0.864196
|
robot.ex
|
starcoder
|
defmodule SemanticLive do
@moduledoc """
Provides LiveView Semantic-UI components that function without the use of Javascript.
"""
import Phoenix.LiveView
@doc """
Renders a Dropdown LiveView. The form should be a `Phoenix.HTML.Form`. Expects `options`
to be a list of `{name, value}` tuples, and `opts` to be a keyword-list that can be used
to modify dropdown behaviour.
If no form is provided, the dropdown will send a message to its parent LiveView in the form
{:option_selected, tag, {name, value}} where tag is used to differentiate between multiple
dropdowns.
## Options
* `:class` - a binary containing space-delimited class names to be added to the
dropdown div generated by the LiveView. Allows the style of the dropdown to be changed.
* `:text` - initial text to be displayed in the dropdown when no option is selected. Defaults
to 'Select'
"""
def dropdown(form, name, options, socket, opts) when is_list(options) do
live_render(socket, SemanticLive.Dropdown, session: Map.merge(%{
form: form,
name: name,
options: options
}, Map.new(opts)))
end
def dropdown(form, name, options, socket) when is_list(options),
do: dropdown(form, name, options, socket, [])
def dropdown(options, tag, socket, opts) when is_list(options) do
live_render(socket, SemanticLive.Dropdown, session: Map.merge(%{
tag: tag,
options: options
}, Map.new(opts)))
end
def dropdown(options, tag, socket) when is_list(options),
do: dropdown(options, tag, socket, [])
@doc """
Renders a Search LiveView. The form should be a `Phoenix.HTML.Form`. Expects `fun` to be a function
that accepts a single binary query argument and returns a list of results in the form `{name, value}`.
"""
def search(form, name, fun, socket, opts \\ []) do
live_render(socket, SemanticLive.Search, session: Map.merge(%{
form: form,
name: name,
search: fun
}, Map.new(opts)))
end
@doc """
Renders a Flash LiveView. The Flash view will render a message and then hide it after 3 seconds by default
if a flash message of the given level is assigned to the conn.
## Options
* `:show_for` - an integer indicating the number of milliseconds for which the flash message should be
visibile.
"""
def flash(conn, level, opts \\ []) do
live_render(conn, SemanticLive.Flash, session: Map.merge(%{
conn: conn,
level: level
}, Map.new(opts)))
end
defmacro __using__(_) do
quote do
import SemanticLive
end
end
end
|
lib/semantic_live.ex
| 0.828454
| 0.512205
|
semantic_live.ex
|
starcoder
|
defmodule Erl2ex do
@moduledoc """
Erl2ex is an Erlang to Elixir transpiler, converting well-formed Erlang
source to Elixir source with equivalent functionality.
The goal is to produce correct, functioning Elixir code, but not necessarily
perfectly idiomatic. This tool may be used as a starting point when porting
code from Erlang to Elixir, but manual cleanup will likely be desired.
This module provides the main entry points into Erl2ex.
"""
alias Erl2ex.Results
alias Erl2ex.Sink
alias Erl2ex.Source
alias Erl2ex.Pipeline.Analyze
alias Erl2ex.Pipeline.Codegen
alias Erl2ex.Pipeline.Convert
alias Erl2ex.Pipeline.InlineIncludes
alias Erl2ex.Pipeline.Parse
@typedoc """
Options that may be provided to a conversion run.
Recognized options are:
* `:include_dir` Add a directory to the include path.
* `:lib_dir` Specifies the directory for a named application.
* `:define_prefix` Prefix added to the environment variable or config key
names that are read to initialize macro definitions. Default: "DEFINE_".
* `:defines_from_config` An application whose config should be used to
initialize macro definitions. If not specified or set to nil, system
environment variables will be used.
* `:emit_file_headers` Add a header comment to each file. Default is true.
* `:verbosity` Set the output verbosity level. (Default is 0, which
outputs only error messages. 1 outputs basic status information, and
2 outputs debug information.)
"""
@type options :: [
include_dir: Path.t,
lib_dir: {atom, Path.t} | %{atom => Path.t},
define_prefix: String.t,
defines_from_config: atom,
emit_file_headers: boolean,
verbosity: integer
]
@typedoc """
A file identifier, which may be a filesystem path or a symbolic id.
"""
@type file_id :: Path.t | atom
@doc """
Converts the source for an Erlang module, represented as a string.
If the conversion is successful, returns a tuple of {:ok, result}.
If an error occurs, returns a tuple of {:error, error_details}.
"""
@spec convert_str(String.t, options) ::
{:ok, String.t} | {:error, %CompileError{}}
def convert_str(source_str, opts \\ []) do
internal_convert_str(source_str, opts,
fn(results, sink) ->
case Results.get_error(results) do
nil -> Sink.get_string(sink, nil)
err -> {:error, err}
end
end)
end
@doc """
Converts the source for an Erlang module, represented as a string, and
returns the Elixir source as a string.
Raises a CompileError if an error occurs.
"""
@spec convert_str!(String.t, options) :: String.t
def convert_str!(source_str, opts \\ []) do
internal_convert_str(source_str, opts,
fn(results, sink) ->
Results.throw_error(results)
{:ok, str} = Sink.get_string(sink, nil)
str
end)
end
defp internal_convert_str(source_str, opts, result_handler) do
opts = Keyword.merge(opts, source_data: source_str)
source = Source.start_link(opts)
sink = Sink.start_link(allow_get: true)
results_collector = Results.Collector.start_link()
try do
convert(source, sink, results_collector, nil, nil, opts)
results = Results.Collector.get(results_collector)
result_handler.(results, sink)
after
Source.stop(source)
Sink.stop(sink)
Results.Collector.stop(results_collector)
end
end
@doc """
Converts a single Erlang source file, and writes the generated Elixir code
to a new file.
You must provide the relative or absolute path to the Erlang source. You may
optionally provide a path to the Elixir destination. If the destination is
not specified, the result will be written in the same directory as the source.
Returns a results object.
"""
@spec convert_file(Path.t, Path.t | nil, options) :: Results.t
def convert_file(source_path, dest_path \\ nil, opts \\ []) do
dest_path =
if dest_path == nil do
"#{Path.rootname(source_path)}.ex"
else
dest_path
end
cur_dir = File.cwd!
include_dirs = Keyword.get_values(opts, :include_dir)
source = Source.start_link(source_dir: cur_dir, include_dirs: include_dirs)
sink = Sink.start_link(dest_dir: cur_dir)
results_collector = Results.Collector.start_link()
try do
convert(source, sink, source_path, dest_path, opts)
if Keyword.get(opts, :verbosity, 0) > 0 do
IO.puts(:stderr, "Converted #{source_path} -> #{dest_path}")
end
Results.Collector.get(results_collector)
after
Source.stop(source)
Sink.stop(sink)
Results.Collector.stop(results_collector)
end
end
@doc """
Searches a directory for Erlang source files, and writes corresponding
Elixir files for each module.
By default, the Elixir files will be written in the same directories as the
Erlang source files. You may optionally provide a different base directory
for the destination files.
Returns a results object.
"""
@spec convert_dir(Path.t, Path.t | nil, options) :: Results.t
def convert_dir(source_dir, dest_dir \\ nil, opts \\ []) do
dest_dir = if dest_dir == nil, do: source_dir, else: dest_dir
source = opts
|> Keyword.put(:source_dir, source_dir)
|> Source.start_link
sink = Sink.start_link(dest_dir: dest_dir)
results_collector = Results.Collector.start_link()
try do
"#{source_dir}/**/*.erl"
|> Path.wildcard
|> Enum.each(fn source_full_path ->
source_rel_path = Path.relative_to(source_full_path, source_dir)
dest_rel_path = "#{Path.rootname(source_rel_path)}.ex"
dest_full_path = Path.join(dest_dir, dest_rel_path)
convert(source, sink, results_collector, source_rel_path, dest_rel_path, opts)
if Keyword.get(opts, :verbosity, 0) > 0 do
IO.puts(:stderr, "Converted #{source_full_path} -> #{dest_full_path}")
end
end)
Results.Collector.get(results_collector)
after
Source.stop(source)
Sink.stop(sink)
Results.Collector.stop(results_collector)
end
end
@doc """
Given a source and a sink, and the source path for one Erlang source file,
converts to Elixir and writes the result to the sink at the given destination
path. Writes the result to the given results collector. Returns :ok.
"""
@spec convert(Source.t, Sink.t, Results.Collector.t, Erl2ex.file_id, Erl2ex.file_id, options) :: :ok
def convert(source, sink, results_collector, source_path, dest_path, opts \\ []) do
{source_str, actual_source_path} = Source.read_source(source, source_path)
opts =
if actual_source_path == nil do
opts
else
[{:cur_file_path, actual_source_path} | opts]
end
try do
str = source_str
|> Parse.string(opts)
|> InlineIncludes.process(source, actual_source_path)
|> Analyze.forms(opts)
|> Convert.module(opts)
|> Codegen.to_str(opts)
:ok = Sink.write(sink, dest_path, str)
:ok = Results.Collector.put_success(results_collector, source_path, dest_path)
rescue
error in CompileError ->
:ok = Results.Collector.put_error(results_collector, source_path, error)
end
:ok
end
end
|
lib/erl2ex.ex
| 0.679923
| 0.47591
|
erl2ex.ex
|
starcoder
|
defmodule Benchmarks.GoogleMessage3.Message24346 do
@moduledoc false
use Protobuf, syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message24401 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field24679, 1, optional: true, type: Benchmarks.GoogleMessage3.Message24400
end
defmodule Benchmarks.GoogleMessage3.Message24402 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field24680, 1, optional: true, type: Benchmarks.GoogleMessage3.Message24400
end
defmodule Benchmarks.GoogleMessage3.Message24379 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field24603, 1, optional: true, type: :string
field :field24604, 2, optional: true, type: :string
field :field24605, 3, optional: true, type: :string
field :field24606, 4, required: true, type: Benchmarks.GoogleMessage3.Message24380
field :field24607, 5, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field24608, 6, optional: true, type: :string
field :field24609, 7, optional: true, type: Benchmarks.GoogleMessage3.Message24381
field :field24610, 8, repeated: true, type: :string
field :field24611, 17, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field24612, 9, repeated: true, type: :string
field :field24613, 10, repeated: true, type: :string
field :field24614, 11, repeated: true, type: :string
field :field24615, 14, optional: true, type: :string
field :field24616, 12, optional: true, type: :string
field :field24617, 16, optional: true, type: :string
field :field24618, 13, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field24619, 15, repeated: true, type: :string
field :field24620, 18, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message27358 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field27415, 1, optional: true, type: :int32
field :field27416, 2, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message34381 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field34398, 1, optional: true, type: :string
field :field34399, 2, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field34400, 3, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field34401, 4, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field34402, 5, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field34403, 6, optional: true, type: :bool
field :field34404, 7, optional: true, type: :bool
field :field34405, 8, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field34406, 9, optional: true, type: :bool
field :field34407, 10, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
end
defmodule Benchmarks.GoogleMessage3.Message34619 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field34641, 1, optional: true, type: :double
field :field34642, 2, optional: true, type: :double
field :field34643, 3, optional: true, type: :double
field :field34644, 4, optional: true, type: :double
field :field34645, 11, optional: true, type: :double
field :field34646, 5, optional: true, type: :double
field :field34647, 100, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
end
defmodule Benchmarks.GoogleMessage3.Message730 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field897, 19, optional: true, type: :string
field :field898, 27, repeated: true, type: :string
field :field899, 28, repeated: true, type: :string
field :field900, 21, repeated: true, type: :string
field :field901, 30, optional: true, type: :string
field :field902, 20, repeated: true, type: :uint32
field :field903, 32, repeated: true, type: :uint32
field :field904, 16, repeated: true, type: :string
field :field905, 6, repeated: true, type: Benchmarks.GoogleMessage3.Message697
field :field906, 7, repeated: true, type: Benchmarks.GoogleMessage3.Message704
field :field907, 18, repeated: true, type: :string
field :field908, 8, repeated: true, type: Benchmarks.GoogleMessage3.Message703
field :field909, 9, repeated: true, type: :string
field :field910, 10, optional: true, type: Benchmarks.GoogleMessage3.Message716
field :field911, 11, optional: true, type: Benchmarks.GoogleMessage3.Message718
field :field912, 14, optional: true, type: :bool
field :field913, 4, repeated: true, type: Benchmarks.GoogleMessage3.Message715
field :field914, 17, repeated: true, type: :string
field :field915, 23, repeated: true, type: :string
field :field916, 24, repeated: true, type: Benchmarks.GoogleMessage3.Message719
field :field917, 26, repeated: true, type: Benchmarks.GoogleMessage3.Message728
field :field918, 35, repeated: true, type: Benchmarks.GoogleMessage3.Message702
field :field919, 36, optional: true, type: :string
field :field920, 37, repeated: true, type: :string
field :field921, 38, optional: true, type: :int64
field :field922, 39, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field923, 1, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field924, 2, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field925, 3, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field926, 5, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field927, 13, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field928, 22, repeated: true, type: :string
field :field929, 31, optional: true, type: :bytes
extensions [{25, 26}, {29, 30}, {34, 35}, {15, 16}]
end
defmodule Benchmarks.GoogleMessage3.Message33958.Message33959 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field33982, 3, required: true, type: :string
field :field33983, 4, optional: true, type: :string
field :field33984, 5, optional: true, type: :string
field :field33985, 8, optional: true, type: :fixed64
field :field33986, 10, optional: true, type: :bool
field :field33987, 6, optional: true, type: Benchmarks.GoogleMessage3.Message0
end
defmodule Benchmarks.GoogleMessage3.Message33958 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field33977, 1, optional: true, type: :string
field :field33978, 9, optional: true, type: :string
field :message33959, 2, repeated: true, type: :group
field :field33980, 7, optional: true, type: Benchmarks.GoogleMessage3.Enum33960, enum: true
end
defmodule Benchmarks.GoogleMessage3.Message6637 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6670, 2, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6671, 1, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6672, 3, optional: true, type: :int32
field :field6673, 4, repeated: true, type: :string
field :field6674, 5, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
end
defmodule Benchmarks.GoogleMessage3.Message6643 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6683, 3, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6684, 4, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6685, 5, optional: true, type: :double
field :field6686, 6, optional: true, type: :double
field :field6687, 1, optional: true, type: :int32
field :field6688, 2, optional: true, type: :int32
field :field6689, 9, optional: true, type: :double
field :field6690, 10, optional: true, type: :bytes
field :field6691, 11, optional: true, type: :int32
field :field6692, 12, optional: true, type: :bool
field :field6693, 13, optional: true, type: :bool
field :field6694, 15, optional: true, type: Benchmarks.GoogleMessage3.Message6578
field :field6695, 16, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field6696, 17, optional: true, type: :int64
field :field6697, 22, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6698, 19, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6699, 20, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6700, 21, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message6126 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6152, 1, required: true, type: :string
field :field6153, 9, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6154, 14, optional: true, type: :int32
field :field6155, 10, optional: true, type: :bytes
field :field6156, 12, optional: true, type: Benchmarks.GoogleMessage3.Message6024
field :field6157, 4, optional: true, type: :int32
field :field6158, 5, optional: true, type: :string
field :field6159, 6, optional: true, type: :int32
field :field6160, 2, repeated: true, type: :int32
field :field6161, 3, repeated: true, type: :int32
field :field6162, 7, repeated: true, type: Benchmarks.GoogleMessage3.Message6052
field :field6163, 11, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6164, 15, optional: true, type: Benchmarks.GoogleMessage3.Enum6065, enum: true
field :field6165, 8, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field6166, 13, optional: true, type: :bool
field :field6167, 16, optional: true, type: :bool
field :field6168, 18, optional: true, type: :bool
field :field6169, 17, repeated: true, type: Benchmarks.GoogleMessage3.Message6054
field :field6170, 19, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message13083.Message13084 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field13107, 3, required: true, type: :float
field :field13108, 4, required: true, type: :int32
field :field13109, 5, optional: true, type: :float
field :field13110, 6, repeated: true, type: Benchmarks.GoogleMessage3.Enum13092, enum: true
end
defmodule Benchmarks.GoogleMessage3.Message13083.Message13085 do
@moduledoc false
use Protobuf, syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message13083.Message13086 do
@moduledoc false
use Protobuf, syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message13083.Message13087 do
@moduledoc false
use Protobuf, syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message13083 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field13096, 1, optional: true, type: :float
field :message13084, 2, repeated: true, type: :group
field :field13098, 44, optional: true, type: :float
field :field13099, 45, optional: true, type: :float
field :field13100, 46, optional: true, type: :uint64
field :field13101, 47, optional: true, type: :float
field :message13085, 16, optional: true, type: :group
field :message13086, 23, repeated: true, type: :group
field :message13087, 29, repeated: true, type: :group
field :field13105, 43, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
end
defmodule Benchmarks.GoogleMessage3.Message13088.Message13089 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field13139, 2, required: true, type: :string
field :field13140, 3, optional: true, type: :float
end
defmodule Benchmarks.GoogleMessage3.Message13088 do
@moduledoc false
use Protobuf, syntax: :proto2
field :message13089, 1, repeated: true, type: :group
field :field13136, 4, optional: true, type: :int64
field :field13137, 5, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message10391 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field10411, 1, optional: true, type: Benchmarks.GoogleMessage3.Enum10392, enum: true
field :field10412, 2, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field10413, 3, optional: true, type: :int64
field :field10414, 4, optional: true, type: :string
field :field10415, 5, optional: true, type: :string
field :field10416, 6, optional: true, type: :bytes
field :field10417, 8, optional: true, type: :bool
field :field10418, 9, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field10419, 10, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message11873 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field11876, 1, optional: true, type: :string
field :field11877, 4, optional: true, type: :string
field :field11878, 5, optional: true, type: Benchmarks.GoogleMessage3.Message10573
field :field11879, 6, optional: true, type: Benchmarks.GoogleMessage3.Message10582
field :field11880, 7, optional: true, type: Benchmarks.GoogleMessage3.Message10824
field :field11881, 12, optional: true, type: Benchmarks.GoogleMessage3.Message10773
field :field11882, 8, optional: true, type: Benchmarks.GoogleMessage3.Message11866
field :field11883, 13, optional: true, type: Benchmarks.GoogleMessage3.Message10818
field :field11884, 16, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field11885, 11, optional: true, type: Benchmarks.GoogleMessage3.Message10155
field :field11886, 14, optional: true, type: Benchmarks.GoogleMessage3.Message10469
field :field11887, 15, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
extensions [{9, 10}, {10, 11}]
end
defmodule Benchmarks.GoogleMessage3.Message35506 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field35524, 1, optional: true, type: :int32
field :field35525, 2, optional: true, type: :string
field :field35526, 3, optional: true, type: Benchmarks.GoogleMessage3.Enum35507, enum: true
field :field35527, 4, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
end
defmodule Benchmarks.GoogleMessage3.Message13151 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field13158, 1, repeated: true, type: Benchmarks.GoogleMessage3.Message13145
end
defmodule Benchmarks.GoogleMessage3.Message18253.Message18254 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field18362, 2, required: true, type: :fixed64
field :field18363, 3, required: true, type: :double
end
defmodule Benchmarks.GoogleMessage3.Message18253 do
@moduledoc false
use Protobuf, syntax: :proto2
field :message18254, 1, repeated: true, type: :group
end
defmodule Benchmarks.GoogleMessage3.Message16685 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field16694, 2, repeated: true, type: Benchmarks.GoogleMessage3.Message16686
end
defmodule Benchmarks.GoogleMessage3.Message16816.Message16817 do
@moduledoc false
use Protobuf, syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message16816.Message16818 do
@moduledoc false
use Protobuf, syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message16816 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field16826, 1, optional: true, type: :float
field :field16827, 2, optional: true, type: Benchmarks.GoogleMessage3.Enum16819, enum: true
field :field16828, 3, optional: true, type: :float
field :message16817, 4, repeated: true, type: :group
field :field16830, 7, optional: true, type: :bool
field :field16831, 8, optional: true, type: :bool
field :message16818, 12, repeated: true, type: :group
field :field16833, 10, optional: true, type: :string
field :field16834, 13, optional: true, type: :bool
field :field16835, 14, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message13168 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field13212, 1, required: true, type: :int32
field :field13213, 7, optional: true, type: :fixed64
field :field13214, 8, optional: true, type: :bool
field :field13215, 10, optional: true, type: :fixed64
field :field13216, 11, optional: true, type: :bool
field :field13217, 9, optional: true, type: Benchmarks.GoogleMessage3.Message12796
field :field13218, 2, required: true, type: :double
field :field13219, 3, required: true, type: :bool
field :field13220, 4, optional: true, type: :int32
field :field13221, 5, required: true, type: :bool
field :field13222, 6, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message13167 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field13199, 1, required: true, type: :int32
field :field13200, 2, optional: true, type: :int32
field :field13201, 3, optional: true, type: :int32
field :field13202, 8, optional: true, type: :bool
field :field13203, 12, optional: true, type: :fixed64
field :field13204, 13, optional: true, type: :bool
field :field13205, 11, optional: true, type: Benchmarks.GoogleMessage3.Message12796
field :field13206, 9, optional: true, type: :fixed64
field :field13207, 10, optional: true, type: :bool
field :field13208, 4, repeated: true, type: :int32
field :field13209, 5, optional: true, type: :int32
field :field13210, 6, optional: true, type: :int32
field :field13211, 7, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message1374 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field1375, 1, required: true, type: :string
field :field1376, 2, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message18943 do
@moduledoc false
use Protobuf, syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message18944 do
@moduledoc false
use Protobuf, syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message18856 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field18857, 1, optional: true, type: :string
field :field18858, 2, optional: true, type: :string
field :field18859, 31, optional: true, type: :bool
field :field18860, 26, optional: true, type: :string
field :field18861, 3, optional: true, type: :string
field :field18862, 4, optional: true, type: :string
field :field18863, 5, optional: true, type: :string
field :field18864, 17, optional: true, type: :string
field :field18865, 6, optional: true, type: :string
field :field18866, 7, optional: true, type: :string
field :field18867, 8, optional: true, type: :string
field :field18868, 9, optional: true, type: :string
field :field18869, 10, optional: true, type: :string
field :field18870, 11, optional: true, type: :string
field :field18871, 21, optional: true, type: :string
field :field18872, 18, optional: true, type: :string
field :field18873, 19, optional: true, type: :string
field :field18874, 20, optional: true, type: :string
field :field18875, 22, optional: true, type: :string
field :field18876, 23, optional: true, type: :string
field :field18877, 24, optional: true, type: :string
field :field18878, 25, optional: true, type: :string
field :field18879, 12, optional: true, type: :string
field :field18880, 13, optional: true, type: :string
field :field18881, 29, optional: true, type: :string
field :field18882, 30, optional: true, type: :string
field :field18883, 15, optional: true, type: :string
field :field18884, 16, optional: true, type: :string
field :field18885, 14, repeated: true, type: :string
field :field18886, 27, optional: true, type: :string
field :field18887, 28, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message3850 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field3924, 2, optional: true, type: Benchmarks.GoogleMessage3.Enum3851, enum: true
field :field3925, 12, optional: true, type: :bool
field :field3926, 4, optional: true, type: :int32
field :field3927, 10, optional: true, type: :bool
field :field3928, 13, optional: true, type: :bool
field :field3929, 14, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message6721 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6744, 1, optional: true, type: Benchmarks.GoogleMessage3.Message6722
field :field6745, 2, optional: true, type: :bool
field :field6746, 3, optional: true, type: :bool
field :field6747, 4, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message6742 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6758, 1, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message6726 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6752, 1, optional: true, type: :int64
field :field6753, 2, repeated: true, type: Benchmarks.GoogleMessage3.Message6727
end
defmodule Benchmarks.GoogleMessage3.Message6733 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6754, 1, optional: true, type: :int64
field :field6755, 2, optional: true, type: :int64
field :field6756, 3, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message6723 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6748, 1, optional: true, type: :int64
field :field6749, 2, repeated: true, type: Benchmarks.GoogleMessage3.Message6724
end
defmodule Benchmarks.GoogleMessage3.Message6725 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6750, 1, optional: true, type: :int32
field :field6751, 2, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message6734 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field6757, 1, repeated: true, type: Benchmarks.GoogleMessage3.Message6735
end
defmodule Benchmarks.GoogleMessage3.Message8184 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field8228, 1, optional: true, type: Benchmarks.GoogleMessage3.Message7966
field :field8229, 2, optional: true, type: :bool
field :field8230, 3, repeated: true, type: Benchmarks.GoogleMessage3.Message8183
end
defmodule Benchmarks.GoogleMessage3.Message8477 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field8486, 1, optional: true, type: Benchmarks.GoogleMessage3.Message7966
field :field8487, 2, optional: true, type: :int64
field :field8488, 3, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8454 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field8465, 1, optional: true, type: Benchmarks.GoogleMessage3.Message8449
field :field8466, 3, optional: true, type: :int64
field :field8467, 4, optional: true, type: :int32
field :field8468, 5, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message8476 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field8483, 1, optional: true, type: :string
field :field8484, 2, optional: true, type: :string
field :field8485, 3, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8455 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field8470, 1, optional: true, type: Benchmarks.GoogleMessage3.Message8449
field :field8471, 2, repeated: true, type: Benchmarks.GoogleMessage3.Message8456
field :field8472, 5, optional: true, type: Benchmarks.GoogleMessage3.Message8457
field :field8473, 6, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
end
defmodule Benchmarks.GoogleMessage3.Message8475 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field8481, 1, optional: true, type: :string
field :field8482, 2, optional: true, type: :int64
end
defmodule Benchmarks.GoogleMessage3.Message12559 do
@moduledoc false
use Protobuf, syntax: :proto2
end
defmodule Benchmarks.GoogleMessage3.Message12817 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field12826, 1, optional: true, type: :int32
field :field12827, 2, optional: true, type: :int32
field :field12828, 3, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message16480 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field16490, 1, optional: true, type: Benchmarks.GoogleMessage3.Message13358
field :field16491, 2, optional: true, type: Benchmarks.GoogleMessage3.Enum16042, enum: true
field :field16492, 3, optional: true, type: Benchmarks.GoogleMessage3.Message13912
field :field16493, 4, optional: true, type: :string
field :field16494, 5, optional: true, type: :string
field :field16495, 6, optional: true, type: :string
field :field16496, 7, optional: true, type: :string
field :field16497, 8, optional: true, type: Benchmarks.GoogleMessage3.Message13358
field :field16498, 9, optional: true, type: :fixed32
end
defmodule Benchmarks.GoogleMessage3.Message24317 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field24446, 1, optional: true, type: :string
field :field24447, 2, optional: true, type: Benchmarks.GoogleMessage3.Message24312
field :field24448, 3, repeated: true, type: Benchmarks.GoogleMessage3.Message24315
field :field24449, 4, repeated: true, type: Benchmarks.GoogleMessage3.Message24313
field :field24450, 5, repeated: true, type: Benchmarks.GoogleMessage3.Message24316
field :field24451, 6, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field24452, 7, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field24453, 8, repeated: true, type: :string
field :field24454, 9, repeated: true, type: :string
field :field24455, 10, repeated: true, type: :string
field :field24456, 28, repeated: true, type: :string
field :field24457, 11, optional: true, type: :string
field :field24458, 12, optional: true, type: :string
field :field24459, 13, optional: true, type: :string
field :field24460, 14, optional: true, type: :string
field :field24461, 15, repeated: true, type: :string
field :field24462, 16, optional: true, type: :string
field :field24463, 17, repeated: true, type: :string
field :field24464, 18, repeated: true, type: :string
field :field24465, 19, repeated: true, type: :string
field :field24466, 20, repeated: true, type: :string
field :field24467, 21, repeated: true, type: :string
field :field24468, 22, repeated: true, type: :string
field :field24469, 23, repeated: true, type: :string
field :field24470, 24, repeated: true, type: :string
field :field24471, 25, optional: true, type: :string
field :field24472, 26, optional: true, type: :string
field :field24473, 27, repeated: true, type: :string
field :field24474, 40, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.PbExtension do
@moduledoc false
use Protobuf, syntax: :proto2
extend Benchmarks.GoogleMessage3.Message8301, :"Message8454.field8469", 66,
optional: true,
type: Benchmarks.GoogleMessage3.Message8454
extend Benchmarks.GoogleMessage3.Message0, :"Message33958.field33981", 10_747_482,
optional: true,
type: Benchmarks.GoogleMessage3.Message33958
extend Benchmarks.GoogleMessage3.Message8302, :"Message8455.field8474", 66,
optional: true,
type: Benchmarks.GoogleMessage3.Message8455
end
|
bench/lib/datasets/google_message3/benchmark_message3_4.pb.ex
| 0.61682
| 0.423875
|
benchmark_message3_4.pb.ex
|
starcoder
|
defmodule Cashtrail.Banking do
@moduledoc """
The Banking context manages bank accounts and institutions.
"""
import Ecto.Query, warn: false
alias Cashtrail.Repo
alias Cashtrail.{Banking, Entities, Paginator}
import Cashtrail.Entities.Tenants, only: [to_prefix: 1]
import Cashtrail.QueryBuilder, only: [build_filter: 3, build_search: 3]
import Cashtrail.Statuses, only: [filter_by_status: 3]
@type institution :: Banking.Institution.t()
@type account :: Banking.Account.t()
@doc """
Returns a `%Cashtrail.Paginator.Page{}` struct with a list of institutions in the
`:entries` field.
If no institutions are found, return an empty list in the `:entries` field.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the institution references.
* options - A `keyword` list of the following options:
* `:search` - search institutions by :country, or by the contact `:name`, or
`:legal_name`.
* See `Cashtrail.Paginator.paginate/2` to know about the pagination options.
See `Cashtrail.Banking.Institution` to have more detailed info about
each field to be filtered or searched.
## Examples
iex> list_institutions(entity)
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Banking.Institution{}, ...], ...}
iex> list_institutions(entity, page: 2)
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Banking.Institution{}, ...], page: 2}
iex> list_institutions(entity, search: "My Bank")
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Banking.Institution{name: "My Bank"}, ...]}
iex> list_institutions(entity, search: "My Legal Bank"})
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Banking.Institution{legal_name: "My Legal Bank"}, ...]}
"""
@spec list_institutions(Entities.Entity.t(), keyword()) :: Paginator.Page.t()
def list_institutions(%Entities.Entity{} = entity, options \\ []) do
Banking.Institution
|> build_search(Keyword.get(options, :search), [:country, contact: [:name, :legal_name]])
|> Ecto.Queryable.to_query()
|> preload([], contact: :category)
|> Map.put(:prefix, to_prefix(entity))
|> Paginator.paginate(options)
end
@doc """
Gets a single institution.
Raises `Ecto.NoResultsError` if the Institution does not exist.
See `Cashtrail.Banking.Institution` to have more detailed info about
the returned struct.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the institution references.
* id - A `string` that is the unique id of the institution to be found.
## Examples
iex> get_institution!(entity, 123)
%Institution{}
iex> get_institution!(entity, 456)
** (Ecto.NoResultsError)
"""
@spec get_institution!(Entities.Entity.t(), Ecto.UUID.t()) :: institution()
def get_institution!(%Entities.Entity{} = entity, id) do
Repo.get!(Banking.Institution, id, prefix: to_prefix(entity))
|> Repo.preload(contact: :category)
end
@doc """
Creates a institution.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the institution references.
* params - A `map` with the params of the institution to be created:
* `:contact` or :contact_id (required) -
* `:contact_id` - `string` that is the description uuid of the contact.
* `:contact` - a `map` with data about the contact to be created an referenced
by the institution. See `Cashtrail.Contacts.Contact` or `Cashtrail.Contacts.create_contact/2`
to have more information about accepted fields.
* `:country` - A `string` with the country where the institution is located.
* `:bank_code` - A `string` with the code of the institution in the country
that the institution is located.
* `:swift` - A `string` with the SWIFT code that identifies a particular
bank worldwide.
* `:logo_url` - A `string` with the url with the logo of the institution.
See `Cashtrail.Banking.Institution` to have more detailed info about
the fields.
## Examples
iex> create_institution(%{field: value})
{:ok, %Institution{}}
iex> create_institution(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec create_institution(Entities.Entity.t(), map() | nil) ::
{:ok, institution()} | {:error, Ecto.Changeset.t(institution())}
def create_institution(%Entities.Entity{} = entity, attrs \\ %{}) do
%Banking.Institution{}
|> Banking.Institution.changeset(attrs)
|> Repo.insert(prefix: to_prefix(entity))
|> load_contact()
end
defp load_contact(
{:ok, %Banking.Institution{contact: %Ecto.Association.NotLoaded{}} = institution}
) do
{:ok, Repo.preload(institution, :contact)}
end
defp load_contact(result), do: result
@doc """
Updates a institution.
## Arguments
* institution - The `%Cashtrail.Banking.Institution{}` to be updated.
* params - A `map` with the field of the institution to be updated. See
`create_institution/2` to know about the params that can be given.
## Examples
iex> update_institution(institution, %{field: new_value})
{:ok, %Institution{}}
iex> update_institution(institution, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec update_institution(institution(), map()) ::
{:ok, institution()} | {:error, Ecto.Changeset.t(institution())}
def update_institution(%Banking.Institution{} = institution, attrs) do
institution
|> Banking.Institution.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a institution.
## Arguments
* institution - The `%Cashtrail.Banking.Institution{}` to be deleted.
## Examples
iex> delete_institution(institution)
{:ok, %Institution{}}
iex> delete_institution(institution)
{:error, %Ecto.Changeset{}}
"""
@spec delete_institution(institution()) ::
{:ok, institution()} | {:error, Ecto.Changeset.t(institution())}
def delete_institution(%Banking.Institution{} = institution) do
Repo.delete(institution)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking institution changes.
## Arguments
* institution - The `%Cashtrail.Banking.Institution{}` to be tracked.
## Examples
iex> change_institution(institution)
%Ecto.Changeset{data: %Institution{}}
"""
@spec change_institution(institution(), map()) ::
Ecto.Changeset.t(institution())
def change_institution(%Banking.Institution{} = institution, attrs \\ %{}) do
Banking.Institution.changeset(institution, attrs)
end
@doc """
Returns a `%Cashtrail.Paginator.Page{}` struct with a list of accounts in the `:entries` field.
If no accounts are found, return an empty list in the `:entries` field.
## Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the account references.
* options - A `keyword` list of the following options:
* `:filter` - filters by following attributes:
* `:type` or `"type"`
* `:status` or `"status"`
* `:currency` or `"currency"`
* `:institution_id` or `"institution_id"`
* `:search` - search accounts by `:description`.
* See `Cashtrail.Paginator.paginate/2` to know about the pagination options.
See `Cashtrail.Banking.Account` to have more detailed info about each field to be filtered or
searched.
## Examples
iex> list_accounts(entity)
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Banking.Account{}, ...], ...}
iex> list_accounts(entity, page: 2)
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Banking.Account{}, ...], page: 2}
iex> list_accounts(entity, filter: %{type: "cash"})
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Banking.Account{type: :cash}, ...]}
iex> list_accounts(entity, filter: %{search: "my"})
%Cashtrail.Paginator.Page{entries: [%Cashtrail.Banking.Account{description: "my cash"}, ...]}
"""
@spec list_accounts(Entities.Entity.t(), keyword) :: Paginator.Page.t(account())
def list_accounts(%Entities.Entity{} = entity, options \\ []) do
Banking.Account
|> build_filter(Keyword.get(options, :filter), [:type, :currency, :institution_id])
|> build_search(Keyword.get(options, :search), [:description])
|> filter_by_status(Keyword.get(options, :filter), :status)
|> Ecto.Queryable.to_query()
|> Map.put(:prefix, to_prefix(entity))
|> Paginator.paginate(options)
end
@doc """
Gets a single account.
Raises `Ecto.NoResultsError` if the Account does not exist.
See `Cashtrail.Banking.Account` to have more detailed info about the struct returned.
## Examples
iex> get_account!(entity, 123)
%Cashtrail.Banking.Account{}
iex> get_account!(entity, 456)
** (Ecto.NoResultsError)
"""
@spec get_account!(Entities.Entity.t(), Ecto.UUID.t() | String.t()) :: account()
def get_account!(%Entities.Entity{} = entity, id) do
Repo.get!(Banking.Account, id, prefix: to_prefix(entity))
end
@doc """
Creates an account.
* entity - The `%Cashtrail.Entities.Entity{}` that the account references.
* params - A `map` with the params of the account to be created:
* `:description` (required)
* `:type` - A `string` or `atom` that is the type of account. It can receive `:cash`,
`:checking`, `:saving`, `:digital`, `:credit`, `:investment` or `:other`. Defaults to
`:cash`.
* `:type` - A `string` or `atom` that is the status of account. It can receive `:cash`,
`:checking`, `:saving`, `:digital`, `:credit`, `:investment` or `:other`. Defaults to
`:cash`.
* `:initial_balance_amount` - A `number` with the initial balance value of the account.
* `:initial_balance_date` - A `date` with the initial balance date of the account. This cannot
be changed.
* `:avatar_url` - A `string` with the avatar url of the account.
* `:restricted_transaction_types` - A `list` of `string` or `atoms` with transaction types that
are allowed. Can receive `:income`, `:expense`, `:tax`, `:transfer`, `:exchange` or `:refund`.
* `:identifier` - A `map` with the data that identifies the account in real world. The fields
are `:bank_code`, `:branch`, `:number`, `:swift` and `:iban`.
* `:currency` - The iso code of the currency that will be used by the account. This cannot be
changed.
* `:institution_id` - The id of the institution of the account.
* `:predicted_account_id` - The id of the account that will be predicted.
See `Cashtrail.Banking.Account` to have more detailed info about the fields.
## Examples
iex> create_account(entity, %{field: value})
{:ok, %Cashtrail.Banking.Account{}}
iex> create_account(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec create_account(Entities.Entity.t(), map()) ::
{:ok, account} | {:error, Ecto.Changeset.t(account())}
def create_account(%Entities.Entity{} = entity, attrs) do
%Banking.Account{}
|> Banking.Account.changeset(attrs)
|> Repo.insert(prefix: to_prefix(entity))
end
@doc """
Updates an account.
* params - A `map` with the field of the account to be updated. See
`create_account/2` to know about the params that can be given.
## Examples
iex> update_account(account, %{field: new_value})
{:ok, %Account{}}
iex> update_account(account, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec update_account(account, map) :: {:ok, account} | {:error, Ecto.Changeset.t(account())}
def update_account(%Banking.Account{} = account, attrs) do
account
|> Banking.Account.update_changeset(attrs)
|> Repo.update()
end
@doc """
Archives an account.
## Examples
iex> archive_account(account)
{:ok, %Account{}}
iex> archive_account(account})
{:error, %Ecto.Changeset{}}
"""
@spec archive_account(account) :: {:ok, account} | {:error, Ecto.Changeset.t(account())}
def archive_account(%Banking.Account{} = account) do
account
|> Banking.Account.archive_changeset()
|> Repo.update()
end
@doc """
Unarchives an account.
## Examples
iex> unarchive_account(account)
{:ok, %Account{}}
iex> unarchive_account(account})
{:error, %Ecto.Changeset{}}
"""
@spec unarchive_account(account) :: {:ok, account} | {:error, Ecto.Changeset.t(account())}
def unarchive_account(%Banking.Account{} = account) do
account
|> Banking.Account.unarchive_changeset()
|> Repo.update()
end
@doc """
Deletes an account.
## Arguments
* account - The `%Cashtrail.Banking.Account{}` to be deleted.
## Examples
iex> delete_account(account)
{:ok, %Account{}}
iex> delete_account(account)
{:error, %Ecto.Changeset{}}
"""
@spec delete_account(account) :: {:ok, account} | {:error, Ecto.Changeset.t(account())}
def delete_account(%Banking.Account{} = account) do
Repo.delete(account)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking account changes.
## Arguments
* account - The `%Cashtrail.Banking.Account{}` to be tracked.
## Examples
iex> change_account(account)
%Ecto.Changeset{data: %Account{}}
"""
@spec change_account(account, map() | nil) :: Ecto.Changeset.t(account())
def change_account(%Banking.Account{} = account, attrs \\ %{}) do
case Ecto.get_meta(account, :state) do
:built -> Banking.Account.changeset(account, attrs)
_ -> Banking.Account.update_changeset(account, attrs)
end
end
end
|
apps/cashtrail/lib/cashtrail/banking.ex
| 0.831725
| 0.467818
|
banking.ex
|
starcoder
|
defmodule Mole.Content.Condition do
use Private
import Kernel, except: [to_string: 1]
@moduledoc "Helper functions for conditions."
require Integer
# no learning, standard feedback
defp normal, do: 7
@eds [:abcd, :duckling, :none, :both]
@feedback [:motivational, :standard, :none]
# the cartesian product of the above
@conditions for ed <- @eds, fb <- @feedback, do: {ed, fb}
# number of the above, just cached as a module attribute
@no_conditions length(@conditions)
@typedoc "A type of education. Always an atom"
@type ed :: :abcd | :duckling | :none
@typedoc "Feedback? Always an atom"
@type feedback :: :motivational | :standard | :none
@doc "Return a random condition integer"
@spec random() :: integer()
def random, do: Enum.random(0..(@no_conditions - 1))
@doc "Return a tuple with the type of education and feedback."
@spec to_tuple(nil | integer()) :: {ed(), feedback()}
def to_tuple(nil), do: to_tuple(normal())
def to_tuple(index), do: Enum.at(@conditions, index)
@doc "Determine if a condition has feedback or not"
@spec feedback(nil | integer()) :: feedback()
def feedback(nil), do: :standard
def feedback(condition) when condition in [0, 3, 6, 9], do: :motivational
def feedback(condition) when condition in [1, 4, 7, 10], do: :standard
def feedback(condition) when condition in [2, 5, 8, 11], do: :none
@spec learning(nil | integer()) :: ed()
def learning(nil), do: :none
def learning(condition) when condition in 0..2, do: :abcd
def learning(condition) when condition in 3..5, do: :duckling
def learning(condition) when condition in 6..8, do: :none
def learning(condition) when condition in 9..11, do: :both
@doc "Give a user friendly string for the doctors."
@spec to_string(nil | integer() | tuple()) :: String.t()
def to_string(condition) when is_integer(condition),
do: condition |> to_tuple() |> to_string()
def to_string({ed, fb}), do: "learning: #{ed}, feedback: #{fb}"
def to_string(nil), do: "N/A"
@doc "Return the learning image for the condition at the page number"
@spec image_for(integer(), String.t()) :: String.t()
def image_for(condition, page) when is_integer(condition) do
{ed, _feedback} = to_tuple(condition)
learning = "#{ed}_#{page}.png"
case Enum.filter(images_available(), fn image -> image =~ learning end) do
[match] -> match
_ -> nil
end
end
@spec images_available() :: [String.t()]
defp images_available do
["#{:code.priv_dir(:mole)}", "static", "images", "*.png"]
|> Path.join()
|> Path.wildcard()
|> Enum.map(fn abs_name ->
"/#{abs_name |> Path.split() |> Enum.take(-2) |> Path.join()}"
end)
end
end
|
lib/mole/content/condition.ex
| 0.830663
| 0.594051
|
condition.ex
|
starcoder
|
defmodule Geo.Utils do
@moduledoc false
use Bitwise
@doc """
Turns a hex string or an integer of base 16 into its floating point
representation.
Takes an optional endian atom. Either :xdr for big endian or :ndr for little
endian. Defaults to :xdr
# Examples
iex> Geo.Utils.hex_to_float("40000000")
2.0
iex> Geo.Utils.hex_to_float(0x40000000)
2.0
iex> Geo.Utils.hex_to_float("3ff0000000000000")
1.0
iex> Geo.Utils.hex_to_float(0x3ff0000000000000)
1.0
"""
def hex_to_float(hex) when is_integer(hex) do
hex_to_float(Integer.to_string(hex, 16))
end
def hex_to_float(hex) when is_binary(hex) do
case bit_size(hex) do
x when x <= 64 ->
<<value::float-32>> = <<String.to_integer(hex, 16)::integer-32>>
value
128 ->
<<value::float-64>> = <<String.to_integer(hex, 16)::integer-64>>
value
end
end
@doc """
Turns a float into a hex value. The size can either be 32 or 64.
"""
def float_to_hex(float, 64) do
<<value::integer-64>> = <<float::float-64>>
value
end
def float_to_hex(float, 32) do
<<value::integer-32>> = <<float::float-32>>
value
end
@doc """
Reverses the byte order of the given hex string.
## Examples
iex> Geo.Utils.reverse_byte_order("00000004")
"40000000"
iex> Geo.Utils.reverse_byte_order("E6100000")
"000010E6"
"""
def reverse_byte_order("") do
""
end
def reverse_byte_order(hex) do
do_reverse_byte_order(hex, "")
end
defp do_reverse_byte_order("", acc) do
acc
end
defp do_reverse_byte_order(<<a, b, rest::binary>>, acc) do
do_reverse_byte_order(rest, <<a, b, acc::binary>>)
end
defp do_reverse_byte_order(<<a, rest::binary>>, acc) do
do_reverse_byte_order(rest, <<a, acc::binary>>)
end
@doc """
Adds 0's to the left of hex string.
"""
def pad_left(hex, size) when byte_size(hex) >= size do
hex
end
def pad_left(hex, size) do
String.duplicate("0", size - byte_size(hex)) <> hex
end
def binary_to_endian(<<48, 49>>) do
:ndr
end
def binary_to_endian(<<48, 48>>) do
:xdr
end
def hex_to_type(0x01) do
%Geo.Point{}
end
def hex_to_type(0x40_00_00_01) do
%Geo.PointM{}
end
def hex_to_type(0x80_00_00_01) do
%Geo.PointZ{}
end
def hex_to_type(0xC0_00_00_01) do
%Geo.PointZM{}
end
def hex_to_type(0x02) do
%Geo.LineString{}
end
def hex_to_type(0x80_00_00_02) do
%Geo.LineStringZ{}
end
def hex_to_type(0x03) do
%Geo.Polygon{}
end
def hex_to_type(0x80_00_00_03) do
%Geo.PolygonZ{}
end
def hex_to_type(0x04) do
%Geo.MultiPoint{}
end
def hex_to_type(0x80_00_00_04) do
%Geo.MultiPointZ{}
end
def hex_to_type(0x05) do
%Geo.MultiLineString{}
end
def hex_to_type(0x80_00_00_05) do
%Geo.MultiLineStringZ{}
end
def hex_to_type(0x06) do
%Geo.MultiPolygon{}
end
def hex_to_type(0x80_00_00_06) do
%Geo.MultiPolygonZ{}
end
def hex_to_type(0x07) do
%Geo.GeometryCollection{}
end
def type_to_hex(geom, true) do
value = 0x20000000
value + do_type_to_hex(geom)
end
def type_to_hex(geom, false) do
value = 0x00000000
value + do_type_to_hex(geom)
end
def do_type_to_hex(%Geo.Point{}) do
0x01
end
def do_type_to_hex(%Geo.PointM{}) do
0x40_00_00_01
end
def do_type_to_hex(%Geo.PointZ{}) do
0x80_00_00_01
end
def do_type_to_hex(%Geo.PointZM{}) do
0xC0_00_00_01
end
def do_type_to_hex(%Geo.LineString{}) do
0x02
end
def do_type_to_hex(%Geo.LineStringZ{}) do
0x80_00_00_02
end
def do_type_to_hex(%Geo.Polygon{}) do
0x03
end
def do_type_to_hex(%Geo.PolygonZ{}) do
0x80_00_00_03
end
def do_type_to_hex(%Geo.MultiPoint{}) do
0x04
end
def do_type_to_hex(%Geo.MultiPointZ{}) do
0x80_00_00_04
end
def do_type_to_hex(%Geo.MultiLineString{}) do
0x05
end
def do_type_to_hex(%Geo.MultiLineStringZ{}) do
0x80_00_00_05
end
def do_type_to_hex(%Geo.MultiPolygon{}) do
0x06
end
def do_type_to_hex(%Geo.MultiPolygonZ{}) do
0x80_00_00_06
end
def do_type_to_hex(%Geo.GeometryCollection{}) do
0x07
end
end
|
lib/geo/utils.ex
| 0.844136
| 0.622015
|
utils.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule BtrzAuth.Plug.VerifyApiKey do
@moduledoc """
Looks for and validates a token found in the `x-api-key` header requesting the accounts service to verify the token and saving the resource in `conn.private[:account]`.
This, like all other Guardian plugs, requires a Guardian pipeline to be setup.
It requires an error handler as `error_handler`.
These can be set either:
1. Upstream on the connection with `plug Guardian.Pipeline`
2. Upstream on the connection with `Guardian.Pipeline.{put_module, put_error_handler, put_key}`
3. Inline with an option of `:module`, `:error_handler`, `:key`
If a token is found but is invalid, the error handler will be called with
`auth_error(conn, {:api_key_not_found, reason})`
Once a token has been found, it will be matched against the database using the configured collection and property,
if not found, the error handler will be called with
`auth_error(conn, {:account_not_found, reason})`
Options:
* `search_in` - atom. The places to look for the x-api-key (`:header`, `:query` or `:all`). Defaults to: `:all`
* `allow_blank` - boolean. If set to true, will try to load a resource once the x-api-key is found, but will not fail if no resource is found. Defaults: false
### Example
```elixir
# default search and verify in all (header and query string)
plug BtrzAuth.Plug.VerifyApiKey
# search only in header
plug BtrzAuth.Plug.VerifyApiKey, search_in: :header
```
"""
import Plug.Conn
alias Guardian.Plug.Pipeline
alias BtrzAuth.Services.Accounts
require Logger
@spec init(Keyword.t()) :: Keyword.t()
def init(opts), do: opts
@spec call(Plug.Conn.t(), Keyword.t()) :: Plug.Conn.t()
def call(conn, opts) do
Logger.debug("accessing VerifyApiKey plug..")
token_config = Application.get_env(:btrz_ex_auth_api, :token)
allow_blank = Keyword.get(opts, :allow_blank, false)
search_in = Keyword.get(opts, :search_in, :all)
case get_api_key(conn, search_in) do
nil ->
respond({{:error, :api_key_not_found}, allow_blank, conn, opts})
api_key ->
if Mix.env() === :test do
# only for test
Logger.debug("using VerifyApiKey in test mode")
account = Map.get(conn.private, :account, %{})
merged_account =
Keyword.get(token_config, :test_resource, %{})
|> underscore_data()
|> Map.merge(account)
conn = put_private(conn, :account, merged_account)
respond({{:ok, :api_key}, allow_blank, conn, opts})
else
case Accounts.get_account_auth_info(api_key) do
{:ok, result} when result == %{} ->
Logger.error("account not found for the provided api_key: #{api_key}")
respond({{:error, :account_not_found}, allow_blank, conn, opts})
{:ok, result} ->
Logger.info("account found for the provided api_key: #{api_key}")
Logger.debug("passing VerifyApiKey plug..")
conn = put_private(conn, :account, underscore_data(result))
respond({{:ok, :api_key}, allow_blank, conn, opts})
{:error, error} ->
Logger.error(
"account not found for the provided api_key: #{api_key}, reason: #{
inspect(error)
}"
)
respond({{:error, :account_not_found}, allow_blank, conn, opts})
end
end
end
end
defp underscore_data(data) do
Enum.reduce(data, %{}, fn {key, val}, acc -> Map.put(acc, Macro.underscore(key), val) end)
end
defp get_api_key(conn, :header), do: get_api_key_from_header(conn)
defp get_api_key(conn, :query), do: get_api_key_from_query(conn)
defp get_api_key(conn, _), do: get_api_key_from_header(conn) || get_api_key_from_query(conn)
defp get_api_key_from_header(conn) do
case get_req_header(conn, "x-api-key") do
[] -> nil
api_keys -> hd(api_keys)
end
end
defp get_api_key_from_query(conn) do
conn = fetch_query_params(conn)
conn.query_params["x-api-key"]
end
defp respond({{:ok, _}, _allow_blank, conn, _opts}), do: conn
defp respond({{:error, :account_not_found}, _allow_blank = true, conn, _opts}), do: conn
defp respond({{:error, :account_not_found}, _allow_blank = false, conn, opts}),
do: respond_error(conn, :account_not_found, opts)
defp respond({{:error, :api_key_not_found}, _allow_blank, conn, opts}),
do: respond_error(conn, :api_key_not_found, opts)
defp respond_error(conn, reason, opts) do
conn
|> Pipeline.fetch_error_handler!(opts)
|> apply(:auth_error, [conn, {:unauthenticated, reason}])
|> halt()
end
end
end
|
lib/plug/verify_api_key.ex
| 0.781956
| 0.815122
|
verify_api_key.ex
|
starcoder
|
defmodule EliVndb.Client do
@moduledoc """
VNDB API Client Module.
[VNDB API Refernce](https://vndb.org/d11)
## `EliVndb.Client` types
### Global
In order to start global client use `EliVndb.Client.start_link/1` or `EliVndb.Client.start_link/3` without options or with `:global` set to true.
Since the client registered globally, once client is started, all other API functions will become available.
As VNDB allows login only once, you need to re-create it anew in order to re-login.
You can use method `EliVndb.Client.stop/0` to terminate currently running global client.
### Local
In order to start local client use `EliVndb.Client.start_link/1` or `EliVndb.Client.start_link/3` with `:global` set to false.
To use local client, you'll need to provide its pid in all API calls.
**NOTE:** VNDB allows only up to 10 clients from the same API. Global client is preferable way to work with VNDB API.
## Available commands
### dbstats
Just retrieves statistics from VNDB.
### get
Each get command requires to specify flags & filters.
Following default values are used by EliVndb:
* `flags = ["basic"]`
* `filters = (id >= 1)`
On success it returns `{:results, %{...}}`
### set
Each set command requires you to provide ID of modified object.
On success it returns `{:ok, %{...}}`
**NOTE:** For set commands successful response contains empty payload as of now. You might as well to ignore it.
## Result
Each function that returns map with keys as strings.
"""
require Logger
@behaviour GenServer
use GenServer
@name __MODULE__
@host 'api.vndb.org'
#@port 19_534
@ssl_port 19_535
@default_opts [:binary, active: false, reuseaddr: true]
@initial_state %{socket: nil, queue: :queue.new()}
@login_args %{protocol: 1, client: "eli", clientver: "0.1"}
# All VNDB messages end with this byte
@msg_end <<4>>
##Constants
@doc "Returns name of global client."
def global_name, do: @name
##Client API
@typedoc "Client initialization options"
@type start_options :: [global: boolean()]
@typedoc "Get command options"
@type get_options :: [type: iodata(), flags: list(iodata()), filters: iodata(), options: Map.t()]
@typedoc "Set command options"
@type set_options :: [type: iodata(), id: integer, fields: Map.t()]
@spec start_link(start_options()) :: GenServer.on_start()
@doc """
Starts VNDB API Client without authorization.
Note that some VNDB APIs may require you to provide login/password.
Options:
* `:global` - whether to register client globally.
"""
def start_link(opts \\ []) do
start_link(nil, nil, opts)
end
@spec start_link(binary | nil, binary | nil, start_options()) :: GenServer.on_start()
@doc """
Starts VNDB API Client with provided credentials.
Parameters:
* `user` - Username to use for login. To omit provide `nil`
* `password` - Password to use for login. To omit provide `nil`
Options:
* `:global` - whether to register client globally.
"""
def start_link(user, password, opts \\ []) do
initial_state = Map.merge(@initial_state, %{user: user, password: password})
case Keyword.get(opts, :global, true) do
true -> GenServer.start_link(__MODULE__, initial_state, name: @name)
false -> GenServer.start_link(__MODULE__, initial_state)
end
end
@spec stop(GenServer.server()) :: :ok
@doc """
Stops particular client
"""
def stop(pid) do
GenServer.stop(pid)
end
@spec stop() :: :ok
@doc """
Stops global client.
Does nothing if client hasn't been started.
"""
def stop() do
case GenServer.whereis(@name) do
nil -> :ok
_ -> stop(@name)
end
end
@spec dbstats(GenServer.server()) :: term()
@doc """
Retrieves VNDB stats using particular client.
Arguments:
* `pid` - Client identifier. Global is used as default.
[Reference](https://vndb.org/d11#4)
On success returns: `{:dbstats, map()}`
"""
def dbstats(pid \\ @name) do
GenServer.call(pid, :dbstats)
end
@spec get(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command.
[Reference](https://vndb.org/d11#5)
Arguments:
* `options` - Keyword list of command options. See below.
* `pid` - Client identifier. Global is used as default.
Options:
* `:type` - Command type. See VNDB API for possible values.
* `:flags` - Command flags as array of strings. Possible values depends on `:type`.
* `:filters` - Command filters as string. Possible values depends on `:type`.
* `:options` - Command options as map. VNDB API allows following keys: `page: integer`, `results: integer`, `sort: string`, `reverse: boolean`
Following default values are used by EliVndb:
* `flags = ["basic"]`
* `filters = id >= 1`
On success returns: `{:results, map()}`
"""
def get(options, pid \\ @name) do
GenServer.call(pid, {:get, options})
end
@spec get_vn(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command with vn type.
The same as `EliVndb.Client.get/2`
"""
def get_vn(options, pid \\ @name) do
get(Keyword.put(options, :type, "vn"), pid)
end
@spec get_release(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command with release type.
The same as `EliVndb.Client.get/2`
"""
def get_release(options, pid \\ @name) do
get(Keyword.put(options, :type, "release"), pid)
end
@spec get_producer(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command with producer type.
The same as `EliVndb.Client.get/2`
"""
def get_producer(options, pid \\ @name) do
get(Keyword.put(options, :type, "producer"), pid)
end
@spec get_character(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command with character type.
The same as `EliVndb.Client.get/2`
"""
def get_character(options, pid \\ @name) do
get(Keyword.put(options, :type, "character"), pid)
end
@spec get_staff(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command with staff type.
The same as `EliVndb.Client.get/2`
"""
def get_staff(options, pid \\ @name) do
get(Keyword.put(options, :type, "staff"), pid)
end
@spec get_user(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command with user type.
The same as `EliVndb.Client.get/2`
"""
def get_user(options, pid \\ @name) do
get(Keyword.put(options, :type, "user"), pid)
end
@spec get_votelist(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command with votelist.
The same as `EliVndb.Client.get/2`
"""
def get_votelist(options, pid \\ @name) do
get(Keyword.put(options, :type, "votelist"), pid)
end
@spec get_vnlist(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command with vnlist type.
The same as `EliVndb.Client.get/2`
"""
def get_vnlist(options, pid \\ @name) do
get(Keyword.put(options, :type, "vnlist"), pid)
end
@spec get_wishlist(get_options(), GenServer.server()) :: term()
@doc """
Performs GET command with wishlist type.
The same as `EliVndb.Client.get/2`
"""
def get_wishlist(options, pid \\ @name) do
get(Keyword.put(options, :type, "wishlist"), pid)
end
@spec set(set_options(), GenServer.server()) :: term()
@doc """
Performs SET command.
[Reference](https://vndb.org/d11#6)
Arguments:
* `options` - Keyword list of command options. See below.
* `pid` - Client identifier. Global is used as default.
Options:
* `:type` - Command type. See VNDB API for possible values.
* `:id` - Identifier of object on which to perform SET.
* `:fields` - Map of object's field to its new value.
"""
def set(options, pid \\ @name) do
GenServer.call(pid, {:set, options})
end
@spec set_votelist(set_options(), GenServer.server()) :: term()
@doc """
Performs SET command with votelist type.
The same as `EliVndb.Client.set/2`
"""
def set_votelist(options, pid \\ @name) do
set(Keyword.put(options, :type, "votelist"), pid)
end
@spec set_vnlist(set_options(), GenServer.server()) :: term()
@doc """
Performs SET command with votelist type.
The same as `EliVndb.Client.set/2`
"""
def set_vnlist(options, pid \\ @name) do
set(Keyword.put(options, :type, "vnlist"), pid)
end
@spec set_wishlist(set_options(), GenServer.server()) :: term()
@doc """
Performs SET command with votelist type.
The same as `EliVndb.Client.set/2`
"""
def set_wishlist(options, pid \\ @name) do
set(Keyword.put(options, :type, "wishlist"), pid)
end
## Server callbacks
def init(state) do
Logger.info 'Connect to VNDB'
case :ssl.connect(@host, @ssl_port, @default_opts, 5000) do
{:ok, socket} ->
vndg_log_in(socket, state.user, state.password)
:ssl.setopts(socket, active: true)
{:ok, %{state | socket: socket}}
error ->
Logger.warn fn -> 'Failed to connect with VNDB. Error=#{inspect(error)}' end
:timer.sleep(:timer.seconds(30))
init(state)
end
end
def handle_call(:dbstats, from, %{queue: queue} = state) do
:ok = :ssl.send(state.socket, vndb_msg("dbstats"))
{:noreply, Map.put(state, :queue, :queue.in(from, queue))}
end
def handle_call({:get, args}, from, %{queue: queue} = state) do
msg = vndb_msg("get #{args[:type]} #{get_flags(args)} #{get_filters(args)} #{get_options(args)}")
Logger.info fn -> 'Send get vn=#{msg}' end
:ok = :ssl.send(state.socket, msg)
{:noreply, Map.put(state, :queue, :queue.in(from, queue))}
end
def handle_call({:set, args}, from, %{queue: queue} = state) do
msg = vndb_msg("set #{args[:type]} #{args[:id]} #{set_fields(args)}")
Logger.info fn -> 'Send set vn=#{msg}' end
:ok = :ssl.send(state.socket, msg)
{:noreply, Map.put(state, :queue, :queue.in(from, queue))}
end
def handle_info({:ssl, _socket, msg}, %{queue: queue} = state) do
{{:value, client}, new_queue} = :queue.out(queue)
Logger.info fn -> "VNDB message=#{msg}" end
result = vndb_msg_parse(msg)
GenServer.reply(client, result)
{:noreply, Map.put(state, :queue, new_queue)}
end
def handle_info({:ssl_closed, _socket}, %{queue: queue} = state) do
# Reconnect and clean queue
Logger.warn fn -> "Connection toward VNDB is closed. Try to re-connect" end
# Reply to user in case VNDB disconnects in the middle of request
# as it is quite possible.
case :queue.out(queue) do
{{:value, client}, _} -> GenServer.reply(client, {:error, "VNDB disconnected"})
_ -> nil
end
{:ok, state} = init(state)
{:noreply, Map.put(state, :queue, :queue.new())}
end
def handle_info({:ssl_error, _socket, reason}, state) do
Logger.error fn -> "SSL Error happend. Reason: #{reason}" end
{:noreply, state}
end
def handle_info(msg, state) do
Logger.warn fn -> "Received unhandled message=#{inspect(msg)}" end
{:noreply, state}
end
## Utils
defp vndg_log_in(socket, user, password) do
login_args = login_args(user, password)
msg = vndb_msg("login", login_args)
Logger.info fn -> 'Send login=#{msg}' end
:ok = :ssl.send(socket, msg)
{:ok, data} = :ssl.recv(socket, 0)
Logger.info fn -> 'Login response=#{data}' end
end
@spec vndb_msg_parse(String.t()) :: tuple()
defp vndb_msg_parse(msg) do
case String.split(String.trim_trailing(msg, @msg_end), " ", parts: 2) do
[name] -> {String.to_atom(name), %{}} # For consistency sake let's return empty map.
[name, value] -> {String.to_atom(name), Poison.decode!(value)}
end
end
@spec vndb_msg(String.t()) :: String.t()
defp vndb_msg(command) do command <> @msg_end end
@spec vndb_msg(String.t(), map()) :: String.t()
defp vndb_msg(command, args) do "#{command} #{Poison.encode!(args)}" <> @msg_end end
# Login utils
@spec login_args(nil, nil) :: map()
defp login_args(nil, nil) do @login_args end
@spec login_args(String.t(), String.t()) :: map()
defp login_args(username, password) do
Map.merge(@login_args, %{username: username, password: password})
end
# Get utils
@spec get_flags(Map.t()) :: binary()
defp get_flags(args) do
case Keyword.get(args, :flags) do
nil -> "basic"
flags -> Enum.join(flags, ",")
end
end
@spec get_filters(Map.t()) :: binary()
defp get_filters(args) do
case Keyword.get(args, :filters) do
nil -> "(id >= 1)"
filters -> filters
end
end
@spec get_options(Map.t()) :: iodata()
defp get_options(args) do
case Keyword.get(args, :options) do
nil -> ""
%{} -> ""
opts -> Poison.encode!(opts)
end
end
# Set utils
@spec set_fields(Map.t()) :: iodata()
defp set_fields(args) do
case Keyword.get(args, :fields) do
nil -> ""
%{} -> ""
opts -> Poison.encode!(opts)
end
end
end
|
lib/vndb/client.ex
| 0.8575
| 0.497742
|
client.ex
|
starcoder
|
defmodule Kaur.Environment do
@moduledoc """
Utilities for working with configuration allowing environment variables.
* `{:system, something}`: will load the environment variable stored in `something`
* `value`: will return the value
"""
alias Kaur.Result
@doc ~S"""
Reads the value or environment variable for the `key` in `application`'s environment.
### Examples
If we imagine a config file like:
# config/config.exs
config :my_app, :my_key, {:system, "MY_KEY"}
config :my_app, :my_key2, "MY_VALUE"
iex> Kaur.Environment.read(:my_app, :my_key)
{:ok, "VALUE STORE IN MY_KEY"}
iex> Kaur.Environment.read(:my_app, :my_key2)
{:ok, "MY VALUE"}
iex> Kaur.Environment.read(:my_app, :something_else)
{:error, :no_value}
"""
@spec read(:atom, :atom) :: Result.t(any, any)
def read(application, key) do
application
|> Application.get_env(key)
|> Result.from_value()
|> Result.and_then(&load_environment_variable/1)
end
@doc ~S"""
Reads the value or environment variable for the `key` in `application`'s environment.
### Examples
If we imagine a config file like:
# config/config.exs
config :my_app, :my_key, secret: {:system, "MY_KEY"}
config :my_app, :my_key2, secret: "MY_VALUE"
iex> Kaur.Environment.read(:my_app, :my_key, [:secret])
{:ok, "VALUE STORE IN MY_KEY"}
iex> Kaur.Environment.read(:my_app, :my_key2, [:secret])
{:ok, "MY VALUE"}
iex> Kaur.Environment.read(:my_app, :something_else)
{:error, :no_value}
"""
@spec read(:atom, :atom, [:atom]) :: Result.t(any, any)
def read(application, key, sub_keys) do
application
|> Application.get_env(key)
|> Result.from_value()
|> Result.and_then(&deep_get(&1, sub_keys))
|> Result.and_then(&load_environment_variable/1)
end
defp load_environment_variable({:system, environment_variable}) do
environment_variable
|> System.get_env()
|> Result.from_value()
end
defp load_environment_variable(value) do
Result.from_value(value)
end
defp deep_get(values, sub_keys) do
values
|> get_in(sub_keys)
|> Result.from_value()
end
end
|
lib/kaur/environment.ex
| 0.814938
| 0.407687
|
environment.ex
|
starcoder
|
defmodule Explot do
@docmodule """
The main module of this package. It provides an easy way to use Python's Matplotlib.
It allows to send arbitrary commands to be interpreted by Python.
It also provides functions to make it easy to use the most common functionality of Matplotlib.
There will be more functions and accepting more params in the future but this module will not wrap
all the functionality of matplotlib (which is huge).
"""
@doc """
Returns a plotter which can receive plotting commands
"""
def new do
python_script = """
import sys
import matplotlib.pyplot as plt
for line in sys.stdin:
eval(line)
"""
cmd = "python3 -c \"#{python_script}\""
port_to_python = Port.open({:spawn, cmd}, [:binary])
{:ok, agent} = Agent.start fn -> Map.new([port: port_to_python]) end
agent
end
@doc """
Sets the label on the X axis of the plot. This must be setup before showing the plot.
"""
def xlabel(agent, label) do
plot_command(agent, "xlabel('#{label}')")
end
@doc """
Sets the label on the Y axis of the plot. This must be setup before showing the plot.
"""
def ylabel(agent, label) do
plot_command(agent, "ylabel('#{label}')")
end
@doc """
Sets the title of the plot. This must be setup before showing the plot.
"""
def title(agent, label) do
plot_command(agent, "title('#{label}')")
end
@doc """
Adds a list of data with a name to the plot.
"""
def add_list(agent, list, list_name) do
plot_command(agent, "plot(#{to_python_array(list)}, label='#{list_name}')")
end
@doc """
Adds a list of labels to the X axis of the plot.
The difference with the xlabel function is that xlabel names the whole axis while this function
names different points along the axis.
For instance xlabel may be "Date" while x_axis_labels are 2016-03-12, 2016-06-15, 2016-09-15, etc.
"""
def x_axis_labels(agent, array_of_labels) do
{labels_available, array_of_indexes} = limit_indexes(array_of_labels)
labels_to_print = to_python_array(labels_available)
plot_command(agent, "xticks(#{to_python_array(array_of_indexes)}, #{labels_to_print})") #, rotation=60)")
end
@doc """
Shows the plot and kills the agent.
"""
def show(agent) do
plot_command(agent, "grid(True)")
plot_command(agent, "legend()")
plot_command(agent, "show()")
Port.close(port(agent))
Agent.stop(agent, :normal)
end
@doc """
Allows sending commands to the plotter. Provides flexibility for advanced users
"""
def plot_command(agent, command) do
send_command(agent, "plt.#{command}")
end
@doc """
Allows sending arbitrary commands to the python process. Use with care.
"""
def send_command(agent, command) do
true = Port.command(port(agent), "#{command}\n")
end
defp port(agent) do
Agent.get(agent, &Map.get(&1, :port))
end
defp to_python_array([h | t]) when is_number(h) do
comma_separated = [h | t] |> Enum.join(", ")
"[#{comma_separated}]"
end
defp to_python_array([h | t]) when is_binary(h) do
comma_separated = [h | t] |> Enum.map(fn(x) -> "'#{x}'" end) |> Enum.join(", ")
"[#{comma_separated}]"
end
defp to_python_array([h | t]) when is_map(h) do
comma_separated = [h | t] |> Enum.map(fn(x) -> "'#{Date.to_iso8601(x)}'" end) |> Enum.join(", ")
"[#{comma_separated}]"
end
# Limits the amount of indexes shown in the graph so data is readable
defp limit_indexes(array) do
divisor = Enum.max([round(Float.floor(length(array) /10)), 1])
data = Enum.take_every(array, divisor)
indexes = Enum.take_every(Enum.to_list(0..length(array) -1), divisor)
{data, indexes}
end
end
|
lib/explot.ex
| 0.656548
| 0.758108
|
explot.ex
|
starcoder
|
defmodule LiveViewStudio.Flights do
def search_by_number(number) do
list_flights()
|> Enum.filter(&(&1.number == number))
end
def search_by_airport(airport) do
list_flights()
|> Enum.filter(&(&1.origin == airport || &1.destination == airport))
end
def list_flights do
[
%{
number: "450",
origin: "DEN",
destination: "ORD",
departure_time: Timex.shift(Timex.now(), days: 1),
arrival_time: Timex.shift(Timex.now(), days: 1, hours: 2)
},
%{
number: "450",
origin: "DEN",
destination: "ORD",
departure_time: Timex.shift(Timex.now(), days: 2),
arrival_time: Timex.shift(Timex.now(), days: 2, hours: 2)
},
%{
number: "450",
origin: "DEN",
destination: "ORD",
departure_time: Timex.shift(Timex.now(), days: 3),
arrival_time: Timex.shift(Timex.now(), days: 3, hours: 2)
},
%{
number: "860",
origin: "DFW",
destination: "ORD",
departure_time: Timex.shift(Timex.now(), days: 1),
arrival_time: Timex.shift(Timex.now(), days: 1, hours: 3)
},
%{
number: "860",
origin: "DFW",
destination: "ORD",
departure_time: Timex.shift(Timex.now(), days: 2),
arrival_time: Timex.shift(Timex.now(), days: 2, hours: 3)
},
%{
number: "860",
origin: "DFW",
destination: "ORD",
departure_time: Timex.shift(Timex.now(), days: 3),
arrival_time: Timex.shift(Timex.now(), days: 3, hours: 3)
},
%{
number: "740",
origin: "DAB",
destination: "DEN",
departure_time: Timex.shift(Timex.now(), days: 1),
arrival_time: Timex.shift(Timex.now(), days: 1, hours: 4)
},
%{
number: "740",
origin: "DAB",
destination: "DEN",
departure_time: Timex.shift(Timex.now(), days: 2),
arrival_time: Timex.shift(Timex.now(), days: 2, hours: 4)
},
%{
number: "740",
origin: "DAB",
destination: "DEN",
departure_time: Timex.shift(Timex.now(), days: 3),
arrival_time: Timex.shift(Timex.now(), days: 3, hours: 4)
}
]
end
end
|
live_view_studio/lib/live_view_studio/flights.ex
| 0.544075
| 0.548855
|
flights.ex
|
starcoder
|
defmodule ComplexNum do
@moduledoc """
Complex Numbers.
## Cartesian vs Polar
There are two kinds of representaions for Complex Numbers:
- Cartesian, of the form `a + bi`. (Storing the `real` and `imaginary` parts of the complex number)
- Polar, of the form `r * e^(i*phi)`. (storing the `magnitude` and the `angle` of the complex number)
Polar form is very useful to perform fast multiplications, division and integer powers with.
Also, it obviously allows for O(1) precise computation of the `magnitude` and the `angle`.
Cartesian form on the other hand, allows besides multiplication and division, precise addition and subtraction.
Also, it obviously allows for O(1) precise computation of the `real` and `imaginary` parts.
Conversions between these two representations is possible, but _lossy_:
This involves trigonometry and square roots, which means that precision is lost.
(CompexNum converts the internal data types to floats and back to perform these oprations.)
## Internal data types
ComplexNum uses the [Numbers](https://github.com/Qqwy/elixir_number/) library,
which means that the `real`/`imaginary` (resp. `magnitude`/`angle`) can be of any
data type that implements Numbers' `Numeric` behaviour. This means that
Integers, Floats, arbitrary precision decimals defined by the Decimals package,
rationals defined by the Ratio package, etc. can be used.
ComplexNum itself also follows the Numeric behaviour, which means that it can be used inside any container that uses Numbers.
(Including inside ComplexNum itself, but [who would do such a thing?](https://en.wikipedia.org/wiki/Quaternion#Quaternions_as_pairs_of_complex_numbers))
"""
defstruct [:real, :imaginary, mode: Cartesian]
alias ComplexNum.{Cartesian, Polar}
def new(real, imaginary \\ 0, make_polar \\ :cartesian)
def new(real, imaginary, :cartesian), do: Cartesian.new(real, imaginary)
def new(real, imaginary, :polar), do: Polar.new(real, imaginary)
def coerce(ca = %ComplexNum{mode: Cartesian}, num), do: {ca, new(num)}
def coerce(ca = %ComplexNum{mode: Polar}, num), do: {ca, new(num, 0, :polar)}
def coerce(num, cb = %ComplexNum{mode: Cartesian}), do: {new(num), cb}
def coerce(num, cb = %ComplexNum{mode: Polar}), do: {new(num, 0, :polar), cb}
@doc """
Adds two `ComplexNum`s together.
If both are Cartesian, this is a precise operation.
If one or both are Polar, this is a lossy operation, as they are first converted to Cartesian.
"""
def add(ca, cb)
@doc """
Subtracts one `ComplexNum` from another.
If both are Cartesian, this is a precise operation.
If one or both are Polar, this is a lossy operation, as they are first converted to Cartesian.
"""
def sub(ca, cb)
@doc """
Multiplies `ca` by `cb`. This is a precise operation for numbers in both Cartesian and Polar forms.
"""
def mult(ca, cb)
@doc """
Divides `ca` by `cb`. This is a precise operation for numbers in both Cartesian and Polar forms.
"""
def div(ca, cb)
operations_that_convert_polar_to_cartesian = [add: true, sub: true, mult: false, div: false]
for {operation, even_if_both_are_polar?} <- operations_that_convert_polar_to_cartesian do
# Polar + Polar
if even_if_both_are_polar? do
def unquote(operation)(ca = %ComplexNum{mode: Polar}, cb = %ComplexNum{mode: Polar}) do
Cartesian.unquote(operation)(Polar.to_cartesian(ca), Polar.to_cartesian(cb))
end
else
def unquote(operation)(ca = %ComplexNum{mode: Polar}, cb = %ComplexNum{mode: Polar}) do
Polar.unquote(operation)(ca, cb)
end
end
# Polar + Cartesian
def unquote(operation)(ca = %ComplexNum{mode: Polar}, cb = %ComplexNum{mode: Cartesian}) do
Cartesian.unquote(operation)(Polar.to_cartesian(ca), cb)
end
# Cartesian + Polar
def unquote(operation)(ca = %ComplexNum{mode: Cartesian}, cb = %ComplexNum{mode: Polar}) do
Cartesian.unquote(operation)(ca, Polar.to_cartesian(cb))
end
# Cartesian + Cartesian
def unquote(operation)(ca = %ComplexNum{mode: Cartesian}, cb = %ComplexNum{mode: Cartesian}) do
Cartesian.unquote(operation)(ca, cb)
end
# PolarOrCartesian + AnyNumeric
def unquote(operation)(ca = %ComplexNum{mode: mode}, b) do
mode.unquote(operation)(ca, mode.new(b))
end
# AnyNumeric + PolarOrCartesian
def unquote(operation)(a, cb = %ComplexNum{mode: mode}) do
mode.unquote(operation)(mode.new(a), cb)
end
end
@doc """
The absolute value of a Complex Number `ca` has as real part the same magnitude as `ca`,
but as imaginary part `0`.
This is a precise operation for numbers in Polar form, but a lossy operation for numbers in Cartesian form.
"""
def abs(complex)
@doc """
The negation of a Complex Number: Both the real and imaginary parts are negated.
This is a precise operation for numbers in Cartesian form, but a lossy operation for numbers in Polar form.
"""
def minus(complex)
@doc """
Returns the magnitude of the Complex Number.
This is a precise operation for numbers in Polar form, but a lossy operation for numbers in Cartesian form.
If you only need to e.g. sort on magnitudes, consider `magnitude_squared/2` instead, which is also precise for numbers in Cartesian form.
"""
def magnitude(complex)
@doc """
The squared magnitude of the Complex Number.
This is a precise operation for both Cartesian and Polar form.
"""
def magnitude_squared(complex)
@doc """
Returns the `angle` of the complex number.
This is a precise operation for numbers in Polar form, but a lossy operation for numbers in Cartesian form.
"""
def angle(complex)
unary_operations = [abs: false, minus: true, magnitude: false, angle: false, magnitude_squared: false]
for {operation, convert_polar_to_cartesian?} <- unary_operations do
if convert_polar_to_cartesian? do
def unquote(operation)(ca = %ComplexNum{mode: Polar}) do
Cartesian.unquote(operation)(Polar.to_cartesian(ca))
end
else
def unquote(operation)(ca = %ComplexNum{mode: Polar}) do
Polar.unquote(operation)(ca)
end
end
def unquote(operation)(ca = %ComplexNum{mode: Cartesian}) do
Cartesian.unquote(operation)(ca)
end
end
@doc """
Power function: computes `base^exponent`,
where `base` is a Complex Number,
and `exponent` _has_ to be an integer.
This means that it is impossible to calculate roots by using this function.
`pow` is fast (constant time) for numbers in Polar form.
For numbers in Cartesian form, the Exponentiation by Squaring algorithm is used, which performs `log n` multiplications.
"""
def pow(base = %ComplexNum{mode: Polar}, exponent) when is_integer(exponent) do
Polar.pow(base, exponent)
end
def pow(base = %ComplexNum{mode: Cartesian}, exponent) when is_integer(exponent) do
Cartesian.pow(base, exponent)
end
@doc """
Converts a Complex Number to Cartesian Form.
This is a lossy operation (unless the number already is in Cartesian form).
"""
def to_cartesian(ca = %ComplexNum{mode: Cartesian}), do: ca
def to_cartesian(pa = %ComplexNum{mode: Polar}), do: Polar.to_cartesian(pa)
@doc """
Converts a Complex Number to Polar Form.
This is a lossy operation (unless the number already is in Polar form).
"""
def to_polar(pa = %ComplexNum{mode: Polar}), do: pa
def to_polar(ca = %ComplexNum{mode: Cartesian}), do: Cartesian.to_polar(ca)
end
defimpl Inspect, for: ComplexNum do
def inspect(polar = %ComplexNum{mode: ComplexNum.Polar, real: 1}, _opts) do
"#ComplexNum (Polar) <e^(𝑖#{inspect(polar.imaginary)})>"
end
def inspect(polar = %ComplexNum{mode: ComplexNum.Polar, imaginary: 0}, _opts) do
"#ComplexNum (Polar) <#{inspect(polar.real)}>"
end
def inspect(polar = %ComplexNum{mode: ComplexNum.Polar}, _opts) do
"#ComplexNum (Polar) <#{inspect(polar.real)} · e^(𝑖#{inspect(polar.imaginary)})>"
end
def inspect(ca = %ComplexNum{mode: ComplexNum.Cartesian, imaginary: 0}, _opts) do
"#ComplexNum (Cartesian) <#{inspect(ca.real)}>"
end
def inspect(ca = %ComplexNum{mode: ComplexNum.Cartesian, real: 0}, _opts) do
"#ComplexNum (Cartesian) <#{inspect(ca.imaginary)}·𝑖>"
end
def inspect(ca = %ComplexNum{mode: ComplexNum.Cartesian}, _opts) do
"#ComplexNum (Cartesian) <#{inspect(ca.real)} + #{inspect(ca.imaginary)}·𝑖>"
end
end
|
lib/complex_num.ex
| 0.941574
| 0.92597
|
complex_num.ex
|
starcoder
|
defmodule AWS.WorkMail do
@moduledoc """
Amazon WorkMail is a secure, managed business email and calendaring service
with support for existing desktop and mobile email clients. You can access
your email, contacts, and calendars using Microsoft Outlook, your browser,
or other native iOS and Android email applications. You can integrate
WorkMail with your existing corporate directory and control both the keys
that encrypt your data and the location in which your data is stored.
The WorkMail API is designed for the following scenarios:
<ul> <li> Listing and describing organizations
</li> </ul> <ul> <li> Managing users
</li> </ul> <ul> <li> Managing groups
</li> </ul> <ul> <li> Managing resources
</li> </ul> All WorkMail API operations are Amazon-authenticated and
certificate-signed. They not only require the use of the AWS SDK, but also
allow for the exclusive use of AWS Identity and Access Management users and
roles to help facilitate access, trust, and permission policies. By
creating a role and allowing an IAM user to access the WorkMail site, the
IAM user gains full administrative visibility into the entire WorkMail
organization (or as set in the IAM policy). This includes, but is not
limited to, the ability to create, update, and delete users, groups, and
resources. This allows developers to perform the scenarios listed above, as
well as give users the ability to grant access on a selective basis using
the IAM model.
"""
@doc """
Adds a member (user or group) to the resource's set of delegates.
"""
def associate_delegate_to_resource(client, input, options \\ []) do
request(client, "AssociateDelegateToResource", input, options)
end
@doc """
Adds a member (user or group) to the group's set.
"""
def associate_member_to_group(client, input, options \\ []) do
request(client, "AssociateMemberToGroup", input, options)
end
@doc """
Cancels a mailbox export job.
<note> If the mailbox export job is near completion, it might not be
possible to cancel it.
</note>
"""
def cancel_mailbox_export_job(client, input, options \\ []) do
request(client, "CancelMailboxExportJob", input, options)
end
@doc """
Adds an alias to the set of a given member (user or group) of Amazon
WorkMail.
"""
def create_alias(client, input, options \\ []) do
request(client, "CreateAlias", input, options)
end
@doc """
Creates a group that can be used in Amazon WorkMail by calling the
`RegisterToWorkMail` operation.
"""
def create_group(client, input, options \\ []) do
request(client, "CreateGroup", input, options)
end
@doc """
Creates a new Amazon WorkMail organization. Optionally, you can choose to
associate an existing AWS Directory Service directory with your
organization. If an AWS Directory Service directory ID is specified, the
organization alias must match the directory alias. If you choose not to
associate an existing directory with your organization, then we create a
new Amazon WorkMail directory for you. For more information, see [Adding an
organization](https://docs.aws.amazon.com/workmail/latest/adminguide/add_new_organization.html)
in the *Amazon WorkMail Administrator Guide*.
You can associate multiple email domains with an organization, then set
your default email domain from the Amazon WorkMail console. You can also
associate a domain that is managed in an Amazon Route 53 public hosted
zone. For more information, see [Adding a
domain](https://docs.aws.amazon.com/workmail/latest/adminguide/add_domain.html)
and [Choosing the default
domain](https://docs.aws.amazon.com/workmail/latest/adminguide/default_domain.html)
in the *Amazon WorkMail Administrator Guide*.
Optionally, you can use a customer managed master key from AWS Key
Management Service (AWS KMS) to encrypt email for your organization. If you
don't associate an AWS KMS key, Amazon WorkMail creates a default AWS
managed master key for you.
"""
def create_organization(client, input, options \\ []) do
request(client, "CreateOrganization", input, options)
end
@doc """
Creates a new Amazon WorkMail resource.
"""
def create_resource(client, input, options \\ []) do
request(client, "CreateResource", input, options)
end
@doc """
Creates a user who can be used in Amazon WorkMail by calling the
`RegisterToWorkMail` operation.
"""
def create_user(client, input, options \\ []) do
request(client, "CreateUser", input, options)
end
@doc """
Deletes an access control rule for the specified WorkMail organization.
"""
def delete_access_control_rule(client, input, options \\ []) do
request(client, "DeleteAccessControlRule", input, options)
end
@doc """
Remove one or more specified aliases from a set of aliases for a given
user.
"""
def delete_alias(client, input, options \\ []) do
request(client, "DeleteAlias", input, options)
end
@doc """
Deletes a group from Amazon WorkMail.
"""
def delete_group(client, input, options \\ []) do
request(client, "DeleteGroup", input, options)
end
@doc """
Deletes permissions granted to a member (user or group).
"""
def delete_mailbox_permissions(client, input, options \\ []) do
request(client, "DeleteMailboxPermissions", input, options)
end
@doc """
Deletes an Amazon WorkMail organization and all underlying AWS resources
managed by Amazon WorkMail as part of the organization. You can choose
whether to delete the associated directory. For more information, see
[Removing an
organization](https://docs.aws.amazon.com/workmail/latest/adminguide/remove_organization.html)
in the *Amazon WorkMail Administrator Guide*.
"""
def delete_organization(client, input, options \\ []) do
request(client, "DeleteOrganization", input, options)
end
@doc """
Deletes the specified resource.
"""
def delete_resource(client, input, options \\ []) do
request(client, "DeleteResource", input, options)
end
@doc """
Deletes the specified retention policy from the specified organization.
"""
def delete_retention_policy(client, input, options \\ []) do
request(client, "DeleteRetentionPolicy", input, options)
end
@doc """
Deletes a user from Amazon WorkMail and all subsequent systems. Before you
can delete a user, the user state must be `DISABLED`. Use the
`DescribeUser` action to confirm the user state.
Deleting a user is permanent and cannot be undone. WorkMail archives user
mailboxes for 30 days before they are permanently removed.
"""
def delete_user(client, input, options \\ []) do
request(client, "DeleteUser", input, options)
end
@doc """
Mark a user, group, or resource as no longer used in Amazon WorkMail. This
action disassociates the mailbox and schedules it for clean-up. WorkMail
keeps mailboxes for 30 days before they are permanently removed. The
functionality in the console is *Disable*.
"""
def deregister_from_work_mail(client, input, options \\ []) do
request(client, "DeregisterFromWorkMail", input, options)
end
@doc """
Returns the data available for the group.
"""
def describe_group(client, input, options \\ []) do
request(client, "DescribeGroup", input, options)
end
@doc """
Describes the current status of a mailbox export job.
"""
def describe_mailbox_export_job(client, input, options \\ []) do
request(client, "DescribeMailboxExportJob", input, options)
end
@doc """
Provides more information regarding a given organization based on its
identifier.
"""
def describe_organization(client, input, options \\ []) do
request(client, "DescribeOrganization", input, options)
end
@doc """
Returns the data available for the resource.
"""
def describe_resource(client, input, options \\ []) do
request(client, "DescribeResource", input, options)
end
@doc """
Provides information regarding the user.
"""
def describe_user(client, input, options \\ []) do
request(client, "DescribeUser", input, options)
end
@doc """
Removes a member from the resource's set of delegates.
"""
def disassociate_delegate_from_resource(client, input, options \\ []) do
request(client, "DisassociateDelegateFromResource", input, options)
end
@doc """
Removes a member from a group.
"""
def disassociate_member_from_group(client, input, options \\ []) do
request(client, "DisassociateMemberFromGroup", input, options)
end
@doc """
Gets the effects of an organization's access control rules as they apply to
a specified IPv4 address, access protocol action, or user ID.
"""
def get_access_control_effect(client, input, options \\ []) do
request(client, "GetAccessControlEffect", input, options)
end
@doc """
Gets the default retention policy details for the specified organization.
"""
def get_default_retention_policy(client, input, options \\ []) do
request(client, "GetDefaultRetentionPolicy", input, options)
end
@doc """
Requests a user's mailbox details for a specified organization and user.
"""
def get_mailbox_details(client, input, options \\ []) do
request(client, "GetMailboxDetails", input, options)
end
@doc """
Lists the access control rules for the specified organization.
"""
def list_access_control_rules(client, input, options \\ []) do
request(client, "ListAccessControlRules", input, options)
end
@doc """
Creates a paginated call to list the aliases associated with a given
entity.
"""
def list_aliases(client, input, options \\ []) do
request(client, "ListAliases", input, options)
end
@doc """
Returns an overview of the members of a group. Users and groups can be
members of a group.
"""
def list_group_members(client, input, options \\ []) do
request(client, "ListGroupMembers", input, options)
end
@doc """
Returns summaries of the organization's groups.
"""
def list_groups(client, input, options \\ []) do
request(client, "ListGroups", input, options)
end
@doc """
Lists the mailbox export jobs started for the specified organization within
the last seven days.
"""
def list_mailbox_export_jobs(client, input, options \\ []) do
request(client, "ListMailboxExportJobs", input, options)
end
@doc """
Lists the mailbox permissions associated with a user, group, or resource
mailbox.
"""
def list_mailbox_permissions(client, input, options \\ []) do
request(client, "ListMailboxPermissions", input, options)
end
@doc """
Returns summaries of the customer's organizations.
"""
def list_organizations(client, input, options \\ []) do
request(client, "ListOrganizations", input, options)
end
@doc """
Lists the delegates associated with a resource. Users and groups can be
resource delegates and answer requests on behalf of the resource.
"""
def list_resource_delegates(client, input, options \\ []) do
request(client, "ListResourceDelegates", input, options)
end
@doc """
Returns summaries of the organization's resources.
"""
def list_resources(client, input, options \\ []) do
request(client, "ListResources", input, options)
end
@doc """
Lists the tags applied to an Amazon WorkMail organization resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Returns summaries of the organization's users.
"""
def list_users(client, input, options \\ []) do
request(client, "ListUsers", input, options)
end
@doc """
Adds a new access control rule for the specified organization. The rule
allows or denies access to the organization for the specified IPv4
addresses, access protocol actions, and user IDs. Adding a new rule with
the same name as an existing rule replaces the older rule.
"""
def put_access_control_rule(client, input, options \\ []) do
request(client, "PutAccessControlRule", input, options)
end
@doc """
Sets permissions for a user, group, or resource. This replaces any
pre-existing permissions.
"""
def put_mailbox_permissions(client, input, options \\ []) do
request(client, "PutMailboxPermissions", input, options)
end
@doc """
Puts a retention policy to the specified organization.
"""
def put_retention_policy(client, input, options \\ []) do
request(client, "PutRetentionPolicy", input, options)
end
@doc """
Registers an existing and disabled user, group, or resource for Amazon
WorkMail use by associating a mailbox and calendaring capabilities. It
performs no change if the user, group, or resource is enabled and fails if
the user, group, or resource is deleted. This operation results in the
accumulation of costs. For more information, see
[Pricing](https://aws.amazon.com/workmail/pricing). The equivalent console
functionality for this operation is *Enable*.
Users can either be created by calling the `CreateUser` API operation or
they can be synchronized from your directory. For more information, see
`DeregisterFromWorkMail`.
"""
def register_to_work_mail(client, input, options \\ []) do
request(client, "RegisterToWorkMail", input, options)
end
@doc """
Allows the administrator to reset the password for a user.
"""
def reset_password(client, input, options \\ []) do
request(client, "ResetPassword", input, options)
end
@doc """
Starts a mailbox export job to export MIME-format email messages and
calendar items from the specified mailbox to the specified Amazon Simple
Storage Service (Amazon S3) bucket. For more information, see [Exporting
mailbox
content](https://docs.aws.amazon.com/workmail/latest/adminguide/mail-export.html)
in the *Amazon WorkMail Administrator Guide*.
"""
def start_mailbox_export_job(client, input, options \\ []) do
request(client, "StartMailboxExportJob", input, options)
end
@doc """
Applies the specified tags to the specified Amazon WorkMail organization
resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Untags the specified tags from the specified Amazon WorkMail organization
resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates a user's current mailbox quota for a specified organization and
user.
"""
def update_mailbox_quota(client, input, options \\ []) do
request(client, "UpdateMailboxQuota", input, options)
end
@doc """
Updates the primary email for a user, group, or resource. The current email
is moved into the list of aliases (or swapped between an existing alias and
the current primary email), and the email provided in the input is promoted
as the primary.
"""
def update_primary_email_address(client, input, options \\ []) do
request(client, "UpdatePrimaryEmailAddress", input, options)
end
@doc """
Updates data for the resource. To have the latest information, it must be
preceded by a `DescribeResource` call. The dataset in the request should be
the one expected when performing another `DescribeResource` call.
"""
def update_resource(client, input, options \\ []) do
request(client, "UpdateResource", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "workmail"}
host = build_host("workmail", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "WorkMailService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/work_mail.ex
| 0.704364
| 0.519399
|
work_mail.ex
|
starcoder
|
defmodule HAP.AccessoryServer do
@moduledoc """
Represents a top-level HAP instance configuration
"""
defstruct display_module: nil,
data_path: nil,
name: nil,
model: nil,
identifier: nil,
pairing_code: nil,
setup_id: nil,
accessory_type: nil,
accessories: []
@typedoc """
Represents an accessory server consisting of a number of accessories. Contains the following fields:
* `name`: The name to assign to this device, for example 'HAP Bridge'
* `model`: The model name to assign to this device, for example 'HAP Co. Super Bridge III'
* `identifier`: A unique identifier string in the form "AA:BB:CC:DD:EE:FF"
* `pairing_code`: A pairing code of the form 123-45-678 to be used for pairing.
If not specified one will be defined dynamically.
* `setup_id`: A 4 character string used as part of the accessory discovery process.
If not specified one will be defined dynamically.
* `display_module`: An optional implementation of `HAP.Display` used to present pairing
and other information to the user. If not specified then a basic console-based
display is used.
* `data_path`: The path to where HAP will store its internal data. Will be created if
it does not exist. If not specified, the default value depends on the value of `Mix.target/0`.
If it is `:host` then a value of `hap_data` is used. If it is anything else (as it will be
when compiling for a Nerves target, for example), the value of `/root/hap_data` is used. This
allows HAP to work out-of-the-box in conventional and Nerves environments.
* `accessory_type`: A HAP specified value indicating the primary function of this
device. See `t:HAP.AccessoryServer.accessory_type/0` for details
* `accessories`: A list of `HAP.Accessory` structs to include in this accessory server
"""
@type t :: %__MODULE__{
name: name(),
model: model(),
identifier: accessory_identifier(),
pairing_code: pairing_code(),
setup_id: setup_id(),
display_module: module(),
data_path: String.t(),
accessory_type: accessory_type(),
accessories: [HAP.Accessory.t()]
}
@typedoc """
The name to advertise for this accessory server, for example 'HAP Bridge'
"""
@type name :: String.t()
@typedoc """
The model of this accessory server, for example 'HAP Co. Super Bridge III'
"""
@type model :: String.t()
@typedoc """
A unique identifier string in the form "AA:BB:CC:DD:EE:FF"
"""
@type accessory_identifier :: String.t()
@typedoc """
A pairing code of the form 123-45-678
"""
@type pairing_code :: String.t()
@typedoc """
A pairing URL suitable for display in a QR code
"""
@type pairing_url :: String.t()
@typedoc """
A 4 character string used as part of the accessory discovery process
"""
@type setup_id :: String.t()
@typedoc """
A HAP specified value indicating the primary function of this device as found
in Section 13 of Apple's [HomeKit Accessory Protocol Specification](https://developer.apple.com/homekit/).
Valid values include:
1. Other
2. Bridge
3. Fan
4. Garage
5. Lightbulb
6. Door Lock
7. Outlet
8. Switch
9. Thermostat
10. Sensor
11. Security System
12. Door
13. Window
14. Window Covering
15. Programmable Switch
16. Range Extender
17. IP Camera
18. Video Door Bell
19. Air Purifier
20. Heater
21. Air Conditioner
22. Humidifier
23. Dehumidifier
28. Sprinkler
29. Faucet
30. Shower System
32. Remote
"""
@type accessory_type :: integer()
@default_data_path if(Mix.target() == :host, do: "hap_data", else: "/root/hap_data")
@doc """
Generates the pairing url to be used to pair with this accessory server. This
URL can be encoded in a QR code to enable pairing directly from an iOS device
"""
@spec pairing_url(t()) :: String.t()
def pairing_url(%__MODULE__{} = accessory) do
padding = 0
version = 0
reserved = 0
accessory_type = accessory.accessory_type
hap_type = 2
pairing_code_int = accessory.pairing_code |> String.replace("-", "") |> String.to_integer()
payload =
<<padding::2, version::3, reserved::4, accessory_type::8, hap_type::4, pairing_code_int::27>>
|> :binary.decode_unsigned()
|> Base36.encode()
"X-HM://00#{payload}#{accessory.setup_id}"
end
@doc false
def compile(%__MODULE__{} = accessory_server) do
accessory_server
|> Map.update!(:display_module, &(&1 || HAP.ConsoleDisplay))
|> Map.update!(:data_path, &(&1 || @default_data_path))
|> Map.update!(:name, &(&1 || "Generic HAP Device"))
|> Map.update!(:model, &(&1 || "Generic HAP Model"))
|> Map.update!(:pairing_code, &(&1 || random_pairing_code()))
|> Map.update!(:setup_id, &(&1 || random_setup_id()))
|> Map.update!(:accessory_type, &(&1 || 1))
|> Map.update!(:accessories, fn accessories ->
accessories |> Enum.map(&HAP.Accessory.compile/1)
end)
end
@doc false
def config_hash(%__MODULE__{} = accessory_server) do
accessory_server
|> accessories_tree(false)
|> Jason.encode!()
|> HAP.Crypto.SHA512.hash()
end
@doc false
def accessories_tree(%__MODULE__{accessories: accessories}, include_values \\ true) do
%{
accessories:
accessories
|> Enum.with_index(1)
|> Enum.map(fn {%HAP.Accessory{services: services}, aid} ->
%{
aid: aid,
services:
services
|> Enum.with_index()
|> Enum.map(fn {%HAP.Service{type: type, characteristics: characteristics}, service_index} ->
%{
iid: HAP.IID.to_iid(service_index),
type: type,
characteristics:
characteristics
|> Enum.with_index()
|> Enum.map(fn {characteristic, characteristic_index} ->
result = %{
iid: HAP.IID.to_iid(service_index, characteristic_index),
type: HAP.Characteristic.get_type(characteristic),
perms: HAP.Characteristic.get_perms(characteristic),
format: HAP.Characteristic.get_format(characteristic)
}
if "pr" in HAP.Characteristic.get_perms(characteristic) && include_values do
result |> Map.put(:value, HAP.Characteristic.get_value!(characteristic))
else
result
end
end)
}
end)
}
end)
}
end
@doc false
def get_characteristics(%__MODULE__{} = accessory_server, characteristics, opts) do
characteristics
|> Enum.map(fn %{aid: aid, iid: iid} ->
with {:ok, accessory} <- get_accessory(accessory_server, aid),
{:ok, service} <- HAP.Accessory.get_service(accessory, iid),
{:ok, characteristic} <- HAP.Service.get_characteristic(service, iid),
{:ok, value} <- HAP.Characteristic.get_value(characteristic) do
opts
|> Enum.reduce(%{aid: aid, iid: iid, value: value, status: 0}, fn opt, acc ->
case opt do
:meta -> Map.merge(acc, HAP.Characteristic.get_meta(characteristic))
:perms -> Map.put(acc, :perms, HAP.Characteristic.get_perms(characteristic))
:type -> Map.put(acc, :type, HAP.Characteristic.get_type(characteristic))
_ -> acc
end
end)
else
{:error, reason} -> %{aid: aid, iid: iid, status: reason}
end
end)
end
@doc false
def put_characteristics(%__MODULE__{} = accessory_server, characteristics, sender) do
characteristics
|> Enum.map(fn
%{"aid" => aid, "iid" => iid} = map ->
with {:ok, accessory} <- get_accessory(accessory_server, aid),
{:ok, service} <- HAP.Accessory.get_service(accessory, iid),
{:ok, characteristic} <- HAP.Service.get_characteristic(service, iid) do
case map do
%{"value" => value} ->
case HAP.Characteristic.put_value(characteristic, value) do
:ok ->
if map["r"] do
%{aid: aid, iid: iid, status: 0, value: value}
else
%{aid: aid, iid: iid, status: 0}
end
{:error, reason} ->
%{aid: aid, iid: iid, status: reason}
end
%{"ev" => true} ->
with {:ok, token} <- HAP.EventManager.register(sender, aid, iid),
:ok <- HAP.Characteristic.set_change_token(characteristic, token) do
%{aid: aid, iid: iid, status: 0}
else
{:error, reason} -> %{aid: aid, iid: iid, status: reason}
end
%{"ev" => false} ->
case HAP.EventManager.unregister(sender, aid, iid) do
:ok -> %{aid: aid, iid: iid, status: 0}
{:error, reason} -> %{aid: aid, iid: iid, status: reason}
end
end
else
{:error, reason} -> %{aid: aid, iid: iid, status: reason}
end
end)
end
@doc false
def value_changed(%__MODULE__{} = accessory_server, characteristic) do
msg = %{characteristics: accessory_server |> get_characteristics([characteristic], [])}
HAP.EventManager.get_listeners(characteristic.aid, characteristic.iid)
|> Enum.each(fn pid ->
HAP.HAPSessionHandler.push(pid, msg)
end)
end
defp get_accessory(%__MODULE__{accessories: accessories}, aid) do
case Enum.at(accessories, aid - 1) do
nil -> {:error, -70_409}
accessory -> {:ok, accessory}
end
end
defp random_pairing_code do
"#{random_digits(3)}-#{random_digits(2)}-#{random_digits(3)}"
end
defp random_setup_id do
Stream.repeatedly(fn -> <<Enum.random(?A..?Z)>> end) |> Enum.take(4) |> Enum.join()
end
defp random_digits(number) do
Stream.repeatedly(&random_digit/0) |> Enum.take(number) |> Enum.join()
end
defp random_digit do
Enum.random(0..9)
end
end
|
lib/hap/accessory_server.ex
| 0.885148
| 0.423339
|
accessory_server.ex
|
starcoder
|
defmodule Spla2API do
@moduledoc """
Wrapper for Spla2 API.
@see https://spla2.yuu26.com/
"""
alias __MODULE__.Struct.{Coop, Stages, Stage}
defstruct [:result]
@rules [:regular, :gachi, :league]
@type rules :: :regular | :gachi | :league
@options [:now, :next, :next_all, :schedule]
@type options :: :now | :next | :next_all | :schedule
@base_uri "https://spla2.yuu26.com/"
@spec schedule() :: {:ok, Stages.t()} | {:error, any()}
def schedule, do: request("schedule", &Stages.to_struct/1)
@doc "指定した時間帯のナワバリバトルのステージ情報を返します。"
@spec regular(options) :: {:error, any()} | {:ok, [Stage.t()]}
def regular(option) when option in @options, do: particular_rule(:regular, option)
@doc "指定した時間帯のガチマッチのステージ情報を返します。"
@spec gachi(options) :: {:error, any()} | {:ok, [Stage.t()]}
def gachi(option) when option in @options, do: particular_rule(:gachi, option)
@doc "指定した時間帯のリーグマッチのステージ情報を返します。"
@spec league(options) :: {:error, any()} | {:ok, [Stage.t()]}
def league(option) when option in @options, do: particular_rule(:league, option)
@doc """
指定した時間帯のステージ情報を返します。
|option|document|
|--|--|
|now|現在のステージ情報|
|next|次のステージ情報|
|next_all|次~最大24時間先までのステージ情報 (next を含む)|
|schedule|現在~最大24時間先までのステージ情報 (now + next_all)|
"""
@spec particular_rule(atom, atom) :: {:ok, [Stage.t()]} | {:error, any()}
def particular_rule(rule, option) when rule in @rules and option in @options,
do:
request("#{rule}/#{option}", fn res ->
res |> Enum.map(&Stage.to_struct/1)
end)
@spec coop :: {:ok, [Coop.t()]} | {:error, any()}
def coop, do: coop(all: false)
@spec coop(all: true | false) :: {:ok, [Coop.t()] | {:error, any()}}
def coop(all: false),
do: request("coop/schedule", fn list -> Enum.map(list, &Coop.to_struct/1) end)
def coop(all: true), do: request("coop", fn list -> Enum.map(list, &Coop.to_struct/1) end)
defp request(url, parser) when parser |> is_function(1) do
with {:ok, 200, _headers, ref} <- :hackney.request(@base_uri <> url),
{:ok, body} <- :hackney.body(ref),
{:ok, map} <- Jason.decode(body, keys: :atoms!) do
result = map.result |> parser.()
:hackney.close(ref)
{:ok, result}
else
{:ok, status_code, _, ref} ->
:hackney.close(ref)
{:error, {:status_code, status_code}}
{:error, reason} ->
{:error, reason}
end
end
end
|
apps/spla2_api/lib/spla2_api.ex
| 0.707708
| 0.427576
|
spla2_api.ex
|
starcoder
|
defmodule Opencensus.Honeycomb.Event do
@moduledoc """
Event structure.
Honeycomb events bind a timestamp to data as described in `t:t/0` below. The `data` corresponds
to Opencensus span attributes, with limitations dictated by the intersection of OpenCensus' and
Honeycomb's data models.
## Supported Value Types
### Honeycomb
Honeycomb supports events (spans, when strung together in a trace) with dozens or hundreds of
attributes. Keys [MUST] be strings, like in OpenCensus. Values [MUST] be JSON encodable, but
[MAY] include objects (Elixir maps) and arrays -- neither of which are supported by OpenCensus.
Honeycomb makes no distinction between measurements and metadata, unlike `:telemetry`.
Honeycomb's keys for trace handling can be configured on a per-dataset basis, but default to:
* `duration_ms`
* `name`
* `service_name`
* `trace.parent_id`
* `trace.span_id`
* `trace.trace_id`
### OpenCensus
To be compatible with the OpenCensus protobuf protocol, [attribute values][AttributeValue]
[MUST] be one of:
* `TruncatableString`
* `int64`
* `bool_value`
* `double_value`
### Opencensus.Honeycomb
The data models being quite similar, the `Jason.Encoder` implementation for
`t:Opencensus.Honeycomb.Event.t/0`:
* Flattens map values as described below
* Converts atom keys and values to strings
* **Drops any other values not compatible with the OpenCensus protobuf definition**
* **Over-writes any keys that clash with the [default trace handling keys](#module-honeycomb)**
[MUST]: https://tools.ietf.org/html/rfc2119#section-1
[MAY]: https://tools.ietf.org/html/rfc2119#section-5
[AttributeValue]: https://github.com/census-instrumentation/opencensus-proto/blob/e2601ef/src/opencensus/proto/trace/v1/trace.proto#L331
[honeycombtrace]: #module-honeycomb
### Flattening
Map flattening uses periods (`.`) to delimit keys from nested maps, much like can be configured
on a dataset basis at the Honeycomb end. These span attributes before flattening:
```elixir
%{
http: %{
host: "localhost",
method: "POST",
path: "/api"
}
}
```
... becomes this event after flattening:
```elixir
%{
"http.host" => "localhost",
"http.method" => "POST",
"http.path" => "/api",
}
```
"""
alias Jason.Encode
alias Jason.Encoder
alias Opencensus.Honeycomb.Cleaner
require Record
defimpl Encoder, for: __MODULE__ do
@spec encode(%{data: map(), time: any()}, Encode.opts()) ::
binary()
| maybe_improper_list(
binary() | maybe_improper_list(any(), binary() | []) | byte(),
binary() | []
)
def encode(%{time: time, data: data, samplerate: samplerate}, opts) do
data = data |> Cleaner.clean()
%{time: time, data: data, samplerate: samplerate} |> Encode.map(opts)
end
end
@enforce_keys [:time, :data, :samplerate]
defstruct [:time, :data, :samplerate]
@typedoc """
Span attributes after flattening.
See [attribute limitations](#module-opencensus-honeycomb) for important detail on span attribute
names and values.
[attrlimits]: #module-opencensus-honeycomb
"""
@type event_data :: map()
@typedoc """
Honeycomb event suitable for POSTing to their batch API.
* `time`: ms since epoch; [MUST] be in ISO 8601 format, e.g. `"2019-05-17T09:55:12.622658Z"`
* `data`: `t:event_data/0` after flattening.
* `samplerate`: Sample rate expressed as 1/N, so 4 means 1/4 events made it to HC.
[MUST]: https://tools.ietf.org/html/rfc2119#section-1
"""
@type t :: %__MODULE__{
time: String.t(),
data: event_data(),
samplerate: pos_integer()
}
@doc """
The current UTC time in ISO 8601 format, e.g. `"2019-05-17T09:55:12.622658Z"`
Useful when creating events manually.
"""
@spec now() :: String.t()
def now do
DateTime.utc_now() |> DateTime.to_iso8601()
end
# Record for :opencensus.span
Record.defrecordp(
:span,
Record.extract(:span, from_lib: "opencensus/include/opencensus.hrl")
)
@doc """
Convert one OpenCensus span to an event suitable for POSTing to the
[Honeycomb Events API][HCevents].
See [attribute limitations](#module-opencensus-honeycomb) for important detail on span attribute
names and values.
[HCevents]: https://docs.honeycomb.io/api/events/
[HCstruct]: https://github.com/honeycombio/opencensus-exporter/blob/master/honeycomb/honeycomb.go#L42
"""
@spec from_oc_span(:opencensus.span(), String.t(), String.t()) :: t()
def from_oc_span(record, service_name, samplerate_key) do
start_time = record |> span(:start_time) |> wts_us_since_epoch()
end_time = record |> span(:end_time) |> wts_us_since_epoch()
duration_ms = (end_time - start_time) / 1_000
# All of the attributes that pass is_value_safe?/1 below:
data =
record
|> span(:attributes)
# Overridden with:
|> Map.merge(%{
# Honeycomb expectations:
"trace.trace_id": record |> span(:trace_id) |> hexify_trace_id(),
"trace.span_id": record |> span(:span_id) |> hexify_span_id(),
"trace.parent_id": record |> span(:parent_span_id) |> hexify_span_id(),
duration_ms: duration_ms |> Float.round(3),
# timestamp: end_time |> Float.round(3),
service_name: service_name,
name: record |> span(:name),
# Our extensions with matching style:
"trace.span_kind": record |> span(:kind)
})
|> Map.to_list()
|> Enum.filter(&is_value_safe?/1)
|> Enum.into(%{})
{samplerate, data} = pop_samplerate(data, samplerate_key)
time =
record
|> span(:start_time)
|> wts_us_since_epoch()
|> DateTime.from_unix!(:microsecond)
|> DateTime.to_iso8601()
%__MODULE__{time: time, data: data, samplerate: samplerate}
end
@spec pop_samplerate(event_data(), nil | String.t()) :: {pos_integer(), event_data()}
defp pop_samplerate(data, samplerate_key)
defp pop_samplerate(data, nil), do: {1, data}
defp pop_samplerate(data, samplerate_key), do: Map.pop(data, samplerate_key, 1)
defp wts_us_since_epoch({monotonic_time, time_offset}) do
div(monotonic_time + time_offset, 1_000)
end
defp is_value_safe?({_key, :undefined}), do: false
defp is_value_safe?({_key, :SPAN_KIND_UNSPECIFIED}), do: false
defp is_value_safe?({_key, nil}), do: false
defp is_value_safe?({_key, _}), do: true
defp hexify_trace_id(:undefined), do: nil
defp hexify_trace_id(n), do: :io_lib.format("~32.16.0b", [n]) |> to_string()
defp hexify_span_id(:undefined), do: nil
defp hexify_span_id(n), do: :io_lib.format("~16.16.0b", [n]) |> to_string()
end
|
lib/opencensus/honeycomb/event.ex
| 0.924543
| 0.843573
|
event.ex
|
starcoder
|
defmodule Linguist.MemorizedVocabulary do
alias Linguist.Compiler
alias Linguist.{LocaleError, NoTranslationError}
defmodule TranslationDecodeError do
defexception [:message]
end
@pluralization_key Application.get_env(:linguist, :pluralization_key, :count)
if Application.get_env(:linguist, :vocabulary_backend, :ets) == :persistent_term do
if not Code.ensure_loaded?(:persistent_term) do
raise("You've set up linguist to use :persistent_term backend, but it is available only if OTP >= 21.2")
end
def create_backend() do
end
def add_to_backend(key, value) do
:persistent_term.put({__MODULE__, key}, value)
end
def remove_from_backend(key) do
:persistent_term.erase({__MODULE__, key})
end
def get_from_backend(key) do
:persistent_term.get({__MODULE__, key}, nil)
end
else
def create_backend() do
if :ets.info(__MODULE__) == :undefined do
:ets.new(__MODULE__, [:named_table, :set, :protected])
end
end
def add_to_backend(key, value) do
:ets.insert(__MODULE__, {key, value})
end
def remove_from_backend(key) do
:ets.delete(__MODULE__, key)
end
def get_from_backend(key) do
case :ets.lookup(__MODULE__, key) |> List.first() do
{_, value} -> value
nil -> nil
end
end
end
@moduledoc """
Defines lookup functions for given translation locales, binding interopolation
Locales are defined with the `locale/2` function, accepting a locale name and
a String path to evaluate for the translations list.
For example, given the following translations :
locale "en", [
flash: [
notice: [
hello: "hello %{first} %{last}",
]
],
users: [
title: "Users",
]
]
locale "fr", Path.join([__DIR__, "fr.exs"])
this module will respond to these functions :
t("en", "flash.notice.hello", bindings \\ []), do: # ...
t("en", "users.title", bindings \\ []), do: # ...
t("fr", "flash.notice.hello", bindings \\ []), do: # ...
"""
def t(locale, path, bindings \\ [])
def t(nil, _, _), do: raise(LocaleError, nil)
def t(locale, path, binding) when is_atom(locale) do
t(to_string(locale), path, binding)
end
def t(locale, path, bindings) do
cldr = get_from_backend("memorized_vocabulary.cldr") || Application.get_env(:linguist, :cldr, Linguist.Cldr)
cardinal = "#{cldr}.Number.Cardinal" |> String.to_atom()
norm_locale = normalize_locale(locale)
if Keyword.has_key?(bindings, @pluralization_key) do
plural_atom =
bindings
|> Keyword.get(@pluralization_key)
|> cardinal.plural_rule(norm_locale)
case plural_atom do
value when is_atom(value) ->
do_t(norm_locale, "#{path}.#{plural_atom}", bindings)
other ->
other
end
else
do_t(norm_locale, path, bindings)
end
end
def t!(locale, path, bindings \\ []) do
case t(locale, path, bindings) do
{:ok, translation} ->
translation
{:error, :no_translation} ->
raise %NoTranslationError{message: "#{locale}: #{path}"}
{:error, {err, msg}} ->
raise struct(err) |> Map.put(:message, msg)
end
end
# sobelow_skip ["DOS.StringToAtom"]
defp do_t(locale, translation_key, bindings) do
result = get_from_backend("#{locale}.#{translation_key}")
case result do
nil ->
{:error, :no_translation}
string ->
translation =
Compiler.interpol_rgx()
|> Regex.split(string, on: [:head, :tail])
|> Enum.reduce("", fn
<<"%{" <> rest>>, acc ->
key = String.to_atom(String.trim_trailing(rest, "}"))
acc <> to_string(Keyword.fetch!(bindings, key))
segment, acc ->
acc <> segment
end)
{:ok, translation}
end
end
def locales do
get_from_backend("memorized_vocabulary.locales") || []
end
def add_locale(name) do
current_locales = locales()
new_locales = [name | current_locales] |> Enum.uniq()
add_to_backend("memorized_vocabulary.locales", new_locales)
end
def update_translations(locale_name, loaded_source) do
create_backend()
loaded_source
|> Enum.map(fn {key, translation_string} ->
add_to_backend("#{locale_name}.#{key}", translation_string)
end)
end
@doc """
Sets Cldr backend for handling locales
* value - Module initialized with `use Cldr` macros
Examples
cldr MyProject.Cldr
"""
def cldr(value) do
add_to_backend("memorized_vocabulary.cldr", value)
end
@doc """
Embeds locales from provided source
* name - The String name of the locale, ie "en", "fr"
* source - The String file path to load YAML from that returns a structured list of translations
Examples
locale "es", Path.join([__DIR__, "es.yml"])
"""
def locale(name, source) do
name = normalize_locale(name)
loaded_source = Linguist.MemorizedVocabulary._load_yaml_file(source)
update_translations(name, loaded_source)
add_locale(name)
end
@doc """
Function used internally to load a yaml file. Please use
the `locale` macro with a path to a yaml file - this function
will not work as expected if called directly.
"""
def _load_yaml_file(source) do
{decode_status, [file_data]} = YamlElixir.read_all_from_file(source)
if decode_status != :ok do
raise %TranslationDecodeError{message: "Decode failed for file #{source}"}
end
%{paths: paths} =
file_data
|> Enum.reduce(
%{paths: %{}, current_prefix: ""},
&Linguist.MemorizedVocabulary._yaml_reducer/2
)
paths
end
@doc """
Recursive function used internally for loading yaml files.
Not intended for external use
"""
def _yaml_reducer({key, value}, acc) when is_binary(value) do
key_name =
if acc.current_prefix == "" do
key
else
"#{acc.current_prefix}.#{key}"
end
%{
paths: Map.put(acc.paths, key_name, value),
current_prefix: acc.current_prefix
}
end
def _yaml_reducer({key, value}, acc) do
next_prefix =
if acc.current_prefix == "" do
key
else
"#{acc.current_prefix}.#{key}"
end
reduced =
Enum.reduce(
value,
%{
paths: acc.paths,
current_prefix: next_prefix
},
&Linguist.MemorizedVocabulary._yaml_reducer/2
)
%{
paths: Map.merge(acc.paths, reduced.paths),
current_prefix: acc.current_prefix
}
end
# @privatedoc
# Takes a locale as an argument, checks if the string contains a `-`, if so
# splits the string on the `-` downcases the first part and upcases the second part.
# With a locale that contains no `-` the string is downcased, and if the locale contains more
# than one `-`, a LocaleError is raised.
def normalize_locale(locale) when is_atom(locale), do: normalize_locale(to_string(locale))
def normalize_locale(locale) do
if String.match?(locale, ~r/-/) do
case String.split(locale, "-") do
[lang, country] ->
Enum.join([String.downcase(lang), String.upcase(country)], "-")
_ ->
raise(LocaleError, locale)
end
else
String.downcase(locale)
end
end
end
|
lib/linguist/memorized_vocabulary.ex
| 0.729423
| 0.418073
|
memorized_vocabulary.ex
|
starcoder
|
defmodule ExUnited.Spawn do
@moduledoc """
This module is used by `ExUnited` to spawn nodes for testing purposes.
`ExUnited.Spawn` uses the Elixir `Port` module for spawning and as it
implements the GenServer behaviour it is able to store state containing
information about the spawn nodes.
You will probably _**not**_ talk to this module directly. Though you can of course
try out things in the console.
## Example
iex(1)> ExUnited.Spawn.start_link()
{:ok, #PID<0.198.0>}
iex(2)> Node.start(:"captain@127.0.0.1")
{:ok, #PID<0.200.0>}
iex(captain@127.0.0.1)3> ExUnited.Spawn.summon(:"bruce@127.0.0.1", env: [PORT: 5000], verbose: true)
iex(bruce@127.0.0.1)> Interactive Elixir (1.10.1) - press Ctrl+C to exit (type h() ENTER for help)
iex(bruce@127.0.0.1)1>
{#Port<0.8>,
"iex --name bruce@127.0.0.1 --erl '-connect_all false' -S mix run -e 'Node.connect(:\"captain@127.0.0.1\")'",
[{'PORT', '5000'}]}
iex(captain@127.0.0.1)4> Node.list()
[:"bruce@127.0.0.1"]
iex(captain@127.0.0.1)5> ExUnited.Spawn.legion()
%ExUnited.Spawn.State{
color_index: 1,
nodes: %{bruce: %{color: "38", node: :"bruce@127.0.0.1", port: #Port<0.8>}}
}
iex(captain@127.0.0.1)6> ExUnited.Spawn.kill_all()
:ok
"""
alias ExUnited.Spawn.State
use GenServer
@nodename :captain
@nodehost '127.0.0.1'
@spawn __MODULE__
@color ~w(
38
214
199
112
177
220
36
)
@doc """
Starts the spawn server. The gen server spawnes nodes and stores their
references in its state.
"""
@spec start_link() :: {:ok, pid}
def start_link do
GenServer.start_link(@spawn, %State{}, name: @spawn)
end
@doc false
@spec init(State.t()) :: {:ok, State.t()}
def init(state) do
Process.flag(:trap_exit, true)
{:ok, state}
end
@spec terminate(atom, State.t()) :: :ok | :noop
def terminate(_reason, state) do
# coveralls-ignore-start
{:reply, status, _state} =
handle_call(:kill_all, {self(), make_ref()}, state)
status
# coveralls-ignore-stop
end
@doc """
Returns a `%ExUnited.Spawn.State{}` containing all its spawned nodes.
iex(captain@127.0.0.1)8> ExUnited.Spawn.legion()
%ExUnited.Spawn.State{
color_index: 4,
nodes: %{
bruce: %{color: nil, node: :"bruce@127.0.0.1", port: #Port<0.8>},
clark: %{color: "214", node: :"clark@127.0.0.1", port: #Port<0.12>},
peter: %{color: "38", node: :"peter@127.0.0.1", port: #Port<0.10>},
steven: %{color: "112", node: :"steven@127.0.0.1", port: #Port<0.16>},
tony: %{color: "199", node: :"tony@127.0.0.1", port: #Port<0.14>}
}
}
See `ExUnited.Spawn.State` for more information.
"""
@spec legion() :: State.t()
def legion do
@spawn
|> GenServer.whereis()
|> :sys.get_state()
end
@doc """
Spawns a new node using the specified node name.
These options are supported:
* `:env` - should be a keyword list containing the environment variables
which will be used for the spawned node
* `:connect` - if `true` a "fully connected" node will be spawned (see
the `erl -connect_all` flag for more information). Defaults to `false`
* `:verbose` - if `true` the STDOUT of the spawned node will be printed.
Defaults to `false`
It returns a tuple conforming the following structure:
{node, port, command, env}
where:
* `node` - the full nodename of the spawned node (the `Node.self()` value)
* `port` - the corresponding `Port` reference of the spawned node
* `command` - the command used for spawning the node
* `env` - the list containing environment variables used spawning the node
## Example
{node, port, command, env} = ExUnited.Spawn.summon(:"peter@127.0.0.1", [
MIX_EXS: "/tmp/peter-mix.exs",
verbose: true
])
If the name already seems to be registered then a `:noop` will be returned
without spawning the node.
"""
@spec summon(atom, keyword) ::
{node, port, binary, [{charlist, charlist}]} | :noop
def summon(name, opts \\ []) do
timeout = Application.get_env(:ex_united, :summon_timeout, 5000)
GenServer.call(@spawn, {:summon, name, opts}, timeout)
end
@doc """
Kills and unregisters a spawned node identified by its node name or port.
"""
@spec kill(atom | port) :: :ok | :noop
def kill(name_or_port) do
GenServer.call(@spawn, {:kill, name_or_port})
end
@doc """
Kills and unregisters all spawned nodes. If either of the nodes failed to be
killed the return value will be `:noop` and elsewise the return value is `:ok`.
"""
@spec kill_all() :: :ok | :noop
def kill_all do
Node.stop()
GenServer.call(@spawn, :kill_all)
end
@spec handle_call(
{:summon, atom, keyword},
{pid, reference},
State.t()
) ::
{:reply, {node, port, binary, [{charlist, charlist}]} | :noop,
State.t()}
def handle_call(
{:summon, name, opts},
_from,
%{nodes: nodes, color_index: index} = state
) do
if find(name, state) do
{:reply, :noop, state}
else
Node.start(:"#{@nodename}@#{@nodehost}")
node = :"#{name}@#{@nodehost}"
connect =
unless Keyword.get(opts, :connect) do
" --erl '-connect_all false'"
end
command =
~s[iex --name #{node}#{connect} -S mix run -e 'Node.connect(#{
inspect(Node.self())
})']
env =
opts
|> Keyword.get(:env, [])
|> to_erlang_env()
{color, index} =
if Keyword.get(opts, :verbose) do
color = Enum.at(@color, index)
index = if index == length(@color) - 1, do: 0, else: index + 1
{color, index}
else
{nil, index}
end
port = Port.open({:spawn, command}, [:binary, env: env])
nodes = Map.put(nodes, name, %{node: node, port: port, color: color})
await_node(node, Keyword.get(opts, :connect))
{:reply, {node, port, command, env},
%{state | nodes: nodes, color_index: index}}
end
end
@spec handle_call({:kill, atom | port}, {pid, reference}, State.t()) ::
{:reply, :ok | :noop, State.t()}
def handle_call({:kill, name_or_port}, _from, %{nodes: nodes} = state) do
case find(name_or_port, state) do
{name, %{port: port}} ->
state = %{state | nodes: Map.delete(nodes, name)}
case Port.info(port) do
nil ->
{:reply, :noop, state}
info ->
Port.close(port)
os_pid = Keyword.get(info, :os_pid)
System.cmd("kill", ["-9", "#{os_pid}"])
{:reply, :ok, state}
end
nil ->
{:reply, :noop, state}
end
end
@spec handle_call(:kill_all, {pid, reference}, State.t()) ::
{:reply, :ok | :noop, State.t()}
def handle_call(:kill_all, from, %{nodes: nodes} = state) do
nodes
|> Enum.reduce({:reply, :ok, state}, fn {name, _port},
{:reply, result, state} ->
{reply, new_result, state} = handle_call({:kill, name}, from, state)
{reply, hd(Enum.sort([result, new_result])), state}
end)
end
@spec handle_info({port, {:data, binary}}, State.t()) :: {:noreply, State.t()}
def handle_info({port, {:data, line}}, state) do
case find(port, state) do
{_name, %{node: node, color: color}} ->
if color do
{prompt, line} = derive_prompt(node, line)
IO.puts("\e[38;5;#{color}m#{prompt}#{IO.ANSI.reset()} #{line}")
end
nil ->
:noop
end
{:noreply, state}
end
def handle_info(_message, state), do: {:noreply, state}
@spec to_erlang_env(keyword) :: [{charlist, charlist}]
defp to_erlang_env(env) do
Enum.map(env, fn {key, value} ->
{to_charlist(key), to_charlist(value)}
end)
end
@spec await_node(node, boolean) :: :ok
defp await_node(node, connect) do
if Enum.member?(Node.list(), node) do
if connect do
[last | others] = Node.list() |> Enum.reverse()
Enum.each(others, fn node ->
:rpc.call(last, Node, :connect, [node])
end)
end
else
Process.sleep(100)
await_node(node, connect)
end
:ok
end
@spec find(atom | port, State.t()) :: {atom, State.spawned_node()} | nil
defp find(name_or_port, %{nodes: nodes}) do
Enum.find(nodes, fn {name, %{port: port}} ->
Enum.member?([name, port], name_or_port)
end)
end
@spec derive_prompt(node, binary) :: {binary, binary}
defp derive_prompt(node, line) do
regex = ~r/^\s*(?<prompt>iex.*?\)\d+>)?(?<line>.*?)\s*$/
case Regex.named_captures(regex, line) do
# coveralls-ignore-start
nil -> {"iex(#{node})>", line}
# coveralls-ignore-stop
%{"prompt" => "", "line" => line} -> {"iex(#{node})>", line}
%{"prompt" => prompt, "line" => line} -> {prompt, line}
end
end
end
|
lib/ex_united/spawn.ex
| 0.77675
| 0.456652
|
spawn.ex
|
starcoder
|
defmodule Shippex do
@moduledoc """
Module documentation for `Shippex`.
"""
alias Shippex.{Address, Carrier, Config, Rate, Service, Shipment, Transaction}
@type response() :: %{code: String.t(), message: String.t()}
@doc """
Fetches rates for a given `shipment`. Possible options:
* `carriers` - Fetches rates for *all* services for the given carriers
* `services` - Fetches rates only for the given services
These may be used in combination. To fetch rates for *all* UPS services, as
well as USPS Priority, for example:
Shippex.fetch_rates(shipment, carriers: :ups, services: [:usps_priority])
If no options are provided, Shippex will fetch rates for every service from
every available carrier.
"""
@spec fetch_rates(Shipment.t(), Keyword.t()) :: [{atom, Rate.t()}]
def fetch_rates(%Shipment{} = shipment, opts \\ []) do
# Convert the atom to a list if necessary.
carriers = Keyword.get(opts, :carriers)
services = Keyword.get(opts, :services)
carriers =
if is_nil(carriers) and is_nil(services) do
Shippex.carriers()
else
cond do
is_nil(carriers) ->
[]
is_atom(carriers) ->
[carriers]
is_list(carriers) ->
carriers
true ->
raise """
#{inspect(carriers)} is an invalid carrier or list of carriers.
Try using an atom. For example:
Shippex.fetch_rates(shipment, carriers: :usps)
"""
end
end
services =
case services do
nil ->
[]
service when is_atom(service) ->
[service]
services when is_list(services) ->
services
services ->
raise """
#{inspect(services)} is an invalid service or list of services.
Try using an atom. For example:
Shippex.fetch_rates(shipment, services: :usps_priority)
"""
end
|> Enum.reject(&(Service.get(&1).carrier in carriers))
carrier_tasks =
Enum.map(carriers, fn carrier ->
Task.async(fn ->
Carrier.module(carrier).fetch_rates(shipment)
end)
end)
service_tasks =
Enum.map(services, fn service ->
Task.async(fn ->
fetch_rate(shipment, service)
end)
end)
rates =
(carrier_tasks ++ service_tasks)
|> Task.yield_many(5000)
|> Enum.map(fn {task, rates} ->
rates || Task.shutdown(task, :brutal_kill)
end)
|> Enum.filter(fn
{:ok, _} -> true
_ -> false
end)
|> Enum.map(fn {:ok, rates} -> rates end)
|> List.flatten()
|> Enum.reject(fn
{atom, _} -> atom not in [:ok, :error]
_ -> true
end)
oks = Enum.filter(rates, &(elem(&1, 0) == :ok))
errors = Enum.filter(rates, &(elem(&1, 0) == :error))
Enum.sort(oks, fn r1, r2 ->
{:ok, r1} = r1
{:ok, r2} = r2
r1.price < r2.price
end) ++ errors
end
@doc """
Fetches the rate for `shipment` for a specific `Service`. The `service` module
contains the `Carrier` and selected delivery speed. You can also pass in the
ID of the service.
Shippex.fetch_rate(shipment, service)
"""
@spec fetch_rate(Shipment.t(), atom() | Service.t()) :: {atom, Rate.t()}
def fetch_rate(%Shipment{} = shipment, service) when is_atom(service) do
service = Service.get(service)
fetch_rate(shipment, service)
end
def fetch_rate(%Shipment{} = shipment, %Service{carrier: carrier} = service) do
case Carrier.module(carrier).fetch_rate(shipment, service) do
[rate] -> rate
{_, _} = rate -> rate
end
end
@doc """
Fetches the label for `shipment` for a specific `Service`. The `service`
module contains the `Carrier` and selected delivery speed.
Shippex.create_transaction(shipment, service)
"""
@spec create_transaction(Shipment.t(), Service.t()) ::
{:ok, Transaction.t()} | {:error, response}
def create_transaction(%Shipment{} = shipment, %Service{carrier: carrier} = service) do
Carrier.module(carrier).create_transaction(shipment, service)
end
@doc """
Cancels the transaction associated with `label`, if possible. The result is
returned in a tuple.
You may pass in either the transaction, or if the full transaction struct
isn't available, you may pass in the carrier, shipment, and tracking number
instead.
case Shippex.cancel_shipment(transaction) do
{:ok, result} ->
IO.inspect(result) #=> %{code: "1", message: "Voided successfully."}
{:error, %{code: code, message: message}} ->
IO.inspect(code)
IO.inspect(message)
end
"""
@spec cancel_transaction(Transaction.t()) :: {atom, response}
def cancel_transaction(%Transaction{} = transaction) do
Carrier.module(transaction.carrier).cancel_transaction(transaction)
end
@spec cancel_transaction(Carrier.t(), Shipment.t(), String.t()) :: {atom, response}
def cancel_transaction(carrier, %Shipment{} = shipment, tracking_number) do
Carrier.module(carrier).cancel_transaction(shipment, tracking_number)
end
@doc """
Returns `true` if the carrier services the given country. An
ISO-3166-compliant country code is required.
iex> Shippex.services_country?(:usps, "US")
true
iex> Shippex.services_country?(:usps, "KP")
false
"""
@spec services_country?(Carrier.t(), ISO.country_code()) :: boolean()
def services_country?(carrier, country) do
Carrier.module(carrier).services_country?(country)
end
@doc """
Returns the status for the given tracking numbers.
"""
@spec track_packages(Carrier.t(), [String.t()]) :: {atom(), response()}
def track_packages(carrier, tracking_numbers) do
Carrier.module(carrier).track_packages(tracking_numbers)
end
@doc """
Performs address validation. If the address is completely invalid,
`{:error, result}` is returned. For addresses that may have typos,
`{:ok, candidates}` is returned. You can iterate through the list of
candidates to present to the end user. Addresses that pass validation
perfectly will still be in a `list` where `length(candidates) == 1`.
Note that the `candidates` returned will automatically pass through
`Shippex.Address.address()` for casting. Also, if `:usps` is used as the
validation provider, the number of candidates will always be 1.
address = Shippex.Address.address(%{
name: "<NAME>",
phone: "123-123-1234",
address: "9999 Hobby Lane",
address_line_2: nil,
city: "Austin",
state: "TX",
postal_code: "78703"
})
case Shippex.validate_address(address) do
{:error, %{code: code, message: message}} ->
# Present the error.
{:ok, candidates} when length(candidates) == 1 ->
# Use the address
{:ok, candidates} when length(candidates) > 1 ->
# Present candidates to user for selection
end
"""
@spec validate_address(Address.t(), Keyword.t()) :: {atom(), response() | [Address.t()]}
defdelegate validate_address(address, opts \\ []), to: Address, as: :validate
@doc false
defdelegate carriers(), to: Config
@doc false
defdelegate currency_code(), to: Config
@doc false
defdelegate env(), to: Config
end
|
lib/shippex.ex
| 0.904595
| 0.603932
|
shippex.ex
|
starcoder
|
defmodule Crux.Structs.Util do
@moduledoc """
Collection of util functions.
"""
alias Crux.Structs
if Version.compare(System.version(), "1.7.0") != :lt do
@moduledoc since: "0.1.0"
end
@doc ~s"""
Converts a string, likely Discord snowflake, to an integer
## Examples
```elixir
# A string
iex> "218348062828003328" |> Crux.Structs.Util.id_to_int()
218348062828003328
# Already a number
iex> 218348062828003328 |> Crux.Structs.Util.id_to_int()
218348062828003328
# Fallback
iex> nil |> Crux.Structs.Util.id_to_int()
nil
```
"""
@spec id_to_int(id :: String.t() | integer() | nil) :: integer() | nil | no_return()
if Version.compare(System.version(), "1.7.0") != :lt do
@doc since: "0.1.0"
else
@since "0.1.0"
end
def id_to_int(str) when is_bitstring(str), do: String.to_integer(str)
def id_to_int(already) when is_integer(already), do: already
def id_to_int(nil), do: nil
@doc ~S"""
Converts a list of raw api data to structs keyed under the passed key.
## Examples
```elixir
iex> [
...> %{"username" => "space", "discriminator" => "0001", "id" => "218348062828003328", "avatar" => "46a356e237350bf8b8dfde15667dfc4"},
...> %{"username" => "Drahcirius", "discriminator" => "1336", "id" => "130175406673231873", "avatar" => "c896aebec82c90f590b08cfebcdc4e3b"}
...> ]
...> |> Crux.Structs.Util.raw_data_to_map(Crux.Structs.User)
%{
130175406673231873 => %Crux.Structs.User{
username: "Drahcirius",
discriminator: "1336",
id: 130175406673231873,
avatar: "c896aebec82c90f590b08cfebcdc4e3b"
},
218348062828003328 => %Crux.Structs.User{
username: "space",
discriminator: "0001",
id: 218348062828003328,
avatar: "46a356e237350bf8b8dfde15667dfc4"
}
}
iex> [
...> %{"username" => "space", "discriminator" => "0001", "id" => "218348062828003328", "avatar" => "46a356e237350bf8b8dfde15667dfc4"},
...> %{"username" => "Drahcirius", "discriminator" => "1336", "id" => "130175406673231873", "avatar" => "c896aebec82c90f590b08cfebcdc4e3b"}
...> ]
...> |> Crux.Structs.Util.raw_data_to_map(Crux.Structs.User, :username)
%{
"Drahcirius" => %Crux.Structs.User{
username: "Drahcirius",
discriminator: "1336",
id: 130175406673231873,
avatar: "c896aebec82c90f590b08cfebcdc4e3b"
},
"space" => %Crux.Structs.User{
username: "space",
discriminator: "0001",
id: 218348062828003328,
avatar: "46a356e237350bf8b8dfde15667dfc4"
}
}
```
"""
@spec raw_data_to_map(data :: list, target :: module(), key :: atom()) :: map()
if Version.compare(System.version(), "1.7.0") != :lt do
@doc since: "0.1.0"
else
@since "0.1.0"
end
def raw_data_to_map(data, target, key \\ :id) do
data
|> Structs.create(target)
|> Map.new(fn struct -> {Map.fetch!(struct, key), struct} end)
end
@doc ~S"""
Returns a function converting a passed map to an id, using the specified key as key.
## Examples
```elixir
# Id is already a number
iex> Crux.Structs.Util.map_to_id(:foo).(%{foo: 123})
123
# Id is a string
iex> Crux.Structs.Util.map_to_id(:foo).(%{foo: "123"})
123
# No id exists
iex> Crux.Structs.Util.map_to_id(:foo).(%{"foo" => "123"})
nil
# Example using `Enum.map/2`
iex> [
...> %{"username" => "space", "discriminator" => "0001", "id" => "218348062828003328", "avatar" => "46a356e237350bf8b8dfde15667dfc4"},
...> %{"username" => "Drahcirius", "discriminator" => "1336", "id" => "130175406673231873", "avatar" => "c896aebec82c90f590b08cfebcdc4e3b"}
...> ]
...> |> Enum.map(Crux.Structs.Util.map_to_id("id"))
[218348062828003328, 130175406673231873]
```
"""
@spec map_to_id(key :: term()) :: (map() -> Crux.Rest.snowflake() | nil)
def map_to_id(key \\ :id) do
fn
%{^key => value} -> id_to_int(value)
_ -> nil
end
end
@doc ~S"""
Converts a string to an atom.
Returns an already converted atom as is instead of raising
## Examples
```elixir
# A string
iex> "id" |> Crux.Structs.Util.string_to_atom()
:id
# Already an atom
iex> :id |> Crux.Structs.Util.string_to_atom()
:id
```
"""
@spec string_to_atom(input :: String.t() | atom()) :: atom()
if Version.compare(System.version(), "1.7.0") != :lt do
@doc since: "0.1.0"
else
@since "0.1.0"
end
def string_to_atom(string) when is_bitstring(string), do: String.to_atom(string)
def string_to_atom(atom) when is_atom(atom), do: atom
@doc ~S"""
Atomifies all keys in a passed list or map to avoid the mess of mixed string and atom keys the gateway sends.
## Examples
```elixir
# A map
iex> %{"username" => "space", "discriminator" => "0001", "id" => "218348062828003328", "avatar" => "46a356e237350bf8b8dfde15667dfc4"}
...> |> Crux.Structs.Util.atomify()
%{username: "space", discriminator: "0001", id: "218348062828003328", avatar: "46a356e237350bf8b8dfde15667dfc4"}
# A list
iex> [
...> %{"username" => "space", "discriminator" => "0001", "id" => "218348062828003328", "avatar" => "46a356e237350bf8b8dfde15667dfc4"},
...> %{"username" => "Drahcirius", "discriminator" => "1336", "id" => "130175406673231873", "avatar" => "c896aebec82c90f590b08cfebcdc4e3b"}
...> ]
...> |> Crux.Structs.Util.atomify()
[
%{username: "space", discriminator: "0001", id: "218348062828003328", avatar: "46a356e237350bf8b8dfde15667dfc4"},
%{username: "Drahcirius", discriminator: "1336", id: "130175406673231873", avatar: "c896aebec82c90f590b08cfebcdc4e3b"}
]
# A nested map
iex> %{"foo" => "bar", "bar" => %{"baz" => "foo"}}
...> |> Crux.Structs.Util.atomify()
%{foo: "bar", bar: %{baz: "foo"}}
# A nested list
iex> [[%{"foo" => "bar"}], %{"bar" => "foo"}]
...> |> Crux.Structs.Util.atomify()
[[%{foo: "bar"}], %{bar: "foo"}]
# A struct
iex> %Crux.Structs.Overwrite{id: 448394877194076161, type: "role", allow: 0, deny: 0}
...> |> Crux.Structs.Util.atomify()
%{id: 448394877194076161, type: "role", allow: 0, deny: 0}
```
"""
@spec atomify(input :: map() | list()) :: map() | list()
if Version.compare(System.version(), "1.7.0") != :lt do
@doc since: "0.1.0"
else
@since "0.1.0"
end
def atomify(input)
def atomify(%{__struct__: _struct} = struct), do: struct |> Map.from_struct() |> atomify()
def atomify(%{} = map), do: Map.new(map, &atomify_kv/1)
def atomify(list) when is_list(list), do: Enum.map(list, &atomify/1)
def atomify(other), do: other
defp atomify_kv({k, v}), do: {string_to_atom(k), atomify(v)}
# TODO: Remove this as soon as 1.7.0 is required
if Version.compare(System.version(), "1.7.0") != :lt do
defmacro since(version) when is_binary(version) do
quote do
@doc since: unquote(version)
end
end
defmacro modulesince(version) when is_binary(version) do
quote do
@moduledoc since: unquote(version)
end
end
defmacro typesince(version) when is_binary(version) do
quote do
@typedoc since: unquote(version)
end
end
else
defmacro since(version) when is_binary(version) do
quote do
@since unquote(version)
end
end
defmacro modulesince(version) when is_binary(version), do: nil
defmacro typesince(version) when is_binary(version), do: nil
end
end
|
lib/structs/util.ex
| 0.871283
| 0.625896
|
util.ex
|
starcoder
|
defmodule RoboticaCommon.Date do
@moduledoc """
Provides data/time functions for Robotica.
"""
@spec get_timezone :: String.t()
def get_timezone, do: Application.get_env(:robotica_common, :timezone)
@doc """
Converts an UTC date time to a local Date for today.
Note: 13:00 UTC is midnight in Australia/Melbourne timezone at this date.
iex> import RoboticaCommon.Date
iex> today(~U[2019-11-09 12:00:00Z])
~D[2019-11-09]
iex> import RoboticaCommon.Date
iex> today(~U[2019-11-09 13:00:00Z])
~D[2019-11-10]
"""
@spec today(DateTime.t()) :: Date.t()
def today(date_time) do
{:ok, local_date_time} = DateTime.shift_zone(date_time, get_timezone())
Date.add(local_date_time, 0)
end
@doc """
Converts an UTC date time to a local Date for tomorrow.
Note: 13:00 UTC is midnight in Australia/Melbourne timezone at this date.
iex> import RoboticaCommon.Date
iex> tomorrow(~U[2019-11-09 12:00:00Z])
~D[2019-11-10]
iex> import RoboticaCommon.Date
iex> tomorrow(~U[2019-11-09 13:00:00Z])
~D[2019-11-11]
"""
@spec tomorrow(DateTime.t()) :: Date.t()
def tomorrow(date_time) do
{:ok, local_date_time} = DateTime.shift_zone(date_time, get_timezone())
Date.add(local_date_time, 1)
end
@doc """
Find the local date next Monday after the specified UTC date time.
If it is Monday, return the next Monday.
iex> import RoboticaCommon.Date
iex> next_monday(~U[2019-11-10 12:00:00Z])
~D[2019-11-11]
iex> import RoboticaCommon.Date
iex> next_monday(~U[2019-11-10 13:00:00Z])
~D[2019-11-18]
"""
@spec next_monday(DateTime.t()) :: Date.t()
def next_monday(date_time) do
# M 1 --> +7
# T 2 --> +6
# W 3 --> +5
# T 4 --> +4
# F 5 --> +3
# S 6 --> +2
# S 7 --> +1
{:ok, date_time} = DateTime.shift_zone(date_time, get_timezone())
day_of_week = Date.day_of_week(date_time)
add_days = 7 - day_of_week + 1
Date.add(date_time, add_days)
end
@doc """
Find the UTC date time at midnight for the specified local date.
iex> import RoboticaCommon.Date
iex> midnight_utc(~D[2019-11-10])
~U[2019-11-09 13:00:00+00:00]
"""
@spec midnight_utc(Date.t()) :: DateTime.t()
def midnight_utc(date) do
{:ok, naive_date_time} = NaiveDateTime.new(date, ~T[00:00:00])
{:ok, local_date_time} = DateTime.from_naive(naive_date_time, get_timezone())
{:ok, utc_date_time} = DateTime.shift_zone(local_date_time, "Etc/UTC")
utc_date_time
end
end
|
robotica_common/lib/date.ex
| 0.743075
| 0.472318
|
date.ex
|
starcoder
|
defmodule Comeonin do
@moduledoc """
Defines a behaviour for higher-level password hashing functions.
"""
@type opts :: keyword
@type password :: binary
@type user_struct :: map | nil
@doc """
Hashes a password and returns the password hash in a map.
"""
@callback add_hash(password, opts) :: map
@doc """
Checks the password by comparing its hash with the password hash found
in a user struct, or map.
The first argument to `check_pass/3` should be a user struct, a regular
map, or nil.
"""
@callback check_pass(user_struct, password, opts) :: {:ok, map} | {:error, String.t()}
@doc """
Runs the password hash function, but always returns false.
This function is intended to make it more difficult for any potential
attacker to find valid usernames by using timing attacks. This function
is only useful if it is used as part of a policy of hiding usernames.
"""
@callback no_user_verify(opts) :: false
defmacro __using__(_) do
quote do
@behaviour Comeonin
@behaviour Comeonin.PasswordHash
@doc """
Hashes a password, using `hash_pwd_salt/2`, and returns the password hash in a map.
This is a convenience function that is especially useful when used with
Ecto changesets.
## Options
In addition to the `:hash_key` option show below, this function also takes
options that are then passed on to the `hash_pwd_salt/2` function in this
module.
See the documentation for `hash_pwd_salt/2` for further details.
* `:hash_key` - the password hash identifier
* the default is `:password_hash`
## Example with Ecto
The `put_pass_hash` function below is an example of how you can use
`add_hash` to add the password hash to the Ecto changeset.
defp put_pass_hash(%Ecto.Changeset{valid?: true, changes:
%{password: password}} = changeset) do
change(changeset, add_hash(password))
end
defp put_pass_hash(changeset), do: changeset
This function will return a changeset with `%{password_hash: password_hash}`
added to the `changes` map.
"""
@impl Comeonin
def add_hash(password, opts \\ []) do
hash_key = opts[:hash_key] || :password_hash
%{hash_key => hash_pwd_salt(password, opts)}
end
@doc """
Checks the password, using `verify_pass/2`, by comparing the hash with
the password hash found in a user struct, or map.
This is a convenience function that takes a user struct, or map, as input
and seamlessly handles the cases where no user is found.
## Options
* `:hash_key` - the password hash identifier
* this does not need to be set if the key is `:password_hash` or `:encrypted_password`
* `:hide_user` - run the `no_user_verify/1` function if no user is found
* the default is true
## Example
The following is an example of using this function to verify a user's
password:
def verify_user(%{"password" => password} = params) do
params
|> Accounts.get_by()
|> check_pass(password)
end
The `Accounts.get_by` function in this example takes the user parameters
(for example, email and password) as input and returns a user struct or nil.
"""
@impl Comeonin
def check_pass(user, password, opts \\ [])
def check_pass(nil, _password, opts) do
unless opts[:hide_user] == false, do: no_user_verify(opts)
{:error, "invalid user-identifier"}
end
def check_pass(user, password, opts) when is_binary(password) do
case get_hash(user, opts[:hash_key]) do
{:ok, hash} ->
if verify_pass(password, hash), do: {:ok, user}, else: {:error, "invalid password"}
_ ->
{:error, "no password hash found in the user struct"}
end
end
def check_pass(_, _, _) do
{:error, "password is not a string"}
end
defp get_hash(%{password_hash: hash}, nil), do: {:ok, hash}
defp get_hash(%{encrypted_password: hash}, nil), do: {:ok, hash}
defp get_hash(_, nil), do: nil
defp get_hash(user, hash_key) do
if hash = Map.get(user, hash_key), do: {:ok, hash}
end
@doc """
Runs the password hash function, but always returns false.
This function is intended to make it more difficult for any potential
attacker to find valid usernames by using timing attacks. This function
is only useful if it is used as part of a policy of hiding usernames.
## Options
This function should be called with the same options as those used by
`hash_pwd_salt/2`.
## Hiding usernames
In addition to keeping passwords secret, hiding the precise username
can help make online attacks more difficult. An attacker would then
have to guess a username / password combination, rather than just
a password, to gain access.
This does not mean that the username should be kept completely secret.
Adding a short numerical suffix to a user's name, for example, would be
sufficient to increase the attacker's work considerably.
If you are implementing a policy of hiding usernames, it is important
to make sure that the username is not revealed by any other part of
your application.
"""
@impl Comeonin
def no_user_verify(opts \\ []) do
hash_pwd_salt("", opts)
false
end
defoverridable Comeonin
end
end
end
|
deps/comeonin/lib/comeonin.ex
| 0.879374
| 0.477798
|
comeonin.ex
|
starcoder
|
defmodule Crux.Structs.Member do
@moduledoc """
Represents a Discord [Guild Member Object](https://discord.com/developers/docs/resources/guild#guild-member-object).
Differences opposed to the Discord API Object:
- `:user` is just the user id
"""
@moduledoc since: "0.1.0"
@behaviour Crux.Structs
alias Crux.Structs.{Member, Snowflake, User, Util}
require Util
defstruct [
:user,
:nick,
:roles,
:joined_at,
:premium_since,
:deaf,
:mute,
:pending,
# Additional
:guild_id
]
@typedoc since: "0.2.1"
@type t :: %__MODULE__{
user: Snowflake.t(),
nick: String.t() | nil,
roles: MapSet.t(Snowflake.t()),
joined_at: String.t(),
premium_since: String.t() | nil,
deaf: boolean() | nil,
mute: boolean() | nil,
pending: boolean() | nil,
guild_id: Snowflake.t() | nil
}
@typedoc """
All available types that can be resolved into a user id.
"""
@typedoc since: "0.2.1"
@type id_resolvable() :: User.id_resolvable()
@doc """
Resolves the id of a `t:Crux.Structs.Member.t/0`.
> Automatically invoked by `Crux.Structs.resolve_id/2`.
```elixir
iex> %Crux.Structs.Member{user: 218348062828003328}
...> |> Crux.Structs.Member.resolve_id()
218348062828003328
```
For more examples see `Crux.Structs.User.resolve_id/1`.
"""
@doc since: "0.2.1"
@spec resolve_id(id_resolvable()) :: Snowflake.t() | nil
defdelegate resolve_id(resolvable), to: User
@doc """
Creates a `t:Crux.Structs.Member.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`.
"""
@doc since: "0.1.0"
@spec create(data :: map()) :: t()
def create(data) do
member =
data
|> Util.atomify()
|> Map.update!(:user, Util.map_to_id())
|> Map.update!(:roles, &MapSet.new(&1, fn role_id -> Snowflake.to_snowflake(role_id) end))
|> Map.update(:guild_id, nil, &Snowflake.to_snowflake/1)
struct(__MODULE__, member)
end
@doc ~S"""
Converts a `t:Crux.Structs.Member.t/0` into its discord mention format.
## Examples
```elixir
# Without nickname
iex> %Crux.Structs.Member{user: 218348062828003328, nick: nil}
...> |> Crux.Structs.Member.to_mention()
"<@218348062828003328>"
# With nickname
iex> %Crux.Structs.Member{user: 218348062828003328, nick: "weltraum"}
...> |> Crux.Structs.Member.to_mention()
"<@!218348062828003328>"
```
"""
@doc since: "0.1.1"
@spec to_mention(user :: Crux.Structs.Member.t()) :: String.t()
def to_mention(%__MODULE__{user: id, nick: nil}), do: "<@#{id}>"
def to_mention(%__MODULE__{user: id}), do: "<@!#{id}>"
defimpl String.Chars, for: Crux.Structs.Member do
@spec to_string(Member.t()) :: String.t()
def to_string(%Member{} = data), do: Member.to_mention(data)
end
end
|
lib/structs/member.ex
| 0.820829
| 0.691823
|
member.ex
|
starcoder
|
defmodule Cog.Commands.Sort do
use Cog.Command.GenCommand.Base,
bundle: Cog.Util.Misc.embedded_bundle
alias Cog.Command.Service.MemoryClient
@description "Sort inputs by field"
@long_description """
Fields are used to pick which values to sort by. If two keys have the same
value the values of the next key are compared and so on. If no fields are
provided, items are intellegently sorted based on their contents.
"""
@examples """
seed '[{"a": 1}, {"a": 3}, {"a": 2}]' | sort
> [{"a": 1}, {"a": 2}, {"a": 3}]
seed '[{"a": 1}, {"a": 3}, {"a": 2}]' | sort --desc
> [{"a": 3}, {"a": 2}, {"a": 1}]
seed '[{"a": 3, "b": 4}, {"a": 1, "b": 4}, {"a": 2, "b": 6}]' | sort b a
> [{"a": 1, "b": 4}, {"a: 3, "b": 4}, {"a": 2, "b": 6}]
"""
rule "when command is #{Cog.Util.Misc.embedded_bundle}:sort allow"
option "desc", short: "d", type: "bool", required: false
option "asc", short: "a", type: "bool", required: false
def handle_message(req, state) do
root = req.services_root
token = req.service_token
key = req.invocation_id
step = req.invocation_step
value = req.cog_env
opts = req.options
args = req.args
MemoryClient.accum(root, token, key, value)
case step do
step when step in ["first", nil] ->
{:reply, req.reply_to, nil, state}
"last" ->
accumulated_value = MemoryClient.fetch(root, token, key)
sorted_value = sort_by(accumulated_value, opts, args)
MemoryClient.delete(root, token, key)
{:reply, req.reply_to, sorted_value, state}
end
end
defp sort_by(items, %{"desc" => true}, args),
do: Enum.sort_by(items, &pluck_fields(&1, args), &>=/2)
defp sort_by(items, _opts, args),
do: Enum.sort_by(items, &pluck_fields(&1, args))
defp pluck_fields(item, []),
do: item
defp pluck_fields(item, fields) do
values = Enum.map(fields, &Map.get(item, &1))
case Enum.reject(values, &is_nil/1) do
[] ->
item
_ ->
values
end
end
end
|
lib/cog/commands/sort.ex
| 0.649023
| 0.566049
|
sort.ex
|
starcoder
|
defmodule Game.Phase.Resolution do
@moduledoc """
Player's dices face off against each other
and previously selected God Favors are triggered before
and/or after the standoff
"""
@behaviour Game.Phase
alias Game.{
Player,
Phase,
Turn,
Action,
Favor
}
@impl Game.Phase
@spec action(Game.t(), any()) :: Game.t()
def action(game, :start_phase) do
%{turns: turns} = Phase.current(game)
game
|> IndexMap.update_all(:players, &Player.update(&1, %{turns: turns}))
|> Action.Token.collect_tokens()
|> Turn.opponent(&Action.Token.collect_tokens/1)
end
def action(game, :continue), do: Turn.next(game)
def action(game, :start_turn) do
game
|> Turn.get_player()
|> case do
%{turns: 7} -> action(game, {:pre_resolution, :opponent})
%{turns: 6} -> action(game, {:pre_resolution, :player})
%{turns: 5} -> action(game, {:resolution, :resolve})
%{turns: 4} -> action(game, {:resolution, :attack})
%{turns: 3} -> action(game, {:resolution, :steal})
%{turns: 2} -> action(game, {:post_resolution, :opponent})
%{turns: 1} -> action(game, {:post_resolution, :player})
_other -> game
end
end
def action(game, :end_turn) do
game
|> IndexMap.update_all(:players, &Player.update(&1, %{invoked_favor: 0}))
|> Turn.update_player(&Player.increase(&1, :turns, -1))
end
def action(game, {:resolution, :resolve}) do
game
|> Turn.update_player(&Player.resolve(&1, Turn.get_opponent(game)))
|> Turn.next()
end
def action(game, {:resolution, :attack}) do
game
|> Action.Attack.attack_health()
end
def action(game, {:resolution, :steal}) do
game
|> Action.Token.steal_tokens()
end
def action(game, {trigger, affects}) do
game = Favor.invoke(game, trigger, affects)
player = Turn.get_player(game)
if player.invoked_favor > 0 do
game
else
Turn.next(game)
end
end
def action(game, :end_phase) do
players = IndexMap.filter(game.players, fn player -> player.health > 0 end)
players
|> Enum.count()
|> case do
0 -> Map.put(game, :winner, Turn.determine_next(game))
1 -> Map.put(game, :winner, players |> Enum.at(0) |> elem(0))
2 -> game
end
|> IndexMap.update_all(:players, &Player.update(&1, %{favor_tier: %{favor: 0, tier: 0}}))
end
def action(game, _other) do
# unknown action
game
end
end
|
src/server/lib/game/phase/resolution.ex
| 0.856932
| 0.441974
|
resolution.ex
|
starcoder
|
defmodule CatalogApi.Category do
@moduledoc """
Defines the CatalogApi.Category struct and functions which are responsible
for parsing categories from CatalogApi responses.
"""
alias CatalogApi.Category
@derive Jason.Encoder
defstruct category_id: nil,
children: [],
depth: 1,
item_count: 0,
name: nil,
parent_category_id: 0
@type t :: %CatalogApi.Category{}
@valid_fields ~w(category_id children depth item_count name parent_category_id)
@doc """
Casts a map representing a Category which results from parsing JSON
representing a category into a `%CatalogApi.Category{}` struct.
If the category has any categories enumerated in its "children" key, then it
casts those children recursively as well.
"""
@spec cast(map()) :: t()
def cast(category_json) when is_map(category_json) do
category_json
|> filter_unknown_properties
|> Enum.map(fn {k, v} -> {String.to_atom(k), v} end)
|> Enum.into(%{})
|> to_struct
|> cast_child_categories
end
@doc """
Accepts a raw JSON response from the `CatalogApi.catalog_breakdon/2` function
and casts all categories as `%CatalogApi.Category{}` structs.
If the given json is not recognized or is an invalid format then it returns
an error tuple of the format: `{:error, :unparseable_catalog_api_categories}`
"""
@spec extract_categories_from_json(any()) :: {:ok, list(t())}
{:error, :unparseable_catalog_api_categories}
def extract_categories_from_json(%{
"catalog_breakdown_response" => %{
"catalog_breakdown_result" => %{"categories" => %{"Category" => categories}}
}
})
when is_list(categories) do
{:ok, Enum.map(categories, &cast/1)}
end
def extract_categories_from_json(_), do: {:error, :unparseable_catalog_api_categories}
defp filter_unknown_properties(map) do
Enum.filter(map, fn {k, _v} -> k in @valid_fields end)
end
defp to_struct(map), do: struct(Category, map)
defp cast_child_categories(
%Category{children: %{"Category" => raw_child_categories}} = category
) do
child_categories = raw_child_categories |> Enum.map(&cast/1)
%{category | children: child_categories}
end
defp cast_child_categories(%Category{children: %{}} = category) do
%{category | children: []}
end
defp cast_child_categories(%Category{} = category), do: category
end
|
lib/catalog_api/category.ex
| 0.835584
| 0.40489
|
category.ex
|
starcoder
|
defmodule Interceptor do
@moduledoc """
See <NAME>'s LispCast Blog article:
[A Model of Interceptors](https://lispcast.com/a-model-of-interceptors/)
and the [interceptor](https://github.com/exoscale/interceptor)
library as inspirations.
The idea of interceptors is similar to that of middleware, they define
the transform-on-the-way-in (`:enter` function) and transform-on-the-way-out
(`:leave` function) as two separate operations. They basically reify the
two uses of middleware into a distinct object.
## Interceptor Pattern
`execute/2` takes a context (map) and chains it to the interceptors
that can modify it and ultimately returns the modified context.
An interceptor contains the `:enter` function and optionally the `:leave`
and/or `:error` functions. The `:enter` and `:leave` functions take
the context as input and return it as output, while `:error` takes
the context and the error that triggered it and potentially returns
and new context.
When executed, it will pass the context to the `:enter` handlers in
order, then to all the `:leave` handlers in reverse.
Something like this:
enter A -> enter B -> enter C -> leave C -> leave B -> leave A
"""
use Boundary, deps: [], exports: []
alias Interceptor.{Coercible, Hook, Queue, Stage}
@type t :: Coercible.t()
@type err :: Exception.t() | (reason :: term)
@type stage :: :enter | :leave | {:error, err}
@type ctx :: %{
:__queue__ => Queue.t() | nil,
:__stage__ => stage | nil,
optional(any) => any
}
@spec execute(ctx) :: {:ok, ctx} | {:error, err}
def execute(ctx) do
ctx.__queue__
|> Enum.reduce(ctx, &Hook.invoke_with_hooks(&2, Coercible.coerce(&1)))
|> case do
%{__stage__: {:error, reason}} -> {:error, reason}
ctx -> {:ok, ctx}
end
end
@spec execute(ctx, [t]) :: {:ok, ctx} | {:error, err}
def execute(ctx, interceptors) do
ctx
|> start()
|> enqueue(interceptors)
|> execute()
end
@spec stage(ctx, Stage.stage_fun() | nil, Stage.stage_fun() | nil, Stage.stage_fun() | nil) ::
ctx
def stage(ctx, enter, leave \\ nil, error \\ nil) do
enqueue(ctx, [Stage.new(enter, leave, error)])
end
@spec start(map) :: ctx
def start(ctx) do
Map.merge(
%{
__queue__: Queue.new(),
__stage__: :enter
},
ctx
)
end
@spec error(ctx, err) :: ctx
def error(ctx, err) do
Map.put(ctx, :__stage__, {:error, err})
end
@spec terminate(ctx) :: ctx
def terminate(ctx) do
Map.update(
ctx,
:__queue__,
Queue.new(),
&Queue.terminate(&1)
)
end
@spec halt(ctx) :: ctx
def halt(ctx) do
Map.update(
ctx,
:__queue__,
Queue.new(),
&Queue.halt(&1)
)
end
@spec enqueue(ctx, [t]) :: ctx
def enqueue(ctx, interceptors) do
Map.update(
ctx,
:__queue__,
Queue.new(interceptors),
&Queue.enqueue(&1, interceptors)
)
end
@spec transform(Stage.stage_fun(), (ctx, any -> ctx)) :: Stage.stage_fun()
def transform(f, g) do
fn ctx ->
g.(ctx, f.(ctx))
end
end
@spec take_in(Stage.stage_fun(), path :: [term, ...]) :: Stage.stage_fun()
def take_in(f, path) do
fn ctx ->
f.(get_in(ctx, path))
end
end
@spec return_at(Stage.stage_fun(), path :: [term, ...]) :: Stage.stage_fun()
def return_at(f, path) do
transform(f, &put_in(&1, path, &2))
end
@spec whenever(Stage.stage_fun(), (ctx -> boolean)) :: Stage.stage_fun()
def whenever(f, pred) do
fn ctx ->
if pred.(ctx) do
f.(ctx)
else
ctx
end
end
end
@spec lens(Stage.stage_fun(), path :: [term, ...]) :: Stage.stage_fun()
def lens(f, path) do
f
|> take_in(path)
|> return_at(path)
end
@spec discard(Stage.stage_fun()) :: Stage.stage_fun()
def discard(f) do
transform(f, fn ctx, _ -> ctx end)
end
end
|
lib/interceptor.ex
| 0.776242
| 0.663786
|
interceptor.ex
|
starcoder
|
defmodule Stompex.Validator do
@moduledoc """
The Validator module can be used for ensuring that
frames being sent to the STOMP server are valid.
Primarily, the reason for a frame being invalid is
due to the version of the protocol being used, as
you should not really be building frames up directly
yourself.
"""
use Stompex.Constants
@valid_commands_10 ~W(CONNECTED MESSAGE RECEIPT ERROR CONNECT SEND SUBSCRIBE UNSUBSCRIBE BEGIN COMMIT ABORT ACK DISCONNECT)
@valid_commands_11 ~W(STOMP NACK)
@doc """
Check to see whether ot not the supplied command is
a valid STOMP command.
"""
@spec valid_command?(String.t, float) :: boolean
def valid_command?(cmd, version \\ 1.2)
def valid_command?(cmd, 1.0) do
cmd in @valid_commands_10
end
def valid_command?(cmd, _version) do
valid_command?(cmd, 1.0) || cmd in @valid_commands_11
end
@doc """
Given a header key and value, this function will
return a map containing the same key value pair,
but with the value converted to a different format
if needed based on the key.
This applies to all known special headers, such as
`content-length` which is actually an integer.
"""
@spec format_header(String.t, String.t) :: map
def format_header("content-length", value) when is_binary(value) do
%{ "content-length" => String.to_integer(value) }
end
def format_header("version", value) do
%{ "value" => String.to_float(value) }
end
def format_header(key, value) do
%{ key => value }
end
@doc """
Returns the header key required for ack'ing a
received frame. STOMP 1.2 simplified this by
changing the `message-id` header to simply `id`.
This function, given the current version in use,
will return the appropriate header to use.
Note this also applies for NACK frames.
"""
@spec ack_header(float) :: String.t
def ack_header(1.2), do: "id"
def ack_header(_version), do: "message-id"
@doc """
Converts one or more versions into floats. Frames
received from the STOMP server will be in a String
format. This function should be used to convert back
into something usable.
"""
@spec normalise_version([]) :: float
@spec normalise_version(nil) :: float
@spec normalise_version(String.t) :: float
@spec normalise_version([String.t]) :: [float]
def normalise_version([]), do: @default_version
def normalise_version(versions) when is_nil(versions) or versions == "", do: @default_version
def normalise_version(versions) when is_binary(versions), do: String.to_float(versions)
def normalise_version(versions) when is_list(versions) do
versions
|> Enum.map(fn(v) -> String.to_float(v) end)
|> Enum.max
end
end
|
lib/stompex/validator.ex
| 0.759315
| 0.441854
|
validator.ex
|
starcoder
|
defmodule PhoenixComponents.View do
@moduledoc """
This module provides a way to easily generate helper functions to render
components.
The module can be included by others Phoenix.View modules to import components easily.
## Example
When working on a project with several components you can use this module in your `web/web.ex` definition.
defmodule MyApp.Web do
#...
def view do
quote do
use Phoenix.View, root: "web/templates"
use PhoenixComponents.View
# ...
end
end
end
After you include the module you can use the following helpers
defmodule MyApp.UserView do
use MyApp.Web, :view
import_component [:button, :jumbotron]
end
After you import a component into the view module you can use the component as follows
<div>
<%= button type: :primary do %>
Submit
<% end %>
</div>
Alternatively, you can also render a component without importing it by using the helper function `component`.
<div>
<%= component :button, type: :primary do %>
Submit
<% end %>
</div>
"""
import Phoenix.View, only: [render: 3]
import Phoenix.HTML, only: [html_escape: 1]
import PhoenixComponents.Helpers, only: [to_pascal_case: 1]
@doc """
Helper to render a component by name.
## Example
<%= component :button %>
"""
def component(name) do
do_component(name, "", [])
end
@doc """
Helper to render a component by name and specifying the content in a block.
## Example
<%= component :button do %>
Submit
<% end %>
"""
def component(name, [do: block]) do
do_component(name, block, [])
end
@doc """
Helper to render a component by name and a list of attributes.
Note that attributes are available in the template as the map @attrs.
## Example
<%= component :button, color: "red", size: "small", label: "Submit" %>
"""
def component(name, attrs) when is_list(attrs) do
do_component(name, "", attrs)
end
@doc """
Helper to render a component by name and a list of attributes.
Note that attributes are available in the template as the map @attrs.
## Example
<%= component :button, color: "red", size: "small" do %>
Submit
<% end %>
"""
def component(name, attrs, [do: block]) when is_list(attrs) do
do_component(name, block, attrs)
end
defp do_component(name, content, attrs) do
safe_content = html_escape(content)
app_module = Application.fetch_env!(:phoenix_components, :app_name)
name
|> to_pascal_case
|> prefix_module(Components)
|> prefix_module(app_module)
|> render("template.html", attrs: Enum.into(attrs, %{}), content: safe_content)
end
defp prefix_module(atom, base_module) do
Module.concat(base_module, atom)
end
@doc """
Macro to generate helpers for components inside views.
## Example
import_components [:button, :jumbotron]
Then you can use the component directly
<%= button type: "submit" %>
"""
defmacro import_components(components) do
for name <- components do
quote do
def unquote(name)(), do: component(unquote(name))
def unquote(name)(attrs), do: component(unquote(name), attrs)
def unquote(name)(attrs, block), do: component(unquote(name), attrs, block)
end
end
end
defmacro __using__(_) do
quote do
import PhoenixComponents.View
end
end
end
|
lib/phoenix_components/view.ex
| 0.843057
| 0.588682
|
view.ex
|
starcoder
|
defmodule Plausible.Stats.Clickhouse do
use Plausible.Repo
alias Plausible.Stats.Query
alias Plausible.Clickhouse
@no_ref "Direct / None"
def compare_pageviews_and_visitors(site, query, {pageviews, visitors}) do
query = Query.shift_back(query)
{old_pageviews, old_visitors} = pageviews_and_visitors(site, query)
cond do
old_pageviews == 0 and pageviews > 0 ->
{100, 100}
old_pageviews == 0 and pageviews == 0 ->
{0, 0}
true ->
{
round((pageviews - old_pageviews) / old_pageviews * 100),
round((visitors - old_visitors) / old_visitors * 100)
}
end
end
def calculate_plot(site, %Query{step_type: "month"} = query) do
steps =
Enum.map((query.steps - 1)..0, fn shift ->
Timex.now(site.timezone)
|> Timex.beginning_of_month()
|> Timex.shift(months: -shift)
|> DateTime.to_date()
end)
groups =
Clickhouse.all(
from e in base_query(site, query),
select:
{fragment("toStartOfMonth(toTimeZone(?, ?)) as month", e.timestamp, ^site.timezone),
fragment("uniq(?) as visitors", e.user_id)},
group_by: fragment("month"),
order_by: fragment("month")
)
|> Enum.map(fn row -> {row["month"], row["visitors"]} end)
|> Enum.into(%{})
present_index =
Enum.find_index(steps, fn step ->
step == Timex.now(site.timezone) |> Timex.to_date() |> Timex.beginning_of_month()
end)
plot = Enum.map(steps, fn step -> groups[step] || 0 end)
labels = Enum.map(steps, fn step -> Timex.format!(step, "{ISOdate}") end)
{plot, labels, present_index}
end
def calculate_plot(site, %Query{step_type: "date"} = query) do
steps = Enum.into(query.date_range, [])
groups =
Clickhouse.all(
from e in base_query(site, query),
select:
{fragment("toDate(toTimeZone(?, ?)) as day", e.timestamp, ^site.timezone),
fragment("uniq(?) as visitors", e.user_id)},
group_by: fragment("day"),
order_by: fragment("day")
)
|> Enum.map(fn row -> {row["day"], row["visitors"]} end)
|> Enum.into(%{})
present_index =
Enum.find_index(steps, fn step -> step == Timex.now(site.timezone) |> Timex.to_date() end)
steps_to_show = if present_index, do: present_index + 1, else: Enum.count(steps)
plot = Enum.map(steps, fn step -> groups[step] || 0 end) |> Enum.take(steps_to_show)
labels = Enum.map(steps, fn step -> Timex.format!(step, "{ISOdate}") end)
{plot, labels, present_index}
end
def calculate_plot(site, %Query{step_type: "hour"} = query) do
steps = 0..23
groups =
Clickhouse.all(
from e in base_query(site, query),
select:
{fragment("toHour(toTimeZone(?, ?)) as hour", e.timestamp, ^site.timezone),
fragment("uniq(?) as visitors", e.user_id)},
group_by: fragment("hour"),
order_by: fragment("hour")
)
|> Enum.map(fn row -> {row["hour"], row["visitors"]} end)
|> Enum.into(%{})
now = Timex.now(site.timezone)
is_today = Timex.to_date(now) == query.date_range.first
present_index = is_today && Enum.find_index(steps, fn step -> step == now.hour end)
steps_to_show = if present_index, do: present_index + 1, else: Enum.count(steps)
labels =
Enum.map(steps, fn step ->
Timex.to_datetime(query.date_range.first)
|> Timex.shift(hours: step)
|> NaiveDateTime.to_iso8601()
end)
plot = Enum.map(steps, fn step -> groups[step] || 0 end) |> Enum.take(steps_to_show)
{plot, labels, present_index}
end
def calculate_plot(site, %Query{period: "realtime"} = query) do
query = %Query{query | period: "30m"}
groups =
Clickhouse.all(
from e in base_query(site, query),
select: {
fragment("dateDiff('minute', now(), ?) as relativeMinute", e.timestamp),
fragment("count(*) as pageviews")
},
group_by: fragment("relativeMinute"),
order_by: fragment("relativeMinute")
)
|> Enum.map(fn row -> {row["relativeMinute"], row["pageviews"]} end)
|> Enum.into(%{})
labels = Enum.into(-30..-1, [])
plot = Enum.map(labels, fn label -> groups[label] || 0 end)
{plot, labels, nil}
end
def bounce_rate(site, query) do
[res] =
Clickhouse.all(
from s in base_session_query(site, query),
select: {fragment("round(sum(is_bounce * sign) / sum(sign) * 100) as bounce_rate")}
)
res["bounce_rate"] || 0
end
def visit_duration(site, query) do
[res] =
Clickhouse.all(
from s in base_session_query(site, query),
select: {fragment("round(avg(duration * sign)) as visit_duration")}
)
res["visit_duration"] || 0
end
def total_pageviews(site, %Query{period: "realtime"} = query) do
query = %Query{query | period: "30m"}
[res] =
Clickhouse.all(
from e in base_session_query(site, query),
select: fragment("sum(sign * pageviews) as pageviews")
)
res["pageviews"]
end
def total_events(site, query) do
[res] =
Clickhouse.all(
from e in base_query(site, query),
select: fragment("count(*) as events")
)
res["events"]
end
def pageviews_and_visitors(site, query) do
[res] =
Clickhouse.all(
from e in base_query_w_sessions(site, query),
select:
{fragment("count(*) as pageviews"),
fragment("uniq(user_id) as visitors")}
)
{res["pageviews"], res["visitors"]}
end
def unique_visitors(site, query) do
[res] =
Clickhouse.all(
from e in base_query(site, query),
select: fragment("uniq(user_id) as visitors")
)
res["visitors"]
end
def top_referrers_for_goal(site, query, limit \\ 5) do
converted_sessions =
from(
from e in base_query(site, query),
select: %{session_id: e.session_id}
)
Plausible.Clickhouse.all(
from s in Plausible.ClickhouseSession,
join: cs in subquery(converted_sessions),
on: s.session_id == cs.session_id,
select:
{fragment("? as name", s.referrer_source), fragment("any(?) as url", s.referrer),
fragment("uniq(user_id) as count")},
where: s.referrer_source != "",
group_by: s.referrer_source,
order_by: [desc: fragment("count")],
limit: ^limit
)
|> Enum.map(fn ref ->
Map.update(ref, "url", nil, fn url -> url && URI.parse("http://" <> url).host end)
end)
end
def top_referrers(site, query, limit \\ 5, show_noref \\ false, include \\ []) do
referrers =
from(s in base_session_query(site, query),
group_by: s.referrer_source,
order_by: [desc: fragment("count")],
limit: ^limit
)
referrers = if show_noref do
referrers
else
from(s in referrers, where: s.referrer_source != "")
end
referrers = if query.filters["page"] do
page = query.filters["page"]
from(s in referrers, where: s.entry_page == ^page)
else
referrers
end
referrers =
if "bounce_rate" in include do
from(
s in referrers,
select:
{fragment("if(empty(?), ?, ?) as name", s.referrer_source, @no_ref, s.referrer_source), fragment("any(?) as url", s.referrer),
fragment("uniq(user_id) as count"),
fragment("round(sum(is_bounce * sign) / sum(sign) * 100) as bounce_rate"),
fragment("round(avg(duration * sign)) as visit_duration")}
)
else
from(
s in referrers,
select:
{fragment("if(empty(?), ?, ?) as name", s.referrer_source, @no_ref, s.referrer_source), fragment("any(?) as url", s.referrer),
fragment("uniq(user_id) as count")}
)
end
Clickhouse.all(referrers)
|> Enum.map(fn ref ->
Map.update(ref, "url", nil, fn url -> url && URI.parse("http://" <> url).host end)
end)
end
def conversions_from_referrer(site, query, referrer) do
converted_sessions =
from(
from e in base_query(site, query),
select: %{session_id: e.session_id}
)
[res] =
Plausible.Clickhouse.all(
from s in Plausible.ClickhouseSession,
join: cs in subquery(converted_sessions),
on: s.session_id == cs.session_id,
where: s.referrer_source == ^referrer,
select: fragment("uniq(user_id) as visitors")
)
res["visitors"]
end
def referrer_drilldown(site, query, referrer, include, limit) do
referrer = if referrer == @no_ref, do: "", else: referrer
q =
from(
s in base_session_query(site, query),
group_by: s.referrer,
where: s.referrer_source == ^referrer,
order_by: [desc: fragment("count")],
limit: ^limit
)
q =
if "bounce_rate" in include do
from(
s in q,
select:
{fragment("if(empty(?), ?, ?) as name", s.referrer, @no_ref, s.referrer),
fragment("uniq(user_id) as count"),
fragment("round(sum(is_bounce * sign) / sum(sign) * 100) as bounce_rate"),
fragment("round(avg(duration * sign)) as visit_duration")}
)
else
from(s in q,
select: {fragment("if(empty(?), ?, ?) as name", s.referrer, @no_ref, s.referrer), fragment("uniq(user_id) as count")}
)
end
referring_urls =
Clickhouse.all(q)
|> Enum.map(fn ref ->
url = if ref["name"] !== "", do: URI.parse("http://" <> ref["name"]).host
Map.put(ref, "url", url)
end)
if referrer == "Twitter" do
urls = Enum.map(referring_urls, & &1["name"])
tweets =
Repo.all(
from t in Plausible.Twitter.Tweet,
where: t.link in ^urls
)
|> Enum.group_by(& &1.link)
Enum.map(referring_urls, fn url ->
Map.put(url, "tweets", tweets[url["name"]])
end)
else
referring_urls
end
end
def referrer_drilldown_for_goal(site, query, referrer) do
converted_sessions =
from(
from e in base_query(site, query),
select: %{session_id: e.session_id}
)
Plausible.Clickhouse.all(
from s in Plausible.ClickhouseSession,
join: cs in subquery(converted_sessions),
on: s.session_id == cs.session_id,
select: {fragment("? as name", s.referrer), fragment("uniq(user_id) as count")},
where: s.referrer_source == ^referrer,
group_by: s.referrer,
order_by: [desc: fragment("count")],
limit: 100
)
end
def entry_pages(site, query, limit, include) do
q = from(
s in base_session_query(site, query),
group_by: s.entry_page,
order_by: [desc: fragment("count")],
limit: ^limit,
select:
{fragment("? as name", s.entry_page), fragment("uniq(?) as count", s.user_id)}
)
q = if query.filters["page"] do
page = query.filters["page"]
from(s in q, where: s.entry_page == ^page)
else
q
end
pages = Clickhouse.all(q)
if "bounce_rate" in include do
bounce_rates = bounce_rates_by_page_url(site, query)
Enum.map(pages, fn url -> Map.put(url, "bounce_rate", bounce_rates[url["name"]]) end)
else
pages
end
end
def top_pages(site, %Query{period: "realtime"} = query, limit, _include) do
Clickhouse.all(
from s in base_session_query(site, query),
select: {fragment("? as name", s.exit_page), fragment("uniq(?) as count", s.user_id)},
group_by: s.exit_page,
order_by: [desc: fragment("count")],
limit: ^limit
)
end
def top_pages(site, query, limit, include) do
q =
from(
e in base_query(site, query),
group_by: e.pathname,
order_by: [desc: fragment("count")],
limit: ^limit,
select:
{fragment("? as name", e.pathname), fragment("uniq(?) as count", e.user_id),
fragment("count(*) as pageviews")}
)
pages = Clickhouse.all(q)
if "bounce_rate" in include do
bounce_rates = bounce_rates_by_page_url(site, query)
Enum.map(pages, fn url -> Map.put(url, "bounce_rate", bounce_rates[url["name"]]) end)
else
pages
end
end
defp bounce_rates_by_page_url(site, query) do
Clickhouse.all(
from s in base_session_query(site, query),
select:
{s.entry_page, fragment("count(*) as total"),
fragment("round(sum(is_bounce * sign) / sum(sign) * 100) as bounce_rate")},
group_by: s.entry_page,
order_by: [desc: fragment("total")],
limit: 100
)
|> Enum.map(fn row -> {row["entry_page"], row["bounce_rate"]} end)
|> Enum.into(%{})
end
defp add_percentages(stat_list) do
total = Enum.reduce(stat_list, 0, fn %{"count" => count}, total -> total + count end)
Enum.map(stat_list, fn stat ->
Map.put(stat, "percentage", round(stat["count"] / total * 100))
end)
end
def top_screen_sizes(site, query) do
Clickhouse.all(
from e in base_query(site, query),
select: {fragment("? as name", e.screen_size), fragment("uniq(user_id) as count")},
group_by: e.screen_size,
where: e.screen_size != "",
order_by: [desc: fragment("count")]
)
|> add_percentages
end
def countries(site, query) do
Clickhouse.all(
from e in base_query(site, query),
select: {fragment("? as name", e.country_code), fragment("uniq(user_id) as count")},
group_by: e.country_code,
where: e.country_code != "\0\0",
order_by: [desc: fragment("count")]
)
|> Enum.map(fn stat ->
two_letter_code = stat["name"]
stat
|> Map.put("name", Plausible.Stats.CountryName.to_alpha3(two_letter_code))
|> Map.put("full_country_name", Plausible.Stats.CountryName.from_iso3166(two_letter_code))
end)
|> add_percentages
end
def browsers(site, query, limit \\ 5) do
Clickhouse.all(
from e in base_query(site, query),
select: {fragment("? as name", e.browser), fragment("uniq(user_id) as count")},
group_by: e.browser,
where: e.browser != "",
order_by: [desc: fragment("count")]
)
|> add_percentages
|> Enum.take(limit)
end
def operating_systems(site, query, limit \\ 5) do
Clickhouse.all(
from e in base_query(site, query),
select: {fragment("? as name", e.operating_system), fragment("uniq(user_id) as count")},
group_by: e.operating_system,
where: e.operating_system != "",
order_by: [desc: fragment("count")]
)
|> add_percentages
|> Enum.take(limit)
end
def current_visitors(site, query) do
[res] =
Clickhouse.all(
from s in base_query(site, query),
select: fragment("uniq(user_id) as visitors")
)
res["visitors"]
end
def has_pageviews?([]), do: false
def has_pageviews?(domains) when is_list(domains) do
res =
Clickhouse.all(
from e in "events",
select: e.timestamp,
where: fragment("? IN tuple(?)", e.domain, ^domains),
limit: 1
)
!Enum.empty?(res)
end
def has_pageviews?(site) do
res =
Clickhouse.all(
from e in "events",
select: e.timestamp,
where: e.domain == ^site.domain,
limit: 1
)
!Enum.empty?(res)
end
def goal_conversions(site, %Query{filters: %{"goal" => goal}} = query) when is_binary(goal) do
Clickhouse.all(
from e in base_query(site, query),
select: {e.name, fragment("uniq(user_id) as count"), fragment("count(*) as total_count")},
group_by: e.name,
order_by: [desc: fragment("count")]
)
end
def goal_conversions(site, query) do
goals = Repo.all(from g in Plausible.Goal, where: g.domain == ^site.domain)
query = if query.period == "realtime", do: %Query{query | period: "30m"}, else: query
(fetch_pageview_goals(goals, site, query) ++
fetch_event_goals(goals, site, query))
|> sort_conversions()
end
defp fetch_event_goals(goals, site, query) do
events =
Enum.map(goals, fn goal -> goal.event_name end)
|> Enum.filter(& &1)
if Enum.count(events) > 0 do
{first_datetime, last_datetime} = utc_boundaries(query, site.timezone)
q =
from(
e in "events",
where: e.domain == ^site.domain,
where: e.timestamp >= ^first_datetime and e.timestamp < ^last_datetime,
where: fragment("? IN tuple(?)", e.name, ^events),
select: {e.name, fragment("uniq(user_id) as count"), fragment("count(*) as total_count")},
group_by: e.name
)
q =
if query.filters["source"] do
filtered_sessions =
from(s in base_session_query(site, query), select: %{session_id: s.session_id})
from(
e in q,
join: cs in subquery(filtered_sessions),
on: e.session_id == cs.session_id
)
else
q
end
q =
if query.filters["page"] do
page = query.filters["page"]
from(e in q, where: e.pathname == ^page)
else
q
end
Clickhouse.all(q)
else
[]
end
end
defp fetch_pageview_goals(goals, site, query) do
pages =
Enum.map(goals, fn goal -> goal.page_path end)
|> Enum.filter(& &1)
if Enum.count(pages) > 0 do
{first_datetime, last_datetime} = utc_boundaries(query, site.timezone)
q =
from(
e in "events",
where: e.domain == ^site.domain,
where: e.timestamp >= ^first_datetime and e.timestamp < ^last_datetime,
where: fragment("? IN tuple(?)", e.pathname, ^pages),
group_by: e.pathname,
select:
{fragment("concat('Visit ', ?) as name", e.pathname),
fragment("uniq(user_id) as count"),
fragment("count(*) as total_count") }
)
q =
if query.filters["source"] do
filtered_sessions =
from(s in base_session_query(site, query), select: %{session_id: s.session_id})
from(
e in q,
join: cs in subquery(filtered_sessions),
on: e.session_id == cs.session_id
)
else
q
end
q =
if query.filters["page"] do
page = query.filters["page"]
from(e in q, where: e.pathname == ^page)
else
q
end
Clickhouse.all(q)
else
[]
end
end
defp sort_conversions(conversions) do
Enum.sort_by(conversions, fn conversion -> -conversion["count"] end)
end
defp base_query_w_sessions(site, query) do
{first_datetime, last_datetime} = utc_boundaries(query, site.timezone)
sessions_q = from(s in "sessions",
where: s.domain == ^site.domain,
where: s.timestamp >= ^first_datetime and s.start < ^last_datetime,
select: %{session_id: s.session_id}
)
sessions_q =
if query.filters["source"] do
source = query.filters["source"]
source = if source == @no_ref, do: "", else: source
from(s in sessions_q, where: s.referrer_source == ^source)
else
sessions_q
end
sessions_q = if query.filters["referrer"] do
ref = query.filters["referrer"]
from(s in sessions_q, where: s.referrer == ^ref)
else
sessions_q
end
q =
from(e in "events",
where: e.domain == ^site.domain,
where: e.timestamp >= ^first_datetime and e.timestamp < ^last_datetime
)
q = if query.filters["source"] || query.filters['referrer'] do
from(
e in q,
join: sq in subquery(sessions_q),
on: e.session_id == sq.session_id
)
else
q
end
if query.filters["page"] do
page = query.filters["page"]
from(e in q, where: e.pathname == ^page)
else
q
end
end
defp base_session_query(site, query) do
{first_datetime, last_datetime} = utc_boundaries(query, site.timezone)
q =
from(s in "sessions",
where: s.domain == ^site.domain,
where: s.timestamp >= ^first_datetime and s.start < ^last_datetime
)
q =
if query.filters["source"] do
source = query.filters["source"]
source = if source == @no_ref, do: "", else: source
from(s in q, where: s.referrer_source == ^source)
else
q
end
q =
if query.filters["page"] do
page = query.filters["page"]
from(s in q, where: s.entry_page == ^page)
else
q
end
if query.filters["referrer"] do
ref = query.filters["referrer"]
from(e in q, where: e.referrer == ^ref)
else
q
end
end
defp base_query(site, query) do
{first_datetime, last_datetime} = utc_boundaries(query, site.timezone)
{goal_event, path} = event_name_for_goal(query)
q =
from(e in "events",
where: e.domain == ^site.domain,
where: e.timestamp >= ^first_datetime and e.timestamp < ^last_datetime
)
q =
if query.filters["source"] do
source = query.filters["source"]
source = if source == @no_ref, do: "", else: source
from(e in q, where: e.referrer_source == ^source)
else
q
end
q =
if query.filters["referrer"] do
ref = query.filters["referrer"]
from(e in q, where: e.referrer == ^ref)
else
q
end
q =
if query.filters["page"] do
page = query.filters["page"]
from(e in q, where: e.pathname == ^page)
else
q
end
q =
if path do
from(e in q, where: e.pathname == ^path)
else
q
end
if goal_event do
from(e in q, where: e.name == ^goal_event)
else
from(e in q, where: e.name == "pageview")
end
end
defp utc_boundaries(%Query{period: "30m"}, _timezone) do
last_datetime = NaiveDateTime.utc_now()
first_datetime = last_datetime |> Timex.shift(minutes: -30)
{first_datetime, last_datetime}
end
defp utc_boundaries(%Query{period: "realtime"}, _timezone) do
last_datetime = NaiveDateTime.utc_now()
first_datetime = last_datetime |> Timex.shift(minutes: -5)
{first_datetime, last_datetime}
end
defp utc_boundaries(%Query{date_range: date_range}, timezone) do
{:ok, first} = NaiveDateTime.new(date_range.first, ~T[00:00:00])
first_datetime =
Timex.to_datetime(first, timezone)
|> Timex.Timezone.convert("UTC")
{:ok, last} = NaiveDateTime.new(date_range.last |> Timex.shift(days: 1), ~T[00:00:00])
last_datetime =
Timex.to_datetime(last, timezone)
|> Timex.Timezone.convert("UTC")
{first_datetime, last_datetime}
end
defp event_name_for_goal(query) do
case query.filters["goal"] do
"Visit " <> page ->
{"pageview", page}
goal when is_binary(goal) ->
{goal, nil}
_ ->
{nil, nil}
end
end
end
|
lib/plausible/stats/clickhouse.ex
| 0.616243
| 0.411879
|
clickhouse.ex
|
starcoder
|
defmodule OMG.ChildChain.Fees.JSONSingleSpecParser do
@moduledoc """
Parsing module for a single fee spec
"""
require Logger
# the fee spec for a specific type/token is missing keys
@type parsing_error() ::
:invalid_fee_spec
# the fee amount is invalid (must be >= 0)
| :invalid_fee
# the subunit to unit is invalid (must be > 0)
| :invalid_subunit_to_unit
# the pegged amount is invalid (must be > 0)
| :invalid_pegged_amount
# the pegged currency is invalid (must be > 0)
| :invalid_pegged_currency
# the pegged subunit to unit is invalid (must be > 0)
| :invalid_pegged_subunit_to_unit
# the updated at date is invalid (wrong date format)
| :invalid_timestamp
# the token address is invalid (must be a valid Ethereum address)
| :bad_address_encoding
# pegged fields must either be all nil or all not nil
| :invalid_pegged_fields
# at the moment only "fixed" fee type is supported
| :unsupported_fee_type
@doc """
Parses and validates a single fee spec
"""
@spec parse({binary(), map()}) :: {:ok, map()} | {:error, parsing_error()}
def parse(
{token,
%{
"amount" => fee,
"subunit_to_unit" => subunit_to_unit,
"pegged_amount" => pegged_amount,
"pegged_currency" => pegged_currency,
"pegged_subunit_to_unit" => pegged_subunit_to_unit,
"updated_at" => updated_at,
"type" => fee_type
}}
) do
# defensive code against user input
with {:ok, fee} <- validate_positive_amount(fee, :invalid_fee),
{:ok, addr} <- decode_address(token),
{:ok, subunit_to_unit} <- validate_positive_amount(subunit_to_unit, :invalid_subunit_to_unit),
{:ok, pegged_amount} <- validate_optional_positive_amount(pegged_amount, :invalid_pegged_amount),
{:ok, pegged_currency} <- validate_pegged_currency(pegged_currency),
{:ok, pegged_subunit_to_unit} <-
validate_optional_positive_amount(pegged_subunit_to_unit, :invalid_pegged_subunit_to_unit),
:ok <- validate_pegged_fields(pegged_currency, pegged_amount, pegged_subunit_to_unit),
{:ok, updated_at} <- validate_updated_at(updated_at),
{:ok, fee_type} <- validate_fee_type(fee_type) do
{:ok,
%{
token: addr,
amount: fee,
subunit_to_unit: subunit_to_unit,
pegged_amount: pegged_amount,
pegged_currency: pegged_currency,
pegged_subunit_to_unit: pegged_subunit_to_unit,
type: fee_type,
updated_at: updated_at
}}
end
end
def parse(_), do: {:error, :invalid_fee_spec}
defp validate_positive_amount(amount, _error) when is_integer(amount) and amount > 0, do: {:ok, amount}
defp validate_positive_amount(_amount, error), do: {:error, error}
defp validate_optional_positive_amount(nil, _error), do: {:ok, nil}
defp validate_optional_positive_amount(amount, _error) when is_integer(amount) and amount > 0, do: {:ok, amount}
defp validate_optional_positive_amount(_amount, error), do: {:error, error}
defp validate_pegged_currency(nil), do: {:ok, nil}
defp validate_pegged_currency(pegged_currency) when is_binary(pegged_currency), do: {:ok, pegged_currency}
defp validate_pegged_currency(_pegged_currency), do: {:error, :invalid_pegged_currency}
defp validate_pegged_fields(nil, nil, nil), do: :ok
defp validate_pegged_fields(currency, amount, subunit_to_unit)
when not is_nil(currency) and not is_nil(amount) and not is_nil(subunit_to_unit) do
:ok
end
defp validate_pegged_fields(_, _, _), do: {:error, :invalid_pegged_fields}
defp validate_updated_at(updated_at) do
case DateTime.from_iso8601(updated_at) do
{:ok, %DateTime{} = date_time, _} -> {:ok, date_time}
_ -> {:error, :invalid_timestamp}
end
end
defp decode_address("0x" <> data), do: decode_address(data)
defp decode_address(data) do
case Base.decode16(data, case: :mixed) do
{:ok, address} when byte_size(address) == 20 ->
{:ok, address}
_ ->
{:error, :bad_address_encoding}
end
end
defp validate_fee_type("fixed"), do: {:ok, :fixed}
defp validate_fee_type(_), do: {:error, :unsupported_fee_type}
end
|
apps/omg_child_chain/lib/omg_child_chain/fees/json_single_spec_parser.ex
| 0.832203
| 0.417123
|
json_single_spec_parser.ex
|
starcoder
|
defmodule Wallaby.Phantom do
@moduledoc """
Wallaby driver for PhantomJS.
## Usage
Start a Wallaby Session using this driver with the following command:
```
{:ok, session} = Wallaby.start_session()
```
## Notes
This driver requires PhantomJS be installed in your path. You can install PhantomJS through NPM or your package manager of choice:
```
$ npm install -g phantomjs-prebuilt
```
If you need to specify a specific PhantomJS you can pass the path in the configuration:
```
config :wallaby, phantomjs: "node_modules/.bin/phantomjs"
```
You can also pass arguments to PhantomJS through the `phantomjs_args` config setting, e.g.:
```
config :wallaby, phantomjs_args: "--webdriver-logfile=phantomjs.log"
```
"""
use Supervisor
alias Wallaby.Phantom.Driver
alias Wallaby.DependencyError
@behaviour Wallaby.Driver
@pool_name Wallaby.ServerPool
@doc false
def start_link(opts \\ []) do
Supervisor.start_link(__MODULE__, :ok, opts)
end
def validate do
cond do
configured_phantom_js()
|> Path.expand
|> System.find_executable ->
:ok
System.find_executable("phantomjs") ->
:ok
true ->
exception = DependencyError.exception """
Wallaby can't find phantomjs. Make sure you have phantomjs installed
and included in your path, or that your `config :wallaby, :phantomjs`
setting points to a valid phantomjs executable.
"""
{:error, exception}
end
end
def init(:ok) do
children = [
:poolboy.child_spec(@pool_name, poolboy_config(), []),
worker(Wallaby.Driver.LogStore, [[]]),
]
supervise(children, strategy: :one_for_one)
end
@doc false
def capabilities(opts) do
default_capabilities()
|> Map.merge(user_agent_capability(opts[:user_agent]))
|> Map.merge(custom_headers_capability(opts[:custom_headers]))
end
@doc false
def default_capabilities do
%{
javascriptEnabled: true,
loadImages: false,
version: "",
rotatable: false,
takesScreenshot: true,
cssSelectorsEnabled: true,
browserName: "phantomjs",
nativeEvents: false,
platform: "ANY",
}
end
@doc false
def start_session(opts) do
server = :poolboy.checkout(@pool_name, true, :infinity)
Wallaby.Phantom.Driver.create(server, opts)
end
@doc false
def end_session(%Wallaby.Session{server: server} = session) do
Driver.execute_script(session, "localStorage.clear()", [],
check_logs: false)
Driver.delete(session)
:poolboy.checkin(Wallaby.ServerPool, server)
end
def blank_page?(session) do
case current_url(session) do
{:ok, url} -> url == "about:blank"
_ -> false
end
end
@doc false
defdelegate accept_alert(session, open_dialog_fn), to: Driver
@doc false
defdelegate accept_confirm(session, open_dialog_fn), to: Driver
@doc false
defdelegate accept_prompt(session, input_va, open_dialog_fn), to: Driver
@doc false
defdelegate cookies(session), to: Driver
@doc false
defdelegate current_path(session), to: Driver
@doc false
defdelegate current_url(session), to: Driver
@doc false
defdelegate dismiss_confirm(session, open_dialog_fn), to: Driver
@doc false
defdelegate dismiss_prompt(session, open_dialog_fn), to: Driver
@doc false
defdelegate get_window_size(session), to: Driver
@doc false
defdelegate page_title(session), to: Driver
@doc false
defdelegate page_source(session), to: Driver
@doc false
defdelegate set_cookie(session, key, value), to: Driver
@doc false
defdelegate set_window_size(session, width, height), to: Driver
@doc false
defdelegate visit(session, url), to: Driver
@doc false
defdelegate attribute(element, name), to: Driver
@doc false
defdelegate click(element), to: Driver
@doc false
defdelegate clear(element), to: Driver
@doc false
defdelegate displayed(element), to: Driver
@doc false
defdelegate selected(element), to: Driver
@doc false
defdelegate set_value(element, value), to: Driver
@doc false
defdelegate text(element), to: Driver
@doc false
defdelegate execute_script(session_or_element, script, args), to: Driver
@doc false
defdelegate find_elements(session_or_element, compiled_query), to: Driver
@doc false
defdelegate send_keys(session_or_element, keys), to: Driver
@doc false
defdelegate take_screenshot(session_or_element), to: Driver
defdelegate log(session_or_element), to: Driver
defdelegate parse_log(log), to: Wallaby.Phantom.Logger
@doc false
def user_agent do
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/538.1 (KHTML, like Gecko) PhantomJS/2.1.1 Safari/538.1"
end
@doc false
def user_agent_capability(nil), do: %{}
def user_agent_capability(ua) do
%{"phantomjs.page.settings.userAgent" => ua}
end
@doc false
def custom_headers_capability(nil), do: %{}
def custom_headers_capability(ch) do
Enum.reduce(ch, %{}, fn ({k, v}, acc) ->
Map.merge(acc, %{"phantomjs.page.customHeaders.#{k}" => v})
end)
end
@doc false
def pool_size do
Application.get_env(:wallaby, :pool_size) || default_pool_size()
end
defp poolboy_config do
[name: {:local, @pool_name},
worker_module: Wallaby.Phantom.Server,
size: pool_size(),
max_overflow: 0]
end
defp default_pool_size do
:erlang.system_info(:schedulers_online)
end
defp configured_phantom_js do
Application.get_env(:wallaby, :phantomjs, "phantomjs")
end
end
|
lib/wallaby/phantom.ex
| 0.765637
| 0.609989
|
phantom.ex
|
starcoder
|
defmodule SpandexPhoenix.Telemetry do
@moduledoc """
Defines the `:telemetry` handlers to attach tracing to Phoenix Telemetry.
See `install/1` documentation for usage.
"""
alias Spandex.SpanContext
@doc """
Installs `:telemetry` event handlers for Phoenix Telemetry events.
`Plug.Telemetry` must be included in your endpoint or router for traces to
function properly. If you upgraded to Phoenix 1.5, you must add the plug yourself.
The Phoenix 1.5 installer includes the plug in your endpoint automatically.
### Options
* `:tracer` (`Atom`)
The tracing module to be used for traces in your Endpoint.
Default: `Application.get_env(:spandex_phoenix, :tracer)`
* `:endpoint_telemetry_prefix` (`Atom`)
The telemetry prefix passed to `Plug.Telemetry` in the endpoint you want to trace.
Default: `[:phoenix, :endpoint]`
* `:filter_traces` (`fun((Plug.Conn.t()) -> boolean)`)
A function that takes a conn and returns true if a trace should be created
for that conn, and false if it should be ignored.
Default: `&SpandexPhoenix.trace_all_requests/1`
* `:span_name` (`String.t()`)
The name for the span this module creates.
Default: `"request"`
* `:span_opts` (`Spandex.Tracer.opts()`)
A list of span options to pass during the creation or continuation of
the top level span.
Default: `[type: :web]`
* `:customize_metadata` (`fun((Plug.Conn.t()) -> Keyword.t())`)
A function that takes a conn and returns a keyword list of metadata.
Default: `&SpandexPhoenix.default_metadata/1`
"""
def install(opts \\ []) do
unless function_exported?(:telemetry, :attach_many, 4) do
raise "Cannot install telemetry events without `:telemetry` dependency." <>
"Did you mean to use the Phoenix Instrumenters integration instead?"
end
{filter_traces, opts} = Keyword.pop(opts, :filter_traces, &SpandexPhoenix.trace_all_requests/1)
{customize_metadata, opts} = Keyword.pop(opts, :customize_metadata, &SpandexPhoenix.default_metadata/1)
{endpoint_prefix, opts} = Keyword.pop(opts, :endpoint_telemetry_prefix, [:phoenix, :endpoint])
{span_name, opts} = Keyword.pop(opts, :span_name, "request")
{span_opts, opts} = Keyword.pop(opts, :span_opts, type: :web)
{tracer, opts} =
Keyword.pop_lazy(opts, :tracer, fn ->
Application.get_env(:spandex_phoenix, :tracer)
end)
unless tracer do
raise ArgumentError, "`:tracer` option must be provided or configured in `:spandex_phoenix`"
end
unless Enum.empty?(opts) do
raise ArgumentError, "Unknown options: #{inspect(Keyword.keys(opts))}"
end
opts = %{
customize_metadata: customize_metadata,
filter_traces: filter_traces,
span_name: span_name,
span_opts: span_opts,
tracer: tracer
}
endpoint_events = [
endpoint_prefix ++ [:start],
endpoint_prefix ++ [:stop]
]
:telemetry.attach_many("spandex-endpoint-telemetry", endpoint_events, &__MODULE__.handle_endpoint_event/4, opts)
router_events = [
[:phoenix, :router_dispatch, :start],
[:phoenix, :router_dispatch, :stop],
[:phoenix, :router_dispatch, :exception]
]
:telemetry.attach_many("spandex-router-telemetry", router_events, &__MODULE__.handle_router_event/4, opts)
end
@doc false
def handle_endpoint_event(event, _, %{conn: conn}, %{tracer: tracer} = config) do
if trace?(conn, config) do
case List.last(event) do
:start -> start_trace(tracer, conn, config)
:stop -> finish_trace(tracer, conn, config)
end
end
end
defp trace?(conn, %{filter_traces: filter_traces}), do: filter_traces.(conn)
defp start_trace(tracer, conn, %{span_name: name, span_opts: opts}) do
case tracer.distributed_context(conn) do
{:ok, %SpanContext{} = span} ->
tracer.continue_trace(name, span, opts)
{:error, _} ->
tracer.start_trace(name, opts)
end
end
defp finish_trace(tracer, conn, %{customize_metadata: customize_metadata}) do
conn
|> customize_metadata.()
|> tracer.update_top_span()
tracer.finish_trace()
end
@doc false
def handle_router_event([:phoenix, :router_dispatch, :start], _, meta, %{tracer: tracer}) do
if phx_controller?(meta) do
tracer.start_span("phx.router_dispatch", resource: "#{meta.plug}.#{meta.plug_opts}")
end
end
def handle_router_event([:phoenix, :router_dispatch, :stop], _, meta, %{tracer: tracer}) do
if phx_controller?(meta) do
tracer.finish_span()
end
end
def handle_router_event([:phoenix, :router_dispatch, :exception], _, meta, %{tracer: tracer}) do
# :phoenix :router_dispatch :exception has far fewer keys in its metadata
# (just `kind`, `error/reason`, and `stacktrace`)
# so we can't use `phx_controller?` or `filter_traces` to detect if we are tracing
if tracer.current_trace_id() do
# phx 1.5.4 has a breaking change that switches `:error` to `:reason`:
# https://github.com/phoenixframework/phoenix/compare/v1.5.3...v1.5.4#diff-c474801b0de930e7c3fd5808258094655afe8b008149ff17bee4caaf0d85a154R368
reason = meta[:reason] || meta[:error]
exception =
case meta[:kind] do
:error -> Exception.normalize(:error, reason, meta.stacktrace)
kind -> Exception.normalize(:error, {kind, reason}, meta.stacktrace)
end
SpandexPhoenix.mark_span_as_error(tracer, exception, meta.stacktrace)
tracer.finish_span()
end
end
# It's possible the router handed this request to a non-controller plug;
# we only handle controller actions though, which is what the `is_atom` clauses are testing for
defp phx_controller?(meta) do
is_atom(meta[:plug]) and is_atom(meta[:plug_opts])
end
end
|
lib/spandex_phoenix/telemetry.ex
| 0.846467
| 0.500305
|
telemetry.ex
|
starcoder
|
defmodule PromEx.Plugins.Beam do
@moduledoc """
Telemetry metrics for the BEAM.
This plugin captures metrics regarding the Erlang Virtual Machine (i.e the BEAM). Specifically, it captures metrics
regarding the CPU topology, system limits, VM feature support, scheduler information, memory utilization, distribution
traffic, and other internal metrics.
This plugin supports the following options:
- `poll_rate`: This is option is OPTIONAL and is the rate at which poll metrics are refreshed (default is 5 seconds).
This plugin exposes the following metric groups:
- `:beam_memory_polling_metrics`
- `:beam_internal_polling_metrics`
- `:beam_cpu_topology_manual_metrics`
- `:beam_system_limits_manual_metrics`
- `:beam_system_info_manual_metrics`
- `:beam_scheduler_manual_metrics`
To use plugin in your application, add the following to your PromEx module:
```
defmodule MyApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
PromEx.Plugins.Beam
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "beam.json"}
]
end
end
```
This plugin exposes manual metrics so be sure to configure the PromEx `:delay_manual_start` as necessary.
"""
use PromEx.Plugin
@memory_event [:prom_ex, :plugin, :beam, :memory]
@impl true
def polling_metrics(opts) do
poll_rate = Keyword.get(opts, :poll_rate, 5_000)
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = PromEx.metric_prefix(otp_app, :beam)
# TODO: Investigate Microstate accounting metrics
# http://erlang.org/doc/man/erlang.html#statistics_microstate_accounting
# TODO: Add a metrics group for allocators
# https://erlang.org/doc/man/erts_alloc.html
# :erlang.system_info(:allocator)
[
memory_metrics(metric_prefix, poll_rate),
mnesia_metrics(metric_prefix, poll_rate),
distribution_metrics(metric_prefix, poll_rate),
beam_internal_metrics(metric_prefix, poll_rate)
]
end
@impl true
def manual_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = PromEx.metric_prefix(otp_app, :beam)
[
beam_cpu_topology_info(metric_prefix),
beam_system_limits_info(metric_prefix),
beam_system_info(metric_prefix),
beam_scheduler_info(metric_prefix)
]
end
defp distribution_metrics(_metric_prefix, poll_rate) do
Polling.build(
:beam_distribution_polling_metrics,
poll_rate,
{__MODULE__, :execute_distribution_metrics, []},
[]
)
end
defp mnesia_metrics(_metric_prefix, poll_rate) do
Polling.build(
:beam_mnesia_polling_metrics,
poll_rate,
{__MODULE__, :execute_mnesia_metrics, []},
[]
)
end
defp beam_internal_metrics(metric_prefix, poll_rate) do
Polling.build(
:beam_internal_polling_metrics,
poll_rate,
{__MODULE__, :execute_internal_metrics, []},
[
last_value(
metric_prefix ++ [:stats, :active_task, :count],
event_name: [:prom_ex, :plugin, :beam, :active_task, :count],
description: "The number of processes and ports that are ready to run, or are currently running.",
measurement: :count,
tags: [:type]
),
last_value(
metric_prefix ++ [:stats, :run_queue, :count],
event_name: [:prom_ex, :plugin, :beam, :run_queue, :count],
description: "The number of processes and ports that are ready to run and are in the run queue.",
measurement: :count,
tags: [:type]
),
last_value(
metric_prefix ++ [:stats, :context_switch, :count],
event_name: [:prom_ex, :plugin, :beam, :context_switch, :count],
description: "The total number of context switches since the system started.",
measurement: :count
),
last_value(
metric_prefix ++ [:stats, :reduction, :count],
event_name: [:prom_ex, :plugin, :beam, :reduction, :count],
description: "The total number of reductions since the system started.",
measurement: :count
),
last_value(
metric_prefix ++ [:stats, :gc, :count],
event_name: [:prom_ex, :plugin, :beam, :gc, :count],
description: "The total number of garbage collections since the system started.",
measurement: :count
),
last_value(
metric_prefix ++ [:stats, :gc, :reclaimed, :bytes],
event_name: [:prom_ex, :plugin, :beam, :gc, :bytes_reclaimed],
description: "The total number of bytes reclaimed since the system started.",
measurement: :count,
unit: :byte
),
last_value(
metric_prefix ++ [:stats, :port_io, :byte, :count],
event_name: [:prom_ex, :plugin, :beam, :port_io, :count],
description: "The total number of bytes sent and received through ports since the system started.",
measurement: :count,
tags: [:type],
unit: :byte
),
last_value(
metric_prefix ++ [:stats, :uptime, :milliseconds, :count],
event_name: [:prom_ex, :plugin, :beam, :uptime, :count],
description: "The total number of wall clock milliseconds that have passed since the system started.",
measurement: :count,
unit: :millisecond
),
last_value(
metric_prefix ++ [:stats, :port, :count],
event_name: [:prom_ex, :plugin, :beam, :port, :count],
description: "A count of how many ports are currently active.",
measurement: :count
),
last_value(
metric_prefix ++ [:stats, :process, :count],
event_name: [:prom_ex, :plugin, :beam, :process, :count],
description: "A count of how many Erlang processes are currently running.",
measurement: :count
),
last_value(
metric_prefix ++ [:stats, :atom, :count],
event_name: [:prom_ex, :plugin, :beam, :atom, :count],
description: "A count of how many atoms are currently allocated.",
measurement: :count
),
last_value(
metric_prefix ++ [:stats, :ets, :count],
event_name: [:prom_ex, :plugin, :beam, :ets, :count],
description: "A count of how many ETS tables currently exist.",
measurement: :count
)
]
)
end
defp beam_system_info(metric_prefix) do
Manual.build(
:beam_system_info_manual_metrics,
{__MODULE__, :execute_system_info, []},
[
last_value(
metric_prefix ++ [:system, :version, :info],
event_name: [:prom_ex, :plugin, :beam, :version],
description: "The OTP release major version.",
measurement: :version
),
last_value(
metric_prefix ++ [:system, :smp_support, :info],
event_name: [:prom_ex, :plugin, :beam, :smp_support],
description: "Whether the BEAM instance has been compiled with SMP support.",
measurement: :enabled
),
last_value(
metric_prefix ++ [:system, :thread_support, :info],
event_name: [:prom_ex, :plugin, :beam, :thread_support],
description: "Whether the BEAM instance has been compiled with threading support.",
measurement: :enabled
),
last_value(
metric_prefix ++ [:system, :time_correction_support, :info],
event_name: [:prom_ex, :plugin, :beam, :time_correction_support],
description: "Whether the BEAM instance has time correction support.",
measurement: :enabled
),
last_value(
metric_prefix ++ [:system, :word_size_bytes, :info],
event_name: [:prom_ex, :plugin, :beam, :word_size_bytes],
description: "The size of Erlang term words in bytes.",
measurement: :size
)
]
)
end
defp beam_scheduler_info(metric_prefix) do
Manual.build(
:beam_scheduler_manual_metrics,
{__MODULE__, :execute_scheduler_info, []},
[
last_value(
metric_prefix ++ [:system, :dirty_cpu_schedulers, :info],
event_name: [:prom_ex, :plugin, :beam, :dirty_cpu_schedulers],
description: "The total number of dirty CPU scheduler threads used by the BEAM.",
measurement: :quantity
),
last_value(
metric_prefix ++ [:system, :dirty_cpu_schedulers_online, :info],
event_name: [:prom_ex, :plugin, :beam, :dirty_cpu_schedulers_online],
description: "The total number of dirty CPU schedulers that are online.",
measurement: :quantity
),
last_value(
metric_prefix ++ [:system, :dirty_io_schedulers, :info],
event_name: [:prom_ex, :plugin, :beam, :dirty_io_schedulers],
description: "The total number of dirty I/O schedulers used to execute I/O bound native functions.",
measurement: :quantity
),
last_value(
metric_prefix ++ [:system, :schedulers, :info],
event_name: [:prom_ex, :plugin, :beam, :schedulers],
description: "The number of scheduler threads in use by the BEAM.",
measurement: :quantity
),
last_value(
metric_prefix ++ [:system, :schedulers_online, :info],
event_name: [:prom_ex, :plugin, :beam, :schedulers_online],
description: "The number of scheduler threads that are online.",
measurement: :quantity
)
]
)
end
defp beam_cpu_topology_info(metric_prefix) do
Manual.build(
:beam_cpu_topology_manual_metrics,
{__MODULE__, :execute_cpu_topology_info, []},
[
last_value(
metric_prefix ++ [:system, :logical_processors, :info],
event_name: [:prom_ex, :plugin, :beam, :logical_processors],
description: "The total number of logical processors on the host machine.",
measurement: :quantity
),
last_value(
metric_prefix ++ [:system, :logical_processors_available, :info],
event_name: [:prom_ex, :plugin, :beam, :logical_processors_available],
description: "The total number of logical processors available to the BEAM.",
measurement: :quantity
),
last_value(
metric_prefix ++ [:system, :logical_processors_online, :info],
event_name: [:prom_ex, :plugin, :beam, :logical_processors_online],
description: "The total number of logical processors online on the host machine.",
measurement: :quantity
)
]
)
end
defp beam_system_limits_info(metric_prefix) do
Manual.build(
:beam_system_limits_manual_metrics,
{__MODULE__, :execute_system_limits_info, []},
[
last_value(
metric_prefix ++ [:system, :ets_limit, :info],
event_name: [:prom_ex, :plugin, :beam, :ets_limit],
description:
"The maximum number of ETS tables allowed (this is partially obsolete given that the number of ETS tables is limited by available memory).",
measurement: :limit
),
last_value(
metric_prefix ++ [:system, :port_limit, :info],
event_name: [:prom_ex, :plugin, :beam, :port_limit],
description: "The maximum number of ports that can simultaneously exist on the BEAM instance.",
measurement: :limit
),
last_value(
metric_prefix ++ [:system, :process_limit, :info],
event_name: [:prom_ex, :plugin, :beam, :process_limit],
description: "The maximum number of processes that can simultaneously exist on the BEAM instance.",
measurement: :limit
),
last_value(
metric_prefix ++ [:system, :thread_pool_size, :info],
event_name: [:prom_ex, :plugin, :beam, :thread_pool_size],
description: "The number of async threads in the async threads pool used for async driver calls.",
measurement: :size
),
last_value(
metric_prefix ++ [:system, :atom_limit, :info],
event_name: [:prom_ex, :plugin, :beam, :atom_limit],
description: "The maximum number of atoms allowed.",
measurement: :limit
)
]
)
end
defp memory_metrics(metric_prefix, poll_rate) do
Polling.build(
:beam_memory_polling_metrics,
poll_rate,
{__MODULE__, :execute_memory_metrics, []},
[
# Capture the total memory allocated to the entire Erlang VM (or BEAM for short)
last_value(
metric_prefix ++ [:memory, :allocated, :bytes],
event_name: @memory_event,
description: "The total amount of memory currently allocated.",
measurement: :total,
unit: :byte
),
# Capture the total memory allocated to atoms
last_value(
metric_prefix ++ [:memory, :atom, :total, :bytes],
event_name: @memory_event,
description: "The total amount of memory currently allocated for atoms.",
measurement: :atom,
unit: :byte
),
# Capture the total memory allocated to binaries
last_value(
metric_prefix ++ [:memory, :binary, :total, :bytes],
event_name: @memory_event,
description: "The total amount of memory currently allocated for binaries.",
measurement: :binary,
unit: :byte
),
# Capture the total memory allocated to Erlang code
last_value(
metric_prefix ++ [:memory, :code, :total, :bytes],
event_name: @memory_event,
description: "The total amount of memory currently allocated for Erlang code.",
measurement: :code,
unit: :byte
),
# Capture the total memory allocated to ETS tables
last_value(
metric_prefix ++ [:memory, :ets, :total, :bytes],
event_name: @memory_event,
description: "The total amount of memory currently allocated for ETS tables.",
measurement: :ets,
unit: :byte
),
# Capture the total memory allocated to Erlang processes
last_value(
metric_prefix ++ [:memory, :processes, :total, :bytes],
event_name: @memory_event,
description: "The total amount of memory currently allocated to Erlang processes.",
measurement: :processes,
unit: :byte
),
# Capture the total memory allocated to :persistent_term
last_value(
metric_prefix ++ [:memory, :persistent_term, :total, :bytes],
event_name: @memory_event,
description: "The total amount of memory currently allocated to Erlang :persistent_term.",
measurement: :persistent_term,
unit: :byte
)
]
)
end
@doc false
def execute_memory_metrics do
%{memory: persistent_term_memory} = :persistent_term.info()
memory_measurements =
:erlang.memory()
|> Map.new()
|> Map.put(:persistent_term, persistent_term_memory)
:telemetry.execute(@memory_event, memory_measurements, %{})
end
@doc false
def execute_distribution_metrics do
end
@doc false
def execute_internal_metrics do
total_active_tasks = :erlang.statistics(:total_active_tasks)
total_active_tasks_all = :erlang.statistics(:total_active_tasks_all)
total_run_queue_lengths = :erlang.statistics(:total_run_queue_lengths)
total_run_queue_lengths_all = :erlang.statistics(:total_run_queue_lengths_all)
dirty_active_tasks = total_active_tasks_all - total_active_tasks
dirty_run_queue_lengths = total_run_queue_lengths_all - total_run_queue_lengths
{context_switches, _} = :erlang.statistics(:context_switches)
{total_reductions, _} = :erlang.statistics(:reductions)
word_size = :erlang.system_info(:wordsize)
{number_of_gcs, words_reclaimed, _} = :erlang.statistics(:garbage_collection)
bytes_reclaimed = words_reclaimed * word_size
{{:input, input_port_bytes}, {:output, output_port_bytes}} = :erlang.statistics(:io)
{wall_clock_time, _} = :erlang.statistics(:wall_clock)
:telemetry.execute([:prom_ex, :plugin, :beam, :port, :count], %{count: :erlang.system_info(:port_count)})
:telemetry.execute([:prom_ex, :plugin, :beam, :process, :count], %{count: :erlang.system_info(:process_count)})
:telemetry.execute([:prom_ex, :plugin, :beam, :atom, :count], %{count: :erlang.system_info(:atom_count)})
:telemetry.execute([:prom_ex, :plugin, :beam, :ets, :count], %{count: :erlang.system_info(:ets_count)})
:telemetry.execute([:prom_ex, :plugin, :beam, :active_task, :count], %{count: total_active_tasks}, %{type: :normal})
:telemetry.execute([:prom_ex, :plugin, :beam, :active_task, :count], %{count: dirty_active_tasks}, %{type: :dirty})
:telemetry.execute([:prom_ex, :plugin, :beam, :context_switch, :count], %{count: context_switches})
:telemetry.execute([:prom_ex, :plugin, :beam, :reduction, :count], %{count: total_reductions})
:telemetry.execute([:prom_ex, :plugin, :beam, :gc, :count], %{count: number_of_gcs})
:telemetry.execute([:prom_ex, :plugin, :beam, :gc, :bytes_reclaimed], %{count: bytes_reclaimed})
:telemetry.execute([:prom_ex, :plugin, :beam, :port_io, :count], %{count: input_port_bytes}, %{type: :input})
:telemetry.execute([:prom_ex, :plugin, :beam, :port_io, :count], %{count: output_port_bytes}, %{type: :output})
:telemetry.execute([:prom_ex, :plugin, :beam, :uptime, :count], %{count: wall_clock_time})
:telemetry.execute([:prom_ex, :plugin, :beam, :run_queue, :count], %{count: total_run_queue_lengths}, %{
type: :normal
})
:telemetry.execute([:prom_ex, :plugin, :beam, :run_queue, :count], %{count: dirty_run_queue_lengths}, %{
type: :dirty
})
end
@doc false
def execute_mnesia_metrics do
# https://github.com/deadtrickster/prometheus.erl/blob/master/src/collectors/mnesia/prometheus_mnesia_collector.erl
end
@doc false
def execute_system_limits_info do
:telemetry.execute([:prom_ex, :plugin, :beam, :ets_limit], %{limit: :erlang.system_info(:ets_limit)})
:telemetry.execute([:prom_ex, :plugin, :beam, :port_limit], %{limit: :erlang.system_info(:port_limit)})
:telemetry.execute([:prom_ex, :plugin, :beam, :process_limit], %{limit: :erlang.system_info(:process_limit)})
:telemetry.execute(
[:prom_ex, :plugin, :beam, :thread_pool_size],
%{size: :erlang.system_info(:thread_pool_size)},
%{}
)
:telemetry.execute([:prom_ex, :plugin, :beam, :atom_limit], %{limit: :erlang.system_info(:atom_limit)}, %{})
end
@doc false
def execute_system_info do
smp_enabled = if(:erlang.system_info(:smp_support), do: 1, else: 0)
thread_support_enabled = if(:erlang.system_info(:threads), do: 1, else: 0)
time_correction_enabled = if(:erlang.system_info(:time_correction), do: 1, else: 0)
word_size = :erlang.system_info(:wordsize)
version = :otp_release |> :erlang.system_info() |> :erlang.list_to_binary() |> String.to_integer()
:telemetry.execute([:prom_ex, :plugin, :beam, :smp_support], %{enabled: smp_enabled}, %{})
:telemetry.execute([:prom_ex, :plugin, :beam, :thread_support], %{enabled: thread_support_enabled}, %{})
:telemetry.execute([:prom_ex, :plugin, :beam, :time_correction_support], %{enabled: time_correction_enabled}, %{})
:telemetry.execute([:prom_ex, :plugin, :beam, :word_size_bytes], %{size: word_size}, %{})
:telemetry.execute([:prom_ex, :plugin, :beam, :version], %{version: version}, %{})
end
@doc false
def execute_cpu_topology_info do
:telemetry.execute(
[:prom_ex, :plugin, :beam, :logical_processors],
%{quantity: :erlang.system_info(:logical_processors)},
%{}
)
:telemetry.execute(
[:prom_ex, :plugin, :beam, :logical_processors_available],
%{quantity: :erlang.system_info(:logical_processors_available)},
%{}
)
:telemetry.execute(
[:prom_ex, :plugin, :beam, :logical_processors_online],
%{quantity: :erlang.system_info(:logical_processors_online)},
%{}
)
end
@doc false
def execute_scheduler_info do
:telemetry.execute(
[:prom_ex, :plugin, :beam, :dirty_cpu_schedulers],
%{quantity: :erlang.system_info(:dirty_cpu_schedulers)},
%{}
)
:telemetry.execute(
[:prom_ex, :plugin, :beam, :dirty_cpu_schedulers_online],
%{quantity: :erlang.system_info(:dirty_cpu_schedulers_online)},
%{}
)
:telemetry.execute(
[:prom_ex, :plugin, :beam, :dirty_io_schedulers],
%{quantity: :erlang.system_info(:dirty_io_schedulers)},
%{}
)
:telemetry.execute(
[:prom_ex, :plugin, :beam, :schedulers],
%{quantity: :erlang.system_info(:schedulers)},
%{}
)
:telemetry.execute(
[:prom_ex, :plugin, :beam, :schedulers_online],
%{quantity: :erlang.system_info(:schedulers_online)},
%{}
)
end
end
|
lib/prom_ex/plugins/beam.ex
| 0.818845
| 0.78469
|
beam.ex
|
starcoder
|
defmodule Bnf do
@doc """
iex> Bnf.count_valid(Bnf.sample())
2
"""
def count_valid(input) do
[a, b] = String.split(input, "\n\n", trim: true)
regex =
a
|> parse_grammar()
|> build_regex_string()
|> Regex.compile!()
String.split(b, "\n", trim: true)
|> Enum.count(&Regex.match?(regex, &1))
end
@doc """
iex> [a,_b] = String.split(Bnf.sample(), "\\n\\n", trim: true)
iex> grammar = Bnf.parse_grammar(a)
iex> grammar[0]
[4,1,5]
"""
def parse_grammar(input) do
input
|> String.split("\n", trim: true)
|> Enum.map(&parse_rule/1)
|> Map.new()
end
@doc """
iex> [a,_b] = String.split(Bnf.sample(), "\\n\\n", trim: true)
iex> Bnf.parse_grammar(a) |> Bnf.build_regex_string("4")
"^a$"
iex> Bnf.parse_grammar(a) |> Bnf.build_regex_string("(4)(5)")
"^(a)(b)$"
iex> Bnf.parse_grammar(a) |> Bnf.build_regex_string("3")
"^(a)(b)|(b)(a)$"
iex> Bnf.parse_grammar(a) |> Bnf.build_regex_string("1")
"^((a)(a)|(b)(b))((a)(b)|(b)(a))|((a)(b)|(b)(a))((a)(a)|(b)(b))$"
iex> Bnf.parse_grammar(a) |> Bnf.build_regex_string("0")
"^(a)(((a)(a)|(b)(b))((a)(b)|(b)(a))|((a)(b)|(b)(a))((a)(a)|(b)(b)))(b)$"
"""
def build_regex_string(grammar, str \\ "0") do
Regex.replace(~r/\d+/, str, fn ddd ->
case grammar[int(ddd)] do
x when is_binary(x) -> x
{a, b} -> "#{regex(a)}|#{regex(b)}"
list -> regex(list)
end
end)
|> build_regex_string_loop(grammar)
end
def build_regex_string_loop(str, grammar) do
if Regex.match?(~r/\d/, str),
do: build_regex_string(grammar, str),
else: "^#{str}$"
end
def regex(list), do: list |> Enum.map(fn a -> "(#{a})" end) |> Enum.join()
@doc """
iex> Bnf.parse_rule("0: 4 1 5")
{0, [4,1,5]}
iex> Bnf.parse_rule("0: 2 3 | 3 2")
{0, {[2,3], [3,2]}}
iex> Bnf.parse_rule("0: \\"x\\"")
{0, \"x\"}
"""
def parse_rule(str) do
[name, rest] = String.split(str, ": ")
body =
cond do
String.contains?(rest, "|") ->
String.split(rest, "|", trim: true)
|> Enum.map(&to_intlist/1)
|> List.to_tuple()
String.starts_with?(rest, "\"") ->
String.at(rest, 1)
true ->
to_intlist(rest)
end
{int(name), body}
end
def to_intlist(str), do: String.split(str) |> Enum.map(&int/1)
defp int(str), do: String.to_integer(str)
def valid?(_msg, _grammar), do: false
def sample() do
"""
0: 4 1 5
1: 2 3 | 3 2
2: 4 4 | 5 5
3: 4 5 | 5 4
4: "a"
5: "b"
ababbb
bababa
abbbab
aaabbb
aaaabbb
"""
end
end
|
19-Bnf/lib/bnf.ex
| 0.542863
| 0.407923
|
bnf.ex
|
starcoder
|
defmodule Protobuf.JSON do
@moduledoc """
JSON encoding and decoding utilities for Protobuf structs.
It follows Google's [specs](https://developers.google.com/protocol-buffers/docs/proto3#json)
and reference implementation. Some features
such as [well-known](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
types are not fully supported yet.
Proto3 is supported as per the specification. Proto2 is supported in practice, but some of its
features might not work correctly, such as extensions.
## Types
| Protobuf | JSON | Supported |
|------------------------------|-----------------------------|-----------|
| `bool` | `true`/`false` | Yes |
| `int32`, `fixed32`, `uint32` | Number | Yes |
| `int64`, `fixed64`, `uint64` | String | Yes |
| `float`, `double` | Number | Yes |
| `bytes` | Base64 string | Yes |
| `string` | String | Yes |
| `message` | Object (`{…}`) | Yes |
| `enum` | String | Yes |
| `map<K,V>` | Object (`{…}`) | Yes |
| `repeated V` | Array of `[v, …]` | Yes |
| `Any` | Object (`{…}`) | No |
| `Timestamp` | RFC3339 datetime | Yes |
| `Duration` | String (`seconds.fraction`) | Yes |
| `Struct` | Object (`{…}`) | Yes |
| `Wrapper types` | Various types | Yes |
| `FieldMask` | String | Yes |
| `ListValue` | Array | Yes |
| `Value` | Any JSON value | Yes |
| `NullValue` | `null` | Yes |
| `Empty` | Object (`{…}`) | Yes |
## Usage
`Protobuf.JSON` requires a JSON library to work, so first make sure you have `:jason` added
to your dependencies:
defp deps do
[
{:jason, "~> 1.2"},
# ...
]
end
With `encode/1` you can turn any `Protobuf` message struct into a JSON string:
iex> message = Car.new(color: :RED, top_speed: 125.3)
iex> Protobuf.JSON.encode(message)
{:ok, "{\\"color\\":\\"RED\\",\\"topSpeed\\":125.3}"}
And go the other way around with `decode/1`:
iex> json = ~S|{"color":"RED","topSpeed":125.3}|
iex> Protobuf.JSON.decode(json, Car)
{:ok, %Car{color: :RED, top_speed: 125.3}}
JSON keys are encoded as *camelCase strings* by default, specified by the `json_name` field
option. So make sure to *recompile the `.proto` files in your project* before working with
JSON encoding, the compiler will generate all the required `json_name` options. You can set
your own `json_name` for a particular field too:
message GeoCoordinate {
double latitude = 1 [ json_name = "lat" ];
double longitude = 2 [ json_name = "long" ];
}
## Known Issues and Limitations
Currently, the `protoc` compiler won't check for field name collisions. This library won't
check that either. Make sure your field names will be unique when serialized to JSON.
For instance, this message definition will not encode correctly since it will emit just
one of the two fields and the problem might go unnoticed:
message CollidingFields {
int32 f1 = 1 [json_name = "sameName"];
float f2 = 2 [json_name = "sameName"];
}
According to the specification, when duplicated JSON keys are found in maps, the library
should raise a decoding error. It currently ignores duplicates and keeps the last occurrence.
"""
alias Protobuf.JSON.{Encode, EncodeError, Decode, DecodeError}
@type encode_opt() ::
{:use_proto_names, boolean()}
| {:use_enum_numbers, boolean()}
| {:emit_unpopulated, boolean()}
@type json_data() :: %{optional(binary) => any}
@doc """
Generates a JSON representation of the given protobuf `struct`.
Similar to `encode/2` except it will unwrap the error tuple and raise in case of errors.
## Examples
iex> Car.new(top_speed: 80.0) |> Protobuf.JSON.encode!()
~S|{"topSpeed":80.0}|
"""
@spec encode!(struct, [encode_opt]) :: String.t() | no_return
def encode!(struct, opts \\ []) do
case encode(struct, opts) do
{:ok, json} -> json
{:error, error} -> raise error
end
end
@doc """
Generates a JSON representation of the given protobuf `struct`.
## Options
* `:use_proto_names` - use original field `name` instead of the camelCase `json_name` for
JSON keys. Defaults to `false`.
* `:use_enum_numbers` - encode `enum` field values as numbers instead of their labels.
Defaults to `false`.
* `:emit_unpopulated` - emit all fields, even when they are blank, empty, or set to their
default value. Defaults to `false`.
## Examples
Suppose that this is you Protobuf message:
syntax = "proto3";
message Car {
enum Color {
GREEN = 0;
RED = 1;
}
Color color = 1;
float top_speed = 2;
}
Encoding is as simple as:
iex> Car.new(color: :RED, top_speed: 125.3) |> Protobuf.JSON.encode()
{:ok, ~S|{"color":"RED","topSpeed":125.3}|}
iex> Car.new(color: :GREEN) |> Protobuf.JSON.encode()
{:ok, "{}"}
iex> Car.new() |> Protobuf.JSON.encode(emit_unpopulated: true)
{:ok, ~S|{"color":"GREEN","topSpeed":0.0}|}
"""
@spec encode(struct, [encode_opt]) ::
{:ok, String.t()} | {:error, EncodeError.t() | Exception.t()}
def encode(%_{} = struct, opts \\ []) when is_list(opts) do
if jason = load_jason() do
with {:ok, map} <- to_encodable(struct, opts), do: jason.encode(map)
else
{:error, EncodeError.new(:no_json_lib)}
end
end
@doc """
Generates a JSON-encodable map for the given Protobuf `struct`.
Similar to `encode/2` except it will return an intermediate `map` representation.
This is especially useful if you want to use custom JSON encoding or a custom
JSON library.
Supports the same options as `encode/2`.
## Examples
iex> Car.new(color: :RED, top_speed: 125.3) |> Protobuf.JSON.to_encodable()
{:ok, %{"color" => :RED, "topSpeed" => 125.3}}
iex> Car.new(color: :GREEN) |> Protobuf.JSON.to_encodable()
{:ok, %{}}
iex> Car.new() |> Protobuf.JSON.to_encodable(emit_unpopulated: true)
{:ok, %{"color" => :GREEN, "topSpeed" => 0.0}}
"""
boolean_opts = [:use_proto_names, :use_enum_numbers, :emit_unpopulated]
@spec to_encodable(struct, [encode_opt]) :: {:ok, json_data} | {:error, EncodeError.t()}
def to_encodable(%_{} = struct, opts \\ []) when is_list(opts) do
Enum.each(opts, fn
{key, value} when key in unquote(boolean_opts) and is_boolean(value) ->
:ok
{key, value} when key in unquote(boolean_opts) ->
raise ArgumentError, "option #{inspect(key)} must be a boolean, got: #{inspect(value)}"
{key, _value} ->
raise ArgumentError, "unknown option: #{inspect(key)}"
other ->
raise ArgumentError, "invalid element in options list: #{inspect(other)}"
end)
{:ok, Encode.to_encodable(struct, opts)}
catch
error -> {:error, EncodeError.new(error)}
end
@doc """
Decodes a JSON `iodata` into a `module` Protobuf struct.
Similar to `decode!/2` except it will unwrap the error tuple and raise in case of errors.
## Examples
iex> Protobuf.JSON.decode!("{}", Car)
%Car{color: :GREEN, top_speed: 0.0}
iex> ~S|{"color":"RED"}| |> Protobuf.JSON.decode!(Car)
%Car{color: :RED, top_speed: 0.0}
iex> ~S|{"color":"GREEN","topSpeed":80.0}| |> Protobuf.JSON.decode!(Car)
%Car{color: :GREEN, top_speed: 80.0}
"""
@spec decode!(iodata, module) :: struct | no_return
def decode!(iodata, module) do
case decode(iodata, module) do
{:ok, json} -> json
{:error, error} -> raise error
end
end
@doc """
Decodes a JSON `iodata` into a `module` Protobuf struct.
## Examples
Given this Protobuf message:
syntax = "proto3";
message Car {
enum Color {
GREEN = 0;
RED = 1;
}
Color color = 1;
float top_speed = 2;
}
You can build its structs from JSON like this:
iex> Protobuf.JSON.decode("{}", Car)
{:ok, %Car{color: :GREEN, top_speed: 0.0}}
iex> ~S|{"color":"RED"}| |> Protobuf.JSON.decode(Car)
{:ok, %Car{color: :RED, top_speed: 0.0}}
iex> ~S|{"color":"GREEN","topSpeed":80.0}| |> Protobuf.JSON.decode(Car)
{:ok, %Car{color: :GREEN, top_speed: 80.0}}
"""
@spec decode(iodata, module) :: {:ok, struct} | {:error, DecodeError.t() | Exception.t()}
def decode(iodata, module) when is_atom(module) do
if jason = load_jason() do
with {:ok, json_data} <- jason.decode(iodata),
do: from_decoded(json_data, module)
else
{:error, DecodeError.new(:no_json_lib)}
end
end
@doc """
Decodes a `json_data` map into a `module` Protobuf struct.
Similar to `decode/2` except it takes a JSON `map` representation of the data.
This is especially useful if you want to use custom JSON encoding or a custom
JSON library.
## Examples
iex> Protobuf.JSON.from_decoded(%{}, Car)
{:ok, %Car{color: :GREEN, top_speed: 0.0}}
iex> Protobuf.JSON.from_decoded(%{"color" => "RED"}, Car)
{:ok, %Car{color: :RED, top_speed: 0.0}}
iex> Protobuf.JSON.from_decoded(%{"color" => "GREEN","topSpeed" => 80.0}, Car)
{:ok, %Car{color: :GREEN, top_speed: 80.0}}
"""
@spec from_decoded(json_data(), module()) :: {:ok, struct()} | {:error, DecodeError.t()}
def from_decoded(json_data, module) when is_atom(module) do
{:ok, Decode.from_json_data(json_data, module)}
catch
error -> {:error, DecodeError.new(error)}
end
defp load_jason, do: Code.ensure_loaded?(Jason) and Jason
end
|
lib/protobuf/json.ex
| 0.943608
| 0.704201
|
json.ex
|
starcoder
|
defmodule GGity.Axis do
@moduledoc false
alias GGity.{Draw, Labels, Plot, Scale}
@spec draw_x_axis(Plot.t()) :: iolist()
def draw_x_axis(%Plot{} = plot) do
[x_axis_line(plot), x_gridlines(plot), x_ticks(plot), draw_x_axis_label(plot)]
end
defp x_axis_line(%Plot{} = plot) do
length = plot.width + plot.area_padding * 2
top_shift = plot.width / plot.aspect_ratio + plot.area_padding * 2
Draw.line(x2: "#{length}", class: "gg-axis-line gg-axis-line-x")
|> Draw.g(opacity: "1", transform: "translate(0, #{top_shift})")
end
defp x_ticks(%Plot{scales: %{x: %{tick_values: [single_tick | []]}}} = plot) do
top_shift = plot.width / plot.aspect_ratio + plot.area_padding * 2
x_tick = draw_x_tick(plot, single_tick)
Draw.g(x_tick,
transform: "translate(#{plot.area_padding},#{top_shift})",
font_size: "10",
text_anchor: "middle"
)
end
defp x_ticks(%Plot{scales: scales} = plot) do
top_shift = plot.width / plot.aspect_ratio + plot.area_padding * 2
scales.x.tick_values
|> Enum.reverse()
|> Enum.map(&draw_x_tick(plot, &1))
|> Draw.g(
transform: "translate(#{plot.area_padding},#{top_shift})",
font_size: "10",
text_anchor: "middle"
)
end
defp x_gridlines(%Plot{scales: %{x: %{tick_values: [single_tick | []]}}} = plot) do
top_shift = plot.width / plot.aspect_ratio + plot.area_padding * 2
tick_label = draw_x_tick_label(plot, single_tick)
major_gridline = draw_x_major_gridline(plot, single_tick)
Draw.g([tick_label, major_gridline],
transform: "translate(#{plot.area_padding},#{top_shift})",
font_size: "10",
text_anchor: "middle"
)
end
defp x_gridlines(%Plot{scales: scales} = plot) do
top_shift = plot.width / plot.aspect_ratio + plot.area_padding * 2
[first, second] =
Enum.slice(scales.x.tick_values, 0..1)
|> Enum.map(scales.x.inverse)
interval = (second - first) / 2
[_last_tick | all_but_last_tick] = ticks = Enum.reverse(scales.x.tick_values)
minor_gridlines = Enum.map(all_but_last_tick, &draw_x_minor_gridline(plot, &1, interval))
tick_labels = Enum.map(ticks, &draw_x_tick_label(plot, &1))
major_gridlines = Enum.map(ticks, &draw_x_major_gridline(plot, &1))
Draw.g(
[tick_labels, major_gridlines, minor_gridlines],
transform: "translate(#{plot.area_padding},#{top_shift})",
font_size: "10",
text_anchor: "middle"
)
end
defp draw_x_tick_label(%Plot{} = plot, value) do
tick_length = plot.theme.axis_ticks_length_x || plot.theme.axis_ticks_length
coord = plot.scales.x.inverse.(value)
plot.scales.x
|> Labels.format(value)
|> to_string()
|> Draw.text(
[
y: "#{9 + tick_length}",
class: "gg-text gg-axis-text gg-axis-text-x"
] ++ attributes_for_angle(plot.theme.axis_text_x.angle, tick_length)
)
|> Draw.g(opacity: "1", transform: "translate(#{coord},0)")
end
defp attributes_for_angle(0, _tick_length) do
[text_anchor: "middle"]
end
defp attributes_for_angle(angle, tick_length) when angle > 0 and angle <= 90 do
x_adjust = angle / 10 * -1
y_adjust = (angle - 45) / 15
[
text_anchor: "end",
transform: "translate(#{x_adjust}, #{y_adjust + tick_length}),rotate(-#{angle})"
]
end
defp attributes_for_angle(_angle, tick_length) do
attributes_for_angle(0, tick_length)
end
defp draw_x_tick(%Plot{} = plot, value) do
tick_length = plot.theme.axis_ticks_length_x || plot.theme.axis_ticks_length
coord = plot.scales.x.inverse.(value)
Draw.line(y2: "#{tick_length}", class: "gg-axis-ticks gg-axis-ticks-x")
|> Draw.g(opacity: "1", transform: "translate(#{coord},0)")
end
defp draw_x_major_gridline(%Plot{} = plot, value) do
gridline_length = plot.width / plot.aspect_ratio + plot.area_padding * 2
coord = plot.scales.x.inverse.(value)
Draw.line(y2: "-#{gridline_length}", class: "gg-panel-grid gg-panel-grid-major")
|> Draw.g(opacity: "1", transform: "translate(#{coord},0)")
end
defp draw_x_minor_gridline(%Plot{} = plot, value, interval) do
gridline_length = plot.width / plot.aspect_ratio + plot.area_padding * 2
coord = plot.scales.x.inverse.(value)
%scale_type{} = plot.scales.x
gridline =
if scale_type == Scale.X.Discrete do
[]
else
Draw.line(
y2: "-#{gridline_length}",
class: "gg-panel-grid gg-panel-grid-minor",
transform: "translate(#{interval}, 0)"
)
end
Draw.g(gridline, opacity: "1", transform: "translate(#{coord},0)")
end
defp draw_x_axis_label(%Plot{labels: %{x: nil}}), do: ""
defp draw_x_axis_label(%Plot{labels: labels} = plot) do
# MAGIC NUMBERS
top_padding = 35 + plot.theme.axis_text_x.angle / 90 * 20
x_position = (plot.width + plot.area_padding * 2) / 2
y_position = plot.width / plot.aspect_ratio + plot.area_padding * 2 + top_padding
Draw.text(to_string(labels.x),
x: x_position,
y: y_position,
class: "gg-text gg-axis-title",
text_anchor: "middle"
)
end
@spec draw_y_axis(Plot.t()) :: iolist()
def draw_y_axis(%Plot{} = plot) do
[y_axis_line(plot), y_gridlines(plot), y_ticks(plot), draw_y_axis_label(plot)]
end
defp y_axis_line(%Plot{} = plot) do
length = plot.width / plot.aspect_ratio + plot.area_padding * 2
Draw.line(y2: "#{length}", class: "gg-axis-line gg-axis-line-y")
|> Draw.g(transform: "translate(0, 0)")
end
defp y_ticks(%Plot{scales: scales} = plot) do
scales.y.tick_values
|> Enum.reverse()
|> Enum.map(&draw_y_tick(plot, &1))
|> Draw.g(
transform: "translate(0, 0)",
font_size: "10",
text_anchor: "end"
)
end
defp y_gridlines(%Plot{scales: scales} = plot) do
transformed_tick_values =
Enum.slice(scales.y.tick_values, 0..1)
|> Enum.map(scales.y.inverse)
interval =
case transformed_tick_values do
[_just_one_y_value] ->
plot.width / plot.aspect_ratio
[first, second] ->
(second - first) / 2 / plot.aspect_ratio
end
[_last_tick | all_but_last_tick] = ticks = Enum.reverse(scales.y.tick_values)
minor_gridlines = Enum.map(all_but_last_tick, &draw_y_minor_gridline(plot, &1, interval))
tick_labels = Enum.map(ticks, &draw_y_tick_label(plot, &1))
major_gridlines = Enum.map(ticks, &draw_y_major_gridline(plot, &1))
Draw.g([tick_labels, major_gridlines, minor_gridlines],
transform: "translate(0, 0)",
font_size: "10",
text_anchor: "end"
)
end
defp draw_y_tick_label(%Plot{} = plot, value) do
tick_length = plot.theme.axis_ticks_length_y || plot.theme.axis_ticks_length
top_shift = plot.width / plot.aspect_ratio + plot.area_padding
coord = plot.scales.y.inverse.(value) / plot.aspect_ratio
plot.scales.y
|> Labels.format(value)
|> to_string()
|> Draw.text(
x: "-#{9 + tick_length}",
dy: "0.32em",
class: "gg-text gg-axis-text gg-axis-text-y"
)
|> Draw.g(opacity: "1", transform: "translate(0,#{top_shift - coord})")
end
defp draw_y_tick(%Plot{} = plot, value) do
tick_length = plot.theme.axis_ticks_length_y || plot.theme.axis_ticks_length
top_shift = plot.width / plot.aspect_ratio + plot.area_padding
coord = plot.scales.y.inverse.(value) / plot.aspect_ratio
Draw.line(x2: "-#{tick_length}", class: "gg-axis-ticks gg-axis-ticks-y")
|> Draw.g(opacity: "1", transform: "translate(0,#{top_shift - coord})")
end
defp draw_y_major_gridline(%Plot{} = plot, value) do
gridline_length = plot.width + plot.area_padding * 2
top_shift = plot.width / plot.aspect_ratio + plot.area_padding
coord = plot.scales.y.inverse.(value) / plot.aspect_ratio
Draw.line(x2: "#{gridline_length}", class: "gg-panel-grid gg-panel-grid-major")
|> Draw.g(opacity: "1", transform: "translate(0,#{top_shift - coord})")
end
defp draw_y_minor_gridline(%Plot{} = plot, value, interval) do
gridline_length = plot.width + plot.area_padding * 2
top_shift = plot.width / plot.aspect_ratio + plot.area_padding
coord = plot.scales.y.inverse.(value) / plot.aspect_ratio
Draw.line(
x2: "#{gridline_length}",
class: "gg-panel-grid gg-panel-grid-minor",
transform: "translate(0, -#{interval})"
)
|> Draw.g(opacity: "1", transform: "translate(0,#{top_shift - coord})")
end
defp draw_y_axis_label(%Plot{labels: %{y: nil}}), do: ""
defp draw_y_axis_label(%Plot{labels: labels} = plot) do
# MAGIC NUMBERS
x_position = -1 * (plot.width + plot.area_padding * 2) / 2 / plot.aspect_ratio
y_position = -40
Draw.text(to_string(labels.y),
x: x_position,
y: y_position,
class: "gg-text gg-axis-title",
text_anchor: "middle",
transform: "rotate(-90)"
)
end
end
|
lib/ggity/axis.ex
| 0.884639
| 0.574275
|
axis.ex
|
starcoder
|
defmodule ZipperTree do
@moduledoc """
Provides traversal and modification methods for variadic arity trees. All
methods maintain an active 'cursor' or focus in the tree. The methods will
also technically work for lists too - I guess, if you're into that sorta
thing.
All traversal and insertion methods happen in constant time with exception to
up, which is porportional to how many nodes were junior to the current subtree.
This is an implementation of Gérard Huet's tree with a zipper, essentially a
direct conversion of the published oocaml code to elixir.
"""
defmodule Node do
@moduledoc """
Represents a breadcrumb, or previous location in the tree of the cursor.
left: all previous siblings of the current node.
up: the previous path.
right: all siblings that come after the current tree node/leaf.
"""
defstruct left: [], up: Top, right: []
end
defmodule Loc do
@moduledoc """
Represents a cursor, or focused location in the tree.
loc: the currently focused subtree.
path: the current path back up the tree, read: ZipperTree.Node
"""
defstruct loc: [], path: Top
end
@doc """
Convience method to quickly initializie a tree with a list instead of calling
down on it.
"""
@spec tree(list()) :: %Loc{}
def tree(l), do: %Loc{loc: l}
def down(l) when is_list l do
down %Loc{ loc: l }
end
@doc """
descend into the the current subtree.
"""
def down %Loc{loc: t, path: p} do
case t do
[h|trees] ->
%Loc{loc: h, path: %Node{ up: p, right: trees }}
_ ->
{:error, "at leaf"}
end
end
@doc """
move the cursor to the previous subtree
"""
def up %Loc{loc: t, path: p} do
case p do
Top ->
{:error, "at top"}
%Node{left: left, up: up, right: right} ->
%Loc{loc: Enum.reverse(left) ++ [t | right], path: up}
end
end
@doc """
Move to the previous sibling of the current subtree
"""
def left %Loc{loc: t, path: p} do
case p do
Top ->
{:error, "left of top"}
%Node{left: [], up: _, right: _} ->
{:error, "left of first"}
%Node{left: [l|left], up: up, right: right} ->
%Loc{loc: l, path: %Node{left: left, up: up, right: [t|right]}}
end
end
@doc """
Move to the node after the current location.
"""
def right %Loc{loc: t, path: p} do
case p do
Top ->
{:error, "right of top"}
%Node{left: left, up: up, right: [r|right]} ->
%Loc{loc: r, path: %Node{left: [t|left], up: up, right: right}}
_ ->
{:error, "right of last"}
end
end
@doc """
Move to the nth most child of the current subtree.
## Examples
iex> [1,2,[3,4]] |> ZipperTree.nth 3
%ZipperTree.Loc{loc: [3, 4],
path: %ZipperTree.Node{left: [2, 1],
right: [], up: Top}}
"""
def nth loc, n do
case n do
1 ->
down loc
_ when n > 0 ->
right nth(loc, n-1)
_ ->
{:error, "nth expects a postive integer"}
end
end
@doc """
Recursively move to the topmost node in linear time.
"""
def top l do
case l do
%Loc{loc: _, path: Top} ->
l
_ ->
top up l
end
end
@doc """
Change the value of the current node to t.
"""
def change(%Loc{loc: _, path: p}, t), do: %Loc{loc: t, path: p}
@doc """
Insert r after the current node.
"""
def insert_right %Loc{loc: t, path: p}, r do
case p do
Top ->
{:error, "insert of top"}
%Node{right: right} ->
%Loc{loc: t, path: %Node{p | right: [r|right]}}
end
end
@doc """
Insert l before the current node.
"""
def insert_left %Loc{loc: t, path: p}, l do
case p do
Top ->
{:error, "insert of top"}
%Node{left: left} ->
%Loc{loc: t, path: %Node{p | left: [l|left]}}
end
end
@doc """
Insert t1 into the current subtree.
"""
def insert_down %Loc{loc: t, path: p}, t1 do
case t do
_ when is_list t ->
%Loc{loc: t1, path: %Node{up: p, right: t}}
_ ->
{:error, "cannot insert below leaf"}
end
end
end
|
lib/zipper_tree.ex
| 0.857649
| 0.719088
|
zipper_tree.ex
|
starcoder
|
defmodule Grakn do
@moduledoc """
The main entry point for interacting with Grakn. All functions take a connection reference.
"""
@typedoc """
A connection process name, pid or reference.
A connection reference is used when making multiple requests within a transaction, see `transaction/3`.
"""
@type conn :: DBConnection.conn()
@doc """
Start and link to a Grakn connnection process.
### Options
* `:hostname` - The hostname of the Grakn server to connect to (required)
* `:port` - The port of the Grakn server (default: 48555)
"""
@spec start_link(Keyword.t()) :: {:ok, conn()} | {:error, any}
def start_link(opts \\ []) do
DBConnection.start_link(Grakn.Protocol, opts)
end
@doc """
Execute a query on the connection process. Queries can anly be run run within a transaction, see `transaction/3`.
### Options
* `:include_inferences` - Boolean specifying if inferences should be included in the querying process (default: true)
"""
@spec query(conn(), Grakn.Query.t(), Keyword.t()) :: any()
def query(conn, query, opts \\ []) do
DBConnection.execute(conn, query, [], opts)
end
@doc """
Execute a query on the connection process and raise an exception if there is an error. See `query/3` for documentation.
"""
@spec query!(conn(), Grakn.Query.t(), Keyword.t()) :: any()
def query!(conn, %Grakn.Query{} = query, opts \\ []) do
DBConnection.execute!(conn, query, [], opts)
end
@spec command(conn(), Grakn.Command.t(), Keyword.t()) :: any()
def command(conn, %Grakn.Command{} = command, opts \\ []) do
DBConnection.execute(conn, command, [], opts)
end
@doc """
Create a new transaction and execute a sequence of statements within the context of the transaction.
### Options
* `:type` - The type of transaction, value must be `Grakn.Transaction.Type.read()` (default), or `Grakn.Transaction.Type.write()`
### Example
```
Grakn.transaction(
conn,
fn conn ->
Grakn.query(conn, Grakn.Query.graql("match $x isa Person; get;"))
end
)
```
"""
@spec transaction(conn(), (conn() -> result), Keyword.t()) :: {:ok, result} | {:error, any}
when result: var
defdelegate transaction(conn, fun, opts \\ []), to: DBConnection
@doc """
Rollback a transaction, does not return.
Aborts the current transaction fun. If inside multiple `transaction/3`
functions, bubbles up to the top level.
## Example
{:error, :oops} = Grakn.transaction(pid, fn(conn) ->
Grakn.rollback(conn, :oops)
IO.puts "never reaches here!"
end)
"""
@spec rollback(DBConnection.t, any) :: no_return()
defdelegate rollback(conn, any), to: DBConnection
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]}
}
end
end
|
lib/grakn.ex
| 0.952596
| 0.746601
|
grakn.ex
|
starcoder
|
defmodule AdventOfCode.Day04 do
@moduledoc """
Day 4.
Repose Record.
Part 1: Find the guard that has the most minutes of sleep.
Part 2: Find the guard and the minute that has the highest frequency of slept time.
"""
@day4 Path.join(["day04.txt"])
def read do
data =
File.stream!(@day4, [], :line)
|> Stream.map(&String.trim/1)
|> Enum.sort()
|> Enum.map(&parse_input/1)
{id, %{:guard_minutes => guard_minutes}} =
get_minutes(data, 0, %{}, nil)
|> Map.to_list()
|> Enum.sort_by(fn {_id, %{total_time: t} = guard_map} -> t end)
|> List.last()
{minute, _frequency} =
guard_minutes
|> Map.to_list()
|> Enum.sort_by(fn {_min, frequency} -> frequency end)
|> List.last()
id * minute
end
def part2 do
data =
File.stream!(@day4, [], :line)
|> Stream.map(&String.trim/1)
|> Enum.sort()
|> Enum.map(&parse_input/1)
{id, {minute, _freq}} =
get_minutes(data, 0, %{}, nil)
|> Map.to_list()
|> Enum.map(fn {id, %{guard_minutes: minutes} = _guard_map} ->
max_minute =
minutes
|> Map.to_list()
|> Enum.sort_by(fn {m, f} -> f end)
|> List.last()
{id, max_minute}
end)
|> Enum.sort_by(fn {id, {minute, freq}} -> freq end)
|> List.last()
id * minute
end
@doc """
Parse the initial line of input.
A line of input looks like the following:
"[1518-11-01 23:58] Guard #99 begins shift"
We'll want the date, time, and the log entry (rest of the input).
"""
def parse_input(input) do
~r/\[(?<datetime>\d{4}-\d{2}-\d{2}\ \d{2}:\d{2})\]\ (?<rest>.+)$/
|> Regex.named_captures(input)
|> Map.update!("datetime", fn date ->
case NaiveDateTime.from_iso8601(date <> ":00") do
{:ok, ret} -> ret
{:error, _reason} = error -> error
end
end)
|> Map.update!("rest", fn rest -> parse_rest(rest) end)
end
@doc """
We will parse the rest of the input string.
The "rest" meaning binaries like the following:
"Guard #99 begins shift"
"wakes up"
"falls asleep"
For the input that has the Guard ID, we'll return the ID only.
"""
def parse_rest("Guard #" <> rest) do
rest
|> String.split()
|> List.first()
|> String.to_integer()
end
def parse_rest(rest), do: rest
@doc """
Get the minutes recursively.
We will iterate through the parsed logs and take action on them accordingly.
This code smells a bit, but the algorithm gets me where I want it to be. So meh. :-|
The gist is, that we track both the total minutes slept *and* the exact frequency of minutes per guard ID.
This is represented in a map like so:
%{:guard_id => %{:guard_minutes => %{}, %:total_time => integer}}
e.g. %{234 => %{:total_time => 23423, :guard_minutes => %{11 => 1, 12 => 23, 13 => 3}}}
"""
def get_minutes([], _prev_ts, minutes_map, _current_guard), do: minutes_map
def get_minutes([record | tail], prev_ts, minutes_map, current_guard) do
ts = Map.get(record, "datetime")
case Map.get(record, "rest") do
id when is_integer(id) ->
get_minutes(tail, ts, minutes_map, id)
action when is_binary(action) ->
if String.equivalent?("falls asleep", action) do
get_minutes(tail, ts, minutes_map, current_guard)
else
sleep_minutes = NaiveDateTime.diff(ts, prev_ts)
current_guard_map = Map.get(minutes_map, current_guard, %{:guard_minutes => %{}, :total_time => 0})
current_guard_map =
Map.update(current_guard_map, :total_time, 0, fn minutes ->
minutes + sleep_minutes
end)
guard_minutes =
update_guard_minutes_map(prev_ts.minute..ts.minute, Map.get(current_guard_map, :guard_minutes))
current_guard_map = Map.put(current_guard_map, :guard_minutes, guard_minutes)
updated_minutes_map = Map.put(minutes_map, current_guard, current_guard_map)
get_minutes(tail, ts, updated_minutes_map, current_guard)
end
end
end
@doc """
Update the frequency count map.
We want to be able to find out what is the most frequent minute that the guard that slept the most has.
"""
def update_guard_minutes_map(minutes, guard_minutes) do
minutes
|> Enum.reduce(guard_minutes, fn (minute, acc) ->
Map.update(acc, minute, 0, fn count -> count + 1 end)
end)
end
end
|
2018/elixir/advent_of_code/lib/day04/day04.ex
| 0.688678
| 0.528594
|
day04.ex
|
starcoder
|
defmodule X.Parser do
@moduledoc """
X template parser module.
"""
alias X.Ast
defguardp is_text_token(token) when elem(token, 0) in [:tag_text, :tag_output]
@doc ~S"""
Converts given tokens into X template AST.
## Example
iex> X.Parser.call([
...> {:tag_start, {1, 1}, 'div', [], nil, nil, false, false, false},
...> {:tag_start, {6, 1}, 'span', [{:tag_attr, {12, 1}, 'class', 'test', false}],
...> nil, nil, false, false, false},
...> {:tag_output, {25, 1}, 'a ', true},
...> {:tag_end, {32, 1}, 'span'},
...> {:tag_end, {39, 1}, 'div'}
...> ])
[
{{:tag_start, {1, 1}, 'div', [], nil, nil, false, false, false},
[
{{:tag_start, {6, 1}, 'span',
[{:tag_attr, {12, 1}, 'class', 'test', false}], nil, nil, false, false,
false},
[
{{:text_group, {25, 1}, 'span'},
[{{:tag_output, {25, 1}, 'a ', true}, []}]}
]}
]}
]
"""
@spec call([Ast.token()]) :: [Ast.leaf()]
def call(tokens) do
{result, _} = parse(tokens, nil, [])
result
end
@spec parse([Ast.token()], charlist() | nil, [Ast.leaf()]) :: {[Ast.leaf()], [Ast.token()]}
defp parse(list = [token | _], scope, acc) when is_text_token(token) do
{children, rest} = parse_text_group(list)
[{head, _} | _] = children
parse(rest, scope, [
{{:text_group, elem(head, 1), scope}, children}
| acc
])
end
defp parse([token = {:tag_start, _, _, _, _, _, singleton, selfclosed, _} | tail], scope, acc)
when singleton or selfclosed do
parse(tail, scope, [{token, []} | acc])
end
defp parse([token = {:tag_start, _, name, _, _, _, _, _, _} | tail], scope, acc) do
{children, rest} = parse(tail, name, [])
parse(rest, scope, [{token, children} | acc])
end
defp parse([{:tag_end, cur, name} | tail], scope, acc) do
case scope do
^name ->
{:lists.reverse(acc), tail}
_ ->
throw({:unexpected_tag, cur, scope, name})
end
end
defp parse([token = {:tag_comment, _, _} | tail], scope, acc) do
parse(tail, scope, [{token, []} | acc])
end
defp parse([], _, acc) do
{:lists.reverse(acc), []}
end
@spec parse_text_group([Ast.token()]) :: {[Ast.leaf()], [Ast.token()]}
defp parse_text_group([token | tail]) when is_text_token(token) do
{acc, rest} = parse_text_group(tail)
{[{token, []} | acc], rest}
end
defp parse_text_group(list) do
{[], list}
end
end
|
lib/x/parser.ex
| 0.79166
| 0.419321
|
parser.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.