code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Mix.Config do
@moduledoc ~S"""
Module for defining, reading and merging app configurations.
Most commonly, this module is used to define your own configuration:
use Mix.Config
config :plug,
key1: "value1",
key2: "value2"
import_config "#{Mix.env}.exs"
All `config/*` macros, including `import_config/1`, are used
to help define such configuration files.
Furthermore, this module provides functions like `read!/1`,
`merge/2` and friends which help manipulate configurations
in general.
Configuration set using `Mix.Config` will set the application env, so
that `Application.get_env/3` and other `Application` functions can be used
at run or compile time to retrieve or change the configuration.
For example, the `:key1` value from application `:plug` (see above) can be
retrieved with:
"value1" = Application.fetch_env!(:plug, :key1)
"""
defmodule LoadError do
defexception [:file, :error]
def message(%LoadError{file: file, error: error}) do
"could not load config #{Path.relative_to_cwd(file)}\n " <>
"#{Exception.format_banner(:error, error)}"
end
end
@doc false
defmacro __using__(_) do
quote do
import Mix.Config, only: [config: 2, config: 3, import_config: 1]
{:ok, agent} = Mix.Config.Agent.start_link
var!(config_agent, Mix.Config) = agent
end
end
@doc """
Configures the given application.
Keyword lists are always deep merged.
## Examples
The given `opts` are merged into the existing configuration
for the given `app`. Conflicting keys are overridden by the
ones specified in `opts`. For example, the declaration below:
config :lager,
log_level: :warn,
mode: :truncate
config :lager,
log_level: :info,
threshold: 1024
Will have a final configuration of:
[log_level: :info, mode: :truncate, threshold: 1024]
This final configuration can be retrieved at run or compile time:
Application.get_all_env(:lager)
"""
defmacro config(app, opts) do
quote do
Mix.Config.Agent.merge var!(config_agent, Mix.Config), [{unquote(app), unquote(opts)}]
end
end
@doc """
Configures the given key for the given application.
Keyword lists are always deep merged.
## Examples
The given `opts` are merged into the existing values for `key`
in the given `app`. Conflicting keys are overridden by the
ones specified in `opts`. For example, given the two configurations
below:
config :ecto, Repo,
log_level: :warn,
adapter: Ecto.Adapters.Postgres
config :ecto, Repo,
log_level: :info,
pool_size: 10
the final value of the configuration for the `Ecto` key in the `:ecto`
application will be:
[log_level: :info, pool_size: 10, adapter: Ecto.Adapters.Postgres]
This final value can be retrieved at runtime or compile time with:
Application.get_env(:ecto, Repo)
"""
defmacro config(app, key, opts) do
quote do
Mix.Config.Agent.merge var!(config_agent, Mix.Config),
[{unquote(app), [{unquote(key), unquote(opts)}]}]
end
end
@doc ~S"""
Imports configuration from the given file or files.
If `path_or_wildcard` is a wildcard, then all the files
matching that wildcard will be imported; if no file matches
the wildcard, no errors are raised. If `path_or_wildcard` is
not a wildcard but a path to a single file, then that file is
imported; in case the file doesn't exist, an error is raised.
This behaviour is analogous to the one for `read_wildcard!/1`.
If path/wildcard is a relative path/wildcard, it will be expanded relatively
to the directory the current configuration file is in.
## Examples
This is often used to emulate configuration across environments:
import_config "#{Mix.env}.exs"
Or to import files from children in umbrella projects:
import_config "../apps/*/config/config.exs"
"""
defmacro import_config(path_or_wildcard) do
loaded_paths_quote =
unless {:loaded_paths, Mix.Config} in __CALLER__.vars do
quote do
var!(loaded_paths, Mix.Config) = [__ENV__.file]
end
end
quote do
unquote(loaded_paths_quote)
Mix.Config.Agent.merge(
var!(config_agent, Mix.Config),
Mix.Config.read_wildcard!(Path.expand(unquote(path_or_wildcard), __DIR__), var!(loaded_paths, Mix.Config))
)
end
end
@doc """
Reads and validates a configuration file.
`file` is the path to the configuration file to be read. If that file doesn't
exist or if there's an error loading it, a `Mix.Config.LoadError` exception
will be raised.
`loaded_paths` is a list of configuration files that have been previously
read. If `file` exists in `loaded_paths`, a `Mix.Config.LoadError` exception
will be raised.
"""
def read!(file, loaded_paths \\ []) do
try do
if file in loaded_paths do
raise ArgumentError, message: "recursive load of #{file} detected"
end
{config, binding} = Code.eval_string File.read!(file), [{{:loaded_paths, Mix.Config}, [file | loaded_paths]}], [file: file, line: 1]
config = case List.keyfind(binding, {:config_agent, Mix.Config}, 0) do
{_, agent} -> get_config_and_stop_agent(agent)
nil -> config
end
validate!(config)
config
rescue
e in [LoadError] -> reraise(e, System.stacktrace)
e -> reraise(LoadError, [file: file, error: e], System.stacktrace)
end
end
defp get_config_and_stop_agent(agent) do
config = Mix.Config.Agent.get(agent)
Mix.Config.Agent.stop(agent)
config
end
@doc """
Reads many configuration files given by wildcard into a single config.
Raises an error if `path` is a concrete filename (with no wildcards)
but the corresponding file does not exist; if `path` matches no files,
no errors are raised.
`loaded_paths` is a list of configuration files that have been previously
read.
"""
def read_wildcard!(path, loaded_paths \\ []) do
paths = if String.contains?(path, ~w(* ? [ {))do
Path.wildcard(path)
else
[path]
end
Enum.reduce(paths, [], &merge(&2, read!(&1, loaded_paths)))
end
@doc """
Persists the given configuration by modifying
the configured applications environment.
`config` should be a list of `{app, app_config}` tuples or a
`%{app => app_config}` map where `app` are the applications to
be configured and `app_config` are the configuration (as key-value
pairs) for each of those applications.
Returns the configured applications.
## Examples
Mix.Config.persist(logger: [level: :error], my_app: [my_config: 1])
#=> [:logger, :my_app]
"""
def persist(config) do
for {app, kw} <- config do
for {k, v} <- kw do
Application.put_env(app, k, v, persistent: true)
end
app
end
end
@doc """
Validates a configuration.
"""
def validate!(config) do
if is_list(config) do
Enum.all?(config, fn
{app, value} when is_atom(app) ->
if Keyword.keyword?(value) do
true
else
raise ArgumentError,
"expected config for app #{inspect app} to return keyword list, got: #{inspect value}"
end
_ ->
false
end)
else
raise ArgumentError,
"expected config file to return keyword list, got: #{inspect config}"
end
end
@doc """
Merges two configurations.
The configuration of each application is merged together
with the values in the second one having higher preference
than the first in case of conflicts.
## Examples
iex> Mix.Config.merge([app: [k: :v1]], [app: [k: :v2]])
[app: [k: :v2]]
iex> Mix.Config.merge([app1: []], [app2: []])
[app1: [], app2: []]
"""
def merge(config1, config2) do
Keyword.merge(config1, config2, fn _, app1, app2 ->
Keyword.merge(app1, app2, &deep_merge/3)
end)
end
defp deep_merge(_key, value1, value2) do
if Keyword.keyword?(value1) and Keyword.keyword?(value2) do
Keyword.merge(value1, value2, &deep_merge/3)
else
value2
end
end
end
|
lib/mix/lib/mix/config.ex
| 0.834474
| 0.401453
|
config.ex
|
starcoder
|
defmodule Signs.Utilities.SourceConfig do
@moduledoc """
Configuration for a sign's data sourcess, via JSON. Configuration of a sign looks like:
## Sign (zone) config
{
"id": "roxbury_crossing_southbound",
"headway_group": "orange_trunk",
"type": "realtime",
"pa_ess_loc": "OROX",
"text_zone": "s",
"audio_zones": ["s"],
"read_loop_offset": 60,
"source_config": [...]
},
* id: the internal ID of this "sign". Really, it's a "zone" and could have a few physical signs
as a part of it in the station, but they all display exactly the same content. Logically
they're a single entity in the ARINC system as far as we can interact with it. This ID is also
used by signs-ui, and is how realtime-signs knows how signs-ui has configured the sign.
* headway_group: this references the signs-ui values that the PIOs set for headways.
* type: always "realtime". (there were other types in previous iterations of the app.)
* pa_ess_loc: the ARINC station code. Starts with the line color B, R, O, G, or M, and then
three letters for the station.
* text_zone: one of the 6 zones ARINC divides a station into, for the purpose of sending text to
the sign.
* audio_zones: a list of those ARINC zones for the purpose of sending audio. This may differ
from the text_zone when the speakers are close enough to cause confusion. For example, at Park
Street for the Red line, we have the center platform text_zone set to "c", but the audio_zones
set to [], while the north zone we have the text_zone set to "n" and the audio_zones set to
["n", "c"]. So the center platform doesn't play audio of its *own*, but rather the north and
south platforms play their audio over the center platform speakers. Since a train that goes to
"ARR" will do so simultaneously for either the north or south platform *and* the center
platform, this configuration prevents simultaneous, slightly overlapping audio of the same
content.
* read_loop_offset: how many seconds to wait after app start-up before entering the "read loop",
which reads things like "The next train to X arrives in 3 minutes. The following train arrives
in 8 minutes". We generally have different read_loop_offsets for different zones, or where
speakers are particularly close, to encourage them to play their audio at different times.
* source_config: see below.
## Source config
A sign's data is provided via the "source_config" key, which is a list of lists of "sources"
(see next section).
A list of sources can be provided: [{...}, {...}, {...}, ...]. The sources determine which
predictions to use in the `Engine.Predictions` process. When a list of multiple sources is
provided, their corresponding predictions are aggregated and sorted by arrival time (or
departure, for terminals), so that the "next" train will be the earliest arriving train from any
of the sources. For example, the JFK mezzanine sign's top line uses a source list of two
sources, with GTFS stop IDs 70086 and 70096, which are the Ashmont and Braintree northbound
platforms. That way the sign will say when the next northbound train will be arriving at JFK,
from either of the Braintree or Ashmont branches.
The "source_config" key currently supports up to two _lists_ of sources. If one list is
provided, then this sign is a "platform" sign and its next two trips will show up on the two
lines of the sign. If two lists are provided, then this sign is a "mezzanine" or "center" sign
and the next trip from each list will appear on different lines of the sign.
The JSON structure for one list of sources is:
[
[{...}, {...}]
]
While the JSON structure for two lists of sources is:
[
[{...}, {...}],
[{...}, {...}]
]
## Source
Allows specifying one of a sign's data sources that it uses to calculate what to display. It
looks like:
{
"stop_id": "70008",
"routes": ["Orange"],
"direction_id": 0,
"headway_direction_name": "Forest Hills",
"platform": null,
"terminal": false,
"announce_arriving": true,
"announce_boarding": false
}
* stop_id: The GTFS stop_id that it uses for prediction data.
* routes: A list of routes that are relevant to this sign regarding alerts.
* direction_id: 0 or 1, used in tandem with the stop ID for predictions
* headway_direction_name: The headsign used to generate the "trains every X minutes" message in
headway mode. Must be a value recognized by `PaEss.Utilities.headsign_to_destination/1`.
* platform: mostly null, but :ashmont | :braintree for JFK/UMass, where it's used for the "next
train to X is approaching, on the Y platform" audio.
* terminal: whether this is a "terminal", and should use arrival or departure times in its
countdown.
* announce_arriving: whether to play audio when a sign goes to ARR.
* announce_boarding: whether to play audio when a sign goes to BRD. Generally we do one or the
other. Considerations include how noisy the station is, what we've done in the past, how
reliable ARR is (BRD is reliable, but especially at Boylston, e.g., ARR can have the "next"
train change frequently, so you don't want to announce the wrong one is arriving), and whether
it's a terminal or not.
"""
require Logger
@enforce_keys [
:stop_id,
:headway_destination,
:direction_id,
:platform,
:terminal?,
:announce_arriving?,
:announce_boarding?
]
defstruct @enforce_keys ++
[:routes, :headway_stop_id, multi_berth?: false, source_for_headway?: false]
@type source :: %__MODULE__{
stop_id: String.t(),
headway_stop_id: String.t() | nil,
headway_destination: PaEss.destination(),
direction_id: 0 | 1,
routes: [String.t()] | nil,
platform: Content.platform() | nil,
terminal?: boolean(),
announce_arriving?: boolean(),
announce_boarding?: boolean(),
multi_berth?: boolean(),
source_for_headway?: boolean()
}
@type config :: {[source()]} | {[source()], [source()]}
@spec parse!([[map()]]) :: config()
def parse!([both_lines_config]) do
{Enum.map(both_lines_config, &parse_source!/1)}
end
def parse!([top_line_config, bottom_line_config]) do
{
Enum.map(top_line_config, &parse_source!/1),
Enum.map(bottom_line_config, &parse_source!/1)
}
end
defp parse_source!(
%{
"stop_id" => stop_id,
"headway_direction_name" => headway_direction_name,
"direction_id" => direction_id,
"platform" => platform,
"terminal" => terminal?,
"announce_arriving" => announce_arriving?,
"announce_boarding" => announce_boarding?
} = source
) do
platform =
case platform do
nil -> nil
"ashmont" -> :ashmont
"braintree" -> :braintree
end
multi_berth? =
case source["multi_berth"] do
true -> true
_ -> false
end
source_for_headway? =
case source["source_for_headway"] do
true -> true
_ -> false
end
{:ok, headway_destination} = PaEss.Utilities.headsign_to_destination(headway_direction_name)
%__MODULE__{
stop_id: stop_id,
headway_destination: headway_destination,
direction_id: direction_id,
routes: source["routes"],
platform: platform,
terminal?: terminal?,
announce_arriving?: announce_arriving?,
announce_boarding?: announce_boarding?,
multi_berth?: multi_berth?,
source_for_headway?: source_for_headway?
}
end
@spec multi_source?(config) :: boolean()
def multi_source?({_, _}), do: true
def multi_source?({_}), do: false
@spec sign_stop_ids(config) :: [String.t()]
def sign_stop_ids({s1, s2}) do
Enum.map(s1, & &1.stop_id) ++ Enum.map(s2, & &1.stop_id)
end
def sign_stop_ids({s}) do
Enum.map(s, & &1.stop_id)
end
@spec sign_routes(config) :: [String.t()]
def sign_routes({s1, s2}) do
Enum.flat_map(s1, &(&1.routes || [])) ++ Enum.flat_map(s2, &(&1.routes || []))
end
def sign_routes({s}) do
Enum.flat_map(s, &(&1.routes || []))
end
end
|
lib/signs/utilities/source_config.ex
| 0.845688
| 0.581392
|
source_config.ex
|
starcoder
|
defmodule Minio do
@moduledoc """
This package implements the Minio API.
## Implemented API fucntions
The following api functions are implemented.
### Presigned Operations
+ presign_put_object
+ presign_get_object
*The package is being developed as necessary for my personal use. If
you require any api endpoint to be added, please add an issue and
send a PR if you have the time.*
## Usage
To use the api a client struct must be created. This can be done the
following way.
```elixir
client = %Minio{
endpoint: "https://play.min.io",
access_key: "<KEY>",
secret_key: "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG",
region: "us-east-1"
}
```
The region has to be specified. It is not automatically retreived.
The default region is set to "us-east-1"
Once the client sturct is created, the following api functions can
be used.
### Pesigned Put Object
Generate a presigned url to put an object to an minio instance.
```elixir
{:ok, put_url} = Minio.presign_put_object(client, bucket_name: "test", object_name: "test3.png")
```
### Presigned Get Object
Generate a presigned url to get an object from an minio instance.
```elixir
{:ok, get_url} = Minio.presign_get_object(client, bucket_name: "test", object_name: "test3.png")
```
"""
defstruct endpoint: nil,
access_key: nil,
secret_key: nil,
region: "us-east-1"
@type t :: %__MODULE__{
endpoint: String.t(),
access_key: String.t(),
secret_key: String.t(),
region: String.t()
}
alias Minio.Signer
@spec presign_put_object(t, Keyword.t()) :: {:ok, String.t()} | {:error, String.t()}
@doc """
Presigns a put object and provides a url
## Options
* `:bucket_name` - The name of the minio bucket.
* `:object_name` - The object name or path.
* `:request_datetime` - The datetime of request. Defaults to `DateTime.utc_now()`
* `:link_expiry` - The number of seconds for link expiry. Defaults to `608_400` seconds.
"""
def presign_put_object(%Minio{} = client, opts),
do: Signer.presigned_url(client, :put, opts)
@spec presign_get_object(t, Keyword.t()) :: {:ok, String.t()} | {:error, String.t()}
@doc """
Presigns a get object and provides a url
## Options
* `:bucket_name` - The name of the minio bucket.
* `:object_name` - The object name or path.
* `:request_datetime` - The datetime of request. Defaults to `DateTime.utc_now()`
* `:link_expiry` - The number of seconds for link expiry. Defaults to `608_400` seconds.
"""
def presign_get_object(%Minio{} = client, opts),
do: Signer.presigned_url(client, :get, opts)
end
|
lib/minio.ex
| 0.828592
| 0.760517
|
minio.ex
|
starcoder
|
defmodule AWS.Neptune do
@moduledoc """
Amazon Neptune
Amazon Neptune is a fast, reliable, fully-managed graph database service
that makes it easy to build and run applications that work with highly
connected datasets. The core of Amazon Neptune is a purpose-built,
high-performance graph database engine optimized for storing billions of
relationships and querying the graph with milliseconds latency. Amazon
Neptune supports popular graph models Property Graph and W3C's RDF, and
their respective query languages Apache TinkerPop Gremlin and SPARQL,
allowing you to easily build queries that efficiently navigate highly
connected datasets. Neptune powers graph use cases such as recommendation
engines, fraud detection, knowledge graphs, drug discovery, and network
security.
This interface reference for Amazon Neptune contains documentation for a
programming or command line interface you can use to manage Amazon Neptune.
Note that Amazon Neptune is asynchronous, which means that some interfaces
might require techniques such as polling or callback functions to determine
when a command has been applied. In this reference, the parameter
descriptions indicate whether a command is applied immediately, on the next
instance reboot, or during the maintenance window. The reference structure
is as follows, and we list following some related topics from the user
guide.
"""
@doc """
Associates an Identity and Access Management (IAM) role from an Neptune DB
cluster.
"""
def add_role_to_d_b_cluster(client, input, options \\ []) do
request(client, "AddRoleToDBCluster", input, options)
end
@doc """
Adds a source identifier to an existing event notification subscription.
"""
def add_source_identifier_to_subscription(client, input, options \\ []) do
request(client, "AddSourceIdentifierToSubscription", input, options)
end
@doc """
Adds metadata tags to an Amazon Neptune resource. These tags can also be
used with cost allocation reporting to track cost associated with Amazon
Neptune resources, or used in a Condition statement in an IAM policy for
Amazon Neptune.
"""
def add_tags_to_resource(client, input, options \\ []) do
request(client, "AddTagsToResource", input, options)
end
@doc """
Applies a pending maintenance action to a resource (for example, to a DB
instance).
"""
def apply_pending_maintenance_action(client, input, options \\ []) do
request(client, "ApplyPendingMaintenanceAction", input, options)
end
@doc """
Copies the specified DB cluster parameter group.
"""
def copy_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "CopyDBClusterParameterGroup", input, options)
end
@doc """
Copies a snapshot of a DB cluster.
To copy a DB cluster snapshot from a shared manual DB cluster snapshot,
`SourceDBClusterSnapshotIdentifier` must be the Amazon Resource Name (ARN)
of the shared DB cluster snapshot.
"""
def copy_d_b_cluster_snapshot(client, input, options \\ []) do
request(client, "CopyDBClusterSnapshot", input, options)
end
@doc """
Copies the specified DB parameter group.
"""
def copy_d_b_parameter_group(client, input, options \\ []) do
request(client, "CopyDBParameterGroup", input, options)
end
@doc """
Creates a new Amazon Neptune DB cluster.
You can use the `ReplicationSourceIdentifier` parameter to create the DB
cluster as a Read Replica of another DB cluster or Amazon Neptune DB
instance.
Note that when you create a new cluster using `CreateDBCluster` directly,
deletion protection is disabled by default (when you create a new
production cluster in the console, deletion protection is enabled by
default). You can only delete a DB cluster if its `DeletionProtection`
field is set to `false`.
"""
def create_d_b_cluster(client, input, options \\ []) do
request(client, "CreateDBCluster", input, options)
end
@doc """
Creates a new DB cluster parameter group.
Parameters in a DB cluster parameter group apply to all of the instances in
a DB cluster.
A DB cluster parameter group is initially created with the default
parameters for the database engine used by instances in the DB cluster. To
provide custom values for any of the parameters, you must modify the group
after creating it using `ModifyDBClusterParameterGroup`. Once you've
created a DB cluster parameter group, you need to associate it with your DB
cluster using `ModifyDBCluster`. When you associate a new DB cluster
parameter group with a running DB cluster, you need to reboot the DB
instances in the DB cluster without failover for the new DB cluster
parameter group and associated settings to take effect.
<important> After you create a DB cluster parameter group, you should wait
at least 5 minutes before creating your first DB cluster that uses that DB
cluster parameter group as the default parameter group. This allows Amazon
Neptune to fully complete the create action before the DB cluster parameter
group is used as the default for a new DB cluster. This is especially
important for parameters that are critical when creating the default
database for a DB cluster, such as the character set for the default
database defined by the `character_set_database` parameter. You can use the
*Parameter Groups* option of the [Amazon Neptune
console](https://console.aws.amazon.com/rds/) or the
`DescribeDBClusterParameters` command to verify that your DB cluster
parameter group has been created or modified.
</important>
"""
def create_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "CreateDBClusterParameterGroup", input, options)
end
@doc """
Creates a snapshot of a DB cluster.
"""
def create_d_b_cluster_snapshot(client, input, options \\ []) do
request(client, "CreateDBClusterSnapshot", input, options)
end
@doc """
Creates a new DB instance.
"""
def create_d_b_instance(client, input, options \\ []) do
request(client, "CreateDBInstance", input, options)
end
@doc """
Creates a new DB parameter group.
A DB parameter group is initially created with the default parameters for
the database engine used by the DB instance. To provide custom values for
any of the parameters, you must modify the group after creating it using
*ModifyDBParameterGroup*. Once you've created a DB parameter group, you
need to associate it with your DB instance using *ModifyDBInstance*. When
you associate a new DB parameter group with a running DB instance, you need
to reboot the DB instance without failover for the new DB parameter group
and associated settings to take effect.
<important> After you create a DB parameter group, you should wait at least
5 minutes before creating your first DB instance that uses that DB
parameter group as the default parameter group. This allows Amazon Neptune
to fully complete the create action before the parameter group is used as
the default for a new DB instance. This is especially important for
parameters that are critical when creating the default database for a DB
instance, such as the character set for the default database defined by the
`character_set_database` parameter. You can use the *Parameter Groups*
option of the Amazon Neptune console or the *DescribeDBParameters* command
to verify that your DB parameter group has been created or modified.
</important>
"""
def create_d_b_parameter_group(client, input, options \\ []) do
request(client, "CreateDBParameterGroup", input, options)
end
@doc """
Creates a new DB subnet group. DB subnet groups must contain at least one
subnet in at least two AZs in the AWS Region.
"""
def create_d_b_subnet_group(client, input, options \\ []) do
request(client, "CreateDBSubnetGroup", input, options)
end
@doc """
Creates an event notification subscription. This action requires a topic
ARN (Amazon Resource Name) created by either the Neptune console, the SNS
console, or the SNS API. To obtain an ARN with SNS, you must create a topic
in Amazon SNS and subscribe to the topic. The ARN is displayed in the SNS
console.
You can specify the type of source (SourceType) you want to be notified of,
provide a list of Neptune sources (SourceIds) that triggers the events, and
provide a list of event categories (EventCategories) for events you want to
be notified of. For example, you can specify SourceType = db-instance,
SourceIds = mydbinstance1, mydbinstance2 and EventCategories =
Availability, Backup.
If you specify both the SourceType and SourceIds, such as SourceType =
db-instance and SourceIdentifier = myDBInstance1, you are notified of all
the db-instance events for the specified source. If you specify a
SourceType but do not specify a SourceIdentifier, you receive notice of the
events for that source type for all your Neptune sources. If you do not
specify either the SourceType nor the SourceIdentifier, you are notified of
events generated from all Neptune sources belonging to your customer
account.
"""
def create_event_subscription(client, input, options \\ []) do
request(client, "CreateEventSubscription", input, options)
end
@doc """
The DeleteDBCluster action deletes a previously provisioned DB cluster.
When you delete a DB cluster, all automated backups for that DB cluster are
deleted and can't be recovered. Manual DB cluster snapshots of the
specified DB cluster are not deleted.
Note that the DB Cluster cannot be deleted if deletion protection is
enabled. To delete it, you must first set its `DeletionProtection` field to
`False`.
"""
def delete_d_b_cluster(client, input, options \\ []) do
request(client, "DeleteDBCluster", input, options)
end
@doc """
Deletes a specified DB cluster parameter group. The DB cluster parameter
group to be deleted can't be associated with any DB clusters.
"""
def delete_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "DeleteDBClusterParameterGroup", input, options)
end
@doc """
Deletes a DB cluster snapshot. If the snapshot is being copied, the copy
operation is terminated.
<note> The DB cluster snapshot must be in the `available` state to be
deleted.
</note>
"""
def delete_d_b_cluster_snapshot(client, input, options \\ []) do
request(client, "DeleteDBClusterSnapshot", input, options)
end
@doc """
The DeleteDBInstance action deletes a previously provisioned DB instance.
When you delete a DB instance, all automated backups for that instance are
deleted and can't be recovered. Manual DB snapshots of the DB instance to
be deleted by `DeleteDBInstance` are not deleted.
If you request a final DB snapshot the status of the Amazon Neptune DB
instance is `deleting` until the DB snapshot is created. The API action
`DescribeDBInstance` is used to monitor the status of this operation. The
action can't be canceled or reverted once submitted.
Note that when a DB instance is in a failure state and has a status of
`failed`, `incompatible-restore`, or `incompatible-network`, you can only
delete it when the `SkipFinalSnapshot` parameter is set to `true`.
You can't delete a DB instance if it is the only instance in the DB
cluster, or if it has deletion protection enabled.
"""
def delete_d_b_instance(client, input, options \\ []) do
request(client, "DeleteDBInstance", input, options)
end
@doc """
Deletes a specified DBParameterGroup. The DBParameterGroup to be deleted
can't be associated with any DB instances.
"""
def delete_d_b_parameter_group(client, input, options \\ []) do
request(client, "DeleteDBParameterGroup", input, options)
end
@doc """
Deletes a DB subnet group.
<note> The specified database subnet group must not be associated with any
DB instances.
</note>
"""
def delete_d_b_subnet_group(client, input, options \\ []) do
request(client, "DeleteDBSubnetGroup", input, options)
end
@doc """
Deletes an event notification subscription.
"""
def delete_event_subscription(client, input, options \\ []) do
request(client, "DeleteEventSubscription", input, options)
end
@doc """
Returns a list of `DBClusterParameterGroup` descriptions. If a
`DBClusterParameterGroupName` parameter is specified, the list will contain
only the description of the specified DB cluster parameter group.
"""
def describe_d_b_cluster_parameter_groups(client, input, options \\ []) do
request(client, "DescribeDBClusterParameterGroups", input, options)
end
@doc """
Returns the detailed parameter list for a particular DB cluster parameter
group.
"""
def describe_d_b_cluster_parameters(client, input, options \\ []) do
request(client, "DescribeDBClusterParameters", input, options)
end
@doc """
Returns a list of DB cluster snapshot attribute names and values for a
manual DB cluster snapshot.
When sharing snapshots with other AWS accounts,
`DescribeDBClusterSnapshotAttributes` returns the `restore` attribute and a
list of IDs for the AWS accounts that are authorized to copy or restore the
manual DB cluster snapshot. If `all` is included in the list of values for
the `restore` attribute, then the manual DB cluster snapshot is public and
can be copied or restored by all AWS accounts.
To add or remove access for an AWS account to copy or restore a manual DB
cluster snapshot, or to make the manual DB cluster snapshot public or
private, use the `ModifyDBClusterSnapshotAttribute` API action.
"""
def describe_d_b_cluster_snapshot_attributes(client, input, options \\ []) do
request(client, "DescribeDBClusterSnapshotAttributes", input, options)
end
@doc """
Returns information about DB cluster snapshots. This API action supports
pagination.
"""
def describe_d_b_cluster_snapshots(client, input, options \\ []) do
request(client, "DescribeDBClusterSnapshots", input, options)
end
@doc """
Returns information about provisioned DB clusters, and supports pagination.
<note> This operation can also return information for Amazon RDS clusters
and Amazon DocDB clusters.
</note>
"""
def describe_d_b_clusters(client, input, options \\ []) do
request(client, "DescribeDBClusters", input, options)
end
@doc """
Returns a list of the available DB engines.
"""
def describe_d_b_engine_versions(client, input, options \\ []) do
request(client, "DescribeDBEngineVersions", input, options)
end
@doc """
Returns information about provisioned instances, and supports pagination.
<note> This operation can also return information for Amazon RDS instances
and Amazon DocDB instances.
</note>
"""
def describe_d_b_instances(client, input, options \\ []) do
request(client, "DescribeDBInstances", input, options)
end
@doc """
Returns a list of `DBParameterGroup` descriptions. If a
`DBParameterGroupName` is specified, the list will contain only the
description of the specified DB parameter group.
"""
def describe_d_b_parameter_groups(client, input, options \\ []) do
request(client, "DescribeDBParameterGroups", input, options)
end
@doc """
Returns the detailed parameter list for a particular DB parameter group.
"""
def describe_d_b_parameters(client, input, options \\ []) do
request(client, "DescribeDBParameters", input, options)
end
@doc """
Returns a list of DBSubnetGroup descriptions. If a DBSubnetGroupName is
specified, the list will contain only the descriptions of the specified
DBSubnetGroup.
For an overview of CIDR ranges, go to the [Wikipedia
Tutorial](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing).
"""
def describe_d_b_subnet_groups(client, input, options \\ []) do
request(client, "DescribeDBSubnetGroups", input, options)
end
@doc """
Returns the default engine and system parameter information for the cluster
database engine.
"""
def describe_engine_default_cluster_parameters(client, input, options \\ []) do
request(client, "DescribeEngineDefaultClusterParameters", input, options)
end
@doc """
Returns the default engine and system parameter information for the
specified database engine.
"""
def describe_engine_default_parameters(client, input, options \\ []) do
request(client, "DescribeEngineDefaultParameters", input, options)
end
@doc """
Displays a list of categories for all event source types, or, if specified,
for a specified source type.
"""
def describe_event_categories(client, input, options \\ []) do
request(client, "DescribeEventCategories", input, options)
end
@doc """
Lists all the subscription descriptions for a customer account. The
description for a subscription includes SubscriptionName, SNSTopicARN,
CustomerID, SourceType, SourceID, CreationTime, and Status.
If you specify a SubscriptionName, lists the description for that
subscription.
"""
def describe_event_subscriptions(client, input, options \\ []) do
request(client, "DescribeEventSubscriptions", input, options)
end
@doc """
Returns events related to DB instances, DB security groups, DB snapshots,
and DB parameter groups for the past 14 days. Events specific to a
particular DB instance, DB security group, database snapshot, or DB
parameter group can be obtained by providing the name as a parameter. By
default, the past hour of events are returned.
"""
def describe_events(client, input, options \\ []) do
request(client, "DescribeEvents", input, options)
end
@doc """
Returns a list of orderable DB instance options for the specified engine.
"""
def describe_orderable_d_b_instance_options(client, input, options \\ []) do
request(client, "DescribeOrderableDBInstanceOptions", input, options)
end
@doc """
Returns a list of resources (for example, DB instances) that have at least
one pending maintenance action.
"""
def describe_pending_maintenance_actions(client, input, options \\ []) do
request(client, "DescribePendingMaintenanceActions", input, options)
end
@doc """
You can call `DescribeValidDBInstanceModifications` to learn what
modifications you can make to your DB instance. You can use this
information when you call `ModifyDBInstance`.
"""
def describe_valid_d_b_instance_modifications(client, input, options \\ []) do
request(client, "DescribeValidDBInstanceModifications", input, options)
end
@doc """
Forces a failover for a DB cluster.
A failover for a DB cluster promotes one of the Read Replicas (read-only
instances) in the DB cluster to be the primary instance (the cluster
writer).
Amazon Neptune will automatically fail over to a Read Replica, if one
exists, when the primary instance fails. You can force a failover when you
want to simulate a failure of a primary instance for testing. Because each
instance in a DB cluster has its own endpoint address, you will need to
clean up and re-establish any existing connections that use those endpoint
addresses when the failover is complete.
"""
def failover_d_b_cluster(client, input, options \\ []) do
request(client, "FailoverDBCluster", input, options)
end
@doc """
Lists all tags on an Amazon Neptune resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Modify a setting for a DB cluster. You can change one or more database
configuration parameters by specifying these parameters and the new values
in the request.
"""
def modify_d_b_cluster(client, input, options \\ []) do
request(client, "ModifyDBCluster", input, options)
end
@doc """
Modifies the parameters of a DB cluster parameter group. To modify more
than one parameter, submit a list of the following: `ParameterName`,
`ParameterValue`, and `ApplyMethod`. A maximum of 20 parameters can be
modified in a single request.
<note> Changes to dynamic parameters are applied immediately. Changes to
static parameters require a reboot without failover to the DB cluster
associated with the parameter group before the change can take effect.
</note> <important> After you create a DB cluster parameter group, you
should wait at least 5 minutes before creating your first DB cluster that
uses that DB cluster parameter group as the default parameter group. This
allows Amazon Neptune to fully complete the create action before the
parameter group is used as the default for a new DB cluster. This is
especially important for parameters that are critical when creating the
default database for a DB cluster, such as the character set for the
default database defined by the `character_set_database` parameter. You can
use the *Parameter Groups* option of the Amazon Neptune console or the
`DescribeDBClusterParameters` command to verify that your DB cluster
parameter group has been created or modified.
</important>
"""
def modify_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "ModifyDBClusterParameterGroup", input, options)
end
@doc """
Adds an attribute and values to, or removes an attribute and values from, a
manual DB cluster snapshot.
To share a manual DB cluster snapshot with other AWS accounts, specify
`restore` as the `AttributeName` and use the `ValuesToAdd` parameter to add
a list of IDs of the AWS accounts that are authorized to restore the manual
DB cluster snapshot. Use the value `all` to make the manual DB cluster
snapshot public, which means that it can be copied or restored by all AWS
accounts. Do not add the `all` value for any manual DB cluster snapshots
that contain private information that you don't want available to all AWS
accounts. If a manual DB cluster snapshot is encrypted, it can be shared,
but only by specifying a list of authorized AWS account IDs for the
`ValuesToAdd` parameter. You can't use `all` as a value for that parameter
in this case.
To view which AWS accounts have access to copy or restore a manual DB
cluster snapshot, or whether a manual DB cluster snapshot public or
private, use the `DescribeDBClusterSnapshotAttributes` API action.
"""
def modify_d_b_cluster_snapshot_attribute(client, input, options \\ []) do
request(client, "ModifyDBClusterSnapshotAttribute", input, options)
end
@doc """
Modifies settings for a DB instance. You can change one or more database
configuration parameters by specifying these parameters and the new values
in the request. To learn what modifications you can make to your DB
instance, call `DescribeValidDBInstanceModifications` before you call
`ModifyDBInstance`.
"""
def modify_d_b_instance(client, input, options \\ []) do
request(client, "ModifyDBInstance", input, options)
end
@doc """
Modifies the parameters of a DB parameter group. To modify more than one
parameter, submit a list of the following: `ParameterName`,
`ParameterValue`, and `ApplyMethod`. A maximum of 20 parameters can be
modified in a single request.
<note> Changes to dynamic parameters are applied immediately. Changes to
static parameters require a reboot without failover to the DB instance
associated with the parameter group before the change can take effect.
</note> <important> After you modify a DB parameter group, you should wait
at least 5 minutes before creating your first DB instance that uses that DB
parameter group as the default parameter group. This allows Amazon Neptune
to fully complete the modify action before the parameter group is used as
the default for a new DB instance. This is especially important for
parameters that are critical when creating the default database for a DB
instance, such as the character set for the default database defined by the
`character_set_database` parameter. You can use the *Parameter Groups*
option of the Amazon Neptune console or the *DescribeDBParameters* command
to verify that your DB parameter group has been created or modified.
</important>
"""
def modify_d_b_parameter_group(client, input, options \\ []) do
request(client, "ModifyDBParameterGroup", input, options)
end
@doc """
Modifies an existing DB subnet group. DB subnet groups must contain at
least one subnet in at least two AZs in the AWS Region.
"""
def modify_d_b_subnet_group(client, input, options \\ []) do
request(client, "ModifyDBSubnetGroup", input, options)
end
@doc """
Modifies an existing event notification subscription. Note that you can't
modify the source identifiers using this call; to change source identifiers
for a subscription, use the `AddSourceIdentifierToSubscription` and
`RemoveSourceIdentifierFromSubscription` calls.
You can see a list of the event categories for a given SourceType by using
the **DescribeEventCategories** action.
"""
def modify_event_subscription(client, input, options \\ []) do
request(client, "ModifyEventSubscription", input, options)
end
@doc """
Not supported.
"""
def promote_read_replica_d_b_cluster(client, input, options \\ []) do
request(client, "PromoteReadReplicaDBCluster", input, options)
end
@doc """
You might need to reboot your DB instance, usually for maintenance reasons.
For example, if you make certain modifications, or if you change the DB
parameter group associated with the DB instance, you must reboot the
instance for the changes to take effect.
Rebooting a DB instance restarts the database engine service. Rebooting a
DB instance results in a momentary outage, during which the DB instance
status is set to rebooting.
"""
def reboot_d_b_instance(client, input, options \\ []) do
request(client, "RebootDBInstance", input, options)
end
@doc """
Disassociates an Identity and Access Management (IAM) role from a DB
cluster.
"""
def remove_role_from_d_b_cluster(client, input, options \\ []) do
request(client, "RemoveRoleFromDBCluster", input, options)
end
@doc """
Removes a source identifier from an existing event notification
subscription.
"""
def remove_source_identifier_from_subscription(client, input, options \\ []) do
request(client, "RemoveSourceIdentifierFromSubscription", input, options)
end
@doc """
Removes metadata tags from an Amazon Neptune resource.
"""
def remove_tags_from_resource(client, input, options \\ []) do
request(client, "RemoveTagsFromResource", input, options)
end
@doc """
Modifies the parameters of a DB cluster parameter group to the default
value. To reset specific parameters submit a list of the following:
`ParameterName` and `ApplyMethod`. To reset the entire DB cluster parameter
group, specify the `DBClusterParameterGroupName` and `ResetAllParameters`
parameters.
When resetting the entire group, dynamic parameters are updated immediately
and static parameters are set to `pending-reboot` to take effect on the
next DB instance restart or `RebootDBInstance` request. You must call
`RebootDBInstance` for every DB instance in your DB cluster that you want
the updated static parameter to apply to.
"""
def reset_d_b_cluster_parameter_group(client, input, options \\ []) do
request(client, "ResetDBClusterParameterGroup", input, options)
end
@doc """
Modifies the parameters of a DB parameter group to the engine/system
default value. To reset specific parameters, provide a list of the
following: `ParameterName` and `ApplyMethod`. To reset the entire DB
parameter group, specify the `DBParameterGroup` name and
`ResetAllParameters` parameters. When resetting the entire group, dynamic
parameters are updated immediately and static parameters are set to
`pending-reboot` to take effect on the next DB instance restart or
`RebootDBInstance` request.
"""
def reset_d_b_parameter_group(client, input, options \\ []) do
request(client, "ResetDBParameterGroup", input, options)
end
@doc """
Creates a new DB cluster from a DB snapshot or DB cluster snapshot.
If a DB snapshot is specified, the target DB cluster is created from the
source DB snapshot with a default configuration and default security group.
If a DB cluster snapshot is specified, the target DB cluster is created
from the source DB cluster restore point with the same configuration as the
original source DB cluster, except that the new DB cluster is created with
the default security group.
"""
def restore_d_b_cluster_from_snapshot(client, input, options \\ []) do
request(client, "RestoreDBClusterFromSnapshot", input, options)
end
@doc """
Restores a DB cluster to an arbitrary point in time. Users can restore to
any point in time before `LatestRestorableTime` for up to
`BackupRetentionPeriod` days. The target DB cluster is created from the
source DB cluster with the same configuration as the original DB cluster,
except that the new DB cluster is created with the default DB security
group.
<note> This action only restores the DB cluster, not the DB instances for
that DB cluster. You must invoke the `CreateDBInstance` action to create DB
instances for the restored DB cluster, specifying the identifier of the
restored DB cluster in `DBClusterIdentifier`. You can create DB instances
only after the `RestoreDBClusterToPointInTime` action has completed and the
DB cluster is available.
</note>
"""
def restore_d_b_cluster_to_point_in_time(client, input, options \\ []) do
request(client, "RestoreDBClusterToPointInTime", input, options)
end
@doc """
Starts an Amazon Neptune DB cluster that was stopped using the AWS console,
the AWS CLI stop-db-cluster command, or the StopDBCluster API.
"""
def start_d_b_cluster(client, input, options \\ []) do
request(client, "StartDBCluster", input, options)
end
@doc """
Stops an Amazon Neptune DB cluster. When you stop a DB cluster, Neptune
retains the DB cluster's metadata, including its endpoints and DB parameter
groups.
Neptune also retains the transaction logs so you can do a point-in-time
restore if necessary.
"""
def stop_d_b_cluster(client, input, options \\ []) do
request(client, "StopDBCluster", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "rds"}
host = build_host("rds", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-www-form-urlencoded"}
]
input = Map.merge(input, %{"Action" => action, "Version" => "2014-10-31"})
payload = AWS.Util.encode_query(input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, AWS.Util.decode_xml(body), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = AWS.Util.decode_xml(body)
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/neptune.ex
| 0.885341
| 0.637962
|
neptune.ex
|
starcoder
|
defmodule DgraphEx.Util do
@moduledoc false
alias DgraphEx.Core.Expr.Uid
def as_rendered(value) do
case value do
x when is_list(x) -> x |> Poison.encode!()
%Date{} = x -> x |> Date.to_iso8601() |> Kernel.<>("T00:00:00.0+00:00")
%DateTime{} = x -> x |> DateTime.to_iso8601() |> String.replace("Z", "+00:00")
x -> x |> to_string
end
end
def infer_type(type) do
case type do
x when is_boolean(x) -> :bool
x when is_binary(x) -> :string
x when is_integer(x) -> :int
x when is_float(x) -> :float
x when is_list(x) -> :geo
%DateTime{} -> :datetime
%Date{} -> :date
%Uid{} -> :uid
end
end
def as_literal(value, :int) when is_integer(value), do: {:ok, to_string(value)}
def as_literal(value, :float) when is_float(value), do: {:ok, as_rendered(value)}
def as_literal(value, :bool) when is_boolean(value), do: {:ok, as_rendered(value)}
def as_literal(value, :string) when is_binary(value),
do: {:ok, value |> strip_quotes |> wrap_quotes}
def as_literal(%Date{} = value, :date), do: {:ok, as_rendered(value)}
def as_literal(%DateTime{} = value, :datetime), do: {:ok, as_rendered(value)}
def as_literal(value, :geo) when is_list(value), do: check_and_render_geo_numbers(value)
def as_literal(value, :uid) when is_binary(value), do: {:ok, "<" <> value <> ">"}
def as_literal(value, type), do: {:error, {:invalidly_typed_value, value, type}}
def as_string(value) do
value
|> as_rendered
|> strip_quotes
|> wrap_quotes
end
defp check_and_render_geo_numbers(nums) do
if nums |> List.flatten() |> Enum.all?(&is_float/1) do
{:ok, nums |> as_rendered}
else
{:error, :invalid_geo_json}
end
end
defp wrap_quotes(value) when is_binary(value) do
"\"" <> value <> "\""
end
defp strip_quotes(value) when is_binary(value) do
value
|> String.replace(~r/^"/, "")
|> String.replace(~r/"&/, "")
end
def has_function?(module, func, arity) do
:erlang.function_exported(module, func, arity)
end
def has_struct?(module) when is_atom(module) do
Code.ensure_loaded?(module)
has_function?(module, :__struct__, 0)
end
def get_value(params, key, default \\ nil) when is_atom(key) do
str_key = to_string(key)
cond do
Map.has_key?(params, key) -> Map.get(params, key)
Map.has_key?(params, str_key) -> Map.get(params, str_key)
true -> default
end
end
@spec merge_keyword_lists(target :: list, source :: list) :: list
def merge_keyword_lists(target, source)
when is_list(target) and is_list(source) do
Enum.map(target, fn {k, v} ->
case Keyword.fetch(source, k) do
{:ok, val} -> {k, val}
:error -> {k, v}
end
end)
end
end
|
lib/dgraph_ex/util.ex
| 0.699973
| 0.451689
|
util.ex
|
starcoder
|
defmodule FexrYahoo.Utils do
@moduledoc """
Documentation for FexrYahoo.Utils.
"""
@doc false
@spec format(String.t, list(String.t)) :: map | no_return
def format({:ok, json}, symbols) do
json
|> Poison.decode!
|> extract_rates
|> format_rates
|> serialize
|> map_merge
|> filter(symbols)
end
@spec extract_rates(map) :: map
def extract_rates(map), do: map["query"]["results"]["rate"]
@spec format_rates(map) :: list({String.t, String.t, String.t, String.t})
defp format_rates(nil), do: {:error, "no rates"}
defp format_rates(rates), do: for rate <- rates, do: {rate["Name"], rate["Date"], rate["Time"], rate["Rate"]}
@spec serialize({:error, String.t} | list({String.t, String.t, String.t, String.t})) :: {:error, String.t} | list(map)
defp serialize({:error, reason}), do: {:error, reason}
defp serialize(rates) do
for {name, _date, _time, rate} <- rates do
[base, symbol] = String.split(name, "/")
%{symbol => String.to_float(rate)}
end
end
@spec map_merge({:error, String.t}) :: {:error, String.t}
defp map_merge({:error, reason}), do: {:error, reason}
@spec map_merge(list(map), map) :: no_return | map
defp map_merge(currencies, acc \\ %{})
defp map_merge([], acc), do: acc
defp map_merge([currency | currencies], acc), do: map_merge(currencies, Map.merge(currency, acc))
@spec filter({:error, String.t}) :: {:error, String.t}
defp filter({:error, reason}), do: {:error, reason}
@spec filter(map, list | nil | list(String.t)) :: map
defp filter(rates, []), do: rates
defp filter(rates, nil), do: rates
defp filter(rates, symbols), do: Map.take(rates, symbols)
@spec convert_symbols(list(atom | String.t)) :: [] | list(String.t)
def convert_symbols([]), do: []
def convert_symbols(symbols) do
{:ok, symbols
|> Enum.reject(fn(s) -> not is_atom(s) and not is_binary(s) end)
|> Enum.map(fn(s) -> if is_atom(s), do: Atom.to_string(s) |> String.upcase, else: String.upcase(s) end)}
end
end
|
lib/fexr_yahoo/utils.ex
| 0.814459
| 0.406067
|
utils.ex
|
starcoder
|
defmodule Integer do
@moduledoc """
Functions for working with integers.
"""
import Bitwise
@doc """
Determines if an integer is odd.
Returns `true` if `n` is an odd number, otherwise `false`.
Allowed in guard clauses.
## Examples
iex> Integer.is_odd(3)
true
iex> Integer.is_odd(4)
false
"""
defmacro is_odd(n) do
quote do: (unquote(n) &&& 1) == 1
end
@doc """
Determines if an integer is even.
Returns `true` if `n` is an even number, otherwise `false`.
Allowed in guard clauses.
## Examples
iex> Integer.is_even(10)
true
iex> Integer.is_even(5)
false
"""
defmacro is_even(n) do
quote do: (unquote(n) &&& 1) == 0
end
@doc """
Returns the ordered digits for the given non-negative integer.
An optional base value may be provided representing the radix for the returned
digits.
## Examples
iex> Integer.digits(101)
[1, 0, 1]
iex> Integer.digits(58127, 2)
[1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1]
"""
@spec digits(non_neg_integer, pos_integer) :: [non_neg_integer, ...]
def digits(n, base \\ 10) when is_integer(n) and n >= 0
and is_integer(base) and base >= 2 do
do_digits(n, base, [])
end
defp do_digits(0, _base, []), do: [0]
defp do_digits(0, _base, acc), do: acc
defp do_digits(n, base, acc) do
do_digits div(n, base), base, [rem(n, base) | acc]
end
@doc """
Returns the integer represented by the ordered digits.
An optional base value may be provided representing the radix for the digits.
## Examples
iex> Integer.undigits([1, 0, 1])
101
iex> Integer.undigits([1, 4], 16)
20
"""
@spec undigits([integer], integer) :: integer
def undigits(digits, base \\ 10) when is_integer(base) do
do_undigits(digits, base, 0)
end
defp do_undigits([], _base, acc), do: acc
defp do_undigits([digit | tail], base, acc) do
do_undigits(tail, base, acc * base + digit)
end
@doc """
Converts a binary from a text representation of an integer
in an optional base `base` to the corresponding integer.
If the base `base` is not given, base 10 will be used.
If successful, returns a tuple in the form of `{integer, remainder_of_binary}`.
Otherwise `:error`.
Raises an error if `base` is less than 2 or more than 36.
## Examples
iex> Integer.parse("34")
{34, ""}
iex> Integer.parse("34.5")
{34, ".5"}
iex> Integer.parse("three")
:error
iex> Integer.parse("34", 10)
{34, ""}
iex> Integer.parse("f4", 16)
{244, ""}
iex> Integer.parse("Awww++", 36)
{509216, "++"}
iex> Integer.parse("fab", 10)
:error
iex> Integer.parse("a2", 38)
** (ArgumentError) invalid base 38
"""
@spec parse(binary, 2..36) :: {integer, binary} | :error | no_return
def parse(binary, base \\ 10)
def parse(binary, base) when is_integer(base) and base in 2..36 do
parse_in_base(binary, base)
end
def parse(_, base) do
raise ArgumentError, "invalid base #{base}"
end
defp parse_in_base("-" <> bin, base) do
case do_parse(bin, base) do
:error -> :error
{number, remainder} -> {-number, remainder}
end
end
defp parse_in_base("+" <> bin, base) do
do_parse(bin, base)
end
defp parse_in_base(bin, base) when is_binary(bin) do
do_parse(bin, base)
end
defp do_parse(<<char, rest::binary>>, base) do
if valid_digit_in_base?(char, base) do
do_parse(rest, base, parse_digit(char, base))
else
:error
end
end
defp do_parse(_, _), do: :error
defp do_parse(<<char, rest::binary>> = bin, base, acc) do
if valid_digit_in_base?(char, base) do
do_parse(rest, base, base * acc + parse_digit(char, base))
else
{acc, bin}
end
end
defp do_parse(bitstring, _, acc) do
{acc, bitstring}
end
defp parse_digit(char, _) do
cond do
char in ?0..?9 -> char - ?0
char in ?A..?Z -> char - ?A + 10
true -> char - ?a + 10
end
end
defp valid_digit_in_base?(char, base) do
if base <= 10 do
char in ?0..(?0 + base - 1)
else
char in ?0..?9 or char in ?A..(?A + base - 11) or char in ?a..(?a + base - 11)
end
end
@doc """
Returns a binary which corresponds to the text representation
of `some_integer`.
Inlined by the compiler.
## Examples
iex> Integer.to_string(123)
"123"
"""
@spec to_string(integer) :: String.t
def to_string(some_integer) do
:erlang.integer_to_binary(some_integer)
end
@doc """
Returns a binary which corresponds to the text representation
of `some_integer` in base `base`.
Inlined by the compiler.
## Examples
iex> Integer.to_string(100, 16)
"64"
"""
@spec to_string(integer, 2..36) :: String.t
def to_string(some_integer, base) do
:erlang.integer_to_binary(some_integer, base)
end
@doc """
Returns a char list which corresponds to the text representation of the given integer.
Inlined by the compiler.
## Examples
iex> Integer.to_char_list(7)
'7'
"""
@spec to_char_list(integer) :: char_list
def to_char_list(number) do
:erlang.integer_to_list(number)
end
@doc """
Returns a char list which corresponds to the text representation of the
given integer in the given base.
Inlined by the compiler.
## Examples
iex> Integer.to_char_list(1023, 16)
'3FF'
"""
@spec to_char_list(integer, 2..36) :: char_list
def to_char_list(number, base) do
:erlang.integer_to_list(number, base)
end
end
|
lib/elixir/lib/integer.ex
| 0.934305
| 0.497192
|
integer.ex
|
starcoder
|
defmodule RomanNumerals do
@romans %{1 => "I", 5 => "V", 10 => "X", 50 => "L", 100 => "C", 500 => "D", 1000 => "M"}
@doc """
Convert the number to a roman number.
"""
@spec numeral(pos_integer) :: String.t()
def numeral(number) when number < 4 do
cond do
is_nil(@romans[number]) -> numeral(number - 1) <> "I"
true -> @romans[number]
end
end
def numeral(number) when number >= 4 and number < 9 do
cond do
is_nil(@romans[number]) and (number - 5) < 0 -> numeral(abs(number - 5)) <> "V"
is_nil(@romans[number]) and (number - 5) > 0 -> "V" <> numeral(number - 5)
true -> @romans[number]
end
end
def numeral(number) when number >= 9 and number < 40 do
cond do
is_nil(@romans[number]) and (number - 10) < 0 -> numeral(abs(number - 10)) <> "X"
is_nil(@romans[number]) and (number - 10) > 0 -> "X" <> numeral(number - 10)
true -> @romans[number]
end
end
def numeral(number) when number >= 40 and number < 90 do
cond do
is_nil(@romans[number]) and (number - 50) < 0 -> "XL" <> numeral(abs(number - 40))
is_nil(@romans[number]) and (number - 50) > 0 -> "L" <> numeral(number - 50)
true -> @romans[number]
end
end
def numeral(number) when number >= 90 and number < 400 do
cond do
is_nil(@romans[number]) and (number - 100) < 0 -> "XC" <> numeral(abs(number - 90))
is_nil(@romans[number]) and (number - 100) > 0 -> "C" <> numeral(number - 100)
true -> @romans[number]
end
end
def numeral(number) when number >= 400 and number < 900 do
cond do
is_nil(@romans[number]) and (number - 500) < 0 -> "CD" <> numeral(abs(number - 400))
is_nil(@romans[number]) and (number - 500) > 0 -> "D" <> numeral(number - 500)
true -> @romans[number]
end
end
def numeral(number) when number >= 900 do
cond do
is_nil(@romans[number]) and (number - 1000) < 0 -> "CM" <> numeral(abs(number - 900))
is_nil(@romans[number]) and (number - 1000) > 0 -> "M" <> numeral(number - 1000)
true -> @romans[number]
end
end
end
|
roman-numerals/lib/roman_numerals.ex
| 0.539711
| 0.686731
|
roman_numerals.ex
|
starcoder
|
defmodule Cldr.Normalize.GrammaticalFeatures do
@moduledoc false
def normalize(content) do
content
|> Enum.map(fn
{<<language::binary-size(2), "-targets-nominal">>, case_data} ->
{language, format_case_data(case_data)}
{<<language::binary-size(3), "-targets-nominal">>, case_data} ->
{language, format_case_data(case_data)}
{"root" = language, compound_data} ->
{language, format_compound_data(compound_data)}
{<<language::binary-size(2)>>, compound_data} ->
{language, format_compound_data(compound_data)}
end)
|> Enum.group_by(fn {k, _v} -> k end, fn {_k, v} -> v end)
|> Enum.map(fn {k, v} -> {k, Cldr.Map.merge_map_list(v)} end)
|> Map.new()
end
def format_case_data(case_data) do
data =
case_data
|> Map.get("grammaticalCase")
if is_nil(data), do: %{}, else: %{grammatical_cases: data}
end
def format_compound_data(compound_data) do
compound_data
|> Enum.map(fn
{"deriveCompound-feature-gender-structure-" <> compound, value} ->
{:gender, compound, String.to_integer(value)}
{"deriveComponent-feature-plural-structure-" <> compound, value} ->
{:plural, compound, format_values(value)}
{"deriveComponent-feature-case-structure-" <> compound, value} ->
{:case, compound, format_values(value)}
end)
|> Enum.group_by(
fn {type, _compound, _values} -> type end,
fn {_type, compound, values} -> {compound, values} end
)
|> Enum.map(fn {k, v} -> {k, Map.new(v)} end)
|> Map.new()
end
def format_values(values) do
values
|> Enum.map(fn
{"_value" <> value, v} -> {String.to_integer(value), v}
end)
|> Map.new()
end
def normalize_gender(content) do
content
|> Enum.map(fn
{<<language::binary-size(2), "-targets-nominal">>, gender_data} ->
{language, format_gender_data(gender_data)}
{<<language::binary-size(3), "-targets-nominal">>, gender_data} ->
{language, format_gender_data(gender_data)}
end)
|> Enum.group_by(fn {k, _v} -> k end, fn {_k, v} -> v end)
|> Enum.map(fn {k, v} -> {k, Cldr.Map.merge_map_list(v)} end)
|> Map.new()
end
def format_gender_data(data) do
Map.get(data, "grammaticalGender")
end
end
|
mix/support/normalize/normalize_grammatical_features.ex
| 0.50708
| 0.499817
|
normalize_grammatical_features.ex
|
starcoder
|
defmodule ExDag.Store.MemoryStoreData do
@moduledoc """
Backend data module for ExDag.Store.MemoryStore
"""
@enforce_keys [:dags, :runs, :options]
defstruct dags: %{},
runs: %{},
options: %{}
alias ExDag.DAG
alias ExDag.DAGRun
def new(options) do
struct!(__MODULE__, runs: %{}, dags: %{}, options: options)
end
def add_dag(%__MODULE__{dags: dags} = data, %DAG{} = dag) do
dags = Map.put(dags, dag.dag_id, dag)
%__MODULE__{data | dags: dags}
end
def get_run(%__MODULE__{runs: runs}, dag_id, run_id) do
run =
runs
|> Map.get(dag_id, %{})
|> Map.get(run_id)
run
end
def get_runs(%__MODULE__{runs: runs}, dag_id) do
runs
|> Map.get(dag_id, %{})
|> Enum.map(fn {run_id, run} ->
{run_id, run}
end)
end
def get_dags(%__MODULE__{dags: dags}) do
dags
end
def get_dag(%__MODULE__{dags: dags}, dag_id) do
case Map.get(dags, dag_id) do
%DAG{} = dag ->
dag
nil ->
{:error, "Dag not found"}
end
end
def add_run(%__MODULE__{runs: runs} = data, %DAGRun{dag: %DAG{} = dag} = dag_run) do
dag_id = dag.dag_id
dag_runs =
runs
|> Map.get(dag_id, %{})
|> Map.put(dag_run.id, dag_run)
runs = Map.put(runs, dag_id, dag_runs)
%__MODULE__{data | runs: runs}
end
def delete_dag(%__MODULE__{dags: dags} = data, dag_id) do
dags = Map.delete(dags, dag_id)
%__MODULE__{data | dags: dags}
end
end
defmodule ExDag.Store.MemoryStore do
@moduledoc """
In-memory implementation of ExDag.Store.Adapter
Uses a GenServer to maintain DAGs and DAGRuns
"""
use GenServer
@behaviour ExDag.Store.Adapter
alias ExDag.Store.MemoryStoreData
alias ExDag.DAG
alias ExDag.DAGRun
require Logger
# GenServer stuff
@impl GenServer
def init(%MemoryStoreData{} = state) do
{:ok, state}
end
@impl GenServer
def handle_call(:get_dags, _from, state) do
dags = MemoryStoreData.get_dags(state)
{:reply, dags, state}
end
@impl GenServer
def handle_call({:get_dag, dag_id}, _from, state) do
result =
case MemoryStoreData.get_dag(state, dag_id) do
%DAG{} = dag ->
{:ok, dag}
{:error, error} ->
{:error, error}
_ ->
{:error, "Could not load dag"}
end
{:reply, result, state}
end
def handle_call({:get_dag_runs, %DAG{} = dag, _options}, _from, %MemoryStoreData{} = state) do
runs = MemoryStoreData.get_runs(state, dag.dag_id)
{:reply, runs, state}
end
def handle_call({:get_dag_run, dag_id, run_id, _options}, _from, %MemoryStoreData{} = state) do
{:reply, {:ok, MemoryStoreData.get_run(state, dag_id, run_id)}, state}
end
def handle_call(
{:save_dag_run, %DAGRun{} = dag_run, _options},
_from,
%MemoryStoreData{} = state
) do
Logger.debug("Saving dag run :#{dag_run.id}")
state = MemoryStoreData.add_run(state, dag_run)
{:reply, {:ok, []}, state}
end
def handle_call({:save_dag, %DAG{} = dag, _options}, _from, %MemoryStoreData{} = state) do
Logger.debug("Saving dag :#{dag.dag_id}")
state = MemoryStoreData.add_dag(state, dag)
{:reply, {:ok, []}, state}
end
def handle_call({:delete_dag, %DAG{} = dag, _options}, _from, %MemoryStoreData{} = state) do
Logger.debug("Saving dag :#{dag.dag_id}")
state = MemoryStoreData.delete_dag(state, dag)
{:reply, {:ok, []}, state}
end
# Store stuff
@impl ExDag.Store.Adapter
def init_store(options) do
state = MemoryStoreData.new(options)
GenServer.start(__MODULE__, state, name: __MODULE__)
end
@impl ExDag.Store.Adapter
def get_dags(_options) do
GenServer.call(__MODULE__, :get_dags)
end
@impl ExDag.Store.Adapter
def get_dag(_options, dag_id) do
GenServer.call(__MODULE__, {:get_dag, dag_id})
end
@impl ExDag.Store.Adapter
def save_dag(options, dag) do
GenServer.call(__MODULE__, {:save_dag, dag, options})
end
@impl ExDag.Store.Adapter
def delete_dag(options, dag) do
GenServer.call(__MODULE__, {:delete_dag, dag, options})
end
@impl ExDag.Store.Adapter
def save_dag_run(options, dag_run) do
GenServer.call(__MODULE__, {:save_dag_run, dag_run, options})
end
@impl ExDag.Store.Adapter
def get_dag_runs(options, dag) do
GenServer.call(__MODULE__, {:get_dag_runs, dag, options})
end
@impl ExDag.Store.Adapter
def get_dag_path(_options, _dag) do
{:error, "unsupported"}
end
@impl ExDag.Store.Adapter
def get_dag_run(options, dag_id, run_id) do
GenServer.call(__MODULE__, {:get_dag_run, dag_id, run_id, options})
end
end
|
lib/ex_dag/store/memory_store.ex
| 0.647018
| 0.442998
|
memory_store.ex
|
starcoder
|
defmodule Modbux.Tcp.Server do
@moduledoc """
API for Modbus TCP Server.
"""
alias Modbux.Tcp.Server
alias Modbux.Model.Shared
use GenServer, restart: :transient
require Logger
@port 502
@to :infinity
defstruct ip: nil,
model_pid: nil,
tcp_port: nil,
timeout: nil,
listener: nil,
parent_pid: nil,
sup_pid: nil,
acceptor_pid: nil
@doc """
Starts a Modbus TCP Server process.
The following options are available:
* `port` - is the Modbux TCP Server tcp port number.
* `timeout` - is the connection timeout.
* `model` - defines the DB initial state.
* `sup_otps` - server supervisor OTP options.
* `active` - (`true` or `false`) enable/disable DB updates notifications (mailbox).
The messages (when active mode is true) have the following form:
`{:modbus_tcp, {:slave_request, payload}}`
## Model (DB)
The model or data base (DB) defines the server memory map, the DB is defined by the following syntax:
```elixir
%{slave_id => %{{memory_type, address_number} => value}}
```
where:
* `slave_id` - specifies a unique unit address from 1 to 247.
* `memory_type` - specifies the memory between:
* `:c` - Discrete Output Coils.
* `:i` - Discrete Input Contacts.
* `:ir` - Analog Input Registers.
* `:hr` - Analog Output Registers.
* `address_number` - specifies the memory address.
* `value` - the current value from that memory.
## Example
```elixir
model = %{80 => %{{:c, 20818} => 0, {:hr, 20818} => 0}}
Modbux.Tcp.Server.start_link(model: model, port: 2000)
```
"""
@spec start_link(any, [
{:debug, [:log | :statistics | :trace | {any, any}]}
| {:hibernate_after, :infinity | non_neg_integer}
| {:name, atom | {:global, any} | {:via, atom, any}}
| {:spawn_opt,
:link
| :monitor
| {:fullsweep_after, non_neg_integer}
| {:min_bin_vheap_size, non_neg_integer}
| {:min_heap_size, non_neg_integer}
| {:priority, :high | :low | :normal}}
| {:timeout, :infinity | non_neg_integer}
]) :: :ignore | {:error, any} | {:ok, pid}
def start_link(params, opts \\ []) do
GenServer.start_link(__MODULE__, {params, self()}, opts)
end
@spec stop(atom | pid | {atom, any} | {:via, atom, any}) :: :ok
def stop(pid) do
GenServer.stop(pid)
end
@doc """
Updates the state of the Server DB.
`cmd` is a 4 elements tuple, as follows:
- `{:rc, slave, address, count}` read `count` coils.
- `{:ri, slave, address, count}` read `count` inputs.
- `{:rhr, slave, address, count}` read `count` holding registers.
- `{:rir, slave, address, count}` read `count` input registers.
- `{:fc, slave, address, value}` force single coil.
- `{:phr, slave, address, value}` preset single holding register.
- `{:fc, slave, address, values}` force multiple coils.
- `{:phr, slave, address, values}` preset multiple holding registers.
"""
@spec update(atom | pid | {atom, any} | {:via, atom, any}, any) :: any
def update(pid, cmd) do
GenServer.call(pid, {:update, cmd})
end
@doc """
Gets the current state of the Server DB.
"""
@spec get_db(atom | pid | {atom, any} | {:via, atom, any}) :: any
def get_db(pid) do
GenServer.call(pid, :get_db)
end
def init({params, parent_pid}) do
port = Keyword.get(params, :port, @port)
timeout = Keyword.get(params, :timeout, @to)
parent_pid = if Keyword.get(params, :active, false), do: parent_pid
model = Keyword.fetch!(params, :model)
{:ok, model_pid} = Shared.start_link(model: model)
sup_opts = Keyword.get(params, :sup_opts, [])
{:ok, sup_pid} = Server.Supervisor.start_link(sup_opts)
state = %Server{
tcp_port: port,
model_pid: model_pid,
timeout: timeout,
parent_pid: parent_pid,
sup_pid: sup_pid
}
{:ok, state, {:continue, :setup}}
end
def terminate(:normal, _state), do: nil
def terminate(reason, state) do
Logger.error("(#{__MODULE__}) Error: #{inspect(reason)}")
:gen_tcp.close(state.listener)
end
def handle_call({:update, request}, _from, state) do
res =
case Shared.apply(state.model_pid, request) do
{:ok, values} ->
Logger.debug("(#{__MODULE__}) DB request: #{inspect(request)}, #{inspect(values)}")
values
nil ->
Logger.debug("(#{__MODULE__}) DB update: #{inspect(request)}")
error ->
Logger.debug("(#{__MODULE__}) An error has occur")
error
end
{:reply, res, state}
end
def handle_call(:get_db, _from, state) do
{:reply, Shared.state(state.model_pid), state}
end
def handle_continue(:setup, state) do
new_state = listener_setup(state)
{:noreply, new_state}
end
defp listener_setup(state) do
case :gen_tcp.listen(state.tcp_port, [:binary, packet: :raw, active: true, reuseaddr: true]) do
{:ok, listener} ->
{:ok, {ip, _port}} = :inet.sockname(listener)
accept = Task.async(fn -> accept(state, listener) end)
%Server{state | ip: ip, acceptor_pid: accept, listener: listener}
{:error, :eaddrinuse} ->
Logger.error("(#{__MODULE__}) Error: A listener is still alive")
close_alive_sockets(state.tcp_port)
Process.sleep(100)
listener_setup(state)
{:error, reason} ->
Logger.error("(#{__MODULE__}) Error in Listen: #{reason}")
Process.sleep(1000)
listener_setup(state)
end
end
def close_alive_sockets(port) do
Port.list()
|> Enum.filter(fn x -> Port.info(x)[:name] == 'tcp_inet' end)
|> Enum.filter(fn x ->
{:ok, {{0, 0, 0, 0}, port}} == :inet.sockname(x) || {:ok, {{127, 0, 0, 1}, port}} == :inet.sockname(x)
end)
|> Enum.each(fn x -> :gen_tcp.close(x) end)
end
defp accept(state, listener) do
case :gen_tcp.accept(listener) do
{:ok, socket} ->
{:ok, pid} =
Server.Supervisor.start_child(state.sup_pid, Server.Handler, [
socket,
state.model_pid,
state.parent_pid
])
Logger.debug("(#{__MODULE__}) New Client socket: #{inspect(socket)}, pid: #{inspect(pid)}")
case :gen_tcp.controlling_process(socket, pid) do
:ok ->
nil
error ->
Logger.error("(#{__MODULE__}) Error in controlling process: #{inspect(error)}")
end
accept(state, listener)
{:error, reason} ->
Logger.error("(#{__MODULE__}) Error Accept: #{inspect(reason)}")
exit(reason)
end
end
end
|
lib/tcp/server/server.ex
| 0.878118
| 0.801548
|
server.ex
|
starcoder
|
defmodule Mix.Tasks.Phx.Gen.Elm do
use Mix.Task
@shortdoc "Generates an elm app inside a Phoenix (1.3) app with the necessary scaffolding"
@instructions """
1. add the following to the `plugins` section of your `brunch-config.js`
```js
elmBrunch: {
elmFolder: '.',
mainModules: ['elm/Main.elm'],
outputFile: 'elm.js',
outputFolder: '../assets/js',
makeParameters: ['--debug'] // optional debugger for development
}
```
2. add `elm` to the `watched` array in your `brunch-config.js`
You may also want to add `/elm\\.js/` to the babel ignore pattern to speed up compilation
```js
babel: {
ignore: [/vendor/, /elm\.js/]
}
```
3. in your `app.js` file add the following
```js
import ElmApp from './elm.js'
import elmEmbed from './elm-embed.js'
elmEmbed.init(ElmApp)
```
4. and finally in your `router.ex` file add
```elixir
get "/path-to-elm-app", ElmController, :index
```
"""
@moduledoc """
Generates an elm app inside a Phoenix (1.3) app with the necessary scaffolding
adds:
- elm files (`Main`, `Types`, `State`, `View`)
- an embed script
- `elm-package.json`
- A phoenix `controller`, `view` and `template`
- an `elm-test` setup
to run the generator:
```sh
> mix phx.gen.elm
```
then follow post install instructions:
#{@instructions}
"""
@src "priv/templates/phx.gen.elm"
def run(_argv) do
copy_phoenix_files()
copy_elm_files()
install_node_modules()
post_install_instructions()
update_time_created()
end
defp post_install_instructions do
instructions = """
🎉 ✨ Your elm app is almost ready to go! ✨ 🎉
#{@instructions}
"""
Mix.shell.info(instructions)
end
defp copy_phoenix_files do
templates = [
{:eex, "views/elm_view.ex", web_dir("views/elm_view.ex")},
{:eex, "controllers/elm_controller.ex", web_dir("controllers/elm_controller.ex")},
{:text, "templates/elm/index.html.eex", web_dir("templates/elm/index.html.eex")}
]
Mix.shell.info("adding phoenix files 🕊 🔥")
copy_files(templates)
end
defp update_time_created do
[
web_dir("views/elm_view.ex"),
web_dir("controllers/elm_controller.ex")
]
|> Enum.map(&File.touch!(&1))
end
defp web_dir(path) do
Mix.Phoenix.context_app()
|> Mix.Phoenix.web_path()
|> Path.join(path)
end
defp copy_files(templates) do
Mix.Phoenix.copy_from(
[:phoenix_elm_scaffold], @src, [app_name: app_module_name()], templates
)
end
defp copy_elm_files do
files = [
"assets/elm/Main.elm",
"assets/elm/State.elm",
"assets/elm/View.elm",
"assets/elm/Types.elm",
"assets/js/elm-embed.js",
"assets/elm-package.json",
"test/elm/Main.elm",
"test/elm/Sample.elm",
"test/elm/elm-package.json"
]
|> Enum.map(&text_file/1)
Mix.shell.info("adding elm files 🌳")
copy_files(files)
end
defp text_file(path) do
{:text, path, path}
end
defp app_module_name do
Mix.Phoenix.context_app()
|> Atom.to_string
|> String.split("_")
|> Enum.map(&String.capitalize/1)
|> Enum.join("")
end
defp install_node_modules do
deps = [
"elm"
]
dev_deps = [
"elm-brunch",
"elm-test"
]
change_dir = "cd assets"
pre_install = "npm install"
node_install_deps = "npm install -S " <> Enum.join(deps, " ")
node_install_dev_deps = "npm install -D " <> Enum.join(dev_deps, " ")
# TODO: make these not depend on a global version of elm
elm_install = "elm-package install -y"
elm_compile = "elm-make elm/Main.elm --output=js/elm.js"
all_cmds = [
change_dir,
pre_install,
node_install_deps,
node_install_dev_deps,
elm_install,
elm_compile
]
cmd = Enum.join(all_cmds, " && ")
Mix.shell.info("installing node modules for elm-app ⬇️")
Mix.shell.info(cmd)
status = Mix.shell.cmd(cmd, stderr_to_stdout: true)
case status do
0 -> :ok
_ -> raise "Error installing node modules: #{status}"
end
end
end
|
lib/mix/tasks/phx.gen.elm.ex
| 0.589835
| 0.685509
|
phx.gen.elm.ex
|
starcoder
|
defmodule Wadm.Model.AppSpec do
@moduledoc """
The root of an OAM Application Specification model
"""
alias __MODULE__
alias Wadm.Model.{
ActorComponent,
CapabilityComponent,
Decoder
}
@enforce_keys [:name]
defstruct [:name, :version, :description, components: []]
@typedoc """
Valid component types
"""
@type component :: ActorComponent.t() | CapabilityComponent.t()
@typedoc """
The root model specification for an application to be managed by the controller
"""
@type t :: %AppSpec{
name: String.t(),
version: String.t(),
description: String.t(),
components: [component()]
}
@doc """
Creates a new wasmCloud OAM application specification
"""
@spec new(String.t(), String.t(), String.t(), [component()]) :: AppSpec.t()
def new(name, version, description, components \\ []) do
%AppSpec{
name: name,
version: version,
description: description,
components: components
}
end
@doc """
Takes a map as returned by either of `YamlElixir`'s parse functions and returns either
a canonical representation of the wasmCloud OAM application specification model or
an error and an accompanying reason indicating the cause of the decode failure
"""
@spec from_yaml(Map.t()) :: {:ok, AppSpec.t()} | {:error, String.t()}
def from_yaml(yaml = %{}) do
case Decoder.extract_components(yaml) do
{:ok, components} ->
{:ok,
new(
case get_in(yaml, ["metadata", "name"]) do
nil -> "Unnamed"
n -> n
end,
case get_in(yaml, ["metadata", "annotations", "version"]) do
nil -> "v0.0.0"
v -> v
end,
case get_in(yaml, ["metadata", "annotations", "description"]) do
nil -> "Unnamed Application"
d -> d
end,
components
)}
{:error, reason} ->
{:error, reason}
end
end
end
|
wadm/lib/wadm/model/app_spec.ex
| 0.869341
| 0.421076
|
app_spec.ex
|
starcoder
|
defmodule EctoIPRange.Util.CIDR do
@moduledoc false
use Bitwise, skip_operators: true
alias EctoIPRange.Util.Inet
@doc """
Calculate first and last address for an IPv4 notation.
## Examples
iex> parse_ipv4("1.2.3.4", 32)
{{1, 2, 3, 4}, {1, 2, 3, 4}}
iex> parse_ipv4("192.168.0.0", 20)
{{192, 168, 0, 0}, {192, 168, 15, 255}}
iex> parse_ipv4("192.168.0.0", 8)
:error
iex> parse_ipv4("a.b.c.d", 32)
:error
iex> parse_ipv4("1.2.3.4", 64)
:error
iex> parse_ipv4("fc00:db20:35b:7399::5", 128)
:error
"""
@spec parse_ipv4(binary, 0..32) :: {:inet.ip4_address(), :inet.ip4_address()} | :error
def parse_ipv4(address, 32) do
case Inet.parse_ipv4_binary(address) do
{:ok, ip4_address} -> {ip4_address, ip4_address}
_ -> :error
end
end
def parse_ipv4(address, maskbits) when maskbits in 0..31 do
with {:ok, ip4_address} <- Inet.parse_ipv4_binary(address),
^ip4_address <- ipv4_start_address(ip4_address, maskbits) do
{ip4_address, ipv4_end_address(ip4_address, maskbits)}
else
_ -> :error
end
end
def parse_ipv4(_, _), do: :error
@doc """
Calculate first and last address for an IPv6 notation.
## Examples
iex> parse_ipv6("fc00:db20:35b:7399::5", 128)
{{1, 2, 3, 4, 5, 6, 7, 8}, {1, 2, 3, 4, 5, 6, 7, 8}}
iex> parse_ipv6("1:2:3:4::", 110)
{{1, 2, 3, 4, 0, 0, 0, 0}, {1, 2, 3, 4, 0, 0, 3, 65535}}
iex> parse_ipv6("1:2:3:4::", 64)
{{1, 2, 3, 4, 0, 0, 0, 0}, {1, 2, 3, 4, 65535, 65535, 65535, 65535}}
iex> parse_ipv6("1.2.3.4", 128)
{{0, 0, 0, 0, 0, 65535, 258, 772}, {0, 0, 0, 0, 0, 65535, 258, 772}}
iex> parse_ipv6("192.168.0.0", 110)
{{0, 0, 0, 0, 0, 65535, 49320, 0}, {0, 0, 0, 0, 0, 65535, 49323, 65535}}
iex> parse_ipv6("fc00:db20:35b:7399::5", 256)
:error
iex> parse_ipv6("fc00:db20:35b:7399::5", 64)
:error
iex> parse_ipv6("s:t:u:v:w:x:y:z", 32)
:error
"""
@spec parse_ipv6(binary, 0..128) :: {:inet.ip6_address(), :inet.ip6_address()} | :error
def parse_ipv6(address, 128) do
case Inet.parse_ipv6_binary(address) do
{:ok, ip6_address} -> {ip6_address, ip6_address}
_ -> :error
end
end
def parse_ipv6(address, maskbits) when maskbits in 0..127 do
with {:ok, ip6_address} <- Inet.parse_ipv6_binary(address),
^ip6_address <- ipv6_start_address(ip6_address, maskbits) do
{ip6_address, ipv6_end_address(ip6_address, maskbits)}
else
_ -> :error
end
end
def parse_ipv6(_, _), do: :error
defp ipv4_start_address({start_a, start_b, start_c, start_d}, maskbits) do
{mask_a, mask_b, mask_c, mask_d} =
cond do
maskbits >= 24 -> {0xFF, 0xFF, 0xFF, bnot(bsr(0xFF, maskbits - 24))}
maskbits >= 16 -> {0xFF, 0xFF, bnot(bsr(0xFF, maskbits - 16)), 0}
maskbits >= 8 -> {0xFF, bnot(bsr(0xFF, maskbits - 8)), 0, 0}
true -> {bnot(bsr(0xFF, maskbits)), 0, 0, 0}
end
{
band(mask_a, start_a),
band(mask_b, start_b),
band(mask_c, start_c),
band(mask_d, start_d)
}
end
defp ipv6_start_address(
{start_a, start_b, start_c, start_d, start_e, start_f, start_g, start_h},
maskbits
) do
{mask_a, mask_b, mask_c, mask_d, mask_e, mask_f, mask_g, mask_h} =
cond do
maskbits >= 112 ->
{0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF,
bnot(bsr(0xFFFF, maskbits - 112))}
maskbits >= 96 ->
{0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, bnot(bsr(0xFFFF, maskbits - 96)), 0}
maskbits >= 80 ->
{0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, bnot(bsr(0xFFFF, maskbits - 80)), 0, 0}
maskbits >= 64 ->
{0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, bnot(bsr(0xFFFF, maskbits - 64)), 0, 0, 0}
maskbits >= 48 ->
{0xFFFF, 0xFFFF, 0xFFFF, bnot(bsr(0xFFFF, maskbits - 48)), 0, 0, 0, 0}
maskbits >= 32 ->
{0xFFFF, 0xFFFF, bnot(bsr(0xFFFF, maskbits - 32)), 0, 0, 0, 0, 0}
maskbits >= 16 ->
{0xFFFF, bnot(bsr(0xFFFF, maskbits - 16)), 0, 0, 0, 0, 0, 0}
true ->
{bnot(bsr(0xFFFF, maskbits)), 0, 0, 0, 0, 0, 0, 0}
end
{
band(mask_a, start_a),
band(mask_b, start_b),
band(mask_c, start_c),
band(mask_d, start_d),
band(mask_e, start_e),
band(mask_f, start_f),
band(mask_g, start_g),
band(mask_h, start_h)
}
end
defp ipv4_end_address({start_a, start_b, start_c, start_d}, maskbits) do
{mask_a, mask_b, mask_c, mask_d} =
cond do
maskbits >= 24 -> {0, 0, 0, bsr(0xFF, maskbits - 24)}
maskbits >= 16 -> {0, 0, bsr(0xFF, maskbits - 16), 0xFF}
maskbits >= 8 -> {0, bsr(0xFF, maskbits - 8), 0xFF, 0xFF}
true -> {bsr(0xFF, maskbits), 0xFF, 0xFF, 0xFF}
end
{
bor(mask_a, start_a),
bor(mask_b, start_b),
bor(mask_c, start_c),
bor(mask_d, start_d)
}
end
defp ipv6_end_address(
{start_a, start_b, start_c, start_d, start_e, start_f, start_g, start_h},
maskbits
) do
{mask_a, mask_b, mask_c, mask_d, mask_e, mask_f, mask_g, mask_h} =
cond do
maskbits >= 112 ->
{0, 0, 0, 0, 0, 0, 0, bsr(0xFFFF, maskbits - 112)}
maskbits >= 96 ->
{0, 0, 0, 0, 0, 0, bsr(0xFFFF, maskbits - 96), 0xFFFF}
maskbits >= 80 ->
{0, 0, 0, 0, 0, bsr(0xFFFF, maskbits - 80), 0xFFFF, 0xFFFF}
maskbits >= 64 ->
{0, 0, 0, 0, bsr(0xFFFF, maskbits - 64), 0xFFFF, 0xFFFF, 0xFFFF}
maskbits >= 48 ->
{0, 0, 0, bsr(0xFFFF, maskbits - 48), 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF}
maskbits >= 32 ->
{0, 0, bsr(0xFFFF, maskbits - 32), 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF}
maskbits >= 16 ->
{0, bsr(0xFFFF, maskbits - 16), 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF}
true ->
{bsr(0xFFFF, maskbits), 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF}
end
{
bor(mask_a, start_a),
bor(mask_b, start_b),
bor(mask_c, start_c),
bor(mask_d, start_d),
bor(mask_e, start_e),
bor(mask_f, start_f),
bor(mask_g, start_g),
bor(mask_h, start_h)
}
end
end
|
lib/ecto_ip_range/util/cidr.ex
| 0.815306
| 0.61144
|
cidr.ex
|
starcoder
|
defmodule Liquex.Filter do
@moduledoc """
Contains all the basic filters for Liquid
"""
@type filter_t :: {:filter, [...]}
@callback apply(any, filter_t, map) :: any
alias Liquex.Context
defmacro __using__(_) do
quote do
@behaviour Liquex.Filter
@spec apply(any, Liquex.Filter.filter_t(), map) :: any
@impl Liquex.Filter
def apply(value, filter, context),
do: Liquex.Filter.apply(__MODULE__, value, filter, context)
end
end
@spec filter_name(filter_t) :: String.t()
def filter_name({:filter, [filter_name | _]}), do: filter_name
def apply(
mod \\ __MODULE__,
value,
{:filter, [function, {:arguments, arguments}]},
context
) do
func = String.to_existing_atom(function)
function_args =
Enum.map(
arguments,
&Liquex.Argument.eval(&1, context)
)
|> merge_keywords()
mod =
if mod != __MODULE__ and Kernel.function_exported?(mod, func, length(function_args) + 2) do
mod
else
__MODULE__
end
Kernel.apply(mod, func, [value | function_args] ++ [context])
rescue
# credo:disable-for-next-line
ArgumentError -> raise Liquex.Error, "Invalid filter #{function}"
end
# Merges the tuples at the end of the argument list into a keyword list, but with string keys
# value, size, {"crop", direction}, {"filter", filter}
# becomes
# value, size, [{"crop", direction}, {"filter", filter}]
defp merge_keywords(arguments) do
{keywords, rest} =
arguments
|> Enum.reverse()
|> Enum.split_while(&is_tuple/1)
case keywords do
[] -> rest
_ -> [Enum.reverse(keywords) | rest]
end
|> Enum.reverse()
end
@doc """
Returns the absolute value of `value`.
## Examples
iex> Liquex.Filter.abs(-1, %{})
1
iex> Liquex.Filter.abs(1, %{})
1
iex> Liquex.Filter.abs("-1.1", %{})
1.1
"""
@spec abs(String.t() | number, any) :: number
def abs(value, _) when is_binary(value) do
{float, ""} = Float.parse(value)
abs(float)
end
def abs(value, _), do: abs(value)
@doc """
Appends `text` to the end of `value`
## Examples
iex> Liquex.Filter.append("myfile", ".html", %{})
"myfile.html"
"""
@spec append(String.t(), String.t(), map()) :: String.t()
def append(value, text, _), do: value <> text
@doc """
Sets a minimum value
## Examples
iex> Liquex.Filter.at_least(3, 5, %{})
5
iex> Liquex.Filter.at_least(5, 3, %{})
5
"""
@spec at_least(number, number, map()) :: number
def at_least(value, min, _) when value > min, do: value
def at_least(_value, min, _), do: min
@doc """
Sets a maximum value
## Examples
iex> Liquex.Filter.at_most(4, 5, %{})
4
iex> Liquex.Filter.at_most(4, 3, %{})
3
"""
@spec at_most(number, number, map()) :: number
def at_most(value, max, _) when value < max, do: value
def at_most(_value, max, _), do: max
@doc """
Capitalizes a string
## Examples
iex> Liquex.Filter.capitalize("title", %{})
"Title"
iex> Liquex.Filter.capitalize("my great title", %{})
"My great title"
"""
@spec capitalize(String.t(), map()) :: String.t()
def capitalize(value, _), do: String.capitalize(value)
@doc """
Rounds `value` up to the nearest whole number. Liquid tries to convert the input to a number before the filter is applied.
## Examples
iex> Liquex.Filter.ceil(1.2, %{})
2
iex> Liquex.Filter.ceil(2.0, %{})
2
iex> Liquex.Filter.ceil(183.357, %{})
184
iex> Liquex.Filter.ceil("3.5", %{})
4
"""
@spec ceil(number | String.t(), map()) :: number
def ceil(value, _) when is_binary(value) do
{num, ""} = Float.parse(value)
Float.ceil(num) |> trunc()
end
def ceil(value, _), do: Float.ceil(value) |> trunc()
@doc """
Removes any nil values from an array.
## Examples
iex> Liquex.Filter.compact([1, 2, nil, 3], %{})
[1,2,3]
iex> Liquex.Filter.compact([1, 2, 3], %{})
[1,2,3]
"""
@spec compact([any], map()) :: [any]
def compact(value, _) when is_list(value),
do: Enum.reject(value, &is_nil/1)
@doc """
Concatenates (joins together) multiple arrays. The resulting array contains all the items
## Examples
iex> Liquex.Filter.concat([1,2], [3,4], %{})
[1,2,3,4]
"""
def concat(value, other, _) when is_list(value) and is_list(other),
do: value ++ other
@doc """
Converts `value` timestamp into another date `format`.
The format for this syntax is the same as strftime. The input uses the same format as Ruby’s Time.parse.
## Examples
iex> Liquex.Filter.date(~D[2000-01-01], "%m/%d/%Y", %{})
"01/01/2000"
iex> Liquex.Filter.date("2000-01-01", "%m/%d/%Y", %{})
"01/01/2000"
iex> Liquex.Filter.date("January 1, 2000", "%m/%d/%Y", %{})
"01/01/2000"
iex> Liquex.Filter.date("1/2/2000", "%m/%d/%Y", %{})
"01/02/2000"
iex> Liquex.Filter.date("March 14, 2016", "%b %d, %y", %{})
"Mar 14, 16"
"""
def date(%Date{} = value, format, _), do: Timex.format!(value, format, :strftime)
def date(%DateTime{} = value, format, _), do: Timex.format!(value, format, :strftime)
def date(%NaiveDateTime{} = value, format, _), do: Timex.format!(value, format, :strftime)
def date("now", format, context), do: date(DateTime.utc_now(), format, context)
def date("today", format, context), do: date(Date.utc_today(), format, context)
def date(value, format, context) when is_binary(value) do
# Thanks to the nonspecific definition of the format in the spec, we parse
# some common date formats
case DateTimeParser.parse_datetime(value, assume_time: true) do
{:ok, parsed_date} ->
parsed_date
|> NaiveDateTime.to_date()
|> date(format, context)
_ ->
nil
end
end
@doc """
Allows you to specify a fallback in case a value doesn’t exist. default will show its value
if the left side is nil, false, or empty.
## Examples
iex> Liquex.Filter.default("1.99", "2.99", %{})
"1.99"
iex> Liquex.Filter.default("", "2.99", %{})
"2.99"
"""
def default(value, def_value, _) when value in [nil, "", false, []], do: def_value
def default(value, _, _), do: value
@doc """
Divides a number by another number.
## Examples
The result is rounded down to the nearest integer (that is, the floor) if the divisor is an integer.
iex> Liquex.Filter.divided_by(16, 4, %{})
4
iex> Liquex.Filter.divided_by(5, 3, %{})
1
iex> Liquex.Filter.divided_by(20, 7.0, %{})
2.857142857142857
"""
def divided_by(value, divisor, _) when is_integer(divisor), do: trunc(value / divisor)
def divided_by(value, divisor, _), do: value / divisor
@doc """
Makes each character in a string lowercase. It has no effect on strings
which are already all lowercase.
## Examples
iex> Liquex.Filter.downcase("<NAME>", %{})
"<NAME>"
iex> Liquex.Filter.downcase("apple", %{})
"apple"
"""
def downcase(nil, _), do: nil
def downcase(value, _), do: String.downcase(value)
@doc """
Escapes a string by replacing characters with escape sequences (so that the string can
be used in a URL, for example). It doesn’t change strings that don’t have anything to
escape.
## Examples
iex> Liquex.Filter.escape("Have you read 'James & the Giant Peach'?", %{})
"Have you read 'James & the Giant Peach'?"
iex> Liquex.Filter.escape("Tetsuro Takara", %{})
"Tetsuro Takara"
"""
def escape(value, _),
do: HtmlEntities.encode(value)
@doc """
Escapes a string by replacing characters with escape sequences (so that the string can
be used in a URL, for example). It doesn’t change strings that don’t have anything to
escape.
## Examples
iex> Liquex.Filter.escape_once("1 < 2 & 3", %{})
"1 < 2 & 3"
"""
def escape_once(value, _),
do: value |> HtmlEntities.decode() |> HtmlEntities.encode()
@doc """
Returns the first item of an array.
## Examples
iex> Liquex.Filter.first([1, 2, 3], %{})
1
iex> Liquex.Filter.first([], %{})
nil
"""
def first([], _), do: nil
def first([f | _], _), do: f
@doc """
Rounds the input down to the nearest whole number. Liquid tries to convert the input to a
number before the filter is applied.
## Examples
iex> Liquex.Filter.floor(1.2, %{})
1
iex> Liquex.Filter.floor(2.0, %{})
2
"""
def floor(value, _), do: Kernel.trunc(value)
@doc """
Combines the items in `values` into a single string using `joiner` as a separator.
## Examples
iex> Liquex.Filter.join(~w(<NAME> George Ringo), " and ", %{})
"John and Paul and George and Ringo"
"""
def join(values, joiner, _), do: Enum.join(values, joiner)
@doc """
Returns the last item of `arr`.
## Examples
iex> Liquex.Filter.last([1, 2, 3], %{})
3
iex> Liquex.Filter.first([], %{})
nil
"""
@spec last(list, Liquex.Context.t()) :: any
def last(arr, context), do: arr |> Enum.reverse() |> first(context)
@doc """
Removes all whitespace (tabs, spaces, and newlines) from the left side of a string.
It does not affect spaces between words.
## Examples
iex> Liquex.Filter.lstrip(" So much room for activities! ", %{})
"So much room for activities! "
"""
@spec lstrip(String.t(), Context.t()) :: String.t()
def lstrip(value, _), do: value |> String.trim_leading()
@doc """
Creates an array (`arr`) of values by extracting the values of a named property from another object (`key`).
## Examples
iex> Liquex.Filter.map([%{"a" => 1}, %{"a" => 2, "b" => 1}], "a", %{})
[1, 2]
"""
@spec map([any], term, Context.t()) :: [any]
def map(arr, key, _), do: Enum.map(arr, &Liquex.Indifferent.get(&1, key, nil))
@doc """
Subtracts a number from another number.
## Examples
iex> Liquex.Filter.minus(4, 2, %{})
2
iex> Liquex.Filter.minus(183.357, 12, %{})
171.357
"""
@spec minus(number, number, Context.t()) :: number
def minus(left, right, _), do: left - right
@doc """
Returns the remainder of a division operation.
## Examples
iex> Liquex.Filter.modulo(3, 2, %{})
1
iex> Liquex.Filter.modulo(183.357, 12, %{})
3.357
"""
@spec modulo(number, number, Context.t()) :: number
def modulo(left, right, _) when is_float(left) or is_float(right),
do: :math.fmod(left, right) |> Float.round(5)
def modulo(left, right, _), do: rem(left, right)
@doc """
Replaces every newline (\n) in a string with an HTML line break (<br />).
## Examples
iex> Liquex.Filter.newline_to_br("\\nHello\\nthere\\n", %{})
"<br />\\nHello<br />\\nthere<br />\\n"
"""
@spec newline_to_br(String.t(), Context.t()) :: String.t()
def newline_to_br(value, _), do: String.replace(value, "\n", "<br />\n")
@doc """
Adds a number to another number.
## Examples
iex> Liquex.Filter.plus(4, 2, %{})
6
iex> Liquex.Filter.plus(183.357, 12, %{})
195.357
"""
def plus(left, right, _), do: left + right
@doc """
Adds the specified string to the beginning of another string.
## Examples
iex> Liquex.Filter.prepend("apples, oranges, and bananas", "Some fruit: ", %{})
"Some fruit: apples, oranges, and bananas"
iex> Liquex.Filter.prepend("/index.html", "example.com", %{})
"example.com/index.html"
"""
def prepend(value, prepender, _), do: prepender <> value
@doc """
Removes every occurrence of the specified substring from a string.
## Examples
iex> Liquex.Filter.remove("I strained to see the train through the rain", "rain", %{})
"I sted to see the t through the "
"""
def remove(value, original, context), do: replace(value, original, "", context)
@doc """
Removes every occurrence of the specified substring from a string.
## Examples
iex> Liquex.Filter.remove_first("I strained to see the train through the rain", "rain", %{})
"I sted to see the train through the rain"
"""
def remove_first(value, original, context), do: replace_first(value, original, "", context)
@doc """
Replaces every occurrence of the first argument in a string with the second argument.
## Examples
iex> Liquex.Filter.replace("Take my protein pills and put my helmet on", "my", "your", %{})
"Take your protein pills and put your helmet on"
"""
def replace(value, original, replacement, _),
do: String.replace(value, original, replacement)
@doc """
Replaces only the first occurrence of the first argument in a string with the second argument.
## Examples
iex> Liquex.Filter.replace_first("Take my protein pills and put my helmet on", "my", "your", %{})
"Take your protein pills and put my helmet on"
"""
def replace_first(value, original, replacement, _),
do: String.replace(value, original, replacement, global: false)
@doc """
Reverses the order of the items in an array. reverse cannot reverse a string.
## Examples
iex> Liquex.Filter.reverse(~w(apples oranges peaches plums), %{})
["plums", "peaches", "oranges", "apples"]
"""
def reverse(arr, _) when is_list(arr), do: Enum.reverse(arr)
@doc """
Rounds a number to the nearest integer or, if a number is passed as an argument, to that number of decimal places.
## Examples
iex> Liquex.Filter.round(1, %{})
1
iex> Liquex.Filter.round(1.2, %{})
1
iex> Liquex.Filter.round(2.7, %{})
3
iex> Liquex.Filter.round(183.357, 2, %{})
183.36
"""
def round(value, precision \\ 0, context)
def round(value, _, _) when is_integer(value), do: value
def round(value, 0, _), do: value |> Float.round() |> trunc()
def round(value, precision, _), do: Float.round(value, precision)
@doc """
Removes all whitespace (tabs, spaces, and newlines) from the right side of a string.
It does not affect spaces between words.
## Examples
iex> Liquex.Filter.rstrip(" So much room for activities! ", %{})
" So much room for activities!"
"""
def rstrip(value, _), do: value |> String.trim_trailing()
@doc """
Returns the number of characters in a string or the number of items in an array.
## Examples
iex> Liquex.Filter.size("Ground control to Major Tom.", %{})
28
iex> Liquex.Filter.size(~w(apples oranges peaches plums), %{})
4
"""
def size(value, _) when is_list(value), do: length(value)
def size(value, _) when is_binary(value), do: String.length(value)
@doc """
Returns a substring of 1 character beginning at the index specified by the
first argument. An optional second argument specifies the length of the
substring to be returned.
## Examples
iex> Liquex.Filter.slice("Liquid", 0, %{})
"L"
iex> Liquex.Filter.slice("Liquid", 2, %{})
"q"
iex> Liquex.Filter.slice("Liquid", 2, 5, %{})
"quid"
If the first argument is a negative number, the indices are counted from
the end of the string:
## Examples
iex> Liquex.Filter.slice("Liquid", -3, 2, %{})
"ui"
"""
def slice(value, start, length \\ 1, _),
do: String.slice(value, start, length)
@doc """
Sorts items in an array in case-sensitive order.
## Examples
iex> Liquex.Filter.sort(["zebra", "octopus", "giraffe", "Sally Snake"], %{})
["Sally Snake", "giraffe", "octopus", "zebra"]
"""
def sort(list, _), do: Liquex.Collection.sort(list)
def sort(list, field_name, _), do: Liquex.Collection.sort(list, field_name)
@doc """
Sorts items in an array in case-insensitive order.
## Examples
iex> Liquex.Filter.sort_natural(["zebra", "octopus", "giraffe", "Sally Snake"], %{})
["giraffe", "octopus", "Sally Snake", "zebra"]
"""
def sort_natural(list, _), do: Liquex.Collection.sort_case_insensitive(list)
def sort_natural(list, field_name, _),
do: Liquex.Collection.sort_case_insensitive(list, field_name)
@doc """
Divides a string into an array using the argument as a separator. split is
commonly used to convert comma-separated items from a string to an array.
## Examples
iex> Liquex.Filter.split("John, Paul, George, Ringo", ", ", %{})
["John", "Paul", "George", "Ringo"]
"""
def split(value, separator, _), do: String.split(value, separator)
@doc """
Removes all whitespace (tabs, spaces, and newlines) from both the left and
right side of a string. It does not affect spaces between words.
## Examples
iex> Liquex.Filter.strip(" So much room for activities! ", %{})
"So much room for activities!"
"""
def strip(value, _), do: String.trim(value)
@doc """
Removes any HTML tags from a string.
## Examples
iex> Liquex.Filter.strip_html("Have <em>you</em> read <strong>Ulysses</strong>?", %{})
"Have you read Ulysses?"
"""
def strip_html(value, _), do: HtmlSanitizeEx.strip_tags(value)
@doc """
Removes any newline characters (line breaks) from a string.
## Examples
iex> Liquex.Filter.strip_newlines("Hello\\nthere", %{})
"Hellothere"
"""
def strip_newlines(value, _) do
value
|> String.replace("\r", "")
|> String.replace("\n", "")
end
@doc """
Multiplies a number by another number.
## Examples
iex> Liquex.Filter.times(3, 4, %{})
12
iex> Liquex.Filter.times(24, 7, %{})
168
iex> Liquex.Filter.times(183.357, 12, %{})
2200.284
"""
def times(value, divisor, _), do: value * divisor
@doc """
Shortens a string down to the number of characters passed as an argument. If
the specified number of characters is less than the length of the string, an
ellipsis (…) is appended to the string and is included in the character
count.
## Examples
iex> Liquex.Filter.truncate("Ground control to Major Tom.", 20, %{})
"Ground control to..."
iex> Liquex.Filter.truncate("Ground control to Major Tom.", 25, ", and so on", %{})
"Ground control, and so on"
iex> Liquex.Filter.truncate("Ground control to Major Tom.", 20, "", %{})
"Ground control to Ma"
"""
def truncate(value, length, ellipsis \\ "...", _) do
if String.length(value) <= length do
value
else
String.slice(
value,
0,
length - String.length(ellipsis)
) <> ellipsis
end
end
@doc """
Shortens a string down to the number of characters passed as an argument. If
the specified number of characters is less than the length of the string, an
ellipsis (…) is appended to the string and is included in the character
count.
## Examples
iex> Liquex.Filter.truncatewords("Ground control to Major Tom.", 3, %{})
"Ground control to..."
iex> Liquex.Filter.truncatewords("Ground control to Major Tom.", 3, "--", %{})
"Ground control to--"
iex> Liquex.Filter.truncatewords("Ground control to Major Tom.", 3, "", %{})
"Ground control to"
"""
def truncatewords(value, length, ellipsis \\ "...", _) do
words = value |> String.split()
if length(words) <= length do
value
else
sentence =
words
|> Enum.take(length)
|> Enum.join(" ")
sentence <> ellipsis
end
end
@doc """
Removes any duplicate elements in an array.
## Examples
iex> Liquex.Filter.uniq(~w(ants bugs bees bugs ants), %{})
["ants", "bugs", "bees"]
"""
def uniq(list, _), do: Enum.uniq(list)
@doc """
Makes each character in a string uppercase. It has no effect on strings
which are already all uppercase.
## Examples
iex> Liquex.Filter.upcase("<NAME>", %{})
"<NAME>"
iex> Liquex.Filter.upcase("APPLE", %{})
"APPLE"
"""
def upcase(value, _), do: String.upcase(value)
@doc """
Decodes a string that has been encoded as a URL or by url_encode/2.
## Examples
iex> Liquex.Filter.url_decode("%27Stop%21%27+said+Fred", %{})
"'Stop!' said Fred"
"""
def url_decode(value, _), do: URI.decode_www_form(value)
@doc """
Decodes a string that has been encoded as a URL or by url_encode/2.
## Examples
iex> Liquex.Filter.url_encode("<EMAIL>", %{})
"john%40liquid.com"
iex> Liquex.Filter.url_encode("Tetsuro Takara", %{})
"Tetsuro+Takara"
"""
def url_encode(value, _), do: URI.encode_www_form(value)
@doc """
Creates an array including only the objects with a given property value, or
any truthy value by default.
## Examples
iex> Liquex.Filter.where([%{"b" => 2}, %{"b" => 1}], "b", 1, %{})
[%{"b" => 1}]
"""
def where(list, key, value, _), do: Liquex.Collection.where(list, key, value)
@doc """
Creates an array including only the objects with a given truthy property value
## Examples
iex> Liquex.Filter.where([%{"b" => true, "value" => 1}, %{"b" => 1, "value" => 2}, %{"b" => false, "value" => 3}], "b", %{})
[%{"b" => true, "value" => 1}, %{"b" => 1, "value" => 2}]
"""
def where(list, key, _), do: Liquex.Collection.where(list, key)
end
|
lib/liquex/filter.ex
| 0.913866
| 0.449272
|
filter.ex
|
starcoder
|
defmodule Catalog do
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
@doc false
defmacro __using__(_) do
quote do
@before_compile unquote(__MODULE__)
Module.register_attribute(__MODULE__, :catalog_from_with_md5, accumulate: true)
import Catalog
end
end
defmacro __before_compile__(_) do
quote do
def __mix_recompile__? do
Enum.any?(@catalog_from_with_md5, fn {from, md5} ->
from |> Path.wildcard() |> Enum.sort() |> :erlang.md5() != md5
end)
end
end
end
@doc """
Processes all markdown files in `from` and stores them in the
module attribute `as`.
To use this macro, you must install `Earmark` as a dependency in your
application:
{:earmark, "~> 1.14"}
Additionally, if you want to use use `Makeup` syntax highlighting via
the `:highlighters` option outlined below, you will need to install it
along with any relevant language lexers you will need:
{:makeup, "~> 1.0"},
{:makeup_elixir, ">= 0.0.0"}
## Example
defmodule MyApp.Catalog do
use Catalog
markdown(:posts, "posts/**.md", build: Article)
def all_posts(), do: @posts
end
## Options
* `:build` - the name of the module that will build each entry
* `:highlighters` - which code highlighters to use. `Catalog`
uses `Makeup` for syntax highlighting and you will need to add its
`.css` classes. You can generate the CSS classes by calling
`Makeup.stylesheet(:vim_style, "makeup")` inside `iex -S mix`.
You can replace `:vim_style` by any style of your choice
[defined here](https://hexdocs.pm/makeup/Makeup.Styles.HTML.StyleMap.html).
* `:earmark_options` - an [`%Earmark.Options{}`](https://hexdocs.pm/earmark/Earmark.Options.html) struct.
"""
defmacro markdown(as, from, opts \\ []),
do: macro(&Catalog.__extract_markdown__/2, as, from, opts)
@doc """
Processes all json files in `from` and stores them in the
module attribute `as`.
This macro uses `Jason` to process the content of the file.
To use it, you must have `Jason` added as a dependency:
{:jason, "~> 1.2"}
## Example
defmodule MyApp.Catalog do
use Catalog
json(:countries, "countries/**.json")
def all_countries(), do: @countries
end
## Options
* `:build` - the name of the module that will build each entry.
* `:jason_options` - options that will be passed along to the
`Jason.decode!/2` call.
"""
defmacro json(as, from, opts \\ []),
do: macro(&Catalog.__extract_json__/2, as, from, opts)
@doc """
Processes all files in `from` and stores them in the
module attribute `as`. This processor merely reads the file contents
into a string. It is commonly used for text or HTML files.
## Example
defmodule MyApp.Catalog do
use Catalog
file(:notes, "notes/**.txt")
def all_notes(), do: @notes
end
## Options
* `:build` - the name of the module that will build each entry.
"""
defmacro file(as, from, opts \\ []),
do: macro(&Catalog.__extract_file__/2, as, from, opts)
@doc """
Processes all YAML files in `from` and stores them in the
module attribute `as`.
This macro uses `YamlElixir` for processing the content of the
file. To use it, you must have `YamlElixir` added as a dependency:
{:yaml_elixir, "~> 2.8"}
## Example
defmodule MyApp.Catalog do
use Catalog
yaml(:cities, "cities/**.yaml")
def all_cities(), do: @cities
end
## Options
* `:build` - the name of the module that will build each entry.
* `:yaml_options` - options that will be passed along to the
`YamlElixir.read_from_string!/2` call.
"""
defmacro yaml(as, from, opts \\ []),
do: macro(&Catalog.__extract_yaml__/2, as, from, opts)
@doc """
Processes all TOML files in `from` and stores them in the
module attribute `as`.
This macro uses `Toml` for processing the content of the file.
To use it, you must install `Toml` as a dependency:
{:toml, "~> 0.6.2"}
## Example
If we have a `authors.toml` file with the following contents:
["<NAME>"]
best_work = "The Quiet American"
["<NAME>"]
best_work = "Book of Disquiet
We can include it in our module like so:
defmodule MyApp.Catalog do
use Catalog
toml(:authors, "authors.toml")
def all_authors(), do: @authors
end
The value of `@authors` will be:
%{
frontmatter: %{},
path: "authors.toml",
content: %{
"<NAME>" => %{"best_work" => "The Quiet American"},
"<NAME>" => %{"best_work" => "Book of Disquiet}
}
}
## Options
* `:build` - the name of the module that will build each entry.
* `:toml_options` - options that will be passed along to the
`Toml.decode!/2` call.
"""
defmacro toml(as, from, opts \\ []),
do: macro(&Catalog.__extract_toml__/2, as, from, opts)
@doc """
Processes all CSV files in `from` and stores them in the
module attribute `as`.
This macro uses `CSV` for processing the content of the file.
To use it, you must install `CSV` as a dependency:
{:csv, "~> 2.4"}
## Example
If we have a `people.csv` file with the following contents:
name,age
john,27
steve,20
We can include it in our module like so:
defmodule MyApp.Catalog do
use Catalog
csv(:people, "people.csv")
def all_people(), do: @people
end
The resulting value of `@movies` will be:
%{
frontmatter: %{},
path: "people.csv",
content: [
%{"name" => "john", "age" => "27"},
%{"name" => "steve", "age" => "20"}
]
}
## Options
* `:build` - the name of the module that will build each entry.
* `:csv_options` - options that will be passed along to the
`CSV.decode!/2` call. By default we pass along `headers: true`
to the call.
"""
defmacro csv(as, from, opts \\ []),
do: macro(&Catalog.__extract_csv__/2, as, from, opts)
defp macro(fun, as, from, opts) do
quote bind_quoted: [fun: fun, as: as, from: from, opts: opts] do
{paths, entries} = fun.(from, opts)
if [from] == paths do
[entry] = entries
Module.put_attribute(__MODULE__, as, entry)
else
Module.put_attribute(__MODULE__, as, entries)
end
for path <- paths do
@external_resource Path.relative_to_cwd(path)
end
@catalog_from_with_md5 {from, :erlang.md5(paths)}
end
end
def __extract_markdown__(from, opts) do
earmark_opts = Keyword.get(opts, :earmark_options, %Earmark.Options{})
highlighters = Keyword.get(opts, :highlighters, [])
decoder = &(&1 |> Earmark.as_html!(earmark_opts) |> highlight(highlighters))
extract(decoder, from, opts)
end
defp highlight(html, []) do
html
end
defp highlight(html, _) do
Catalog.Highlighter.highlight(html)
end
if Code.ensure_loaded?(Jason) do
def __extract_json__(from, opts) do
jason_options = Keyword.get(opts, :jason_options, [])
parser = &Jason.decode!(&1, jason_options)
extract(parser, from, opts)
end
else
def __extract_json__(_from, _opts),
do: raise(ArgumentError, "json/3 requires :jason to be installed and loaded")
end
def __extract_file__(from, opts) do
extract(& &1, from, opts)
end
if Code.ensure_loaded?(YamlElixir) do
def __extract_yaml__(from, opts) do
yaml_options = Keyword.get(opts, :yaml_options, [])
parser = &YamlElixir.read_from_string!(&1, yaml_options)
extract(parser, from, opts)
end
else
def __extract_yaml__(_from, _opts),
do: raise(ArgumentError, "yaml/3 requires :yaml_elixir to be installed and loaded")
end
if Code.ensure_loaded?(Toml) do
def __extract_toml__(from, opts) do
toml_options = Keyword.get(opts, :toml_options, [])
parser = &Toml.decode!(&1, toml_options)
extract(parser, from, opts)
end
else
def __extract_toml__(_from, _opts),
do: raise(ArgumentError, "toml/3 requires :toml to be installed and loaded")
end
if Code.ensure_loaded?(CSV) do
def __extract_csv__(from, opts) do
csv_options = Keyword.merge([headers: true], Keyword.get(opts, :csv_otpions, []))
parser = &(String.split(&1) |> CSV.decode!(csv_options) |> Enum.to_list())
extract(parser, from, opts)
end
else
def __extract_csv__(_from, _opts),
do: raise(ArgumentError, "csv/3 requires :csv to be installed and loaded")
end
defp extract(decoder, from, opts) do
builder = Keyword.get(opts, :build)
paths = from |> Path.wildcard() |> Enum.sort()
entries =
for path <- paths do
{frontmatter, content} = Catalog.FrontMatter.process!(File.read!(path), path, opts)
content = decoder.(content)
if builder do
builder.build(path, frontmatter, content)
else
%{
path: path,
frontmatter: frontmatter,
content: content
}
end
end
{paths, entries}
end
end
|
lib/catalog.ex
| 0.817028
| 0.433682
|
catalog.ex
|
starcoder
|
defmodule AFK.State.Keymap do
@moduledoc false
alias AFK.Keycode.Layer
alias AFK.Keycode.None
alias AFK.Keycode.Transparent
@enforce_keys [:layers, :counter]
defstruct [:layers, :counter]
@type t :: %__MODULE__{
layers: %{
optional(non_neg_integer) => %{
active: bool,
activations: %{optional(atom) => AFK.Keycode.t(), optional(:default) => true},
layer: AFK.Keymap.layer()
}
},
counter: [non_neg_integer]
}
@spec new(keymap :: AFK.Keymap.t()) :: t
def new([]) do
struct!(__MODULE__, layers: %{}, counter: [])
end
def new(keymap) when is_list(keymap) do
layers =
keymap
|> Enum.with_index()
|> Map.new(fn {layer, index} ->
{index,
%{
active: false,
activations: %{},
layer: layer
}}
end)
|> put_in([0, :active], true)
|> put_in([0, :activations, :default], true)
counter = Enum.to_list((Enum.count(layers) - 1)..0)
struct!(__MODULE__, layers: layers, counter: counter)
end
@spec find_keycode(keymap :: t, key :: atom) :: AFK.Keycode.t()
def find_keycode(%__MODULE__{} = keymap, key) do
case do_find_keycode(keymap.layers, keymap.counter, key) do
%_mod{} = keycode -> keycode
%{} -> %None{}
end
end
defp do_find_keycode(layers, counter, key) do
Enum.reduce_while(counter, layers, fn layer_id, acc ->
case Map.fetch!(layers, layer_id) do
%{active: true, layer: %{^key => %Transparent{}}} -> {:cont, acc}
%{active: true, layer: %{^key => keycode}} -> {:halt, keycode}
_else -> {:cont, acc}
end
end)
end
@spec add_activation(keymap :: t, keycode :: AFK.Keycode.Layer.t(), key :: atom) :: t
def add_activation(%__MODULE__{} = keymap, %Layer{} = keycode, key) do
layers =
keymap.layers
|> put_in([keycode.layer, :activations, key], keycode)
|> Map.update!(keycode.layer, fn layer ->
if map_size(layer.activations) > 0 do
Map.put(layer, :active, true)
end
end)
%{keymap | layers: layers}
end
@spec remove_activation(keymap :: t, keycode :: AFK.Keycode.Layer.t(), key :: atom) :: t
def remove_activation(%__MODULE__{} = keymap, %Layer{} = keycode, key) do
layers =
keymap.layers
|> update_in([keycode.layer, :activations], &Map.delete(&1, key))
|> Map.update!(keycode.layer, fn layer ->
if map_size(layer.activations) == 0 do
Map.put(layer, :active, false)
else
layer
end
end)
%{keymap | layers: layers}
end
@spec toggle_activation(keymap :: t, keycode :: AFK.Keycode.Layer.t(), key :: atom) :: t
def toggle_activation(%__MODULE__{} = keymap, %Layer{} = keycode, key) do
case keymap.layers[keycode.layer].activations[key] do
^keycode -> remove_activation(keymap, keycode, key)
nil -> add_activation(keymap, keycode, key)
end
end
@spec set_default(keymap :: t, keycode :: AFK.Keycode.Layer.t(), key :: atom) :: t
def set_default(%__MODULE__{} = keymap, %Layer{} = keycode, _key) do
layers =
Map.new(keymap.layers, fn {layer_id, layer} ->
cond do
layer.activations[:default] ->
{layer_id, layer |> update_in([:activations], &Map.delete(&1, :default)) |> Map.put(:active, false)}
layer_id == keycode.layer ->
{layer_id, layer |> put_in([:activations, :default], true) |> Map.put(:active, true)}
true ->
{layer_id, layer}
end
end)
%{keymap | layers: layers}
end
end
|
lib/afk/state/keymap.ex
| 0.835718
| 0.516169
|
keymap.ex
|
starcoder
|
defmodule FinanceTS do
alias FinanceTS.OHLCV
alias FinanceTS.TimeSeries
@doc """
iex> FinanceTS.to_list({:ok, [[3600, 68.7, 70.1, 64.7, 67.9, 4.0e7], [7200, 68.3, 73.7, 65.8, 73.2, 3.2e7]], "AAPL", "USD", "NYSE"})
{:ok, %TimeSeries{
format: :list,
data: [%OHLCV{c: 67.9, h: 70.1, l: 64.7, o: 68.7, ts: 3600, v: 4.0e7}, %OHLCV{c: 73.2, h: 73.7, l: 65.8, o: 68.3, ts: 7200, v: 3.2e7}],
symbol: "AAPL",
currency: "USD",
first: %FinanceTS.OHLCV{c: 67.9, h: 70.1, l: 64.7, o: 68.7, ts: 3600, v: 4.0e7},
last: %FinanceTS.OHLCV{c: 73.2, h: 73.7, l: 65.8, o: 68.3, ts: 7200, v: 3.2e7},
size: 2,
source: "NYSE"
}}
"""
def to_list({:ok, stream, symbol, currency, source}) do
list =
stream
|> Stream.map(fn [ts, o, h, l, c, v] -> %OHLCV{ts: ts, o: o, h: h, l: l, c: c, v: v} end)
|> Enum.to_list()
{:ok,
%TimeSeries{
symbol: symbol,
currency: currency,
source: source,
format: :list,
size: length(list),
first: List.first(list),
last: List.last(list),
data: list
}}
end
def to_list({:error, error}), do: {:error, error}
@doc """
iex> FinanceTS.to_csv({:ok, [[3600, 68.7, 70.1, 64.7, 67.9, 4.0e7], [7200, 68.3, 73.7, 65.8, 73.2, 3.2e7]], "AAPL", "USD", "NYSE"})
{:ok, %TimeSeries{
format: :csv,
data: "3600,68.7,70.1,64.7,67.9,4.0e7\n7200,68.3,73.7,65.8,73.2,3.2e7",
symbol: "AAPL",
currency: "USD",
first: %FinanceTS.OHLCV{c: 67.9, h: 70.1, l: 64.7, o: 68.7, ts: 3600, v: 4.0e7},
last: %FinanceTS.OHLCV{c: 73.2, h: 73.7, l: 65.8, o: 68.3, ts: 7200, v: 3.2e7},
size: 2,
source: "NYSE"
}}
iex> FinanceTS.to_csv({:ok, [[3600, 68.7, 70.1, 64.7, 67.9, 4.0e7], [7200, 68.3, 73.7, 65.8, 73.2, 3.2e7]], "AAPL", "USD", "NYSE"}, only: [:t, :c])
{:ok, %TimeSeries{
format: :csv,
data: "3600,67.9\n7200,73.2",
symbol: "AAPL",
currency: "USD",
first: %FinanceTS.OHLCV{c: 67.9, h: 70.1, l: 64.7, o: 68.7, ts: 3600, v: 4.0e7},
last: %FinanceTS.OHLCV{c: 73.2, h: 73.7, l: 65.8, o: 68.3, ts: 7200, v: 3.2e7},
size: 2,
source: "NYSE"
}}
"""
def to_csv(stream, opts \\ [])
def to_csv({:ok, stream, symbol, currency, source}, opts) do
list =
stream
|> Stream.map(fn [t, o, h, l, c, v] -> %OHLCV{ts: t, o: o, h: h, l: l, c: c, v: v} end)
|> Enum.to_list()
# Generalize option for only
csv =
if opts[:only] == [:t, :c] do
stream
|> Stream.map(fn [t, _o, _h, _l, c, _v] -> Enum.join([t, c], ",") end)
|> Enum.join("\n")
else
stream
|> Stream.map(fn list -> Enum.join(list, ",") end)
|> Enum.join("\n")
end
{:ok,
%TimeSeries{
symbol: symbol,
currency: currency,
source: source,
format: :csv,
size: length(list),
first: List.first(list),
last: List.last(list),
data: csv
}}
end
def to_csv({:error, error}, _opts), do: {:error, error}
@doc """
iex> FinanceTS.change_in_percent({:ok, [[3600, 68.7, 70.1, 64.7, 100, 4.0e7], [7200, 68.3, 73.7, 65.8, 150, 3.2e7]], "AAPL", "USD", "NYSE"}, 3600, DateTime.from_unix!(7200))
0.5
iex> FinanceTS.change_in_percent({:ok, [[3600, 68.7, 70.1, 64.7, 100, 4.0e7], [7200, 68.3, 73.7, 65.8, 150, 3.2e7]], "AAPL", "USD", "NYSE"}, 3600, DateTime.from_unix!(7201))
0.5
iex> FinanceTS.change_in_percent({:ok, [[3600, 68.7, 70.1, 64.7, 100, 4.0e7], [7200, 68.3, 73.7, 65.8, 150, 3.2e7]], "AAPL", "USD", "NYSE"}, 3600, DateTime.from_unix!(6000))
0.0
iex> FinanceTS.change_in_percent({:ok, [[3600, 68.7, 70.1, 64.7, 100, 4.0e7]], "AAPL", "USD", "NYSE"}, 3600, DateTime.from_unix!(17201))
0.0
iex> FinanceTS.change_in_percent({:ok, [[3600, 68.7, 70.1, 64.7, 100, 4.0e7]], "AAPL", "USD", "NYSE"}, 3600, DateTime.from_unix!(7201))
0.0
iex> FinanceTS.change_in_percent({:ok, [], "AAPL", "USD", "NYSE"}, 3600, DateTime.from_unix!(7201))
0.0
"""
def change_in_percent(stream, change_in_seconds, now \\ DateTime.utc_now())
def change_in_percent({:ok, [], _symbol, _currency, _source}, _change_in_seconds, _now), do: 0.0
def change_in_percent({:ok, stream, _symbol, _currency, _source}, change_in_seconds, now) do
to_ts = now |> DateTime.to_unix()
from_ts = to_ts - change_in_seconds
reverse_list =
stream
|> Stream.filter(fn [_t, _o, _h, _l, c, _v] -> !is_nil(c) end)
|> Enum.to_list()
|> Enum.reverse()
to_elem = Enum.find(reverse_list, fn [t, _o, _h, _l, _c, _v] -> t <= to_ts end)
from_elem = Enum.find(reverse_list, fn [t, _o, _h, _l, _c, _v] -> t <= from_ts end)
if to_elem && from_elem do
[_t, _o, _h, _l, to_c, _v] = to_elem
[_t, _o, _h, _l, from_c, _v] = from_elem
(to_c - from_c) / from_c
else
0.0
end
end
def change_in_percent({:error, error}, _change_in_seconds, _now), do: {:error, error}
end
|
lib/finance_ts.ex
| 0.691289
| 0.450662
|
finance_ts.ex
|
starcoder
|
defmodule PhoenixComponents.View do
@moduledoc """
This module provides a way to easily generate helper functions to render
components.
The module can be included by others Phoenix.View modules to import components easily.
## Example
When working on a project with several components you can use this module in your `web/web.ex` definition.
defmodule AppWeb do
#...
def view do
quote do
use Phoenix.View, root: "lib/app_web/templates"
namespace: AppWeb
use PhoenixComponents.View, namespace: AppWeb.Components
# ...
end
end
end
After you include the module you can use the following helpers
defmodule AppWeb.UserView do
use AppWeb, :view
import_component [:button, :jumbotron]
end
After you import a component into the view module you can use the component as follows
<div>
<%= button type: :primary do %>
Submit
<% end %>
</div>
Alternatively, you can also render a component without importing it by using the helper function `component`.
<div>
<%= component :button, type: :primary do %>
Submit
<% end %>
</div>
"""
import Phoenix.View, only: [render: 3]
import Phoenix.HTML, only: [html_escape: 1]
import PhoenixComponents.Helpers, only: [to_pascal_case: 1]
@doc """
Helper to render a component by name.
## Example
<%= component :button %>
"""
def component(namespace, name) do
do_component(namespace, name, "", [])
end
@doc """
Helper to render a component by name and specifying the content in a block.
## Example
<%= component :button do %>
Submit
<% end %>
"""
def component(namespace, name, do: block) do
do_component(namespace, name, block, [])
end
@doc """
Helper to render a component by name and a list of attributes.
Note that attributes are available in the template as the map @attrs.
## Example
<%= component :button, color: "red", size: "small", label: "Submit" %>
"""
def component(namespace, name, attrs) when is_list(attrs) do
do_component(namespace, name, "", attrs)
end
@doc """
Helper to render a component by name and a list of attributes.
Note that attributes are available in the template as the map @attrs.
## Example
<%= component :button, color: "red", size: "small" do %>
Submit
<% end %>
"""
def component(namespace, name, attrs, do: block) when is_list(attrs) do
do_component(namespace, name, block, attrs)
end
def component(namespace, name, %Phoenix.HTML.Form{} = form, field) when is_atom(field) do
do_component(namespace, name, "", [form: form, field: field])
end
def component(namespace, name, %Phoenix.HTML.Form{} = form, field, attrs) when is_list(attrs) and is_atom(field) do
do_component(namespace, name, "", Keyword.merge(attrs, [form: form, field: field]))
end
def component(namespace, name, %Phoenix.HTML.Form{} = form, field, attrs, do: block) when is_list(attrs) and is_atom(field) do
do_component(namespace, name, block, Keyword.merge(attrs, [form: form, field: field]))
end
defp do_component(namespace, name, content, attrs) do
safe_content = html_escape(content)
name
|> to_pascal_case
|> prefix_module(namespace)
|> render("template.html", attrs: Enum.into(attrs, %{}), content: safe_content)
end
defp prefix_module(atom, namespace) do
Module.concat(namespace, atom)
end
@doc """
Macro to generate helpers for components inside views.
## Example
import_components [:button, :jumbotron]
import_components [:button, :jumbotron], from: SomeOtherModule
Then you can use the component directly
<%= button type: "submit" %>
"""
defmacro import_components(components, opts \\ []) do
namespace = Keyword.get(opts, :from)
for name <- components do
if namespace do
quote do
def unquote(name)(), do: component(unquote(namespace), unquote(name))
def unquote(name)(attrs), do: component(unquote(namespace), unquote(name), attrs)
def unquote(name)(attrs, block),
do: component(unquote(namespace), unquote(name), attrs, block)
end
else
quote do
def unquote(name)(), do: component(@namespace, unquote(name))
def unquote(name)(attrs), do: component(@namespace, unquote(name), attrs)
def unquote(name)(attrs, block),
do: component(@namespace, unquote(name), attrs, block)
end
end
end
end
defmacro __using__(namespace: namespace) do
quote do
@namespace unquote(namespace)
import PhoenixComponents.View
end
end
end
|
lib/phoenix_components/view.ex
| 0.820037
| 0.611556
|
view.ex
|
starcoder
|
defmodule RDF.NQuads.Encoder do
@moduledoc """
An encoder for N-Quads serializations of RDF.ex data structures.
As for all encoders of `RDF.Serialization.Format`s, you normally won't use these
functions directly, but via one of the `write_` functions on the `RDF.NQuads`
format module or the generic `RDF.Serialization` module.
## Options
- `:default_graph_name`: The graph name to be used as the default for triples
from a `RDF.Graph` or `RDF.Description`. When the input to be encoded is a
`RDF.Description` the default is `nil` for the default graph. In case of a
`RDF.Graph` the default is the `RDF.Graph.name/1`. The option doesn't
have any effect at all when the input to be encoded is a `RDF.Dataset`.
"""
use RDF.Serialization.Encoder
alias RDF.{Statement, Graph}
@impl RDF.Serialization.Encoder
@callback encode(RDF.Data.t(), keyword) :: {:ok, String.t()} | {:error, any}
def encode(data, opts \\ []) do
default_graph_name = default_graph_name(data, Keyword.get(opts, :default_graph_name, false))
{:ok, Enum.map_join(data, &statement(&1, default_graph_name))}
end
@impl RDF.Serialization.Encoder
@spec stream(RDF.Data.t(), keyword) :: Enumerable.t()
def stream(data, opts \\ []) do
default_graph_name = default_graph_name(data, Keyword.get(opts, :default_graph_name, false))
case Keyword.get(opts, :mode, :string) do
:string -> Stream.map(data, &statement(&1, default_graph_name))
:iodata -> Stream.map(data, &iolist_statement(&1, default_graph_name))
invalid -> raise "Invalid stream mode: #{invalid}"
end
end
defp default_graph_name(%Graph{} = graph, false), do: graph.name
defp default_graph_name(_, none) when none in [false, nil], do: nil
defp default_graph_name(_, default_graph_name),
do: Statement.coerce_graph_name(default_graph_name)
@spec statement(Statement.t(), Statement.graph_name()) :: String.t()
def statement(statement, default_graph_name)
def statement({subject, predicate, object, nil}, _) do
"#{term(subject)} #{term(predicate)} #{term(object)} .\n"
end
def statement({subject, predicate, object, graph}, _) do
"#{term(subject)} #{term(predicate)} #{term(object)} #{term(graph)} .\n"
end
def statement({subject, predicate, object}, default_graph_name) do
statement({subject, predicate, object, default_graph_name}, default_graph_name)
end
defdelegate term(value), to: RDF.NTriples.Encoder
@spec iolist_statement(Statement.t(), Statement.graph_name()) :: iolist
def iolist_statement(statement, default_graph_name)
def iolist_statement({subject, predicate, object, nil}, _) do
[iolist_term(subject), " ", iolist_term(predicate), " ", iolist_term(object), " .\n"]
end
def iolist_statement({subject, predicate, object, graph}, _) do
[
iolist_term(subject),
" ",
iolist_term(predicate),
" ",
iolist_term(object),
" ",
iolist_term(graph),
" .\n"
]
end
def iolist_statement({subject, predicate, object}, default_graph_name) do
iolist_statement({subject, predicate, object, default_graph_name}, default_graph_name)
end
defdelegate iolist_term(value), to: RDF.NTriples.Encoder
end
|
lib/rdf/serializations/nquads_encoder.ex
| 0.890491
| 0.616763
|
nquads_encoder.ex
|
starcoder
|
defmodule Rak.Module.Service do
@moduledoc """
Defines the behavior expected from Rak module services.
Module services are responsible for:
1. Receiving client requests
2. Executing, routing and/or forwarding messages
3. Creating and terminating module instances as needed
In the case where a Rak module is a sub-module, the service
may receive forwarded RPC and messages from the parent.
"""
defmodule BeforeCompile do
@moduledoc false
defmacro __before_compile__(env) do
level = get_log_level(env)
quote do
def cast(name, message, session) do
Logger.log(unquote(level), "Received unknown message", [
module: __MODULE__,
name: name,
message: message,
session: session
])
end
def call(name, message, session) do
Logger.log(unquote(level), "Received unknown rpc", [
module: __MODULE__,
name: name,
message: message,
session: session
])
end
end
end
defp get_log_level(:prod), do: :debug
defp get_log_level(_), do: :warn
end
defmacro __using__(config) do
state = get_state(config[:state])
quote do
use GenServer
import Supervisor.Spec
import Rak.Module.Service
require Logger
@behaviour Rak.Module.Service
@before_compile Rak.Module.Service.BeforeCompile
def child_spec(args), do: worker(__MODULE__, args)
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
def init(_), do: {:ok, unquote(state)}
end
end
@doc """
message macro
```elixir
defmodule MyModule.Service do
use Rak.Module.Service
message messageName %{ count: count } do
{:noreply, state}
end
end
```
"""
defmacro message({name, meta, [message]}, do: block) do
quote do
@doc false
def cast(unquote(name), unquote(message)) when unquote(meta) do
GenServer.cast({:message, unquote(name), unquote(message), nil})
end
@doc false
def cast(unquote(name), unquote(message), session) when unquote(meta) do
GenServer.cast({:message, unquote(name), unquote(message), session})
end
@doc false
def handle_cast({:message, unquote(name), unquote(message), session}, _, state) do
state = Macro.var(state, nil)
session = Macro.var(session, nil)
unquote(block)
end
end
end
@doc """
message macro
```elixir
defmodule MyModule.Service do
use Rak.Module.Service
rpc messageName %{ count: count } do
{:reply, %{ count: count + 1}, state}
end
end
```
"""
defmacro rpc({name, meta, [message]}, do: block) do
quote do
@doc false
def call(unquote(name), unquote(message)) when unquote(meta) do
GenServer.call({:rpc, unquote(name), unquote(message), nil})
end
@doc false
def call(unquote(name), unquote(message), session) when unquote(meta) do
GenServer.call({:rpc, unquote(name), unquote(message), session})
end
@doc false
def handle_call({:rpc, unquote(name), unquote(message), session}, _, state) do
state = Macro.var(state, nil)
session = Macro.var(session, nil)
unquote(block)
end
end
end
defp get_state(nil), do: %{}
defp get_state(state), do: state
end
|
lib/rak/module/service.ex
| 0.780202
| 0.760117
|
service.ex
|
starcoder
|
defmodule Sanbase.Clickhouse.MetricAdapter.SqlQuery do
@table "daily_metrics_v2"
@moduledoc ~s"""
Define the SQL queries to access to the v2 metrics in Clickhouse
The metrics are stored in the '#{@table}' clickhouse table where each metric
is defined by a `metric_id` and every project is defined by an `asset_id`.
"""
use Ecto.Schema
import Sanbase.DateTimeUtils, only: [str_to_sec: 1]
import Sanbase.Metric.SqlQuery.Helper,
only: [
aggregation: 3,
generate_comparison_string: 3,
asset_id_filter: 2,
additional_filters: 3,
dt_to_unix: 2
]
alias Sanbase.Clickhouse.MetricAdapter.FileHandler
@name_to_metric_map FileHandler.name_to_metric_map()
@table_map FileHandler.table_map()
schema @table do
field(:datetime, :utc_datetime, source: :dt)
field(:asset_id, :integer)
field(:metric_id, :integer)
field(:value, :float)
field(:computed_at, :utc_datetime)
end
def timeseries_data_query(metric, slug_or_slugs, from, to, interval, aggregation, filters) do
args = [
str_to_sec(interval),
Map.get(@name_to_metric_map, metric),
dt_to_unix(:from, from),
dt_to_unix(:to, to),
slug_or_slugs
]
{additional_filters, args} = additional_filters(filters, args, trailing_and: true)
query = """
SELECT
toUnixTimestamp(intDiv(toUInt32(toDateTime(dt)), ?1) * ?1) AS t,
#{aggregation(aggregation, "value", "dt")}
FROM(
SELECT
asset_id,
dt,
value
FROM #{Map.get(@table_map, metric)} FINAL
PREWHERE
#{additional_filters}
#{maybe_convert_to_date(:after, metric, "dt", "toDateTime(?3)")} AND
#{maybe_convert_to_date(:before, metric, "dt", "toDateTime(?4)")} AND
isNotNull(value) AND NOT isNaN(value) AND
#{asset_id_filter(slug_or_slugs, argument_position: 5)} AND
metric_id = ( SELECT metric_id FROM metric_metadata FINAL PREWHERE name = ?2 LIMIT 1 )
)
GROUP BY t
ORDER BY t
"""
{query, args}
end
def timeseries_data_per_slug_query(
metric,
slug_or_slugs,
from,
to,
interval,
aggregation,
filters
) do
args = [
str_to_sec(interval),
Map.get(@name_to_metric_map, metric),
dt_to_unix(:from, from),
dt_to_unix(:to, to),
slug_or_slugs
]
{additional_filters, args} = additional_filters(filters, args, trailing_and: true)
query = """
SELECT
toUnixTimestamp(intDiv(toUInt32(toDateTime(dt)), ?1) * ?1) AS t,
name AS slug,
#{aggregation(aggregation, "value", "dt")} AS value
FROM(
SELECT
asset_id,
dt,
value
FROM #{Map.get(@table_map, metric)} FINAL
PREWHERE
#{additional_filters}
#{maybe_convert_to_date(:after, metric, "dt", "toDateTime(?3)")} AND
#{maybe_convert_to_date(:before, metric, "dt", "toDateTime(?4)")} AND
isNotNull(value) AND NOT isNaN(value) AND
#{asset_id_filter(slug_or_slugs, argument_position: 5)} AND
metric_id = ( SELECT metric_id FROM metric_metadata FINAL PREWHERE name = ?2 LIMIT 1 )
)
INNER JOIN (
SELECT asset_id, name
FROM asset_metadata FINAL
) USING (asset_id)
GROUP BY t, name
ORDER BY t
"""
{query, args}
end
def aggregated_timeseries_data_query(metric, slugs, from, to, aggregation, filters) do
args = [
slugs,
# Fetch internal metric name used. Fallback to the same name if missing.
Map.get(@name_to_metric_map, metric),
dt_to_unix(:from, from),
dt_to_unix(:to, to)
]
{additional_filters, args} = additional_filters(filters, args, trailing_and: true)
query = """
SELECT slug, SUM(value), toUInt32(SUM(has_changed))
FROM (
SELECT
arrayJoin([?1]) AS slug,
toFloat64(0) AS value,
toUInt32(0) AS has_changed
UNION ALL
SELECT
name AS slug,
#{aggregation(aggregation, "value", "dt")} AS value,
toUInt32(1) AS has_changed
FROM(
SELECT
dt,
asset_id,
value
FROM #{Map.get(@table_map, metric)} FINAL
PREWHERE
#{additional_filters}
#{asset_id_filter(slugs, argument_position: 1)} AND
metric_id = ( SELECT metric_id FROM metric_metadata FINAL PREWHERE name = ?2 LIMIT 1 ) AND
isNotNull(value) AND NOT isNaN(value) AND
#{maybe_convert_to_date(:after, metric, "dt", "toDateTime(?3)")} AND
#{maybe_convert_to_date(:before, metric, "dt", "toDateTime(?4)")}
)
INNER JOIN (
SELECT asset_id, name
FROM asset_metadata FINAL
) USING (asset_id)
GROUP BY slug
)
GROUP BY slug
"""
{query, args}
end
def slugs_by_filter_query(metric, from, to, operation, threshold, aggregation, filters) do
{query, args} = aggregated_slugs_base_query(metric, from, to, aggregation, filters)
query =
query <>
"""
WHERE #{generate_comparison_string("value", operation, threshold)}
"""
{query, args}
end
def slugs_order_query(metric, from, to, direction, aggregation, filters)
when direction in [:asc, :desc] do
{query, args} = aggregated_slugs_base_query(metric, from, to, aggregation, filters)
query =
query <>
"""
ORDER BY a.value #{direction |> Atom.to_string() |> String.upcase()}
"""
{query, args}
end
defp aggregated_slugs_base_query(metric, from, to, aggregation, filters) do
args = [
# Fetch internal metric name used. Fallback to the same name if missing.
Map.get(@name_to_metric_map, metric),
dt_to_unix(:from, from),
dt_to_unix(:to, to)
]
{additional_filters, args} = additional_filters(filters, args, trailing_and: true)
query = """
SELECT name AS slug, value
FROM (
SELECT
asset_id,
#{aggregation(aggregation, "value", "dt")} AS value
FROM(
SELECT
dt,
asset_id,
value
FROM #{Map.get(@table_map, metric)} FINAL
PREWHERE
#{additional_filters}
metric_id = ( SELECT metric_id FROM metric_metadata FINAL PREWHERE name = ?1 LIMIT 1 ) AND
#{maybe_convert_to_date(:after, metric, "dt", "toDateTime(?2)")} AND
#{maybe_convert_to_date(:before, metric, "dt", "toDateTime(?3)")}
)
GROUP BY asset_id
) AS a
ALL LEFT JOIN
(
SELECT asset_id, name
FROM asset_metadata FINAL
) AS b USING (asset_id)
"""
{query, args}
end
def available_slugs_query() do
query = """
SELECT DISTINCT(name)
FROM asset_metadata FINAL
PREWHERE
asset_id GLOBAL IN (
SELECT DISTINCT(asset_id)
FROM available_metrics
)
"""
args = []
{query, args}
end
def available_slugs_for_metric_query(metric) do
query = """
SELECT DISTINCT(name)
FROM asset_metadata FINAL
PREWHERE asset_id in (
SELECT DISTINCT(asset_id)
FROM available_metrics
PREWHERE
metric_id = ( SELECT metric_id FROM metric_metadata FINAL PREWHERE name = ?1 LIMIT 1 ) AND
end_dt > now() - INTERVAL 14 DAY
)
"""
args = [Map.get(@name_to_metric_map, metric)]
{query, args}
end
def last_datetime_computed_at_query(metric, slug) do
query = """
SELECT toUnixTimestamp(argMax(computed_at, dt))
FROM #{Map.get(@table_map, metric)} FINAL
PREWHERE
metric_id = ( SELECT metric_id FROM metric_metadata FINAL PREWHERE name = ?1 LIMIT 1 ) AND
asset_id = ( SELECT asset_id FROM asset_metadata FINAL PREWHERE name = ?2 LIMIT 1 )
"""
args = [Map.get(@name_to_metric_map, metric), slug]
{query, args}
end
def first_datetime_query(metric, nil) do
query = """
SELECT
toUnixTimestamp(start_dt)
FROM available_metrics FINAL
PREWHERE
metric_id = ( SELECT metric_id FROM metric_metadata FINAL PREWHERE name = ?2 LIMIT 1 )
"""
args = [Map.get(@name_to_metric_map, metric)]
{query, args}
end
def first_datetime_query(metric, slug) do
query = """
SELECT
toUnixTimestamp(start_dt)
FROM available_metrics FINAL
PREWHERE
asset_id = ( SELECT asset_id FROM asset_metadata FINAL PREWHERE name = ?1 LIMIT 1 ) AND
metric_id = ( SELECT metric_id FROM metric_metadata FINAL PREWHERE name = ?2 LIMIT 1 )
"""
args = [slug, Map.get(@name_to_metric_map, metric)]
{query, args}
end
def available_metrics_for_slug_query(slug) do
query = """
SELECT name
FROM available_metrics FINAL
INNER JOIN (
SELECT name, metric_id
FROM metric_metadata FINAL
) USING (metric_id)
PREWHERE
asset_id = ( SELECT asset_id FROM asset_metadata FINAL PREWHERE name = ?1 LIMIT 1 ) AND
end_dt > now() - INTERVAL 14 DAY
"""
args = [slug]
{query, args}
end
# Private functions
defp maybe_convert_to_date(:after, metric, dt_column, code) do
case Map.get(@table_map, metric) do
"daily_" <> _rest_of_table -> "#{dt_column} >= toDate(#{code})"
_ -> "#{dt_column} >= #{code}"
end
end
defp maybe_convert_to_date(:before, metric, dt_column, code) do
case Map.get(@table_map, metric) do
"daily_" <> _rest_of_table -> "#{dt_column} <= toDate(#{code})"
_ -> "#{dt_column} <= #{code}"
end
end
end
|
lib/sanbase/clickhouse/metric/sql_query/metric_sql_query.ex
| 0.804636
| 0.430686
|
metric_sql_query.ex
|
starcoder
|
defmodule Cldr.Number.Format do
@moduledoc """
Functions to manage the collection of number patterns defined in Cldr.
Number patterns affect how numbers are interpreted in a localized context.
Here are some examples, based on the French locale. The "." shows where the
decimal point should go. The "," shows where the thousands separator should
go. A "0" indicates zero-padding: if the number is too short, a zero (in the
locale's numeric set) will go there. A "#" indicates no padding: if the
number is too short, nothing goes there. A "¤" shows where the currency sign
will go. The following illustrates the effects of different patterns for the
French locale, with the number "1234.567". Notice how the pattern characters
',' and '.' are replaced by the characters appropriate for the locale.
## Number Pattern Examples
| Pattern | Currency | Text |
| ------------- | :-------------: | ----------: |
| #,##0.## | n/a | 1 234,57 |
| #,##0.### | n/a | 1 234,567 |
| ###0.##### | n/a | 1234,567 |
| ###0.0000# | n/a | 1234,5670 |
| 00000.0000 | n/a | 01234,5670 |
| #,##0.00 ¤ | EUR | 1 234,57 € |
The number of # placeholder characters before the decimal do not matter,
since no limit is placed on the maximum number of digits. There should,
however, be at least one zero some place in the pattern. In currency formats,
the number of digits after the decimal also do not matter, since the
information in the supplemental data (see Supplemental Currency Data) is used
to override the number of decimal places — and the rounding — according to
the currency that is being formatted. That can be seen in the above chart,
with the difference between Yen and Euro formatting.
Details of the number formats are described in the
[Unicode documentation](http://unicode.org/reports/tr35/tr35-numbers.html#Number_Format_Patterns)
"""
@type format :: String.t()
@short_format_styles [:decimal_long, :decimal_short, :currency_short, :currency_long]
@format_styles [:standard, :currency, :accounting, :scientific, :percent] ++
@short_format_styles
defstruct @format_styles ++ [:currency_spacing, :other]
require Cldr
alias Cldr.Number.System
alias Cldr.Locale
alias Cldr.LanguageTag
def short_format_styles do
@short_format_styles
end
@doc """
Returns the list of decimal formats in the configured locales including
the list of locales configured for precompilation in `config.exs`.
This function exists to allow the decimal formatter
to precompile all the known formats at compile time.
## Arguments
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Example
=> Cldr.Number.Format.decimal_format_list(MyApp.Cldr)
["#", "#,##,##0%", "#,##,##0.###", "#,##,##0.00¤", "#,##,##0.00¤;(#,##,##0.00¤)",
"#,##,##0 %", "#,##0%", "#,##0.###", "#,##0.00 ¤",
"#,##0.00 ¤;(#,##0.00 ¤)", "#,##0.00¤", "#,##0.00¤;(#,##0.00¤)",
"#,##0 %", "#0%", "#0.######", "#0.00 ¤", "#E0", "%#,##0", "% #,##0",
"0", "0.000000E+000", "0000 M ¤", "0000¤", "000G ¤", "000K ¤", "000M ¤",
"000T ¤", "000mM ¤", "000m ¤", "000 Bio'.' ¤", "000 Bln ¤", "000 Bn ¤",
"000 B ¤", "000 E ¤", "000 K ¤", "000 MRD ¤", "000 Md ¤", "000 Mio'.' ¤",
"000 Mio ¤", "000 Mld ¤", "000 Mln ¤", "000 Mn ¤", "000 Mrd'.' ¤",
"000 Mrd ¤", "000 Mr ¤", "000 M ¤", "000 NT ¤", "000 N ¤", "000 Tn ¤",
"000 Tr ¤", ...]
"""
@spec decimal_format_list(Cldr.backend()) :: list(format())
def decimal_format_list(backend) do
Module.concat(backend, Number.Format).decimal_format_list
end
@doc """
Returns the list of decimal formats for a configured locale.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
This function exists to allow the decimal formatter to precompile all
the known formats at compile time. Its use is not otherwise recommended.
## Example
iex> Cldr.Number.Format.decimal_format_list_for("en", MyApp.Cldr)
{:ok, ["#,##0%", "#,##0.###", "#E0", "0 billion", "0 million", "0 thousand",
"0 trillion", "00 billion", "00 million", "00 thousand", "00 trillion",
"000 billion", "000 million", "000 thousand", "000 trillion", "000B", "000K",
"000M", "000T", "00B", "00K", "00M", "00T", "0B", "0K", "0M", "0T",
"¤#,##0.00", "¤#,##0.00;(¤#,##0.00)", "¤000B", "¤000K", "¤000M",
"¤000T", "¤00B", "¤00K", "¤00M", "¤00T", "¤0B", "¤0K", "¤0M", "¤0T"]}
"""
@spec decimal_format_list_for(LanguageTag.t() | Locale.locale_name(), Cldr.backend()) ::
{:ok, list(String.t())} | {:error, {module(), String.t()}}
def decimal_format_list_for(locale, backend) do
Module.concat(backend, Number.Format).decimal_format_list_for(locale)
end
@doc """
Returns the decimal formats defined for a given locale.
## Options
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Examples
Cldr.Number.Format.all_formats_for("en", MyApp.Cldr)
#=> {:ok, %{latn: %Cldr.Number.Format{
accounting: "¤#,##0.00;(¤#,##0.00)",
currency: "¤#,##0.00",
percent: "#,##0%",
scientific: "#E0",
standard: "#,##0.###",
currency_short: [{"1000", [one: "¤0K", other: "¤0K"]},
{"10000", [one: "¤00K", other: "¤00K"]},
{"100000", [one: "¤000K", other: "¤000K"]},
{"1000000", [one: "¤0M", other: "¤0M"]},
{"10000000", [one: "¤00M", other: "¤00M"]},
{"100000000", [one: "¤000M", other: "¤000M"]},
{"1000000000", [one: "¤0B", other: "¤0B"]},
{"10000000000", [one: "¤00B", other: "¤00B"]},
{"100000000000", [one: "¤000B", other: "¤000B"]},
{"1000000000000", [one: "¤0T", other: "¤0T"]},
{"10000000000000", [one: "¤00T", other: "¤00T"]},
{"100000000000000", [one: "¤000T", other: "¤000T"]}],
....
}}
"""
@spec all_formats_for(LanguageTag.t() | Locale.locale_name(), Cldr.backend()) ::
{:ok, map()} | {:error, {module(), String.t()}}
def all_formats_for(locale, backend) do
Module.concat(backend, Number.Format).all_formats_for(locale)
end
@doc """
Returns the decimal formats defined for a given locale.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Returns
* a list of decimal formats ot
* raises an exception
See `Cldr.Number.Format.all_formats_for/2` for further information.
"""
def all_formats_for!(locale, backend) do
Module.concat(backend, Number.Format).all_formats_for!(locale)
end
@doc """
Returns the minium grouping digits for a locale.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/0`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Returns
* `{:ok, minumum_digits}` or
* `{:error, {exception, message}}`
## Examples
iex> Cldr.Number.Format.minimum_grouping_digits_for("en", MyApp.Cldr)
{:ok, 1}
"""
@spec minimum_grouping_digits_for(LanguageTag.t(), Cldr.backend()) ::
{:ok, non_neg_integer} | {:error, {module(), String.t()}}
def minimum_grouping_digits_for(locale, backend) do
Module.concat(backend, Number.Format).minimum_grouping_digits_for(locale)
end
@doc """
Returns the minium grouping digits for a locale or raises if there is an error.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Examples
iex> Cldr.Number.Format.minimum_grouping_digits_for!("en", MyApp.Cldr)
1
Cldr.Number.Format.minimum_grouping_digits_for!(:invalid)
** (Cldr.UnknownLocaleError) The locale :invalid is invalid
"""
def minimum_grouping_digits_for!(locale, backend) do
Module.concat(backend, Number.Format).minimum_grouping_digits_for!(locale)
end
@doc """
Returns the default grouping for a locale.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/0`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Returns
* `{:ok, minumum_digits}` or
* `{:error, {exception, message}}`
## Examples
iex> Cldr.Number.Format.default_grouping_for("en", MyApp.Cldr)
{:ok, %{fraction: %{first: 0, rest: 0}, integer: %{first: 3, rest: 3}}}
"""
@spec default_grouping_for(LanguageTag.t() | Cldr.Locale.locale_name(), Cldr.backend()) ::
{:ok, non_neg_integer} | {:error, {module(), String.t()}}
def default_grouping_for(locale, backend) do
Module.concat(backend, Number.Format).default_grouping_for(locale)
end
@doc """
Returns the default grouping for a locale or raises if there is an error.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Examples
iex> Cldr.Number.Format.default_grouping_for!("en", MyApp.Cldr)
%{fraction: %{first: 0, rest: 0}, integer: %{first: 3, rest: 3}}
Cldr.Number.Format.default_grouping_for!(:invalid)
** (Cldr.UnknownLocaleError) The locale :invalid is invalid
"""
@spec default_grouping_for!(LanguageTag.t() | Cldr.Locale.locale_name(), Cldr.backend()) ::
map() | no_return
def default_grouping_for!(locale, backend) do
Module.concat(backend, Number.Format).default_grouping_for!(locale)
end
@doc """
Returns the currency space for a given locale and
number system.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `number_system` is any valid number system or number system type returned
by `Cldr.Number.System.number_systems_for/2`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
"""
@spec currency_spacing(
LanguageTag.t() | Cldr.Locale.locale_name(),
System.system_name(),
Cldr.backend()
) :: map() | {:error, {module(), String.t()}}
def currency_spacing(locale, number_system, backend) do
backend.currency_spacing(locale, number_system)
end
@doc """
Return the predfined formats for a given `locale` and `number_system`.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `number_system` is any valid number system or number system type returned
by `Cldr.Number.System.number_systems_for/2` or `Cldr.Number.System.number_system_names_for/2`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Example
Cldr.Number.Format.formats_for "fr", :native, MyApp.Cldr
#=> {:ok, %Cldr.Number.Format{
accounting: "#,##0.00 ¤;(#,##0.00 ¤)",
currency: "#,##0.00 ¤",
percent: "#,##0 %",
scientific: "#E0",
standard: "#,##0.###"
currency_short: [{"1000", [one: "0 k ¤", other: "0 k ¤"]},
{"10000", [one: "00 k ¤", other: "00 k ¤"]},
{"100000", [one: "000 k ¤", other: "000 k ¤"]},
{"1000000", [one: "0 M ¤", other: "0 M ¤"]},
{"10000000", [one: "00 M ¤", other: "00 M ¤"]},
{"100000000", [one: "000 M ¤", other: "000 M ¤"]},
{"1000000000", [one: "0 Md ¤", other: "0 Md ¤"]},
{"10000000000", [one: "00 Md ¤", other: "00 Md ¤"]},
{"100000000000", [one: "000 Md ¤", other: "000 Md ¤"]},
{"1000000000000", [one: "0 Bn ¤", other: "0 Bn ¤"]},
{"10000000000000", [one: "00 Bn ¤", other: "00 Bn ¤"]},
{"100000000000000", [one: "000 Bn ¤", other: "000 Bn ¤"]}],
...
}}
"""
@spec formats_for(LanguageTag.t() | Locale.locale_name(), atom | String.t(), Cldr.backend()) ::
{:ok, map()} | {:error, {module(), String.t()}}
def formats_for(locale, number_system, backend) do
Module.concat(backend, Number.Format).formats_for(locale, number_system)
end
@doc """
Return the predfined formats for a given `locale` and `number_system` or raises
if either the `locale` or `number_system` is invalid.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `number_system` is any valid number system or number system type returned
by `Cldr.Number.System.number_systems_for/2`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
"""
@spec formats_for!(LanguageTag.t(), Cldr.Number.System.system_name(), Cldr.backend()) ::
map() | no_return()
def formats_for!(locale, number_system, backend) do
case formats_for(locale, number_system, backend) do
{:ok, formats} -> formats
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Returns the format styles available for a `locale`.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `number_system` is any valid number system or number system type returned
by `Cldr.Number.System.number_systems_for/2`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
Format styles standardise the access to a format defined for a common
use. These types are `:standard`, `:currency`, `:accounting`, `:scientific`
and :percent, :currency_short, :decimal_short, :decimal_long.
These types can be used when formatting a number for output. For example
`Cldr.Number.to_string(123.456, format: :percent)`.
## Example
iex> Cldr.Number.Format.format_styles_for("en", :latn, MyApp.Cldr)
{:ok, [:accounting, :currency, :currency_long, :currency_short,
:decimal_long, :decimal_short, :percent, :scientific, :standard]}
"""
@reject_styles [:__struct__, :currency_spacing, :other]
@spec format_styles_for(
LanguageTag.t() | Locale.locale_name(),
System.system_name(),
Cldr.backend()
) :: {:ok, list(atom())} | {:error, {module(), String.t()}}
def format_styles_for(%LanguageTag{} = locale, number_system, backend) do
with {:ok, formats} <- formats_for(locale, number_system, backend) do
{
:ok,
formats
|> Map.to_list()
|> Enum.reject(fn {k, v} -> is_nil(v) || k in @reject_styles end)
|> Enum.into(%{})
|> Map.keys()
}
end
end
def format_styles_for(locale_name, number_system, backend) when is_binary(locale_name) do
with {:ok, locale} <- Cldr.validate_locale(locale_name, backend) do
format_styles_for(locale, number_system, backend)
end
end
@doc """
Returns the short formats available for a locale.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `number_system` is any valid number system or number system type returned
by `Cldr.Number.System.number_systems_for/2`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Example
iex> Cldr.Number.Format.short_format_styles_for("he", :latn, MyApp.Cldr)
{:ok, [:currency_short, :decimal_long, :decimal_short]}
"""
@isnt_really_a_short_format [:currency_long]
@short_formats MapSet.new(@short_format_styles -- @isnt_really_a_short_format)
@spec short_format_styles_for(
LanguageTag.t() | Cldr.Locale.locale_name(),
binary | atom,
Cldr.backend()
) ::
{:ok, list(atom())} | {:error, {module(), String.t()}}
@dialyzer {:nowarn_function, short_format_styles_for: 3}
def short_format_styles_for(%LanguageTag{} = locale, number_system, backend) do
with {:ok, formats} <- format_styles_for(locale, number_system, backend) do
{
:ok,
formats
|> MapSet.new()
|> MapSet.intersection(@short_formats)
|> MapSet.to_list()
}
end
end
def short_format_styles_for(locale_name, number_system, backend) when is_binary(locale_name) do
with {:ok, locale} <- Cldr.validate_locale(locale_name, backend) do
short_format_styles_for(locale, number_system, backend)
end
end
@doc """
Returns the decimal format styles that are supported by
`Cldr.Number.Formatter.Decimal`.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `number_system` is any valid number system or number system type returned
by `Cldr.Number.System.number_systems_for/2`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Example
iex> Cldr.Number.Format.decimal_format_styles_for("en", :latn, MyApp.Cldr)
{:ok, [:accounting, :currency, :currency_long, :percent,
:scientific, :standard]}
"""
@spec decimal_format_styles_for(
LanguageTag.t() | Locale.locale_name(),
System.system_name(),
Cldr.backend()
) :: {:ok, list(atom())} | {:error, {module(), String.t()}}
@dialyzer {:nowarn_function, decimal_format_styles_for: 3}
def decimal_format_styles_for(%LanguageTag{} = locale, number_system, backend) do
with {:ok, styles} <- format_styles_for(locale, number_system, backend),
{:ok, short_styles} <- short_format_styles_for(locale, number_system, backend) do
{:ok, styles -- short_styles -- [:currency_long, :currency_spacing, :other]}
end
end
def decimal_format_styles_for(locale_name, number_system, backend)
when is_binary(locale_name) do
with {:ok, locale} <- Cldr.validate_locale(locale_name, backend) do
decimal_format_styles_for(locale, number_system, backend)
end
end
@doc """
Returns the number system types available for a `locale`
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
A number system type is an identifier that categorises number systems
that comprise a site of digits or rules for transliterating or translating
digits and a number system name for determining plural rules and format
masks.
If that all sounds a bit complicated then the default `number system type`
called `:default` is probably what you want nearly all the time.
## Examples
iex> Cldr.Number.Format.format_system_types_for("pl", MyApp.Cldr)
{:ok, [:default, :native]}
iex> Cldr.Number.Format.format_system_types_for("ru", MyApp.Cldr)
{:ok, [:default, :native]}
iex> Cldr.Number.Format.format_system_types_for("th", MyApp.Cldr)
{:ok, [:default, :native]}
"""
@spec format_system_types_for(Cldr.Locale.locale_name() | LanguageTag.t(), Cldr.backend()) ::
{:ok, Keyword.t()} | {:error, {module(), String.t()}}
def format_system_types_for(%LanguageTag{} = locale, backend) do
with {:ok, _} <- Cldr.validate_locale(locale, backend) do
{:ok, systems} = System.number_systems_for(locale, backend)
{:ok, Map.keys(systems)}
end
end
def format_system_types_for(locale_name, backend) when is_binary(locale_name) do
with {:ok, locale} <- Cldr.validate_locale(locale_name, backend) do
format_system_types_for(locale, backend)
end
end
@doc """
Returns the names of the number systems for the `locale`.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`. The default
is `Cldr.get_locale/1`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Examples
iex> Cldr.Number.Format.format_system_names_for("th", MyApp.Cldr)
{:ok, [:latn, :thai]}
iex> Cldr.Number.Format.format_system_names_for("pl", MyApp.Cldr)
{:ok, [:latn]}
"""
@spec format_system_names_for(LanguageTag.t() | Cldr.Locale.locale_name(), Cldr.backend()) ::
{:ok, list(atom)} | {:error, {module(), String.t()}}
def format_system_names_for(%LanguageTag{} = locale, backend) do
Cldr.Number.System.number_system_names_for(locale, backend)
end
def format_system_names_for(locale_name, backend) when is_binary(locale_name) do
with {:ok, locale} <- Cldr.validate_locale(locale_name, backend) do
format_system_names_for(locale, backend)
end
end
end
|
lib/cldr/number/format.ex
| 0.820649
| 0.773152
|
format.ex
|
starcoder
|
defmodule Day07.Connector do
@moduledoc """
A `t:Intcode.Computer.handler/0` that connects the input of one computer to the output
of another.
This module has functions for creating collections of handlers with different phase
settings and wiring those handlers together correctly. A Connector will usually have
a `next` connector, to which it will send any outputs it sees. If it doesn't have a
`next` connector (because it's not set up for feedback and is the last in the chain),
then when it receives an output from the computer, it will return it and finish its
task.
When a Connector receives a value from another connector, it adds it to a queue to be
sent the next time the computer requests input.
"""
@typedoc """
A PID for a connector process.
"""
@type t :: pid
@typedoc """
A list of phase settings for the amplifiers in an array.
Each setting should be unique: the list should contain no duplicates.
"""
@type phase_settings :: list(number)
@doc """
Create a list of connectors with the given phase settings.
This will create as many connectors as there are settings in the list.
Each connector will be set up to forward its outputs to the next connector
in the list. The final connector will have no `next` connector and will
simply return the output it receives from its `Intcode.Computer`.
"""
@spec async_with_settings(phase_settings) :: list(Task.t())
def async_with_settings(settings) do
Enum.reverse(settings)
|> Enum.reduce([], fn setting, amps ->
case amps do
[] -> [async(nil, setting)]
[hd | tl] -> [async(hd.pid, setting) | [hd | tl]]
end
end)
end
@doc """
Create a feedback loop of connectors with the given phase settings.
Unlike with `async_with_settings/1`, the final connector in the list will
send its output back to the first connector, creating a feedback loop. Only
when the final `Intcode.Computer` halts will the output be returned.
"""
@spec async_with_feedback(phase_settings) :: list(Task.t())
def async_with_feedback(settings) do
conns = async_with_settings(settings)
set_next(List.last(conns).pid, List.first(conns).pid)
conns
end
@spec async(t | nil, number) :: Task.t()
defp async(next, setting) do
Task.async(__MODULE__, :run, [next, setting])
end
@doc false
@spec run(t | nil, number) :: number
def run(next, setting) do
loop(next, nil, [setting], nil)
end
@doc """
Send an input value to a particular connector.
This will add the value to that connector's queue so it can be sent to the computer
the next time it requests input.
Generally, the connectors use this to send information to each other, but it's also
available to send the initial value to the first connector to start the process.
"""
@spec send_input(t, number) :: any
def send_input(connector, value) do
send(connector, {:chain, value})
end
@spec set_next(t, t) :: any
defp set_next(connector, next) do
send(connector, {:set_next, next})
end
defp loop(next, waiting_pid, queue, last_output) do
receive do
{:input, pid} ->
case queue do
[] ->
loop(next, pid, [], last_output)
[hd | tl] ->
Intcode.send_input(pid, hd)
loop(next, nil, tl, last_output)
end
{:chain, value} ->
case waiting_pid do
nil ->
loop(next, nil, queue ++ [value], last_output)
pid ->
Intcode.send_input(pid, value)
loop(next, nil, [], last_output)
end
{:output, _, value} ->
case next do
nil ->
value
_ ->
send_input(next, value)
loop(next, waiting_pid, queue, value)
end
{:set_next, new_next} ->
loop(new_next, waiting_pid, queue, last_output)
{:halt, _} ->
last_output
end
end
end
|
aoc2019_elixir/apps/day07/lib/connector.ex
| 0.819821
| 0.673266
|
connector.ex
|
starcoder
|
defmodule Set do
@moduledoc %S"""
This module specifies the Set API expected to be
implemented by different representations.
It also provides functions that redirect to the
underlying Set, allowing a developer to work with
different Set implementations using one API.
To create a new set, use the `new` functions defined
by each set type:
HashSet.new #=> creates an empty HashSet
For simplicity's sake, in the examples below every time
`new` is used, it implies one of the module-specific
calls like above.
## Protocols
Sets are required to implement the `Enumerable` protocol,
allowing one to write:
Enum.each(set, fn k ->
IO.inspect k
end)
## Match
Sets are required to implement all operations
using the match (`===`) operator. Any deviation from
this behaviour should be avoided and explicitly documented.
"""
use Behaviour
@type value :: any
@type values :: [ value ]
@type t :: tuple
defcallback new :: t
defcallback new(Enum.t) :: t
defcallback new(Enum.t, (any -> any)) :: t
defcallback delete(t, value) :: t
defcallback difference(t, t) :: t
defcallback disjoint?(t, t) :: boolean
defcallback empty(t) :: t
defcallback equal?(t, t) :: boolean
defcallback intersection(t, t) :: t
defcallback member?(t, value) :: boolean
defcallback put(t, value) :: t
defcallback reduce(t, Enumerable.acc, Enumerable.reducer) :: Enumerable.result
defcallback size(t) :: non_neg_integer
defcallback subset?(t, t) :: boolean
defcallback to_list(t) :: list()
defcallback union(t, t) :: t
defmacrop target(set) do
quote do
if is_tuple(unquote(set)) do
elem(unquote(set), 0)
else
unsupported_set(unquote(set))
end
end
end
@doc """
Deletes `value` from `set`.
## Examples
iex> s = HashSet.new([1, 2, 3])
...> Set.delete(s, 4) |> Enum.sort
[1, 2, 3]
iex> s = HashSet.new([1, 2, 3])
...> Set.delete(s, 2) |> Enum.sort
[1, 3]
"""
@spec delete(t, value) :: t
def delete(set, value) do
target(set).delete(set, value)
end
@doc """
Returns a set that is `set1` without the members of `set2`.
Notice this function is polymorphic as it calculates the difference
for of any type. Each set implementation also provides a `difference`
function, but they can only work with sets of the same type.
## Examples
iex> Set.difference(HashSet.new([1,2]), HashSet.new([2,3,4])) |> Enum.sort
[1]
"""
@spec difference(t, t) :: t
def difference(set1, set2) do
target1 = target(set1)
target2 = target(set2)
if target1 == target2 do
target1.difference(set1, set2)
else
target2.reduce(set2, { :cont, set1 }, fn v, acc ->
{ :cont, target1.delete(acc, v) }
end) |> elem(1)
end
end
@doc """
Checks if `set1` and `set2` have no members in common.
Notice this function is polymorphic as it checks for disjoint sets of
any type. Each set implementation also provides a `disjoint?` function,
but they can only work with sets of the same type.
## Examples
iex> Set.disjoint?(HashSet.new([1, 2]), HashSet.new([3, 4]))
true
iex> Set.disjoint?(HashSet.new([1, 2]), HashSet.new([2, 3]))
false
"""
@spec disjoint?(t, t) :: boolean
def disjoint?(set1, set2) do
target1 = target(set1)
target2 = target(set2)
if target1 == target2 do
target1.disjoint?(set1, set2)
else
target2.reduce(set2, { :cont, true }, fn member, acc ->
case target1.member?(set1, member) do
false -> { :cont, acc }
_ -> { :halt, false }
end
end) |> elem(1)
end
end
@doc """
Returns an empty set of the same type as `set`.
"""
@spec empty(t) :: t
def empty(set) do
target(set).empty(set)
end
@doc """
Check if two sets are equal using `===`.
Notice this function is polymorphic as it compares sets of
any type. Each set implementation also provides an `equal?`
function, but they can only work with sets of the same type.
## Examples
iex> Set.equal?(HashSet.new([1, 2]), HashSet.new([2, 1, 1]))
true
iex> Set.equal?(HashSet.new([1, 2]), HashSet.new([3, 4]))
false
"""
@spec equal?(t, t) :: boolean
def equal?(set1, set2) do
target1 = target(set1)
target2 = target(set2)
cond do
target1 == target2 ->
target1.equal?(set1, set2)
target1.size(set1) == target2.size(set2) ->
do_subset?(target1, target2, set1, set2)
true ->
false
end
end
@doc """
Returns a set containing only members in common between `set1` and `set2`.
Notice this function is polymorphic as it calculates the intersection of
any type. Each set implementation also provides a `intersection` function,
but they can only work with sets of the same type.
## Examples
iex> Set.intersection(HashSet.new([1,2]), HashSet.new([2,3,4])) |> Enum.sort
[2]
iex> Set.intersection(HashSet.new([1,2]), HashSet.new([3,4])) |> Enum.sort
[]
"""
@spec intersection(t, t) :: t
def intersection(set1, set2) do
target1 = target(set1)
target2 = target(set2)
if target1 == target2 do
target1.intersection(set1, set2)
else
target1.reduce(set1, { :cont, target1.empty(set1) }, fn v, acc ->
{ :cont, if(target2.member?(set2, v), do: target1.put(acc, v), else: acc) }
end) |> elem(1)
end
end
@doc """
Checks if `set` contains `value`.
## Examples
iex> Set.member?(HashSet.new([1, 2, 3]), 2)
true
iex> Set.member?(HashSet.new([1, 2, 3]), 4)
false
"""
@spec member?(t, value) :: boolean
def member?(set, value) do
target(set).member?(set, value)
end
@doc """
Inserts `value` into `set` if it does not already contain it.
## Examples
iex> Set.put(HashSet.new([1, 2, 3]), 3) |> Enum.sort
[1, 2, 3]
iex> Set.put(HashSet.new([1, 2, 3]), 4) |> Enum.sort
[1, 2, 3, 4]
"""
@spec put(t, value) :: t
def put(set, value) do
target(set).put(set, value)
end
@doc """
Returns the number of elements in `set`.
## Examples
iex> Set.size(HashSet.new([1, 2, 3]))
3
"""
@spec size(t) :: non_neg_integer
def size(set) do
target(set).size(set)
end
@doc """
Checks if `set1`'s members are all contained in `set2`.
Notice this function is polymorphic as it checks the subset for
any type. Each set implementation also provides a `subset?` function,
but they can only work with sets of the same type.
## Examples
iex> Set.subset?(HashSet.new([1, 2]), HashSet.new([1, 2, 3]))
true
iex> Set.subset?(HashSet.new([1, 2, 3]), HashSet.new([1, 2]))
false
"""
@spec subset?(t, t) :: boolean
def subset?(set1, set2) do
target1 = target(set1)
target2 = target(set2)
if target1 == target2 do
target1.subset?(set1, set2)
else
do_subset?(target1, target2, set1, set2)
end
end
@doc """
Converts `set` to a list.
## Examples
iex> HashSet.to_list(HashSet.new([1, 2, 3])) |> Enum.sort
[1,2,3]
"""
@spec to_list(t) :: list
def to_list(set) do
target(set).to_list(set)
end
@doc """
Returns a set containing all members of `set1` and `set2`.
Notice this function is polymorphic as it calculates the union of
any type. Each set implementation also provides a `union` function,
but they can only work with sets of the same type.
## Examples
iex> Set.union(HashSet.new([1,2]), HashSet.new([2,3,4])) |> Enum.sort
[1,2,3,4]
"""
@spec union(t, t) :: t
def union(set1, set2) do
target1 = target(set1)
target2 = target(set2)
if target1 == target2 do
target1.union(set1, set2)
else
target2.reduce(set2, { :cont, set1 }, fn v, acc ->
{ :cont, target1.put(acc, v) }
end) |> elem(1)
end
end
defp do_subset?(target1, target2, set1, set2) do
target1.reduce(set1, { :cont, true }, fn member, acc ->
case target2.member?(set2, member) do
true -> { :cont, acc }
_ -> { :halt, false }
end
end) |> elem(1)
end
defp unsupported_set(set) do
raise ArgumentError, message: "unsupported set: #{inspect set}"
end
end
|
lib/elixir/lib/set.ex
| 0.937826
| 0.690494
|
set.ex
|
starcoder
|
defmodule Bintreeviz.Renderer.Ascii do
@moduledoc """
Simple ASCII rendering module which, given a tree structure, render its nodes to STDOUT using ASCII characters such as dashes and pipes. Its implemented quite naively but we'll fix that in a next iteration.
"""
@behaviour Bintreeviz.Renderer
alias Bintreeviz.Node
@doc "render/1 takes the starting node and converts it to a rendered representation using ASCII characters"
@spec render(Node.t(), Bintreeviz.render_options()) :: String.t()
@impl true
def render(%Node{} = root, options) do
do_render(Textmatrix.new(), root, options)
end
defp do_render(buffer, nil, _options), do: buffer
defp do_render(buffer, %Node{} = root, options) do
buffer
|> box_node(root, options)
|> do_render(root.left_child, options)
|> do_render(root.right_child, options)
|> connect(root, options)
end
defp box_node(buffer, %Node{} = root, options) do
charset = Keyword.get(options, :ascii_renderer_charset)
horizontal_line =
root
|> Node.width()
|> Kernel.-(2)
|> repeat_char(charset.horizontal_line())
buffer
|> Textmatrix.write(
root.x,
root.y,
"#{charset.top_left_corner()}#{horizontal_line}#{charset.top_right_corner()}"
)
|> Textmatrix.write(
root.x,
root.y + 1,
"#{charset.vertical_line()} #{root.label} #{charset.vertical_line()}"
)
|> Textmatrix.write(
root.x,
root.y + 2,
"#{charset.bottom_left_corner()}#{horizontal_line}#{charset.bottom_right_corner()}"
)
end
defp connect(buffer, %Node{left_child: nil, right_child: nil}, _options), do: buffer
defp connect(buffer, %Node{} = node, options) when is_list(options) do
case node do
%Node{left_child: %Node{} = left_child, right_child: %Node{} = right_child} ->
buffer
|> connect(node, left_child, options)
|> connect(node, right_child, options)
%Node{left_child: %Node{} = left_child, right_child: nil} ->
connect(buffer, node, left_child, options)
%Node{left_child: nil, right_child: %Node{} = right_child} ->
connect(buffer, node, right_child, options)
end
end
defp connect(buffer, %Node{} = root, %Node{} = child, options) when is_list(options) do
root_anchor_x = floor(Node.width(root) / 2 + root.x)
root_anchor_y = root.y + 2
child_anchor_x = floor(Node.width(child) / 2 + child.x)
child_anchor_y = child.y
charset = Keyword.get(options, :ascii_renderer_charset)
# generate the horizontal connecting line between node's anchor points
line =
(root_anchor_x - child_anchor_x)
|> abs()
|> Kernel.-(1)
|> repeat_char(charset.horizontal_line())
# decide which way the corner pieces should face
child_connection_char = child_connection_char(root_anchor_x, child_anchor_x, options)
buffer
# draw connecting characters
|> Textmatrix.write(root_anchor_x, root_anchor_y, charset.node_connector_bottom())
|> Textmatrix.write(root_anchor_x, root_anchor_y + 1, charset.vertical_line())
|> Textmatrix.write(root_anchor_x, root_anchor_y + 2, parent_connection_char(root, options))
|> Textmatrix.write(child_anchor_x, child_anchor_y - 1, charset.vertical_line())
# draw connecting corner pices
|> Textmatrix.write(child_anchor_x, child_anchor_y - 2, child_connection_char)
|> Textmatrix.write(child_anchor_x, child_anchor_y, charset.node_connector_top())
# draw connecting horizontal line
|> Textmatrix.write(min(root_anchor_x, child_anchor_x) + 1, root_anchor_y + 2, "#{line}")
end
defp parent_connection_char(%Node{} = root, options) do
charset = Keyword.get(options, :ascii_renderer_charset)
case root do
%Node{left_child: %Node{} = _left_child, right_child: %Node{} = _right_child} ->
# Got two children
charset.parent_split_char()
%Node{left_child: %Node{} = _left_child, right_child: nil} ->
# Got only left child
charset.parent_left_turn_char()
%Node{left_child: nil, right_child: %Node{} = _right_child} ->
# Got only right child
charset.parent_right_turn_char()
end
end
defp child_connection_char(root_anchor_x, child_anchor_x, options)
when root_anchor_x > child_anchor_x do
# child is ofsetted to the left
Keyword.get(options, :ascii_renderer_charset).child_right_turn_char()
end
defp child_connection_char(root_anchor_x, child_anchor_x, options)
when root_anchor_x < child_anchor_x do
# child is offsetted to the right
Keyword.get(options, :ascii_renderer_charset).child_left_turn_char()
end
defp repeat_char(times, char) do
String.duplicate(char, times)
end
end
|
lib/renderer/ascii.ex
| 0.835249
| 0.505981
|
ascii.ex
|
starcoder
|
defmodule PhoenixInlineSvg.Helpers do
@moduledoc """
The module that adds the view helpers to fetch
and render SVG files into safe HTML.
## New Way
The preferred way of using this library is to add the helpers to the quoted
`view` in your `web.ex` file.
```elixir
def view do
quote do
use PhoenixInlineSvg.Helpers, otp_app: :phoenix_inline_svg
end
end
```
Using the new way you can get svg images using the methods:
```elixir
# Get an image with the default collection
svg_image("image_name")
# Get an image with a different collection
svg_image("image_name", "collection_name")
```
## Old Way
As an alternative this module can be imported in the quoted `view` def of the
`web/web.ex` which will always pull the SVG files from the disk (unless you
are using a caching class).
```
def view do
quote do
import PhoenixInlineSvg.Helpers
end
end
```
*Note:* If you are setting a custom directory for the SVG files and are using
Exrm or Distillery, you will need to ensure that the directory you set is in
the outputted `lib` directory of your application.
## In Both Configurations
By default SVG files are loaded from:
```
priv/static/svg/
```
The directory where SVG files are loaded from can be configured
by setting the configuration variable:
```
config :phoenix_inline_svg, dir: "some/other/dir"
```
Where `some/other/dir` is a directory located in the Phoenix
application directory.
"""
@doc """
The using method for the Inline SVG library precompiles the SVG images into
static function definitions.
**Note** These will not be able to be changed as the contents of the SVG files
will be directly loaded into the build of the application.
If you want to support changing SVGs on the fly without a new deployment, use
the `import` method instead.
Using this method requires you to tell the use statement what the name of your
OTP app is.
## Examples
In the quoted `view` def of the `web/web/ex` you should add:
```elixir
use PhoenixInlineSvg.Helpers, otp_app: :my_app_name
```
This will create pre-built functions:
```elixir
# Default collection
svg_image("image_name")
# Named collection
svg_image("image_name", "collection_name")
```
"""
defmacro __using__([otp_app: app_name]) do
svgs_path = Application.app_dir(app_name,
config_or_default(:dir, "priv/static/svg/"))
svgs_path
|> find_collection_sets
|> Enum.map(&create_cached_svg_image(&1, svgs_path))
end
defmacro __using__(_) do
raise "You must specifiy an OTP app!"
end
@doc """
Sends the contents of the SVG file `name` in the directory.
Returns a safe HTML string with the contents of the SVG file
wrapped in an `i` HTML element with classes.
## Examples
```
<%= svg_image(@conn, "home") %>
```
Will result in output of:
```
<i class="generic-svgs generic-home-svg">
<svg>...</svg>
</i>
```
"""
def svg_image(conn, name) do
svg_image(conn, name, config_or_default(:default_collection, "generic"))
end
@doc """
Sends the contents of the SVG file `name` in the directory.
Returns a safe HTML string with the contents of the SVG file
wrapped in an `i` HTML element with classes.
## Examples
```
<%= svg_image(@conn, "user", "fontawesome") %>
```
Will result in the output:
```
<i class="fontawesome-svgs fontawesome-home-svg">
<svg>...</svg>
</i>
```
"""
def svg_image(conn, name, collection) do
"#{collection}/#{name}.svg"
|> read_svg_file(conn)
|> safety_string
end
defp safety_string(html) do
{:safe, html}
end
defp read_svg_from_path(path) do
case File.read(path) do
{:ok, result} ->
result
{:error, _} ->
config_or_default(:not_found,
"<svg viewbox='0 0 60 60'>" <>
"<text x='0' y='40' font-size='30' font-weight='bold'" <>
"font-family='monospace'>Err</text></svg>")
end
end
defp read_svg_file(icon_path, conn) do
[
Application.app_dir(Phoenix.Controller.endpoint_module(conn).config(:otp_app)),
config_or_default(:dir, "priv/static/svg/"),
icon_path
]
|> Path.join
|> read_svg_from_path
end
defp config_or_default(config, default) do
case Application.fetch_env(:phoenix_inline_svg, config) do
:error ->
default
{:ok, data} ->
data
end
end
defp find_collection_sets(svgs_path) do
case File.ls(svgs_path) do
{:ok, listed_files} ->
listed_files
|> Stream.filter(fn(e) -> File.dir?(Path.join(svgs_path, e)) end)
|> Stream.flat_map(&map_collection(&1, svgs_path))
|> Enum.into([])
_ -> []
end
end
defp map_collection(coll, svgs_path) do
coll_path = Path.join(svgs_path, coll)
coll_path
|> File.ls!
|> Stream.map(&Path.join(coll_path, &1))
|> Stream.filter(&File.regular?(&1))
|> Stream.map(fn(e) -> {coll, e} end)
|> Enum.into([])
end
defp create_cached_svg_image({collection, name}, svgs_path) do
filename = name |> String.split(".") |> List.first
quote do
def svg_image(unquote(filename), unquote(collection)) do
unquote(
[svgs_path, collection, name]
|> Path.join
|> read_svg_from_path
|> safety_string
)
end
end
end
end
|
lib/phoenix_inline_svg/helpers.ex
| 0.898235
| 0.907926
|
helpers.ex
|
starcoder
|
defmodule PollutionDataStream do
@moduledoc false
def loadData() do
:pollution_sup.start_link()
data = importLinesFromCSV() |> Stream.map(&parseLine/1)
stations = identifyStations(data)
IO.puts("Loading stations time: #{getTime(fn -> loadStations(stations) end)}s")
IO.puts("Loading measurements time: #{getTime(fn -> loadValues(data) end)}s")
IO.puts("Station mean time: #{getTime(fn -> :pollution_gen_server.getStationMean({20.06, 49.986}, 'PM10') end)}s")
IO.puts("Daily mean time: #{getTime(fn -> :pollution_gen_server.getDailyMean('PM10', {2017, 5, 3}) end)}s")
IO.puts("Station mean value: #{:pollution_gen_server.getStationMean({20.06, 49.986}, 'PM10')}")
IO.puts("Daily mean value: #{:pollution_gen_server.getDailyMean('PM10', {2017, 5, 3})}")
end
defp loadStations(data) do
data |> Stream.map(fn {latitude,longitude} -> ['station_#{latitude}_#{longitude}',{latitude,longitude}] end)
|> Enum.each(fn [name, location] -> :pollution_gen_server.addStation(name,location) end)
end
defp loadValues(data) do
data |> Stream.map(fn x -> {x.location, x.datetime, 'PM10', x.pollutionLevel} end)
|> Enum.each(fn {location,datetime,type,value} -> :pollution_gen_server.addValue(location,datetime,type,value)end )
end
defp importLinesFromCSV() do
File.stream!("../res/pollution.csv")
end
defp parseLine(line) do
[date, time, latitude, longitude, value] = String.split(line, ",")
%{
:datetime => parseDatetime(date, time),
:location => parseLocation(latitude, longitude),
:pollutionLevel => parseValue(value)
}
end
defp parseDatetime(date, time) do
date = String.split(date, "-") |> Enum.reverse |> Stream.map(&(Integer.parse(&1) |> elem(0)))
|> Enum.reduce({}, fn(element, tuple) -> Tuple.append(tuple, element) end)
{h, m} = String.split(time, ":") |> Stream.map(&(Integer.parse(&1) |> elem(0)))
|> Enum.reduce({}, fn(element, tuple) -> Tuple.append(tuple, element) end)
{date, {h, m, 0}}
end
defp parseLocation(latitude, longitude) do
{Float.parse(latitude) |> elem(0), Float.parse(longitude) |> elem(0)}
end
defp parseValue(value) do
Integer.parse(value) |> elem(0)
end
defp identifyStations(data) do
data |> Stream.map(fn x -> x.location end)
|> Stream.uniq() |> Enum.map(fn x -> x end)
end
defp getTime(fun) do
fun |> :timer.tc |> elem(0)
|> Kernel./(1_000_000)
end
end
|
src/pollution_data_stream.ex
| 0.706596
| 0.450903
|
pollution_data_stream.ex
|
starcoder
|
defmodule Wargaming.Warships.Ship do
@moduledoc """
Ship provides functions for interacting with the
WarGaming.net World of Warships Warships API.
"""
use Wargaming.ApiEndpoint, api: Wargaming.Warships
@ship_stats "/ships/stats/"
@doc """
Ship.stats_for_all_ships/2 searches WarGaming ship stats (in the configured region) and returns all ship statistics for the given account id.
[Official Reference](https://developers.wargaming.net/reference/all/wows/ships/stats/?application_id=123456&r_realm=na)
Returns `{:ok, response_map}` or `{:error, error_map}`
## Available Options
* `access_token` : Token for accessing private data on account.
* `extra` : Additional fields to add to the response. See [Official Reference](https://developers.wargaming.net/reference/all/wows/ships/stats/?application_id=123456&r_realm=na) for more information.
* `fields` : Comma separated list of fields. Embedded fields separated by periods. To exclude a field, prefix it with a `-`. Returns all fields if one of the fields is undefined.
* `in_garage` : Filter by ship's availability in the port. If not specified, all ships are returned. Available options (options are strings):
- "1" — Available in port.
- "0" — Ship not in port.
* `language` : Default "en". Available options:
- "cs" — Čeština
- "de" — Deutsch
- "en" — English (by default)
- "es" — Español
- "fr" — Français
- "ja" — 日本語
- "pl" — Polski
- "ru" — Русский
- "th" — ไทย
- "zh-tw" — 繁體中文
* `ship_id` : Ship id(s). Max limit of 100.
"""
def stats_for_all_ships(account_id, opts \\ %{}) do
constructed_get(:account_id, account_id, @ship_stats, opts)
end
@doc """
See [Ship.stats_for_all_ships/2](#stats_for_all_ships/2). Convenience function for specifying specific ship(s).
"""
def stats_for_ship(accounts_id, ship_ids, opts \\ %{})
def stats_for_ship(account_id, ship_ids, opts) when is_list(ship_ids) do
opts =
opts
|> Map.merge(%{ship_id: Enum.join(ship_ids, ",")})
stats_for_all_ships(account_id, opts)
end
def stats_for_ship(account_id, ship_id, opts) do
opts =
opts
|> Map.merge(%{ship_id: ship_id})
stats_for_all_ships(account_id, opts)
end
end
|
lib/wargaming/warships/ship.ex
| 0.87582
| 0.506408
|
ship.ex
|
starcoder
|
defmodule ExTectonicdb.Connection do
@moduledoc """
Handles connection to the database socket
`tdb-server` uses first bit in the reply to denote success/failure, so `:gen_tcp` needs to connect with `packet: :raw`.
Incoming message format: 1 byte for success failure, 8 bytes big endian (64 bit) for length n, and n bytes for body
Outgoing message format: 4 byte big endian for length n, and n bytes for body
"""
require Logger
use GenServer
defmodule State do
@type config :: ExTectonicdb.Config.t()
@type socket :: :gen_tcp.socket()
@type message_length :: non_neg_integer
@type buffered_message :: list(non_neg_integer) | nil
@type t :: %State{
socket: socket,
config: config,
queue: :queue.queue(),
buffer: {message_length, buffered_message}
}
@enforce_keys ~w[config buffer queue]a
defstruct ~w[config buffer socket queue]a
end
def send_message(pid, message) do
GenServer.call(pid, {:message, message})
end
def start_link(args \\ []) do
state = %State{
config: Keyword.get(args, :config, %ExTectonicdb.Config{}),
queue: :queue.new(),
buffer: {0, nil}
}
opts = Keyword.take(args, [:name])
GenServer.start_link(__MODULE__, state, opts)
end
def init(state) do
{:ok, state, {:continue, :connect}}
end
def handle_continue(:connect, %{config: config} = state) do
:ok = Logger.info("Connecting to #{:inet.ntoa(config.host)}:#{config.port}")
case :gen_tcp.connect(
config.host,
config.port,
Keyword.merge(config.tcp_opts, packet: :raw, active: true)
) do
{:ok, socket} -> {:noreply, %{state | socket: socket}}
{:error, reason} -> disconnect(state, reason)
end
end
# start of new message
def handle_info({:tcp, socket, data}, %{socket: socket, buffer: {0, _buf}} = state) do
{:ok, [buffer: buffer_size]} = :inet.getopts(socket, [:buffer])
{_success, _msg, msg_len} = from_packet(data)
new_state =
if msg_len < buffer_size do
process_and_reply(state, data)
else
%{state | buffer: {msg_len, data}}
end
{:noreply, new_state}
end
# buffered message
def handle_info({:tcp, socket, data}, %{socket: socket, buffer: {msg_len, buf}} = state) do
agg_data = buf ++ data
new_state =
if length(agg_data) >= msg_len do
process_and_reply(state, agg_data)
else
%{state | buffer: {msg_len, agg_data}}
end
{:noreply, new_state}
end
def handle_info({:tcp_closed, _}, state), do: {:stop, :normal, state}
def handle_info({:tcp_error, _}, state), do: {:stop, :normal, state}
def handle_call({:message, message}, from, %{socket: socket, queue: queue} = state) do
# format message to binary and send over tcp
packet = to_packet(message)
:ok = :gen_tcp.send(socket, packet)
# queue client for later reply
q = :queue.in(from, queue)
state = %{state | queue: q}
{:noreply, state}
end
def disconnect(state, reason) do
:ok = Logger.info("Disconnected: #{reason}")
{:stop, :normal, state}
end
defp process_and_reply(state, data) do
{{:value, from}, new_queue} = :queue.out(state.queue)
{success, msg, _length} = from_packet(data)
GenServer.reply(from, {success, msg})
%{state | queue: new_queue, buffer: {0, nil}}
end
@send_length_byte_size 32
defp to_packet(msg) do
size = byte_size(msg)
:binary.bin_to_list(<<size::@send_length_byte_size, msg::binary>>)
end
@recv_length_byte_size 64
defp from_packet(packet) do
case :binary.list_to_bin(packet) do
<<1, len::@recv_length_byte_size, msg::binary>> -> {:ok, msg, len}
<<0, len::@recv_length_byte_size, msg::binary>> -> {:error, msg, len}
end
end
end
|
lib/ex_tectonicdb/connection.ex
| 0.706596
| 0.401834
|
connection.ex
|
starcoder
|
defmodule Himamo.BaumWelch.StepM do
@moduledoc ~S"""
Defines components of the M-step of the Baum-Welch algorithm (Maximization).
Maximizes the model's parameters.
"""
alias Himamo.{Matrix, Model, ObsSeq, Logzero}
alias Himamo.BaumWelch.Stats
import Logzero
@doc ~S"""
Re-estimates the `A` variable.
Each entry in `A=[a_{i,j}]` is recomputed as: expected number of transitions
from state `S_i` to state `S_j` divided by the expected number of
transitions from state `S_j`.
This is part of the _M_ step of Baum-Welch.
"""
@spec reestimate_a(Model.t, Himamo.BaumWelch.stats_list) :: Matrix.t
def reestimate_a(%Model{n: num_states}, stats_list) do
states_range = 0..num_states-1
Stream.flat_map(stats_list, fn({
%ObsSeq{len: obs_len},
prob_k,
%Stats{xi: xi, gamma: gamma}
}) ->
for i <- states_range, j <- states_range do
{numerator, denominator} =
Enum.map(0..obs_len-2, fn (t) ->
curr_log_xi = Matrix.get(xi, {t, i, j})
curr_log_gamma = Matrix.get(gamma, {t, i})
{curr_log_xi, curr_log_gamma}
end)
|> Enum.reduce({Logzero.const, Logzero.const}, fn ({numer, denom}, {numer_sum, denom_sum}) ->
{ext_log_sum(numer_sum, numer), ext_log_sum(denom_sum, denom)}
end)
{{i, j}, {ext_log_product(numerator, prob_k), ext_log_product(denominator, prob_k)}}
end
end)
|> sum_fraction_parts
|> fractions_to_numbers
|> Enum.into(Himamo.Model.A.new(num_states))
end
@doc ~S"""
Re-estimates the `B` variable.
"""
@spec reestimate_b(Model.t, Himamo.BaumWelch.stats_list) :: Matrix.t
def reestimate_b(%Model{n: num_states, m: num_symbols}, stats_list) do
states_range = 0..num_states-1
symbols_range = 0..num_symbols-1
Enum.flat_map(stats_list, fn({
%ObsSeq{seq: observations},
prob_k,
%Stats{gamma: gamma}
}) ->
observations = List.delete_at(observations, -1)
for j <- states_range, k <- symbols_range do
{numerator, denominator} =
Stream.with_index(observations)
|> Enum.reduce({Logzero.const, Logzero.const}, fn({o, t}, {numer, denom}) ->
curr_log_gamma = Matrix.get(gamma, {t, j})
denom = ext_log_sum(denom, curr_log_gamma)
numer = if o == k do
ext_log_sum(numer, curr_log_gamma)
else
numer
end
{numer, denom}
end)
{{j, k}, {ext_log_product(numerator, prob_k), ext_log_product(denominator, prob_k)}}
end
end)
|> sum_fraction_parts
|> fractions_to_numbers
|> Enum.into(Himamo.Model.B.new(n: num_states, m: num_symbols))
end
defp sum_fraction_parts(fractions) do
Enum.reduce(fractions, Map.new, fn({{_i, _j} = key, {numer, denom}}, sums) ->
{curr_numer, curr_denom} = Map.get(sums, key, {Logzero.const, Logzero.const})
Map.put(sums, key, {ext_log_sum(numer, curr_numer), ext_log_sum(denom, curr_denom)})
end)
end
defp fractions_to_numbers(fractions) do
Stream.map(fractions, fn({key, {numerator, denominator}}) ->
{key, ext_exp(ext_log_product(numerator, -denominator))}
end)
end
@doc ~S"""
Re-estimates the `π` variable.
"""
@spec reestimate_pi(Model.t, Himamo.BaumWelch.stats_list) :: Model.Pi.t
def reestimate_pi(model, [{obs_seq, _, stats} |_]) do
%ObsSeq{prob: obs_prob} = obs_seq
%Stats{alpha: alpha, beta: beta} = stats
states_range = 0..(model.n - 1)
row =
Himamo.BaumWelch.StepE.compute_xi_row(model, alpha, beta, obs_prob, 0)
|> Enum.into(Matrix.new({1, model.n, model.n}))
for i <- states_range do
for j <- states_range do
Matrix.get(row, {0, i, j})
end
|> Logzero.sum_log_values
|> Logzero.ext_exp
end
|> Model.Pi.new
end
end
|
lib/himamo/baum_welch/step_m.ex
| 0.830732
| 0.819713
|
step_m.ex
|
starcoder
|
defmodule Day24 do
@moduledoc """
AoC 2019, Day 24 - Planet of Discord
"""
@doc """
Calculate biodiversity rating for input
"""
def part1 do
Util.priv_file(:day24, "day24_input.txt")
|> File.read!()
|> first_repeated_bio()
end
@doc """
Count number of bugs in recursive grid after 200 minutes
"""
def part2 do
Util.priv_file(:day24, "day24_input.txt")
|> File.read!()
|> recursive_bug_count(200)
end
def recursive_bug_count(str, steps) do
parse(str)
|> Enum.into([])
|> Enum.map(fn {{x, y}, val} -> {{x, y, 0}, val} end)
|> Enum.into(%{})
|> recursive_step(steps)
|> Map.values()
|> Enum.filter(fn v -> v == :bug end)
|> Enum.count()
end
def recursive_step(map, 0), do: map
def recursive_step(map, step) do
new_map = add_next_levels(map)
Enum.into([], new_map)
|> Enum.map(&(update_node(&1, map)))
|> Enum.into(%{})
|> recursive_step(step-1)
end
defp add_next_levels(map) do
{{_, _, z_min}, {_, _, z_max}} = Enum.min_max_by(Map.keys(map), fn {_x, _y, z} -> z end)
for z <- [z_min-1, z_max+1], x <- 0..4, y <- 0..4 do
{{x, y, z}, :empty}
end
|> Enum.into(%{})
|> Map.merge(map)
end
defp neighbors({x, y}), do: [{x+1, y}, {x-1, y}, {x, y+1}, {x, y-1}]
defp neighbors({2, 2, _z}), do: []
defp neighbors({0, 0, z}) do
[{1, 0, z}, {0, 1, z}, {2, 1, z+1}, {1, 2, z+1}]
end
defp neighbors({4, 0, z}) do
[{3, 0, z}, {4, 1, z}, {2, 1, z+1}, {3, 2, z+1}]
end
defp neighbors({0, 4, z}) do
[{0, 3, z}, {1, 4, z}, {1, 2, z+1}, {2, 3, z+1}]
end
defp neighbors({4, 4, z}) do
[{3, 4, z}, {4, 3, z}, {3, 2, z+1}, {2, 3, z+1}]
end
defp neighbors({0, y, z}) do
[{0, y-1, z}, {1, y, z}, {0, y+1, z}, {1, 2, z+1}]
end
defp neighbors({4, y, z}) do
[{4, y-1, z}, {3, y, z}, {4, y+1, z}, {3, 2, z+1}]
end
defp neighbors({x, 0, z}) do
[{x-1, 0, z}, {x+1, 0, z}, {x, 1, z}, {2, 1, z+1}]
end
defp neighbors({x, 4, z}) do
[{x-1, 4, z}, {x+1, 4, z}, {x, 3, z}, {2, 3, z+1}]
end
defp neighbors({2, 1, z}) do
this = [{2, 0, z}, {1, 1, z}, {3, 1, z}]
down = for x <- 0..4 do
{x, 0, z-1}
end
List.flatten([this | down])
end
defp neighbors({1, 2, z}) do
this = [{1, 1, z}, {0, 2, z}, {1, 3, z}]
right = for y <- 0..4 do
{0, y, z-1}
end
List.flatten([this | right])
end
defp neighbors({3, 2, z}) do
this = [{3, 1, z}, {3, 3, z}, {4, 2, z}]
left = for y <- 0..4 do
{4, y, z-1}
end
List.flatten([this | left])
end
defp neighbors({2, 3, z}) do
this = [{1, 3, z}, {3, 3, z}, {2, 4, z}]
up = for x <- 0..4 do
{x, 4, z-1}
end
List.flatten([this | up])
end
defp neighbors({x, y, z}), do: [{x+1, y, z}, {x-1, y, z}, {x, y+1, z}, {x, y-1, z}]
def first_repeated_bio(str) do
parse(str)
|> step(MapSet.new())
end
defp step(map, seen) do
bio = biodiversity(map)
if MapSet.member?(seen, bio) do
bio
else
step(next_map(map), MapSet.put(seen, bio))
end
end
defp next_map(map) do
Enum.into([], map)
|> Enum.map(&(update_node(&1, map)))
|> Enum.into(%{})
end
defp update_node({loc, val}, map) do
cnt = neighbors(loc)
|> Enum.map(fn pt -> Map.get(map, pt, :empty) end)
|> Enum.reduce(0, fn (v, acc) -> if :empty == v, do: acc, else: acc+1 end)
new_val = cond do
val == :bug && cnt != 1 -> :empty
val == :empty && (cnt == 1 || cnt == 2) -> :bug
true -> val
end
{loc, new_val}
end
defp biodiversity(map) do
Map.keys(map)
|> Enum.sort_by(&sort_keys/1)
|> Enum.with_index()
|> Enum.reduce(0, &(bio_node(map, &1, &2)))
end
defp bio_node(map, {loc, idx}, acc) do
v = Map.get(map, loc, :empty)
if v == :empty do
acc
else
acc + :math.pow(2, idx)
end
end
defp sort_keys({x, y}), do: {y, x}
defp parse(str) do
String.split(str, "\n", trim: true)
|> Enum.with_index()
|> Enum.map(&parse_row/1)
|> List.flatten()
|> Enum.into(%{})
end
defp parse_row({str, row}) do
String.graphemes(str)
|> Enum.map(fn c -> if c == ".", do: :empty, else: :bug end)
|> Enum.with_index()
|> Enum.map(fn {v, c} -> {{c, row}, v} end)
end
end
|
apps/day24/lib/day24.ex
| 0.647464
| 0.543954
|
day24.ex
|
starcoder
|
defmodule Mix.Tasks.Compile.Unused do
use Mix.Task.Compiler
@shortdoc "Find unused public functions"
@moduledoc ~S"""
Compile project and find uncalled public functions.
### Warning
This isn't perfect solution and this will not find dynamic calls in form of:
apply(mod, func, args)
So this mean that, for example, if you have custom `child_spec/1` definition
then this will return such function as unused even when you are using that
indirectly in your supervisor.
## Configuration
You can define used functions by adding pattern in `unused: [ignored: [⋯]]`
in your project configuration:
def project do
[
# ⋯
unused: [
ignore: [
{MyApp.Foo, :child_spec, 1}
]
],
# ⋯
]
end
### Patterns
`unused` patterns are similar to the match specs from Erlang, but extends
their API to be much more flexible. Simplest possible patter is to match
exactly one function, which mean that we use 3-ary tuple with module,
function name, and arity as respective elements, ex.:
[{Foo, :bar, 1}]
This will match function `Foo.bar/1`, however often we want to use more
broad patterns, in such case there are few tricks we can use. First is
to use `:_` which will mean "wildcard" aka any value will match, ex.:
[{:_, :child_spec, 1}]
Will ignore all functions `child_spec/1` in your application (you probably
should add it, as `unused` is not able to notice that this function is used
even if it is used in any supervisor, as it will be dynamic call).
In additional to wildcard matches, which isn't often what we really want, we
can use regular expressions for module and function name or range for arity:
[
{:_, ~r/^__.+__\??$/, :_},
{~r/^MyAppWeb\..*Controller/, :_, 2},
{MyApp.Test, :foo, 1..2}
]
To make the ignore specification list less verbose there is also option to
omit last `:_`, i.e.: `{Foo, :bar, :_}` is the same as `{Foo, :bar}`, if you
want to ignore whole module, then you can just use `Foo` (it also works for
regular expressions).
To ignore warnings about unused structs you need to use "special" syntax in
form of `{StructModule, :__struct__, 0}`.
### Documentation metadata
Functions that have `export: true` in their metadata will be automatically
treated as exports for usage by external parties and will not be marked as
unused.
## Options
- `severity` - severity of the reported messages, defaults to `hint`.
Other allowed levels are `information`, `warning`, and `error`.
- `warnings-as-errors` - if the `severity` is set to `:warning` and there is
any report, then fail compilation with exit code `1`.
"""
alias Mix.Task.Compiler.Diagnostic
@recursive true
@manifest "unused.manifest"
alias MixUnused.Tracer
alias MixUnused.Filter
alias MixUnused.Exports
@impl true
def run(argv) do
{:ok, _pid} = Tracer.start_link()
mix_config = Mix.Project.config()
config = MixUnused.Config.build(argv, Keyword.get(mix_config, :unused, []))
tracers = Code.get_compiler_option(:tracers)
[manifest] = manifests()
Mix.Task.Compiler.after_compiler(
:app,
&after_compiler(&1, mix_config[:app], tracers, config, manifest)
)
Code.put_compiler_option(:tracers, [Tracer | tracers])
{:ok, []}
end
@impl true
def manifests, do: [Path.join(Mix.Project.manifest_path(), @manifest)]
@impl true
def clean, do: Enum.each(manifests(), &File.rm/1)
defp after_compiler({status, diagnostics}, app, tracers, config, manifest) do
# Cleanup tracers after compilation
Code.put_compiler_option(:tracers, tracers)
cache =
case File.read(manifest) do
{:ok, data} -> :erlang.binary_to_term(data)
_ -> %{}
end
data = Map.merge(cache, Tracer.get_data())
_ = File.write!(manifest, :erlang.term_to_binary(data))
all_functions =
app
|> Exports.application()
|> Filter.reject_matching(config.ignore)
error_on_messages =
config.severity == :error or
(config.severity == :warning and config.warnings_as_errors)
config.checks
|> MixUnused.Analyze.analyze(data, all_functions, config)
|> Enum.sort()
|> tap_all(&print_diagnostic/1)
|> case do
[] ->
{status, diagnostics}
messages when error_on_messages ->
{:error, messages ++ diagnostics}
messages ->
{status, messages ++ diagnostics}
end
end
defp print_diagnostic(%Diagnostic{details: %{mfa: {_, :__struct__, 1}}}),
do: nil
defp print_diagnostic(diag) do
file = Path.relative_to_cwd(diag.file)
Mix.shell().info([
level(diag.severity),
diag.message,
"\n ",
file,
?:,
Integer.to_string(diag.position),
"\n"
])
end
# Elixir < 1.12 do not have tap, so we provide custom implementation
defp tap_all(list, fun) do
Enum.each(list, fun)
list
end
defp level(level), do: [:bright, color(level), "#{level}: ", :reset]
defp color(:error), do: :red
defp color(:warning), do: :yellow
defp color(_), do: :blue
end
|
lib/mix/tasks/compile.unused.ex
| 0.783326
| 0.539287
|
compile.unused.ex
|
starcoder
|
defmodule Oban.Worker do
@moduledoc """
Defines a behavior and macro to guide the creation of worker modules.
Worker modules do the work of processing a job. At a minimum they must define a `perform/1`
function, which will be called with an `args` map.
## Defining Workers
Define a worker to process jobs in the `events` queue:
defmodule MyApp.Workers.Business do
use Oban.Worker, queue: "events", max_attempts: 10
@impl true
def perform(%Oban.Job{attempt: attempt}) when attempt > 3 do
IO.inspect(attempt)
end
def perform(args) do
IO.inspect(args)
end
end
The `perform/1` function receives an `Oban.Job` struct as the sole argument. If no clause
matches on `%Oban.Job{}` then the `args` are extracted and `perform/1` is called again with the
args. This allows workers to change the behavior of `perform/1` based on attributes of the Job,
e.g. the number of attempts or when it was inserted.
A job is considered complete if `perform/1` returns a non-error value, and it doesn't raise an
exception or have an unhandled exit.
Any of these return values or error events will fail the job:
* return `{:error, error}`
* return `:error`
* an unhandled exception
* an unhandled exit or throw
As an example of error tuple handling, this worker may return an error tuple when the value is
less than one:
defmodule MyApp.Workers.ErrorExample do
use Oban.Worker
@impl true
def perform(%{value: value}) do
if value > 1 do
:ok
else
{:error, "invalid value given: " <> inspect(value)"}
end
end
end
## Enqueuing Jobs
All workers implement a `new/2` function that converts an args map into a job changeset
suitable for inserting into the database for later execution:
%{in_the: "business", of_doing: "business"}
|> MyApp.Workers.Business.new()
|> MyApp.Repo.insert()
The worker's defaults may be overridden by passing options:
%{vote_for: "none of the above"}
|> MyApp.Workers.Business.new(queue: "special", max_attempts: 5)
|> MyApp.Repo.insert()
See `Oban.Job` for all available options.
## Customizing Backoff
When jobs fail they may be retried again in the future using a backoff algorithm. By default
the backoff is exponential with a fixed padding of 15 seconds. This may be too aggressive for
jobs that are resource intensive or need more time between retries. To make backoff scheduling
flexible a worker module may define a custom backoff function.
This worker defines a backoff function that delays retries using a variant of the historic
Resque/Sidekiq algorithm:
defmodule MyApp.SidekiqBackoffWorker do
use Oban.Worker
@impl true
def backoff(attempt) do
:math.pow(attempt, 4) + 15 + :rand.uniform(30) * attempt
end
@impl true
def perform(args) do
:do_business
end
end
Here are some alternative backoff strategies to consider:
* **constant** — delay by a fixed number of seconds, e.g. 1→15, 2→15, 3→15
* **linear** — delay for the same number of seconds as the current attempt, e.g. 1→1, 2→2, 3→3
* **squared** — delay by attempt number squared, e.g. 1→1, 2→4, 3→9
* **sidekiq** — delay by a base amount plus some jitter, e.g. 1→32, 2→61, 3→135
"""
@moduledoc since: "0.1.0"
alias Oban.Job
@doc """
Build a job changeset for this worker with optional overrides.
See `Oban.Job.new/2` for the available options.
"""
@callback new(args :: Job.args(), opts :: [Job.option()]) :: Ecto.Changeset.t()
@doc """
Calculate the execution backoff, or the number of seconds to wait before retrying a failed job.
"""
@callback backoff(attempt :: pos_integer()) :: pos_integer()
@doc """
The `perform/1` function is called when the job is executed.
The function is passed a job's args, which is always a map with string keys.
The return value is not important. If the function executes without raising an exception it is
considered a success. If the job raises an exception it is a failure and the job may be
scheduled for a retry.
"""
@callback perform(job_or_args :: Job.t() | Job.args()) :: term()
@doc false
defmacro __before_compile__(_env) do
quote do
@impl true
def perform(%Job{args: args}), do: perform(args)
def perform(args) when is_map(args), do: :ok
end
end
@doc false
defmacro __using__(opts) do
quote location: :keep do
alias Oban.{Job, Worker}
@before_compile Worker
@behaviour Worker
@opts unquote(opts)
|> Keyword.take([:queue, :max_attempts])
|> Keyword.put(:worker, to_string(__MODULE__))
@impl Worker
def new(args, opts \\ []) when is_map(args) do
Job.new(args, Keyword.merge(@opts, opts))
end
@impl Worker
def backoff(attempt) when is_integer(attempt) do
Worker.default_backoff(attempt)
end
defoverridable Worker
end
end
@doc false
@spec default_backoff(pos_integer(), non_neg_integer()) :: pos_integer()
def default_backoff(attempt, base_backoff \\ 15) when is_integer(attempt) do
trunc(:math.pow(2, attempt) + base_backoff)
end
end
|
lib/oban/worker.ex
| 0.903457
| 0.680209
|
worker.ex
|
starcoder
|
defmodule Day12.Moon do
defstruct position: %Day12.Vector{}, velocity: %Day12.Vector{}
def run(1) do
# Hardcode positions, instead of trying to parse the input file
[
at(-7, -1, 6),
at(6, -9, -9),
at(-12, 2, -7),
at(4, -17, -12)
]
|> step(1_000)
|> total_energy
|> IO.puts
end
def run(2) do
# Hardcode positions, instead of trying to parse the input file
moons = [
at(-7, -1, 6),
at(6, -9, -9),
at(-12, 2, -7),
at(4, -17, -12)
]
{x, y, z} = detect_periods(moons)
ElixirMath.lcm(x, y) |> ElixirMath.lcm(z) |> IO.puts
end
def at(x, y, z), do: %__MODULE__{position: Day12.Vector.new(x, y, z)}
def step(moons, 0), do: moons
def step(moons, n) do
moons
|> apply_gravity([])
|> Enum.map(&move/1)
|> step(n - 1)
end
def detect_periods(moons) do
{xk, yk, zk} = keys_for(moons)
do_detect_periods(moons, %{xk => 0}, %{yk => 0}, %{zk => 0}, {0, 0, 0}, 1)
end
defp do_detect_periods(_, _, _, _, {x, y, z}, _) when x != 0 and y != 0 and z != 0, do: {x, y, z}
defp do_detect_periods(moons, xs, ys, zs, {xp, yp, zp}, n) do
moons = step(moons, 1)
{xk, yk, zk} = keys_for(moons)
xp = if xp == 0 && Map.has_key?(xs, xk) do
n - Map.get(xs, xk)
else
xp
end
yp = if yp == 0 && Map.has_key?(ys, yk) do
n - Map.get(ys, yk)
else
yp
end
zp = if zp == 0 && Map.has_key?(zs, zk) do
n - Map.get(zs, zk)
else
zp
end
do_detect_periods(moons, Map.put(xs, xk, n), Map.put(ys, yk, n), Map.put(zs, zk, n), {xp, yp, zp}, n + 1)
end
defp keys_for([a, b, c, d]) do
{
[a.position.x, a.velocity.x, b.position.x, b.velocity.x, c.position.x, c.velocity.x, d.position.x, d.velocity.x],
[a.position.y, a.velocity.y, b.position.y, b.velocity.y, c.position.y, c.velocity.y, d.position.y, d.velocity.y],
[a.position.z, a.velocity.z, b.position.z, b.velocity.z, c.position.z, c.velocity.z, d.position.z, d.velocity.z]
}
end
def total_energy(moons) do
moons
|> Enum.map(&energy_of/1)
|> Enum.reduce(&Kernel.+/2)
end
def gravitate(a, b) do
vec = Day12.Vector.new(velo_adjust(a.position.x, b.position.x),
velo_adjust(a.position.y, b.position.y),
velo_adjust(a.position.z, b.position.z))
%Day12.Moon{a | velocity: Day12.Vector.adjust(a.velocity, vec)}
end
def move(moon), do: %__MODULE__{moon | position: Day12.Vector.adjust(moon.position, moon.velocity)}
defp apply_gravity([], moons), do: moons |> Enum.reverse
defp apply_gravity([moon | rest], done) do
moon = Enum.reduce(rest, moon, fn x, acc -> gravitate(acc, x) end)
moon = Enum.reduce(done, moon, fn x, acc -> gravitate(acc, x) end)
apply_gravity(rest, [moon | done])
end
defp energy_of(moon) do
potential = abs(moon.position.x) + abs(moon.position.y) + abs(moon.position.z)
kinetic = abs(moon.velocity.x) + abs(moon.velocity.y) + abs(moon.velocity.z)
potential * kinetic
end
defp velo_adjust(a, a), do: 0
defp velo_adjust(a, b) when a > b, do: -1
defp velo_adjust(a, b) when a < b, do: 1
end
|
year_2019/lib/day_12/moon.ex
| 0.732974
| 0.61057
|
moon.ex
|
starcoder
|
defmodule Routemaster.Drains.Notify do
@moduledoc """
Drain plug to declare listener modules that will be notified
of the received events. Listeners must implement the `call/1`
function, that will be invoked with a list of `Routemaster.Drain.Event`
structures as argument.
The listeners' `call/1` function is invoked in a supervised `Task`,
so all listeners are to be considered asynchronous and independent.
By default a listener module will be notified of all events in the
current payload, but it's possible to filter by topic. It's also
possible to subscribe multiple listeners at the same time.
This is convenient when topic filters are applied, because the
filtering function will be executed only once for all the listeners
declared together.
### Options
* `:listener` (or `:listeners`, plural): either the listener module
or a list of listener modules.
* `:only`: either a binary or a list of binaries. The listener will
only be notified of events for this topic or topics.
* `:except`: either a binary or a list of binaries. The opposite
of `:only`.
If no events match the filters, listeners are not notified. This
means that the listeners' `call/1` functions are never invoked
with empty lists.
After notifying the listener(s), the full event payload is passed
down in the pipeline unchanged, which means that multiple `Notify`
drains can be configured together.
### Examples
```elixir
alias Routemaster.Drains.Notify
# listen to all events from all topics
drain Notify, listener: GlobalListener
# only listen to one or some topics
drain Notify, listener: BurgerListener, only: "burgers"
drain Notify, listener: FruitListener, only: ~w(apple orange)
# listen to all but some topics
drain Notify, listener: VeggieListener, except: "meat"
drain Notify, listener: NoFishListener, except: ~w(cod seabass)
# notify multiple listeners for a selection of topics
drain Notify,
listeners: [DessertListener, SweetListener],
only: ~w(pies cakes ice_creams)
```
"""
@supervisor DrainEvents.TaskSupervisor
def init(opts) do
listeners = fetch_listeners!(opts)
filter =
case {Keyword.fetch(opts, :only), Keyword.fetch(opts, :except)} do
{:error, :error} ->
:all
{{:ok, only}, :error} ->
{:only, normalize(only)}
{:error, {:ok, except}} ->
{:except, normalize(except)}
_ ->
raise "The #{__MODULE__} drain can't be configured with both :only and :except filters"
end
[listeners: listeners, filter: filter]
end
defp fetch_listeners!(kw) do
case Keyword.get(kw, :listener, Keyword.get(kw, :listeners)) do
nil ->
raise KeyError, key: ":listener or :listeners", term: kw
listeners ->
List.wrap(listeners)
end
end
defp normalize(topics) when is_list(topics) do
topics
|> List.flatten()
|> Enum.map(&normalize/1)
end
defp normalize(topic) when is_binary(topic), do: topic
defp normalize(topic), do: to_string(topic)
def call(conn, [listeners: listeners, filter: filter]) do
events = select(conn.assigns.events, filter)
notify(events, listeners)
conn
end
defp notify([], _), do: nil
defp notify(events, listeners) do
for listener <- listeners do
Task.Supervisor.start_child(@supervisor, fn() ->
listener.call(events)
end)
end
end
defp select(events, :all), do: events
defp select(events, {:only, target}) do
Enum.filter(events, &match(&1.topic, target))
end
defp select(events, {:except, target}) do
Enum.reject(events, &match(&1.topic, target))
end
defp match(item, target) when is_list(target) do
item in target
end
defp match(item, target) do
item == target
end
end
|
lib/routemaster/drain/drains/notify.ex
| 0.848941
| 0.880746
|
notify.ex
|
starcoder
|
defmodule FE.Review do
@moduledoc """
`FE.Review` is a data type similar to `FE.Result`, made for representing
output of a computation that either succeed (`accepted`) or fail (`rejected`),
but that might continue despite of issues encountered (`issues`).
One could say that the type is a specific implementation of a writer monad,
that collects issues encountered during some computation.
For instance, it might be used for validation of a user input, when we don't
want to stop the process of validation when we encounter the first mistake,
but rather we would like to collect all the user's mistakes before returning
feedback to her.
"""
@type t(a, b) :: {:accepted, a} | {:issues, a, [b]} | {:rejected, [b]}
alias FE.{Maybe, Result}
defmodule Error do
defexception [:message]
end
@doc """
Creates a `FE.Review` representing a successful output of a computation.
"""
@spec accepted(a) :: t(a, any) when a: var
def accepted(value), do: {:accepted, value}
@doc """
Creates a `FE.Review` representing an errornous output of a computation with
a list of issues encountered during the computation.
"""
@spec rejected([b]) :: t(any, b) when b: var
def rejected(issues) when is_list(issues), do: {:rejected, issues}
@doc """
Creates a `FE.Review` representing a problematic output of a computation
that there were some issues with.
"""
@spec issues(a, b) :: t(a, b) when a: var, b: var
def issues(value, issues) when is_list(issues), do: {:issues, value, issues}
@doc """
Transforms a successful or a problematic value in a `FE.Review` using
a provided function.
## Examples
iex> FE.Review.map(FE.Review.rejected(["foo"]), &String.length/1)
FE.Review.rejected(["foo"])
iex> FE.Review.map(FE.Review.issues("foo", ["b", "ar"]), &String.length/1)
FE.Review.issues(3, ["b", "ar"])
iex> FE.Review.map(FE.Review.accepted("baz"), &String.length/1)
FE.Review.accepted(3)
"""
@spec map(t(a, b), (a -> c)) :: t(c, b) when a: var, b: var, c: var
def map(review, f)
def map({:accepted, value}, f), do: accepted(f.(value))
def map({:issues, value, issues}, f), do: issues(f.(value), issues)
def map({:rejected, issues}, _), do: rejected(issues)
@doc """
Transform issues stored in a `FE.Review` using a provided function.
## Examples
iex> FE.Review.map_issues(FE.Review.accepted("ack!"), &String.length/1)
FE.Review.accepted("ack!")
iex> FE.Review.map_issues(FE.Review.issues("a", ["bb", "ccc"]), &String.length/1)
FE.Review.issues("a", [2, 3])
iex> FE.Review.map_issues(FE.Review.rejected(["dddd", "eeeee"]), &String.length/1)
FE.Review.rejected([4, 5])
"""
@spec map_issues(t(a, b), (b -> c)) :: t(a, c) when a: var, b: var, c: var
def map_issues(review, f)
def map_issues({:accepted, value}, _), do: accepted(value)
def map_issues({:issues, value, issues}, f) do
issues(value, Enum.map(issues, f))
end
def map_issues({:rejected, issues}, f) do
rejected(Enum.map(issues, f))
end
@doc """
Returns the accepted value stored in a `FE.Review` or a provided default if
either rejected or value with issues is passed
## Examples
iex> FE.Review.unwrap_or(FE.Review.rejected(["no", "way"]), :default)
:default
iex> FE.Review.unwrap_or(FE.Review.issues(1, ["no", "way"]), :default)
:default
iex> FE.Review.unwrap_or(FE.Review.accepted(123), :default)
123
"""
@spec unwrap_or(t(a, any), a) :: a when a: var
def unwrap_or(review, default)
def unwrap_or({:rejected, _}, default), do: default
def unwrap_or({:issues, _, _}, default), do: default
def unwrap_or({:accepted, value}, _), do: value
@doc """
Returns the accepted value stored in a `FE.Review`, raises an `FE.Review.Error`
if either rejected or value with issues is passed
## Examples
iex> FE.Review.unwrap!(FE.Review.accepted("foo"))
"foo"
"""
@spec unwrap!(t(a, any)) :: a when a: var
def unwrap!(review)
def unwrap!({:accepted, value}), do: value
def unwrap!({:rejected, issues}) do
raise(Error, "unwrapping rejected Review with issues: #{inspect(issues)}")
end
def unwrap!({:issues, _, issues}) do
raise(Error, "unwrapping Review with issues: #{inspect(issues)}")
end
@doc """
Applies accepted value of a `FE.Review` to a provided function.
Returns its return value, that should be of `FE.Review` type.
Applies value with issues of a `FE.Review` to a provided function.
If accepted value is returned, then the value is replaced, but the issues
remain the same.
If new value with issues is returned, then the value is replaced and the issues
are appended to the list of current issues.
If rejected is returned, then the issues are appended to the list of current issues,
if issues were passed.
Useful for chaining together a computation consisting of multiple steps,
each of which takes either a success value or value with issues wrapped in
`FE.Review` as an argument and returns a `FE.Review`.
## Examples
iex> FE.Review.and_then(
...> FE.Review.rejected(["foo"]),
...> &FE.Review.accepted(String.length(&1)))
FE.Review.rejected(["foo"])
iex> FE.Review.and_then(
...> FE.Review.issues("foo", ["bar", "baz"]),
...> &FE.Review.accepted(String.length(&1)))
FE.Review.issues(3, ["bar", "baz"])
iex> FE.Review.and_then(
...> FE.Review.issues("foo", ["bar", "baz"]),
...> &FE.Review.issues(String.length(&1), ["qux"]))
FE.Review.issues(3, ["bar", "baz", "qux"])
iex> FE.Review.and_then(FE.Review.accepted(1), &FE.Review.issues(&1, [:one]))
FE.Review.issues(1, [:one])
"""
@spec and_then(t(a, b), (a -> t(c, b))) :: t(c, b) when a: var, b: var, c: var
def and_then(review, f)
def and_then({:accepted, value}, f) do
case f.(value) do
{:accepted, value} -> accepted(value)
{:issues, value, issues} -> issues(value, issues)
{:rejected, issues} -> rejected(issues)
end
end
def and_then({:issues, value, issues}, f) do
case f.(value) do
{:accepted, value} -> issues(value, issues)
{:issues, value, new_issues} -> issues(value, issues ++ new_issues)
{:rejected, new_issues} -> rejected(issues ++ new_issues)
end
end
def and_then({:rejected, value}, _), do: {:rejected, value}
@doc """
Folds over provided list of elements applying it and current accumulator
to the provided function.
The next accumulator is the same as the result of calling `and_then` with the
current accumulator and the provided function.
The provided `FE.Review` is initial accumulator.
## Examples
iex> FE.Review.fold(FE.Review.rejected([:error]), [],
...> &FE.Review.accepted(&1 + &2))
FE.Review.rejected([:error])
iex> FE.Review.fold(FE.Review.accepted(5), [1, 2, 3],
...> &FE.Review.accepted(&1 * &2))
FE.Review.accepted(30)
iex> FE.Review.fold(FE.Review.accepted(5), [1, 2, 3],
...> &FE.Review.issues(&1 * &2, [&1]))
FE.Review.issues(30, [1, 2, 3])
iex> FE.Review.fold(FE.Review.issues(5, [:five]), [1, 2, 3],
...> &FE.Review.accepted(&1 * &2))
FE.Review.issues(30, [:five])
iex> FE.Review.fold(FE.Review.accepted(5), [1, 2, 3], fn
...> x, 10 -> FE.Review.issues(x * 10, ["it's a ten!"])
...> x, y -> FE.Review.accepted(x * y)
...> end)
FE.Review.issues(30, ["it's a ten!"])
iex> FE.Review.fold(FE.Review.accepted(5), [1, 2, 3], fn
...> _, 10 -> FE.Review.rejected(["it's a ten!"])
...> x, y -> FE.Review.accepted(x * y)
...> end)
FE.Review.rejected(["it's a ten!"])
"""
@spec fold(t(a, b), [c], (c, a -> t(a, b))) :: t(a, b) when a: var, b: var, c: var
def fold(review, elems, f) do
Enum.reduce_while(elems, review, fn elem, acc ->
case and_then(acc, fn value -> f.(elem, value) end) do
{:accepted, _} = accepted -> {:cont, accepted}
{:issues, _, _} = issues -> {:cont, issues}
{:rejected, _} = rejected -> {:halt, rejected}
end
end)
end
@doc """
Works like `fold/3`, except that the first element of the provided list is removed
from it, converted to an accepted `FE.Review` and treated as the initial accumulator.
Then, fold is executed over the remainder of the provided list.
## Examples
iex> FE.Review.fold([1], fn _, _ -> FE.Review.rejected([:foo]) end)
FE.Review.accepted(1)
iex> FE.Review.fold([1, 2, 3], &FE.Review.accepted(&1 + &2))
FE.Review.accepted(6)
iex> FE.Review.fold([1, 2, 3], &FE.Review.issues(&1 + &2, [&2]))
FE.Review.issues(6, [1, 3])
iex> FE.Review.fold([1, 2, 3, 4], fn
...> _, 6 -> FE.Review.rejected(["six"])
...> x, y -> FE.Review.issues(x + y, [y])
...> end)
FE.Review.rejected([1, 3, "six"])
iex> FE.Review.fold([1, 2, 3, 4], fn
...> x, 6 -> FE.Review.issues(x + 6, ["six"])
...> x, y -> FE.Review.accepted(x + y)
...> end)
FE.Review.issues(10, ["six"])
"""
@spec fold([c], (c, a -> t(a, b))) :: t(a, b) when a: var, b: var, c: var
def fold([], _), do: raise(Enum.EmptyError)
def fold([head | tail], f), do: fold(accepted(head), tail, f)
@doc """
Transforms `FE.Review` to a `FE.Result`.
Any accepted value of a `FE.Review` becomes a successful value of a `FE.Result`.
If there are any issues either in a rejected `FE.Review` or coupled with a value,
all the issues become a errornous output of the output `FE.Result`.
## Examples
iex> FE.Review.to_result(FE.Review.issues(1, [2, 3]))
FE.Result.error([2, 3])
iex> FE.Review.to_result(FE.Review.accepted(4))
FE.Result.ok(4)
iex> FE.Review.to_result(FE.Review.rejected([5, 6, 7]))
FE.Result.error([5, 6, 7])
"""
@spec to_result(t(a, b)) :: Result.t(a, [b]) when a: var, b: var
def to_result(review)
def to_result({:accepted, value}), do: Result.ok(value)
def to_result({:rejected, issues}), do: Result.error(issues)
def to_result({:issues, _, issues}), do: Result.error(issues)
@doc """
Transforms `FE.Review` to a `FE.Maybe`.
Any accepted value of a `FE.Review` becomes a `FE.Maybe` with the same value.
If there are any issues either in a rejected `FE.Review` or coupled with a value,
a `FE.Maybe` without a value is returned.
## Examples
iex> FE.Review.to_maybe(FE.Review.issues(1, [2, 3]))
FE.Maybe.nothing()
iex> FE.Review.to_maybe(FE.Review.accepted(4))
FE.Maybe.just(4)
iex> FE.Review.to_maybe(FE.Review.rejected([5, 6, 7]))
FE.Maybe.nothing()
"""
@spec to_maybe(t(a, any)) :: Maybe.t(a) when a: var
def to_maybe(review)
def to_maybe({:accepted, value}), do: Maybe.just(value)
def to_maybe({:rejected, _}), do: Maybe.nothing()
def to_maybe({:issues, _, _}), do: Maybe.nothing()
end
|
lib/fe/review.ex
| 0.933317
| 0.645413
|
review.ex
|
starcoder
|
defmodule Exceptional.TaggedStatus do
@moduledoc ~S"""
Convert back to conventional Erlang/Elixir `{:ok, _}` tuples
## Convenience `use`s
Everything:
use Exceptional.TaggedStatus
Only named functions (`to_tagged_status`, `ok`):
use Exceptional.TaggedStatus, only: :named_functions
Only operators (`~~~`):
use Exceptional.TaggedStatus, only: :operators
"""
defmacro __using__(only: :named_functions) do
quote do
import unquote(__MODULE__), except: [~~~: 1]
end
end
defmacro __using__(only: :operators) do
quote do
import unquote(__MODULE__), only: [~~~: 1]
end
end
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
@doc ~S"""
Convert unraised exceptions to `{:error, message}`, and other values to
`{:ok, value}`.
## Examples
iex> to_tagged_status([1,2,3])
{:ok, [1,2,3]}
iex> Enum.OutOfBoundsError.exception("error message") |> to_tagged_status()
{:error, "error message"}
"""
def to_tagged_status(maybe_exception) do
case maybe_exception do
tuple when is_tuple(tuple) ->
tuple
|> Tuple.to_list()
|> List.first()
|> case do
tag when is_atom(tag) -> tuple
_ -> {:ok, tuple}
end
value ->
if Exception.exception?(value) do
{:error, Exception.message(value)}
else
{:ok, value}
end
end
end
@doc ~S"""
Alias for `to_tagged_status`
## Examples
iex> [1,2,3] |> ok()
{:ok, [1,2,3]}
iex> Enum.OutOfBoundsError.exception("error message") |> ok
{:error, "error message"}
"""
defdelegate ok(maybe_exception), to: __MODULE__, as: :to_tagged_status
@doc ~S"""
Operator alias for `to_tagged_status`
## Examples
iex> ~~~[1,2,3]
{:ok, [1,2,3]}
iex> ~~~Enum.OutOfBoundsError.exception("error message")
{:error, "error message"}
iex> exc = Enum.OutOfBoundsError.exception("error message")
...> ~~~exc
{:error, "error message"}
"""
defdelegate ~~~(maybe_exception), to: __MODULE__, as: :to_tagged_status
end
|
lib/exceptional/tagged_status.ex
| 0.749271
| 0.436322
|
tagged_status.ex
|
starcoder
|
defmodule Temple do
@engine Application.compile_env(:temple, :engine, EEx.SmartEngine)
@moduledoc """
Temple syntax is available inside the `temple`, and is compiled into efficient Elixir code at compile time using the configured `EEx.Engine`.
You should checkout the [guides](https://hexdocs.pm/temple/your-first-template.html) for a more in depth explanation.
## Usage
```elixir
defmodule MyApp.HomePage do
import Temple
def render() do
assigns = %{title: "My Site | Sign Up", logged_in: false}
temple do
"<!DOCTYPE html>"
html do
head do
meta charset: "utf-8"
meta http_equiv: "X-UA-Compatible", content: "IE=edge"
meta name: "viewport", content: "width=device-width, initial-scale=1.0"
link rel: "stylesheet", href: "/css/app.css"
title do: @title
end
body do
header class: "header" do
ul do
li do
a href: "/", do: "Home"
end
li do
if @logged_in do
a href: "/logout", do: "Logout"
else
a href: "/login", do: "Login"
end
end
end
end
main do
"Hi! Welcome to my website."
end
end
end
end
end
end
```
## Configuration
### Engine
By default Temple wil use the `EEx.SmartEngine`, but you can configure it to use any other engine. Examples could be `Phoenix.HTML.Engine` or `Phoenix.LiveView.Engine`.
```elixir
config :temple, engine: Phoenix.HTML.Engine
```
### Aliases
You can add an alias for an element if there is a namespace collision with a function. If you are using `Phoenix.HTML`, there will be namespace collisions with the `<link>` and `<label>` elements.
```elixir
config :temple, :aliases,
label: :label_tag,
link: :link_tag,
select: :select_tag
temple do
label_tag do
"Email"
end
link_tag href: "/css/site.css"
end
```
This will result in:
```html
<label>
Email
</label>
<link href="/css/site.css">
```
"""
defmacro temple(block) do
opts = [engine: engine()]
quote do
require Temple.Renderer
Temple.Renderer.compile(unquote(opts), unquote(block))
end
end
@doc false
def component(func, assigns) do
apply(func, [assigns])
end
@doc false
def engine(), do: @engine
end
|
lib/temple.ex
| 0.835047
| 0.641394
|
temple.ex
|
starcoder
|
defmodule Indicado.ADI do
@moduledoc """
This is the ADI module used for calculating Accumulation Distribution Line.
"""
@typedoc """
The argument passed to eval functions should be a list of adi_data_map type.
"""
@type adi_data_map :: %{
low: float,
high: float,
close: float,
volume: float
}
@doc """
Calculates ADI for the list. The list argument passed to eval function should be list of adi_data_map type.
Returns `{:ok, adi_ist}` or `{:error, reason}`
## Examples
iex> Indicado.ADI.eval([%{low: 1.0, high: 11.0, close: 10.0, volume: 5.7}, %{low: 2.0, high: 6.0, close: 3.0, volume: 11.5}, %{low: 3.0, high: 7.0, close: 4.0, volume: 2.0}, %{low: 4.0, high: 20.3, close: 18.0, volume: 20.2}])
{:ok, [4.5600000000000005, -1.1899999999999995, -2.1899999999999995, 12.309386503067484]}
iex> Indicado.ADI.eval([%{low: 1.0, high: 4.0, close: 3.0, volume: 2.0}])
{:ok, [0.6666666666666666]}
iex> Indicado.ADI.eval([])
{:error, :not_enough_data}
"""
@spec eval(nonempty_list(adi_data_map)) :: {:ok, nonempty_list(float)} | {:error, atom}
def eval(list), do: calc(list)
@doc """
Calculates ADI for the list. The list argument passed to eval function should be list of adi_data_map type spec. Raises exceptions when arguments does not satisfy needed conditions.
Returns `adi_ist` or,
Raises `NotEnoughDataError` if the given list lenght is zero.
## Examples
iex> Indicado.ADI.eval!([%{low: 1.0, high: 4.0, close: 3.0, volume: 2.0}])
[0.6666666666666666]
iex> Indicado.ADI.eval!([])
** (NotEnoughDataError) not enough data
"""
@spec eval!(nonempty_list(adi_data_map)) :: nonempty_list(float) | no_return
def eval!(list) do
case calc(list) do
{:ok, result} -> result
{:error, :not_enough_data} -> raise NotEnoughDataError
end
end
defp calc(list, results \\ [])
defp calc([], []), do: {:error, :not_enough_data}
defp calc([], results), do: {:ok, Enum.reverse(results)}
defp calc([head | tail], []) do
calc(tail, [clv(head) * head.volume])
end
defp calc([head | tail], [rhead | _rtail] = results) do
calc(tail, [rhead + clv(head) * head.volume | results])
end
defp clv(row) when row.high == row.low do
0
end
defp clv(row) do
(row.close - row.low - (row.high - row.close)) / (row.high - row.low)
end
end
|
lib/indicado/adi.ex
| 0.869424
| 0.598928
|
adi.ex
|
starcoder
|
defmodule GenServerAsync do
@moduledoc ~S"""
Gen Server for preventing blocking GenServer process on `c:handle_call/3` callbacks.
See more in `GenServer`.
## Example
```elixir
defmodule Queue do
use GenServerAsync
@server_name __MODULE__
def start_link(default) do
GenServerAsync.start_link(__MODULE__, default, name: @server_name)
end
def register(user) do
GenServerAsync.call_async(@server_name, {:register, user})
end
# blocking call
def handle_call({:register, user}, from, state) do
with :not_found <- found_user(state, user) do
# call async callback
{:noreply, state}
else
{:found, user} ->
# send reply to from
{:reply, {:alredy_registered, user}, state}
end
end
# called async
def handle_call_async({:register, user}, from, state) do
result = heavy_function(user)
{:reply, result, state}
end
# called on finish `handle_call_async` with result
def handle_cast_async({:register, user}, result, state) do
# update state if needed
{:noreply, state}
end
end
```
## Debugging
Use `async: false` for disable asynchonious calling `c:handle_call_async/2` for debuging.
```elixir
GenServerAsync.call_async(server, request, async: false)
```
"""
@typedoc "Result of calculation in `handle_call_async` method."
@type result :: term()
@typedoc "Request message for mathcing callback"
@type request :: term()
@typedoc "GenServer state"
@type state :: term()
@doc """
TODO:
"""
@callback handle_call_async(request, state) :: {:reply, result}
@doc """
Invoked to handle asynchronous
request is the request request sent by a cast/2 and state is the current state of the GenServer.
If this callback is not implemented, the default implementation by
`use GenServerAsync` will return `{:noreply, state}`.
"""
@callback handle_cast_async(request :: term(), result, state) :: {:noreply, state}
@optional_callbacks handle_cast_async: 3
@doc """
Starts a `GenServerAsync` process linked to the current process.
See `GenServer.start_link/3`.
"""
@spec start_link(module(), any(), GenServer.options()) :: GenServer.on_start()
defdelegate start_link(module, args, options \\ []), to: GenServer
@doc """
Makes a synchronous call to the `server` and waits for its reply.
See `GenServer.call/3`.
"""
@spec call(GenServer.server(), term(), timeout()) :: term()
defdelegate call(server, request, timeout \\ 5000), to: GenServer
@doc """
Sends an asynchronous request to the `server`.
See `GenServer.cast/2`.
"""
@spec cast(GenServer.server(), term()) :: :ok
defdelegate cast(server, request), to: GenServer
@doc """
Replies to a client.
See `GenServer.reply/2`.
"""
@spec reply(GenServer.from(), term()) :: :ok
defdelegate reply(client, reply), to: GenServer
@doc false
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
use GenServer, Macro.escape(opts)
@behaviour GenServerAsync
@impl GenServer
def init(state) do
{:ok, state}
end
@doc false
@impl true
def handle_call({:call_async, genserver_pid, request, opts}, from, state) do
case handle_call(request, from, state) do
{:reply, result, state} ->
{:reply, result, state}
{:noreply, updated_state} ->
Task.start_link(fn ->
try do
{:reply, result} = handle_call_async(request, updated_state)
GenServer.cast(genserver_pid, {:async_cast, from, request, result})
rescue
error ->
GenServer.cast(genserver_pid, {:async_cast, from, request, {:error, error}})
end
end)
{:noreply, updated_state}
end
end
@doc false
def handle_call({:call_no_async, genserver_pid, request, opts}, from, state) do
case handle_call(request, from, state) do
{:reply, response, state} ->
{:reply, response, state}
{:noreply, call_state} ->
{:reply, result} = handle_call_async(request, call_state)
{:noreply, updated_state} = handle_cast_async(request, result, call_state)
{:reply, result, updated_state}
end
end
@doc false
@impl true
def handle_cast({:async_cast, from, request, result}, state) do
GenServer.reply(from, result)
handle_cast_async(request, result, state)
end
def handle_cast_async(_request, _result, state) do
{:noreply, state}
end
defoverridable init: 1, handle_cast_async: 3
end
end
@doc ~S"""
Makes a synchronous call to the server and waits for its reply.
The client sends the given request to the `server` and waits until a reply
arrives or a timeout occurs. `c:handle_call/3` will be called on the server to
handle the request.
`c:handle_cast_async/3` will be called an asynchronous if `c:handle_call/3` returns `{:no_reply, state}`.
`c:handle_cast_async/3` called synchroniosly after `c:handle_cast_async/3` callback.
"""
@spec call_async(GenServer.server(), request) :: result()
def call_async(pid, request, opts \\ []) do
timeout = config(opts, :timeout, 20_00)
event_name = (config(opts, :async, true) && :call_async) || :call_no_async
call(pid, {event_name, pid, request, opts}, timeout)
end
@spec config(Keyword.t(), atom(), any()) :: any()
def config(opts, name, default) do
if Keyword.get(opts, name) == nil do
Application.get_env(:gen_server_async, name, default)
end
end
end
|
lib/gen_server_async.ex
| 0.811788
| 0.618435
|
gen_server_async.ex
|
starcoder
|
defmodule Xandra.Compressor do
@moduledoc """
A behaviour to compress and decompress binary data.
Modules implementing this behaviour can be used to compress and decompress
data using one of the compression algorithms supported by Cassandra (see below).
## Supported algorithms and implementations
Native protocol versions v4 and earlier support two compression algorithms:
[LZ4](https://en.wikipedia.org/wiki/LZ4_(compression_algorithm)) and
[Snappy](https://en.wikipedia.org/wiki/Snappy_(compression)).
Native protocol versions v5 and later only support LZ4.
### LZ4
If you implement a compressor module for the LZ4 algorithm, then:
* The `c:compress/1` callback **must** return the compressed payload
*preceded* by a 32-bit big-endian unsigned integer representing the
length (in bytes) of the **uncompressed body**.
* Xandra will call the `c:decompress/1` callback with the compressed
payload, also preceded by the uncompressed body size (in bytes) as
a 32-bit big-endian unsigned integer.
That is, the result of compression when using the LZ4 algorithm must look like
this:
<<uncompressed_payload_length::32-big-unsigned, compressed_payload::binary>>
Snappy is self-sufficient so it doesn't need the prepended uncompressed-payload
size.
## Example
Let's imagine you implemented the LZ compression algorithm in your application:
defmodule MyApp.LZ4 do
def compress(binary), do: # ...
def decompress(binary, uncompressed_size), do: # ...
end
You can then implement a module that implements the `Xandra.Compressor`
behaviour and can be used to compress and decompress data flowing through the
connection to Cassandra:
defmodule LZ4XandraCompressor do
@behaviour Xandra.Compressor
@impl true
def algorithm(), do: :lz4
@impl true
def compress(body) do
[<<IO.iodata_length(body)::4-unit(8)-integer>>, MyApp.LZ4.compress(body)]
end
@impl true
def decompress(<<uncompressed_size::4-unit(8)-integer, compressed_body::binary>>) do
MyApp.LZ4.decompress(compressed_body, uncompressed_size)
end
end
Now, this module can be passed as the value of the `:compressor` option to
many functions in `Xandra`:
Xandra.start_link(compressor: LZ4XandraCompressor)
For more information on compression, see the "Compression" section in the
documentation for `Xandra.`
"""
@doc """
Specifies which algorithm this module will use to compress and decompress
data.
"""
@callback algorithm() :: :lz4 | :snappy
@doc """
Compresses the given iodata according to the algorithm returned by
`c:algorithm/0`.
"""
@callback compress(iodata) :: iodata
@doc """
Decompresses the given binary according to the algorithm returned by
`c:algorithm/0`.
"""
@callback decompress(binary) :: binary
end
|
lib/xandra/compressor.ex
| 0.925592
| 0.784938
|
compressor.ex
|
starcoder
|
defmodule SecureHeaders.XXssProtection do
@moduledoc """
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 0]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 1]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 1, mode: "block"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 1, report: "http://google.com/"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 1, mode: "block", report: "http://google.com/report"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 1, mode: "block", report: "/report/"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "0"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "0;"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "1"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "1;"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "1;mode=block"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "1; mode=block"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "1; report=http://google.com/"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "1; report=/google.com/"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "1; mode=block; report=/google.com/"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: "1; mode=block; report=http://google.com/"])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 0, mode: "block"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 0, report: "http://google.com/"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 0, mode: "block", report: "http://google.com/"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 0, mode: "block", report: "/report/"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 1, mode: "allow"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 1, mode: "allow", report: "http://google.com/"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 2, mode: "block"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 2, modes: "block"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 2, mode: "allow"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 2, report: "google.com"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 2, mode: "allow", report: "http://google.com/"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 2, mode: "block", report: "google.com"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [values: 2]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [value: 2, reportz: "google.com"]])
IO.inspect( SecureHeaders.XXssProtection.validate [x_xss_protection: [valuez: 2, mode: "block", reportz: "google.com"]])
"""
@error_msg "Invalid configuration for x-xss-protection"
@secure_config [
value: 0,
mode: "block",
report: ""
]
def validate(options) when is_list(options) do
case Keyword.has_key?(options, :config) do
false -> {:ok, options}
true ->
case Keyword.has_key?(options[:config], :x_xss_protection) do
# No x-xss-protection configuration found - return config
false -> {:ok, options}
true ->
case validate_config(options[:config][:x_xss_protection]) do
false -> {:error, @error_msg}
true -> {:ok, config_to_string(options, options[:config][:x_xss_protection])}
end
end
end
end
def validate(_), do: {:error, @error_msg}
defp config_to_string(options, xss_config) when xss_config |> is_list do
config = Keyword.drop(options[:config], [:x_xss_protection])
config = config ++ [x_xss_protection: make_string(xss_config)]
Keyword.drop(options, [:config]) ++ [config: config]
end
defp config_to_string(options, xss_config) when xss_config |> is_bitstring do
options
end
defp validate_config(xss_config) when is_list(xss_config) do
case validate_keys(xss_config) do
false -> false
true -> validate_config(make_string(xss_config))
end
end
defp validate_config(xss_config) when xss_config |> is_bitstring do
v = ~r/^[01](;)?\z/i
vm = ~r/^[1](;|; ){1,1}mode=block\z/i
vr= ~r/^[1](;|; ){1,1}report=.+\z/i
vmr = ~r/^[1](;|; ){1,1}mode=block(;|; ){1,1}report=.+\z/i
Regex.match?(v, xss_config) || Regex.match?(vm, xss_config)|| Regex.match?(vr, xss_config) || Regex.match?(vmr, xss_config)
end
defp validate_config(_), do: {:error, @error_msg}
defp validate_keys(xss_config) when xss_config |> is_list do
List.foldl( Keyword.keys(xss_config), true, fn (key,acc) -> List.keymember?(@secure_config,key,0) && acc end)
end
defp make_string(xss_config) do
result = ""
if Keyword.get(xss_config, :value), do: result = result <> Integer.to_string(xss_config[:value])
if Keyword.get(xss_config, :mode), do: result = result <> "; mode=" <> xss_config[:mode]
if Keyword.get(xss_config, :report), do: result = result <> "; report=" <> xss_config[:report]
result
end
end
|
lib/headers/x_xss_protection.ex
| 0.550003
| 0.664613
|
x_xss_protection.ex
|
starcoder
|
defmodule FlowAssertions.Ecto.SchemaA do
use FlowAssertions.Define
use FlowAssertions
alias FlowAssertions.Ecto.Messages
@moduledoc """
Assertions for values defined by the macros in `Ecto.Schema`.
"""
@doc """
Assert that an association has been loaded.
Animal.typical(id, preload: [:service_gaps])
|> assert_loaded(:service_gaps)
The second argument can either be a single key or a list of keys.
"""
defchain assert_assoc_loaded(struct, key_or_keys) when is_list(key_or_keys) do
for k <- key_or_keys, do: assert_assoc_loaded(struct, k)
end
defchain assert_assoc_loaded(struct, key_or_keys) when is_struct(struct) do
struct_must_have_key!(struct, key_or_keys)
value = Map.get(struct, key_or_keys)
case value do
%Ecto.Association.NotLoaded{} ->
elaborate_flunk(Messages.assoc_not_loaded(key_or_keys), left: value)
_ ->
:ok
end
end
@doc """
Fail when an association has been loaded.
Animal.typical(id, preload: [:species])
|> refute_loaded(:service_gaps)
The second argument can either be a single key or a list of keys.
"""
defchain refute_assoc_loaded(struct, key_or_keys) when is_list(key_or_keys) do
for k <- key_or_keys, do: refute_assoc_loaded(struct, k)
end
defchain refute_assoc_loaded(struct, key_or_keys) do
struct_must_have_key!(struct, key_or_keys)
value = Map.get(struct, key_or_keys)
case value do
%Ecto.Association.NotLoaded{} ->
:ok
_ ->
elaborate_flunk(Messages.assoc_loaded(key_or_keys), left: value)
end
end
@doc """
Check that the given value matches a schema's name.
We consider a schema's name to be that found inside its
`Ecto.Schema.Metadata`, which is - by default - the module
it was defined in. Embedded schemas don't have metadata, so
`FlowAssertions.StructA.assert_struct_named/2` is the appropriate
assertion for them.
"""
defchain assert_schema_name(value, name) when is_struct(value) do
if Map.has_key?(value, :__meta__) do
elaborate_assert_equal(value.__meta__.schema, name)
else
elaborate_flunk(Messages.maybe_embedded, left: value)
end
end
def assert_schema_name(value, _module_name) do
elaborate_flunk(Messages.never_a_schema, left: value)
end
@doc """
Map comparison that auto-ignores fields typically irrelevant when working with schemas.
Works just like `FlowAssertions.MapA.assert_same_map/3`, except that
it ignores the `:updated_at`, `:created_at`, and `:__meta__` fields
(if present).
old
|> VM.Animal.change(name: "bossie")
|> assert_same_schema(old, except: [name: "bossie"]
You can compare one or more of those three fields by using the `comparing:` or
`except:` options:
assert_same_schema(new, old, except: newer_than(old)
"""
defchain assert_same_schema(new, old, opts \\ []) do
default_ignore = those_in(new, [:inserted_at, :updated_at, :__meta__])
ignore = Keyword.get(opts, :ignoring, []) ++ default_ignore
except = Keyword.get(opts, :except, [])
if Keyword.has_key?(opts, :comparing) do
assert_same_map(new, old, opts)
else
assert_same_map(new, old, ignoring: ignore, except: except)
end
end
defp those_in(struct, keys), do: Enum.filter(keys, &(Map.has_key?(struct, &1)))
end
|
lib/schema_a.ex
| 0.843509
| 0.686035
|
schema_a.ex
|
starcoder
|
defmodule Sanbase.Signal.Scheduler do
@moduledoc ~s"""
This module is the entrypoint to the user custom signals.
It's main job is to execute the whole glue all modules related to signal processing
into one pipeline (the `run/0` function):
> Get the user triggers from the database
> Evaluate the signals
> Send the signals to the user
> Update the `last_triggered` in the database
> Log stats messages
"""
@signal_modules Sanbase.Signal.List.get()
alias Sanbase.Signal.{UserTrigger, HistoricalActivity}
alias Sanbase.Signal.Evaluator
alias Sanbase.Signal
require Logger
defguard is_non_empty_map(map) when is_map(map) and map != %{}
for module <- @signal_modules do
def run_signal(unquote(module)) do
unquote(module).type() |> run()
end
end
# Private functions
defp run(type) do
{updated_user_triggers, sent_list_results} =
type
|> UserTrigger.get_active_triggers_by_type()
|> Evaluator.run(type)
|> send_and_mark_as_sent()
fired_signals =
updated_user_triggers
|> get_fired_signals_data()
fired_signals |> persist_historical_activity()
fired_signals |> persist_timeline_events()
updated_user_triggers |> deactivate_non_repeating()
sent_list_results
|> List.flatten()
|> log_sent_messages_stats(type)
end
defp deactivate_non_repeating(triggers) do
for %UserTrigger{id: id, user: user, trigger: %{is_repeating: false}} <- triggers do
UserTrigger.update_user_trigger(user, %{
id: id,
is_active: false
})
end
end
# returns a tuple {updated_user_triggers, send_result_list}
defp send_and_mark_as_sent(triggers) do
triggers
|> Sanbase.Parallel.map(
fn %UserTrigger{} = user_trigger ->
case Signal.send(user_trigger) do
[] ->
{user_trigger, []}
# Trying to send not triggered signal
{:error, _} ->
{user_trigger, []}
list when is_list(list) ->
{:ok, updated_user_trigger} = update_last_triggered(user_trigger, list)
user_trigger =
put_in(
user_trigger.trigger.last_triggered,
updated_user_trigger.trigger.last_triggered
)
{user_trigger, list}
end
end,
max_concurrency: 20,
ordered: false,
map_type: :map
)
|> Enum.unzip()
end
defp update_last_triggered(
%{user: user, id: trigger_id, trigger: %{last_triggered: last_triggered}},
send_results_list
) do
# Round the datetimes to minutes because the `last_triggered` is used as
# part of a cache key. If `now` is left as is the last triggered time of
# all signals will be different, sometimes only by a second
now = Timex.now() |> Timex.set(second: 0, microsecond: {0, 0})
# Update all triggered_at regardless if the send to the channel succeed
# because the signal will be stored in the timeline events.
last_triggered =
send_results_list
|> Enum.reduce(last_triggered, fn
{list, _}, acc when is_list(list) ->
Enum.reduce(list, acc, fn elem, inner_acc ->
Map.put(inner_acc, elem, now)
end)
{identifier, _}, acc ->
Map.put(acc, identifier, now)
end)
UserTrigger.update_user_trigger(user, %{
id: trigger_id,
last_triggered: last_triggered
})
end
defp get_fired_signals_data(user_triggers) do
user_triggers
|> Enum.map(fn
%UserTrigger{
id: id,
user_id: user_id,
trigger: %{
settings: %{triggered?: true, payload: payload, template_kv: template_kv},
last_triggered: last_triggered
}
}
when is_non_empty_map(last_triggered) ->
identifier_kv_map =
template_kv
|> Enum.into(%{}, fn {identifier, {_template, kv}} -> {identifier, kv} end)
%{
user_trigger_id: id,
user_id: user_id,
payload: payload,
triggered_at: max_last_triggered(last_triggered) |> DateTime.to_naive(),
data: %{user_trigger_data: identifier_kv_map}
}
_ ->
nil
end)
|> Enum.reject(&is_nil/1)
end
# Fixme: remove after frontend migrates to use only Timeline Events
defp persist_historical_activity(fired_triggers) do
fired_triggers
|> Enum.chunk_every(200)
|> Enum.each(fn chunk ->
Sanbase.Repo.insert_all(HistoricalActivity, chunk)
end)
end
defp persist_timeline_events(fired_triggers) do
fired_triggers
|> Sanbase.Timeline.TimelineEvent.create_trigger_fired_events()
end
defp log_sent_messages_stats([], type) do
Logger.info("There were no signals triggered of type #{type}")
end
defp log_sent_messages_stats(list, type) do
successful_messages = list |> Enum.count(fn {_elem, status} -> status == :ok end)
for {_, {:error, error}} <- list do
Logger.warn("Cannot send a signal. Reason: #{inspect(error)}")
end
Logger.info(
"In total #{successful_messages}/#{length(list)} #{type} signals were sent successfully"
)
end
defp max_last_triggered(last_triggered) when is_non_empty_map(last_triggered) do
last_triggered
|> Map.values()
|> Enum.map(fn
%DateTime{} = dt ->
dt
datetime_str when is_binary(datetime_str) ->
Sanbase.DateTimeUtils.from_iso8601!(datetime_str)
end)
|> Enum.max_by(&DateTime.to_unix/1)
end
end
|
lib/sanbase/signals/evaluator/scheduler.ex
| 0.722918
| 0.47098
|
scheduler.ex
|
starcoder
|
defmodule Bonbon.Model.Store do
use Bonbon.Web, :model
@moduledoc """
A model representing the different stores.
##Fields
###:id
Is the unique reference to the store entry. Is an `integer`.
###:public
Whether the store is publicly listed or whether it is private (for use by [this](https://trello.com/c/K2HFzzo0)). Is a `boolean`.
###:status
Is the current operating status of the store. Is a `Bonbon.Type.Store.StatusEnum`.
###:name
Is the name of the store. Is a `string`.
###:phone
Is the contact phone number of the store. Is a `string`.
###:address
Is the address the store is located at. Is a `string`.
###:suburb
Is the suburb the store is located in. Is a `string`.
###:state
Is the state the store is located in. Is a `string`.
###:zip_code
Is the zip code for where the store is located. Is a `string`.
###:country
Is the country the store is located in. Is a `string`.
###:geo
Is the geospatial coordinate the store is located at. Is a `Geo.Point`.
###:coordinates
Is the longitude/latitude coordinate the store is located at. Is a `map`.
###:place
Is the place/landmark/building the store is located inside (i.e. if it is
inside a shopping centre's food court). Is a `string`.
###:pickup
Whether the store allows for customer pickup. Is a `boolean`.
###:reservation
Whether or not the store accepts reservations. Is a `boolean`.
"""
schema "stores" do
field :public, :boolean
field :status, Bonbon.Type.Store.StatusEnum
field :name, :string
field :phone, :string
field :address, :string
field :suburb, :string
field :state, :string
field :zip_code, :string
field :country, :string
field :geo, Geo.Point
field :coordinates, { :map, :float }, virtual: true
field :place, :string
field :pickup, :boolean
field :reservation, :boolean
timestamps
end
defp changeset(struct, params \\ %{}) do
struct
|> validate_phone_number(:phone)
|> validate_map(:coordinates, [:latitude, :longitude])
|> format_coordinate(:coordinates, :geo)
end
@doc """
Builds a changeset for insertion based on the `struct` and `params`.
Enforces:
* `status` field is required
* `name` field is required
* `phone` field is required
* `address` field is required
* `suburb` field is required
* `state` field is required
* `country` field is required
* `coordinates` field is required
* `coordinates` field is a map containing the required fields `:latitude`
and `:longitude`
* `pickup` field is required
* `reservation` field is required
* `phone` field is a valid phone number
"""
def insert_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:public, :status, :name, :phone, :address, :suburb, :state, :zip_code, :country, :coordinates, :place, :pickup, :reservation])
|> validate_required([:status, :name, :phone, :address, :suburb, :state, :country, :coordinates, :pickup, :reservation])
|> changeset(params)
end
@doc """
Builds a changeset for updates based on the `struct` and `params`.
Enforces:
* `status` field is not empty
* `name` field is not empty
* `phone` field is not empty
* `address` field is not empty
* `suburb` field is not empty
* `state` field is not empty
* `country` field is not empty
* `coordinates` field is not empty
* `coordinates` field is a map containing the required fields `:latitude`
and `:longitude`
* `pickup` field is not empty
* `reservation` field is not empty
* `phone` field is a valid phone number
"""
def update_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:public, :status, :name, :phone, :address, :suburb, :state, :zip_code, :country, :coordinates, :place, :pickup, :reservation])
|> validate_emptiness(:status)
|> validate_emptiness(:name)
|> validate_emptiness(:phone)
|> validate_emptiness(:address)
|> validate_emptiness(:suburb)
|> validate_emptiness(:state)
|> validate_emptiness(:country)
|> validate_emptiness(:coordinates)
|> validate_emptiness(:pickup)
|> validate_emptiness(:reservation)
|> changeset(params)
end
def get_coordinates(%{ geo: %Geo.Point{ coordinates: { lng, lat }, srid: 4326 } }) do
%{ latitude: lat, longitude: lng }
end
end
|
web/models/store.ex
| 0.848486
| 0.788257
|
store.ex
|
starcoder
|
defmodule Flex.EngineAdapter.ANFIS do
@moduledoc """
An adaptive network-based fuzzy inference system (ANFIS) is a kind of artificial neural network that is based on Takagi–Sugeno fuzzy inference system,
this implementation use backpropagation, only Gaussian Membership function are allowed.
Reference:
https://upcommons.upc.edu/bitstream/handle/2099.1/20296/Annex%201%20-%20Introduction%20to%20Adaptive%20Neuro-Fuzzy%20Inference%20Systems%20%28ANFIS%29.pdf
Jang, J-SR. "ANFIS: adaptive-network-based fuzzy inference system." IEEE transactions on systems, man, and cybernetics 23.3 (1993): 665-685.
"""
alias Flex.{EngineAdapter, EngineAdapter.State, MembershipFun, Variable}
@behaviour EngineAdapter
import Flex.Rule, only: [statement: 2, get_rule_parameters: 3]
import MembershipFun, only: [derivative: 4]
@impl EngineAdapter
def validation(engine_state, _antecedent, _rules, _consequent),
do: engine_state
@impl EngineAdapter
def fuzzification(%State{input_vector: input_vector} = engine_state, antecedent) do
fuzzy_antecedent = EngineAdapter.default_fuzzification(input_vector, antecedent, %{})
%{engine_state | fuzzy_antecedent: fuzzy_antecedent}
end
@impl EngineAdapter
def inference(
%State{fuzzy_antecedent: fuzzy_antecedent, input_vector: input_vector} = engine_state,
rules,
consequent
) do
fuzzy_consequent =
fuzzy_antecedent
|> inference_engine(rules, consequent)
|> compute_output_level(input_vector)
%{engine_state | fuzzy_consequent: fuzzy_consequent}
end
@impl EngineAdapter
def defuzzification(%State{fuzzy_consequent: fuzzy_consequent} = engine_state) do
%{engine_state | crisp_output: weighted_average_method(fuzzy_consequent)}
end
def inference_engine(_fuzzy_antecedent, [], consequent), do: consequent
def inference_engine(fuzzy_antecedent, [rule | tail], consequent) do
rule_parameters = get_rule_parameters(rule.antecedent, fuzzy_antecedent, []) ++ [consequent]
consequent =
if is_function(rule.statement) do
rule.statement.(rule_parameters)
else
args = Map.merge(fuzzy_antecedent, %{consequent.tag => consequent})
statement(rule.statement, args)
end
inference_engine(fuzzy_antecedent, tail, consequent)
end
def forward_pass(de_dy, learning_rate, %{
fuzzy_consequent: fuzzy_consequent,
input_vector: input_vector
}) do
w = get_w(fuzzy_consequent) |> List.flatten()
n_w = Enum.map(w, fn w_i -> w_i / Enum.sum(w) end)
dy_dbc =
Enum.map(n_w, fn n_w_i -> Enum.map(input_vector ++ [1], fn input -> input * n_w_i end) end)
de_dbc =
Enum.map(dy_dbc, fn dy_dbc_f ->
Enum.map(dy_dbc_f, fn dy_dbc_fi -> de_dy * dy_dbc_fi end)
end)
Variable.update(fuzzy_consequent, de_dbc, learning_rate)
end
defp get_w(fuzzy_consequent) do
Enum.reduce(fuzzy_consequent.fuzzy_sets, [], fn output_fuzzy_set, acc ->
acc ++ [fuzzy_consequent.mf_values[output_fuzzy_set.tag]]
end)
end
def backward_pass(
de_dy,
%{
antecedent: antecedent,
sets_in_rules: sets_in_rules,
learning_rate: learning_rate
},
%{
fuzzy_antecedent: fuzzy_antecedent,
fuzzy_consequent: fuzzy_consequent,
input_vector: input_vector
}
) do
ant_list =
antecedent
|> Enum.map(fn antecedent -> antecedent.tag end)
|> Enum.with_index()
w = get_w(fuzzy_consequent) |> List.flatten()
n_w = Enum.map(w, fn w_i -> w_i / Enum.sum(w) end)
# inputs loop
for {ant_tag, i_index} <- ant_list, reduce: [] do
acc ->
# Sets loop
de_da =
for fuzzy_set <- fuzzy_antecedent[ant_tag].fuzzy_sets, reduce: [] do
acc ->
# Get dependent rules.
sets = Enum.map(sets_in_rules, fn sets -> Enum.at(sets, i_index) end)
w_d =
for {{w_i, set}, w_index} <- Enum.zip(w, sets) |> Enum.with_index(),
fuzzy_set.tag == set,
do: {w_i, w_index}
muij = fuzzy_antecedent[ant_tag].mf_values[fuzzy_set.tag]
# Premise parameters loop
de_dag =
for {_aij, g_index} <- Enum.with_index(fuzzy_set.mf_params), reduce: [] do
acc ->
dmuij_daij =
derivative(fuzzy_set, Enum.at(input_vector, i_index), muij, g_index)
de_daij =
for {w_i, w_index} <- w_d, reduce: 0 do
acc ->
dwi_dmuij = dwi_dmuij(w_i, muij)
sum_dy_dwi =
for {fi, k_index} <- Enum.with_index(fuzzy_consequent.rule_output),
reduce: 0 do
acc ->
dy_dnwi = fi
dnwi_dwi = dnwi_dwi(n_w, w, w_index, k_index)
acc + dy_dnwi * dnwi_dwi
end
acc + de_dy * sum_dy_dwi * dwi_dmuij * dmuij_daij
end
acc ++ [de_daij]
end
acc ++ [de_dag]
end
acc ++ [Variable.update(fuzzy_antecedent[ant_tag], de_da, learning_rate)]
end
end
defp dnwi_dwi(n_w, w, w_index, k_index) when w_index == k_index do
with n_w_k <- Enum.at(n_w, k_index),
w_k <- Enum.at(w, k_index),
true <- w_k != 0 do
n_w_k * (1 - n_w_k) / w_k
else
_ ->
0
end
end
defp dnwi_dwi(n_w, w, _w_index, k_index) do
with n_w_k <- Enum.at(n_w, k_index),
w_k <- Enum.at(w, k_index),
true <- w_k != 0 do
-:math.pow(n_w_k, 2) / w_k
else
_ ->
0
end
end
defp dwi_dmuij(0.0, 0.0), do: 1
defp dwi_dmuij(w_i, 0.0), do: w_i / 1.0e-10
defp dwi_dmuij(w_i, muij), do: w_i / muij
defp compute_output_level(cons_var, input_vector) do
rules_output =
Enum.reduce(cons_var.fuzzy_sets, [], fn output_fuzzy_set, acc ->
output_value =
for _ <- cons_var.mf_values[output_fuzzy_set.tag], into: [] do
output_fuzzy_set.mf.(input_vector)
end
acc ++ output_value
end)
%{cons_var | rule_output: rules_output}
end
def weighted_average_method(%Variable{type: type} = fuzzy_var) when type == :consequent do
fuzzy_var
|> build_fuzzy_sets_strength_list()
|> fuzzy_to_crisp(fuzzy_var.rule_output, 0, 0)
end
defp build_fuzzy_sets_strength_list(%Variable{fuzzy_sets: fuzzy_sets, mf_values: mf_values}) do
Enum.reduce(fuzzy_sets, [], fn fuzzy_set, acc -> acc ++ mf_values[fuzzy_set.tag] end)
end
defp fuzzy_to_crisp([], _input, nom, den), do: nom / den
defp fuzzy_to_crisp([fs_strength | f_tail], [input | i_tail], nom, den) do
nom = nom + fs_strength * input
den = den + fs_strength
fuzzy_to_crisp(f_tail, i_tail, nom, den)
end
def least_square_estimate(a_matrix, b_matrix, initial_gamma, state) do
consequent_args_size = a_matrix |> Enum.at(0) |> Enum.count()
s_matrix = Nx.eye(consequent_args_size) |> Nx.multiply(initial_gamma)
x_vector = List.duplicate([0], consequent_args_size) |> Nx.tensor()
{_s_matrix, x_vector} =
for {at, bt} <- Enum.zip(a_matrix, b_matrix), reduce: {s_matrix, x_vector} do
{s_matrix, x_vector} ->
at = Nx.tensor([at])
a = Nx.transpose(at)
at_s = multiply(at, s_matrix)
s_a_at_s =
s_matrix
|> multiply(a)
|> multiply(at_s)
at_s_a_p1 =
at_s
|> multiply(a)
|> Nx.add(1)
s_matrix = Nx.subtract(s_matrix, Nx.divide(s_a_at_s, at_s_a_p1))
at_x = multiply(at, x_vector)
x_vector =
s_matrix
|> multiply(a)
|> multiply(Nx.subtract(bt, at_x))
|> Nx.add(x_vector)
{s_matrix, x_vector}
end
x_vector_lt = x_vector |> Nx.to_flat_list()
Variable.update(state.consequent, x_vector_lt)
end
defp multiply(a, b), do: Nx.dot(a, [1], b, [0])
end
|
lib/engine_adapters/anfis.ex
| 0.803675
| 0.525917
|
anfis.ex
|
starcoder
|
defmodule OptionsTrackerWeb.PositionLive.Helpers do
alias OptionsTracker.Accounts
alias OptionsTracker.Accounts.Position
alias OptionsTrackerWeb.Router.Helpers, as: Routes
@spec type_display(Position.t()) :: String.t()
def type_display(%Position{type: :stock, basis: basis}) when not is_nil(basis), do: "#{OptionsTrackerWeb.LiveHelpers.currency_string(basis)} basis"
def type_display(%Position{type: :stock, basis: nil}), do: "$0.00 basis"
def type_display(%Position{type: :call}), do: "Call"
def type_display(%Position{type: :put}), do: "Put"
def type_display(%Position{type: :call_spread}), do: "Call Spread"
def type_display(%Position{type: :put_spread}), do: "Put Spread"
def type_display_class(%Position{type: :stock}), do: "is-info"
def type_display_class(%Position{type: :call}), do: "is-success"
def type_display_class(%Position{type: :call_spread}), do: "is-success is-light"
def type_display_class(%Position{type: :put}), do: "is-danger"
def type_display_class(%Position{type: :put_spread}), do: "is-danger is-light"
def break_even(%Position{type: call, strike: price, short: short, accumulated_profit_loss: accumulated_profit_loss, premium: premium}) when call in [:call, :call_spread] do
accumulated = Decimal.div(accumulated_profit_loss || Decimal.from_float(0.0), Decimal.from_float(100.0))
if short do
Decimal.add(price, Decimal.abs(premium)) |> Decimal.add(accumulated)
else
Decimal.add(price, Decimal.abs(premium)) |> Decimal.sub(accumulated)
end
end
def break_even(%Position{type: put, strike: price, short: short, accumulated_profit_loss: accumulated_profit_loss, premium: premium}) when put in [:put, :put_spread] do
accumulated = Decimal.div(accumulated_profit_loss || Decimal.from_float(0.0), Decimal.from_float(100.0))
if short do
Decimal.sub(price, Decimal.abs(premium)) |> Decimal.sub(accumulated)
else
Decimal.sub(price, Decimal.abs(premium)) |> Decimal.add(accumulated)
end
end
def break_even_premium(%Position{short: true, accumulated_profit_loss: accumulated_profit_loss, premium: premium}) do
accumulated = Decimal.div(accumulated_profit_loss || Decimal.from_float(0.0), Decimal.from_float(100.0))
Decimal.add(Decimal.abs(premium), accumulated)
end
def break_even_premium(%Position{short: false, accumulated_profit_loss: accumulated_profit_loss, premium: premium}) do
accumulated = Decimal.div(accumulated_profit_loss || Decimal.from_float(0.0), Decimal.from_float(100.0))
Decimal.sub(Decimal.abs(premium), accumulated)
end
def rolled_cost(rolled_premium, exit_price, true) do
Decimal.sub(Decimal.abs(rolled_premium), Decimal.abs(exit_price))
end
def rolled_cost(rolled_premium, exit_price, false) do
Decimal.sub(Decimal.abs(exit_price), Decimal.abs(rolled_premium))
end
def count_type(%Position{type: :stock, count: 1}), do: "share"
def count_type(%Position{type: :stock, count: c}) when c > 1, do: "shares"
def count_type(_position), do: "lot"
@spec position_type_map :: Keyword.t()
def position_type_map() do
Accounts.list_position_types()
|> Keyword.keys()
# [:call, :call_spread, :put, :put_spread, :stock] This order makes hitting `c` or `p` takes you to call and put first and then the spread
|> Enum.sort(:asc)
|> Enum.map(fn type -> {Accounts.name_for_position_type(type), type} end)
end
@spec position_type_map(atom) :: non_neg_integer
def position_type_map(nil), do: nil
def position_type_map(type) do
Accounts.list_position_types()
|> Enum.find(fn {t, value} -> t == type || value == type end)
|> elem(1)
end
@spec position_status_map(atom | non_neg_integer, atom | boolean) ::
Keyword.t() | non_neg_integer()
def position_status_map(type, past_tense \\ false)
def position_status_map(type, past_tense) when is_boolean(past_tense) do
Accounts.list_position_statuses(type)
|> Enum.reject(fn {s, _value} -> s in [:rolled] end)
|> Enum.map(fn {status, _value} ->
{Accounts.name_for_position_status(status, past_tense), status}
end)
end
def position_status_map(_type, nil), do: nil
def position_status_map(type, status) when is_atom(status) do
Accounts.list_position_statuses(type)
|> Enum.find(fn {s, value} -> s == status || value == status end)
|> elem(1)
end
@spec position_status_display(atom, atom | non_neg_integer, boolean) :: String.t()
def position_status_display(type, nil, past_tense) do
position_status_display(type, Accounts.position_status_open(), past_tense)
end
def position_status_display(type, status, past_tense) do
Accounts.list_position_statuses(type)
|> Enum.find(fn {s, value} -> s == status || value == status end)
|> elem(0)
|> Accounts.name_for_position_status(past_tense)
end
@spec max_profit(OptionsTracker.Accounts.Position.t()) :: Decimal.t()
def max_profit(%Position{} = position) do
Accounts.calculate_max_profit(position)
end
@spec credit_debit_display(Decimal.t()) :: String.t()
def credit_debit_display(%Decimal{} = value) do
value_string =
value
|> Decimal.abs()
|> OptionsTrackerWeb.LiveHelpers.currency_string()
credit_debit_str = if(Decimal.cmp(value, Decimal.new(0)) in [:eq, :gt], do: "cr", else: "db")
"#{value_string}#{credit_debit_str}"
end
@spec is_option?(
%{
data: OptionsTracker.Accounts.Position.t() | map,
params: nil | maybe_improper_list | map
}
| OptionsTracker.Accounts.Position.t()
) :: boolean
def is_option?(%Phoenix.HTML.Form{params: params, data: %Position{} = position}) do
type = params["type"] || position.type
cond do
is_atom(type) -> type != OptionsTracker.Accounts.Position.TransType.stock_key()
is_binary(type) -> type != to_string(OptionsTracker.Accounts.Position.TransType.stock_key())
is_number(type) -> type != OptionsTracker.Accounts.Position.TransType.stock()
end
end
def is_option?(%Phoenix.HTML.Form{data: %{}}) do
true
end
def is_option?(%Position{type: type}) do
type != OptionsTracker.Accounts.Position.TransType.stock_key()
end
@spec is_naked?(OptionsTracker.Accounts.Position.t()) :: boolean
def is_naked?(%Position{type: type}) when type in [:call, :put], do: true
def is_naked?(_position), do: false
@spec is_short?(%{
data: OptionsTracker.Accounts.Position.t() | map,
params: nil | maybe_improper_list | map
}) :: any
def is_short?(%{params: params, data: %Position{} = position}) do
short = params["short"] || position.short
if is_binary(short) do
short != "" && short != "false"
else
short
end
end
def is_short?(%{data: %{}}) do
true
end
def calculate_roi(%Decimal{} = max_profit, %Decimal{} = max_loss) do
if Decimal.cmp(max_loss, 0) == :eq do
Decimal.from_float(0.0)
else
Decimal.div(max_profit, max_loss)
end
end
@spec is_closed?(OptionsTracker.Accounts.Position.t()) :: boolean
def is_closed?(%Position{status: status}) do
status != Accounts.position_status_open() && status != Accounts.position_status_open_key()
end
@spec is_open?(OptionsTracker.Accounts.Position.t()) :: boolean
def is_open?(%Position{status: status}) do
status == Accounts.position_status_open() || status == Accounts.position_status_open_key()
end
@spec is_spread?(
%{
data: OptionsTracker.Accounts.Position.t() | map,
params: nil | maybe_improper_list | map
}
| OptionsTracker.Accounts.Position.t()
) :: boolean
def is_spread?(%Phoenix.HTML.Form{params: params, data: %Position{} = position}) do
type = params["type"] || position.type
OptionsTracker.Accounts.Position.TransType.call_spread?(type) ||
OptionsTracker.Accounts.Position.TransType.put_spread?(type)
end
def is_spread?(%Phoenix.HTML.Form{data: %{}}) do
false
end
def is_spread?(%Position{type: type}) do
OptionsTracker.Accounts.Position.TransType.call_spread?(type) ||
OptionsTracker.Accounts.Position.TransType.put_spread?(type)
end
def return_to_path(socket, :all) do
Routes.position_index_path(socket, :index)
end
def return_to_path(socket, current_account_id) do
Routes.position_account_index_path(socket, :index, current_account_id)
end
@spec row_class_for_status(:closed | :exercised | :open | :rolled) :: binary
def row_class_for_status(:closed), do: "has-background-grey-lighter"
def row_class_for_status(:open), do: ""
def row_class_for_status(:rolled), do: "has-background-warning-light"
def row_class_for_status(:exercised), do: "has-background-grey-lighter"
@spec date_display(Date.t(), boolean) :: String.t()
def date_display(%Date{year: year, month: month, day: day}, show_year) do
if show_year do
"#{month}/#{day}/#{year}"
else
"#{month}/#{day}"
end
end
def date_display(nil, _), do: ""
end
|
lib/options_tracker_web/live/position_live/helpers.ex
| 0.834069
| 0.494446
|
helpers.ex
|
starcoder
|
defmodule Pongo.Match.Game do
alias Pongo.Match.Vec
@parameters %{
paddle_length: 100,
paddle_height: 20,
ball_radius: 8,
field_width: 920,
field_height: 690,
wall_width: 15
}
@paddle_speed 700 / 1_000_000
@ball_initial_speed 300 / 1_000_000
@ball_max_speed 700 / 1_000_000
@ball_acceleration 10 / 1_000_000 / 1_000_000
@iterations 4
@direction_y_limit 0.33
@ball_x_impulse_on_hit 0.33
@derive Jason.Encoder
defstruct ball: Vec.new(@parameters.field_width / 2, @parameters.field_height / 2),
direction: Vec.new(0, 0),
speed: @ball_initial_speed,
player1: @parameters.field_width / 2,
player2: @parameters.field_width / 2,
last_collision: nil,
id: nil,
sound: nil
def parameters(), do: @parameters
def new(id), do: %__MODULE__{id: id}
def start(state) do
direction = Vec.new(2 * :rand.uniform() - 1, Enum.random([1, -1])) |> Vec.normalize()
%__MODULE__{state | direction: direction, sound: :start}
end
def invert(state) do
%__MODULE__{
state
| player1: state.player2,
player2: state.player1,
ball: Vec.new(state.ball.x, @parameters.field_height - state.ball.y),
direction: Vec.new(state.direction.x, state.direction.y)
}
end
def advance(state, time_step, player1_speed, player2_speed) do
state = %{state | sound: nil}
iteration_step = time_step / @iterations
state =
Enum.reduce(1..@iterations, state, fn _i, state ->
objects = objects(state.player1, player1_speed, state.player2, player2_speed)
speed = min(state.speed + @ball_acceleration * iteration_step, @ball_max_speed)
object_index = collide(state.ball, objects)
state =
if object_index not in [nil, state.last_collision] do
direction = objects |> Enum.at(object_index) |> bounce(state.direction)
%{state | direction: direction, last_collision: object_index, sound: :bounce}
else
state
end
ball = state.direction |> Vec.mul(speed * iteration_step) |> Vec.add(state.ball)
player1 = limit_player(state.player1 + player1_speed * @paddle_speed * iteration_step)
player2 = limit_player(state.player2 + player2_speed * @paddle_speed * iteration_step)
%__MODULE__{state | player1: player1, player2: player2, ball: ball, speed: speed}
end)
cond do
state.ball.y < -@parameters.ball_radius ->
:player1_wins
state.ball.y > @parameters.field_height + @parameters.ball_radius ->
:player2_wins
true ->
{:ok, state}
end
end
@left_edge 0 + @parameters.wall_width + @parameters.paddle_length / 2
@right_edge @parameters.field_width - @parameters.wall_width - @parameters.paddle_length / 2
defp limit_player(position), do: clamp(position, @left_edge, @right_edge)
defp clamp(value, min, max), do: min(max, max(min, value))
def hits?(ball, from, to) do
r = @parameters.ball_radius
d = Vec.sub(to, from)
f = Vec.sub(from, ball)
a = 2 * Vec.dot(d, d)
b = 2 * Vec.dot(f, d)
c = Vec.dot(f, f) - r * r
discriminant = b * b - 2 * a * c
if discriminant < 0 do
false
else
discriminant = :math.sqrt(discriminant)
t1 = (-b - discriminant) / a
t2 = (-b + discriminant) / a
(t1 >= 0 and t1 <= 1) or (t2 >= 0 and t2 <= 1)
end
end
@wall_left {
Vec.new(@parameters.wall_width, 0),
Vec.new(@parameters.wall_width, @parameters.field_height),
Vec.new(1, 0),
Vec.new(0, 0)
}
@wall_right {
Vec.new(@parameters.field_width - @parameters.wall_width, 0),
Vec.new(@parameters.field_width - @parameters.wall_width, @parameters.field_height),
Vec.new(-1, 0),
Vec.new(0, 0)
}
defp objects(player1, player1_speed, player2, player2_speed) do
paddle1 = {
Vec.new(
player1 - @parameters.paddle_length / 2,
@parameters.field_height - @parameters.paddle_height
),
Vec.new(
player1 + @parameters.paddle_length / 2,
@parameters.field_height - @parameters.paddle_height
),
Vec.new(0, 1),
Vec.new(@ball_x_impulse_on_hit * player1_speed, 0)
}
paddle2 = {
Vec.new(player2 - @parameters.paddle_length / 2, @parameters.paddle_height),
Vec.new(player2 + @parameters.paddle_length / 2, @parameters.paddle_height),
Vec.new(0, -1),
Vec.new(@ball_x_impulse_on_hit * player2_speed, 0)
}
[@wall_left, @wall_right, paddle1, paddle2]
end
defp collide(ball, objects) do
Enum.find_index(objects, fn {from, to, _normal, _impulse} -> hits?(ball, from, to) end)
end
defp limit_direction(%{x: x, y: y}) do
y = if y >= 0, do: max(y, @direction_y_limit), else: min(y, -@direction_y_limit)
Vec.new(x, y)
end
defp bounce({_from, _to, normal, impulse}, direction) do
direction |> Vec.reflect(normal) |> Vec.add(impulse) |> limit_direction() |> Vec.normalize()
end
end
|
lib/pongo/match/game.ex
| 0.781372
| 0.5526
|
game.ex
|
starcoder
|
defmodule Curvy.Signature do
@moduledoc """
Module for converting signature R and S values to DER encoded or compact
binaries.
"""
use Bitwise, only_operators: true
alias Curvy.Curve
defstruct crv: :secp256k1,
r: nil,
s: nil,
recid: nil
@typedoc "ECDSA Signature"
@type t :: %__MODULE__{
crv: atom,
r: integer,
s: integer,
recid: recovery_id | nil
}
@typedoc "Recovery ID"
@type recovery_id :: 0 | 1 | 2 | 3
@crv Curve.secp256k1
@doc """
Parsed the given binary signature in a [`Signature`](`t:t`) struct.
Parsed DER encoded and compact signatures. Returns `:error` if unable to parse.
"""
@spec parse(binary) :: t | :error
def parse(<<0x30, _len, 0x02, rlen, rbin::bytes-size(rlen), 0x02, slen, sbin::bytes-size(slen)>>) do
%__MODULE__{
r: :binary.decode_unsigned(rbin),
s: :binary.decode_unsigned(sbin)
}
end
def parse(<<prefix::integer, r::big-size(256), s::big-size(256)>>) do
recid = case prefix - 27 - 4 do
recid when recid < 0 ->
recid + 4
recid ->
recid
end
%__MODULE__{r: r, s: s, recid: recid}
end
def parse(_sig), do: :error
@doc """
Normalizes the signature by enforcing Low-S values.
Returns a [`Signature`](`t:t`).
See [BIP 62](https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki)
for more info.
"""
@spec normalize(t) :: t
def normalize(%__MODULE__{s: s} = sig) when s > (@crv.n >>> 1) do
sig
|> Map.put(:s, @crv.n - s)
|> case do
%__MODULE__{recid: recid} = sig when recid in 0..3 ->
Map.put(sig, :recid, Bitwise.bxor(recid, 1))
sig ->
sig
end
end
def normalize(%__MODULE__{} = sig), do: sig
@doc """
Returns the signature as a DER-encoded binary.
"""
@spec to_der(t) :: binary
def to_der(%__MODULE__{r: r, s: s}, _opts \\ []) do
rbin = der_encode_int(r)
sbin = der_encode_int(s)
rlen = byte_size(rbin)
slen = byte_size(sbin)
<<
0x30, # header
2 + rlen + 2 + slen, # length
0x02, # r header
rlen, # r length
rbin::binary, # r
0x02, # s header
slen, # s length
sbin::binary # s
>>
end
@doc """
Returns the signature as a 65 byte compact binary.
"""
@spec to_compact(t, keyword) :: binary
def to_compact(%__MODULE__{r: r, s: s, recid: recid}, opts \\ []) do
with recid when recid in 0..3 <- Keyword.get(opts, :recovery_id, recid) do
prefix = case Keyword.get(opts, :compressed, true) do
true -> recid + 27 + 4
false -> recid + 27
end
<<
prefix, # recovery
r::big-size(256), # r
s::big-size(256) # s
>>
else
_ ->
raise "Recovery ID not in range 0..3"
end
end
# DER encodes the given integer
defp der_encode_int(int) when is_integer(int) do
<<n::integer, _::binary>> = bin = :binary.encode_unsigned(int)
case n &&& 0x80 do
0 -> bin
_ -> <<0, bin::binary>>
end
end
end
|
lib/curvy/signature.ex
| 0.788909
| 0.442155
|
signature.ex
|
starcoder
|
defmodule ICalex.Props.VRecur do
@moduledoc false
use ICalex.Props
alias ICalex.Props
@canonical_keys [
"freq",
"until",
"count",
"interval",
"bysecond",
"byminute",
"byhour",
"byday",
"bymonthday",
"byyearday",
"byweekno",
"bymonth",
"bysetpos",
"wkst"
]
def get_type(type_name, value) do
type_name = String.downcase(type_name)
cond do
type_name == "freq" ->
Props.VFrequency.of(value)
type_name == "until" ->
Props.VDDDTypes.of(value)
type_name in ["wkst", "byday"] ->
Props.VWeekday.of(value)
type_name in [
"count",
"interval",
"bysecond",
"byminute",
"byhour",
"byweekno",
"bymonthday",
"byyearday",
"bymonth",
"bysetpos"
] ->
Props.VInt.of(value)
end
end
def parse_type(type_name, value) do
type_name = String.downcase(type_name)
cond do
type_name == "freq" ->
Props.VFrequency.from(value)
type_name == "until" ->
Props.VDDDTypes.from(value)
type_name in ["wkst", "byday"] ->
Props.VWeekday.from(value)
type_name in [
"count",
"interval",
"bysecond",
"byminute",
"byhour",
"byweekno",
"bymonthday",
"byyearday",
"bymonth",
"bysetpos"
] ->
Props.VInt.from(value)
end
end
defp map_keys_to_downcase(m),
do: Enum.reduce(m, %{}, fn {key, value}, acc -> Map.put(acc, String.downcase(key), value) end)
@enforce_keys [:value]
defstruct ICalex.Props.common_fields()
def of(%{} = value), do: %__MODULE__{value: map_keys_to_downcase(value)}
def from(value) when is_bitstring(value) do
String.split(value, ";")
|> Enum.reduce(%{}, fn key_value, acc ->
[key, value] = String.split(key_value, "=")
key = String.downcase(key)
values = String.split(value, ",") |> Enum.map(&parse_type(key, &1))
Map.put(acc, key, values)
end)
end
def to_ical(%{value: values} = _data) do
keys = for key <- Map.keys(values), do: String.downcase(key)
non_canonical_keys = keys -- @canonical_keys
sorted_keys = @canonical_keys ++ Enum.sort(non_canonical_keys)
sorted_keys
|> Enum.reduce([], fn key, acc ->
if Map.has_key?(values, key) do
value = Map.get(values, key)
value = if is_list(value), do: value, else: [value]
value = value |> Enum.map(fn v -> ICal.to_ical(get_type(key, v)) end) |> Enum.join(",")
["#{String.upcase(key)}=#{value}" | acc]
else
acc
end
end)
|> Enum.reverse()
|> Enum.join(";")
end
defimpl ICal do
def to_ical(data), do: ICalex.Props.VRecur.to_ical(data)
end
end
|
lib/props/v_recur.ex
| 0.587233
| 0.419886
|
v_recur.ex
|
starcoder
|
defmodule AWS.Route53RecoveryCluster do
@moduledoc """
Welcome to the Routing Control (Recovery Cluster) API Reference Guide for Amazon
Route 53 Application Recovery Controller.
With Route 53 ARC, you can use routing control with extreme reliability to
recover applications by rerouting traffic across Availability Zones or Amazon
Web Services Regions. Routing controls are simple on/off switches hosted on a
highly available cluster in Route 53 ARC. A cluster provides a set of five
redundant Regional endpoints against which you can run API calls to get or
update the state of routing controls. To implement failover, you set one routing
control On and another one Off, to reroute traffic from one Availability Zone or
Amazon Web Services Region to another.
*Be aware that you must specify a Regional endpoint for a cluster when you work
with API cluster operations to get or update routing control states in Route 53
ARC.* In addition, you must specify the US West (Oregon) Region for Route 53 ARC
API calls. For example, use the parameter `--region us-west-2` with AWS CLI
commands. For more information, see [ Get and update routing control states using the
API](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.update.api.html)
in the Amazon Route 53 Application Recovery Controller Developer Guide.
This API guide includes information about the API operations for how to get and
update routing control states in Route 53 ARC. To work with routing control in
Route 53 ARC, you must first create the required components (clusters, control
panels, and routing controls) using the recovery cluster configuration API.
For more information about working with routing control in Route 53 ARC, see the
following:
* Create clusters, control panels, and routing controls by using API
operations. For more information, see the [Recovery Control Configuration API Reference Guide for Amazon Route 53 Application Recovery
Controller](https://docs.aws.amazon.com/recovery-cluster/latest/api/).
* Learn about the components in recovery control, including
clusters, routing controls, and control panels, and how to work with Route 53
ARC in the Amazon Web Services console. For more information, see [ Recovery control
components](https://docs.aws.amazon.com/r53recovery/latest/dg/introduction-components.html#introduction-components-routing)
in the Amazon Route 53 Application Recovery Controller Developer Guide.
* Route 53 ARC also provides readiness checks that continually audit
resources to help make sure that your applications are scaled and ready to
handle failover traffic. For more information about the related API operations,
see the [Recovery Readiness API Reference Guide for Amazon Route 53 Application Recovery
Controller](https://docs.aws.amazon.com/recovery-readiness/latest/api/).
* For more information about creating resilient applications and
preparing for recovery readiness with Route 53 ARC, see the [Amazon Route 53 Application Recovery Controller Developer
Guide](https://docs.aws.amazon.com/r53recovery/latest/dg/).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-12-02",
content_type: "application/x-amz-json-1.0",
credential_scope: nil,
endpoint_prefix: "route53-recovery-cluster",
global?: false,
protocol: "json",
service_id: "Route53 Recovery Cluster",
signature_version: "v4",
signing_name: "route53-recovery-cluster",
target_prefix: "ToggleCustomerAPI"
}
end
@doc """
Get the state for a routing control.
A routing control is a simple on/off switch that you can use to route traffic to
cells. When a routing control state is On, traffic flows to a cell. When the
state is Off, traffic does not flow.
Before you can create a routing control, you must first create a cluster, and
then host the control in a control panel on the cluster. For more information,
see [ Create routing control structures](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.create.html)
in the Amazon Route 53 Application Recovery Controller Developer Guide. You
access one of the endpoints for the cluster to get or update the routing control
state to redirect traffic for your application.
*You must specify Regional endpoints when you work with API cluster operations
to get or update routing control states in Route 53 ARC.*
To see a code example for getting a routing control state, including accessing
Regional cluster endpoints in sequence, see [API examples](https://docs.aws.amazon.com/r53recovery/latest/dg/service_code_examples_actions.html)
in the Amazon Route 53 Application Recovery Controller Developer Guide.
Learn more about working with routing controls in the following topics in the
Amazon Route 53 Application Recovery Controller Developer Guide:
* [ Viewing and updating routing control states](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.update.html)
* [Working with routing controls in Route 53 ARC](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.html)
"""
def get_routing_control_state(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetRoutingControlState", input, options)
end
@doc """
List routing control names and Amazon Resource Names (ARNs), as well as the
routing control state for each routing control, along with the control panel
name and control panel ARN for the routing controls.
If you specify a control panel ARN, this call lists the routing controls in the
control panel. Otherwise, it lists all the routing controls in the cluster.
A routing control is a simple on/off switch in Route 53 ARC that you can use to
route traffic to cells. When a routing control state is On, traffic flows to a
cell. When the state is Off, traffic does not flow.
Before you can create a routing control, you must first create a cluster, and
then host the control in a control panel on the cluster. For more information,
see [ Create routing control structures](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.create.html)
in the Amazon Route 53 Application Recovery Controller Developer Guide. You
access one of the endpoints for the cluster to get or update the routing control
state to redirect traffic for your application.
*You must specify Regional endpoints when you work with API cluster operations
to use this API operation to list routing controls in Route 53 ARC.*
Learn more about working with routing controls in the following topics in the
Amazon Route 53 Application Recovery Controller Developer Guide:
* [ Viewing and updating routing control states](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.update.html)
* [Working with routing controls in Route 53 ARC](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.html)
"""
def list_routing_controls(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListRoutingControls", input, options)
end
@doc """
Set the state of the routing control to reroute traffic.
You can set the value to be On or Off. When the state is On, traffic flows to a
cell. When the state is Off, traffic does not flow.
With Route 53 ARC, you can add safety rules for routing controls, which are
safeguards for routing control state updates that help prevent unexpected
outcomes, like fail open traffic routing. However, there are scenarios when you
might want to bypass the routing control safeguards that are enforced with
safety rules that you've configured. For example, you might want to fail over
quickly for disaster recovery, and one or more safety rules might be
unexpectedly preventing you from updating a routing control state to reroute
traffic. In a "break glass" scenario like this, you can override one or more
safety rules to change a routing control state and fail over your application.
The `SafetyRulesToOverride` property enables you override one or more safety
rules and update routing control states. For more information, see [ Override safety rules to reroute
traffic](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.override-safety-rule.html)
in the Amazon Route 53 Application Recovery Controller Developer Guide.
*You must specify Regional endpoints when you work with API cluster operations
to get or update routing control states in Route 53 ARC.*
To see a code example for getting a routing control state, including accessing
Regional cluster endpoints in sequence, see [API examples](https://docs.aws.amazon.com/r53recovery/latest/dg/service_code_examples_actions.html)
in the Amazon Route 53 Application Recovery Controller Developer Guide.
* [ Viewing and updating routing control states](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.update.html)
* [Working with routing controls overall](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.html)
"""
def update_routing_control_state(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateRoutingControlState", input, options)
end
@doc """
Set multiple routing control states.
You can set the value for each state to be On or Off. When the state is On,
traffic flows to a cell. When it's Off, traffic does not flow.
With Route 53 ARC, you can add safety rules for routing controls, which are
safeguards for routing control state updates that help prevent unexpected
outcomes, like fail open traffic routing. However, there are scenarios when you
might want to bypass the routing control safeguards that are enforced with
safety rules that you've configured. For example, you might want to fail over
quickly for disaster recovery, and one or more safety rules might be
unexpectedly preventing you from updating a routing control state to reroute
traffic. In a "break glass" scenario like this, you can override one or more
safety rules to change a routing control state and fail over your application.
The `SafetyRulesToOverride` property enables you override one or more safety
rules and update routing control states. For more information, see [ Override safety rules to reroute
traffic](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.override-safety-rule.html)
in the Amazon Route 53 Application Recovery Controller Developer Guide.
*You must specify Regional endpoints when you work with API cluster operations
to get or update routing control states in Route 53 ARC.*
To see a code example for getting a routing control state, including accessing
Regional cluster endpoints in sequence, see [API examples](https://docs.aws.amazon.com/r53recovery/latest/dg/service_code_examples_actions.html)
in the Amazon Route 53 Application Recovery Controller Developer Guide.
* [ Viewing and updating routing control states](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.update.html)
* [Working with routing controls overall](https://docs.aws.amazon.com/r53recovery/latest/dg/routing-control.html)
"""
def update_routing_control_states(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateRoutingControlStates", input, options)
end
end
|
lib/aws/generated/route53_recovery_cluster.ex
| 0.937045
| 0.612773
|
route53_recovery_cluster.ex
|
starcoder
|
defmodule TypeCheck.Options do
import TypeCheck.Internals.Bootstrap.Macros
@moduledoc """
Defines the options that TypeCheck supports on calls to `use TypeCheck`.
Supported options:
- `:overrides`: A list of overrides for remote types. (default: `[]`)
- `:default_overrides`: A boolean. If false, will not include any of the overrides of the types of Elixir's standard library (c.f. `TypeCheck.DefaultOverrides.default_overrides/0`). (default: `true`)
- `:enable_runtime_checks`: When true, functions that contain a `@spec!` will be wrapped with a runtime check which will check the input to and result returned from the function. (Default: `true`).
- `:debug`: When true, will (at compile-time) print the generated TypeCheck-checking code. (Default: `false`)
These options are usually specified as passed to `use TypeCheck`,
although they may also be passed in direct calls to `TypeCheck.conforms/3` (and its variants).
These options are module-specific and are read/used at compile-time.
## The supported options in detail
### Overrides:
The `:overrides` field contains a list of remote types to be overridden by a replacement.
This is useful to be able to specify TypeCheck-types for types that you do not have control over
(because they are for instance defined in a library that is not itself using TypeCheck).
For obvious reasons, using TypeCheck directly should be preferred over overriding types.
Each of the elements in the `:overrides` list should be written as `{original_type, replacement_type}`.
Both of these can take the shape of either`&Module.type/arity` or the longer form `{Module, :type, arity}`.
An example:
```
use TypeCheck, overrides: [
{&Ecto.Schema.t/0, &MyProject.TypeCheckOverrides.Ecto.Schema.t/0}
]
### Enabling/Disabling runtime checks
By default, runtime checks are enabled.
In the case where the runtime checks turn out to be too slow (for instance, because of working with very large or deeply nested collections) in a particular module,
they can be turned off completely.
It is recommended to:
- Only turn them off after benchmarking has shown that this will make a significant difference.
- Only turn them off in e.g. the production environment, keeping them on in the development and test environments.
An example:
```elixir
use TypeCheck, enable_runtime_checks: Mix.env() != :prod
```
### Debugging
Passing the option `debug: true` will at compile-time print the generated code
for all added `@spec`s, as well as `TypeCheck.conforms/3`/`TypeCheck.conforms?/3`/`TypeCheck.conforms!/3` calls.
```
"""
if_recompiling? do
use TypeCheck
@type! remote_type() :: mfa() | function
@typedoc """
An extra check is performed to ensure that the original type
and the replacement type have the same arity.
"""
@type! type_override :: {original :: remote_type(), replacement :: remote_type()}
@type! type_overrides :: list(type_override())
@type! t :: %TypeCheck.Options{
overrides: type_overrides(),
default_overrides: boolean(),
enable_runtime_checks: boolean(),
debug: boolean(),
}
else
@type remote_type() :: mfa | function
@type type_override :: {remote_type(), remote_type()}
@type t :: %TypeCheck.Options{
overrides: list(type_override()),
default_overrides: boolean(),
enable_runtime_checks: boolean(),
debug: boolean(),
}
end
defstruct [overrides: [], default_overrides: true, enable_runtime_checks: true, debug: false]
def new() do
%__MODULE__{overrides: default_overrides()}
end
def new(already_struct = %__MODULE__{}) do
# IO.inspect(already_struct, label: "Inside first new clause")
already_struct
end
if_recompiling? do
@spec! new(enum :: any()) :: t()
end
def new(enum) do
# IO.inspect(enum, label: "Inside second new clause")
raw_overrides = Keyword.get(enum, :overrides, [])
debug = Keyword.get(enum, :debug, false)
enable_runtime_checks = Keyword.get(enum, :enable_runtime_checks, true)
overrides = check_overrides!(raw_overrides)
overrides =
if Access.get(enum, :default_overrides, true) do
overrides ++ default_overrides()
else
overrides
end
%__MODULE__{
overrides: overrides,
enable_runtime_checks: enable_runtime_checks,
debug: debug
}
end
if_recompiling? do
@spec! check_overrides!(overrides :: type_overrides()) :: type_overrides()
end
def check_overrides!(overrides) do
Enum.map(overrides, &check_override!/1)
end
defp default_overrides() do
case Code.ensure_loaded(TypeCheck.DefaultOverrides) do
{:error, _problem} -> []
{:module, _} -> apply(TypeCheck.DefaultOverrides, :default_overrides, [])
end
end
defp check_override!({original, override}) do
{module_k, function_k, arity_k} = ensure_external_function!(original)
{module_v, function_v, arity_v} = ensure_external_function!(override)
if arity_k != arity_v do
raise TypeCheck.CompileError, "Error while parsing TypeCheck overides: override #{inspect(override)} does not have same arity as original type #{inspect(original)}."
else
{
{module_k, function_k, arity_k},
{module_v, function_v, arity_v}
}
end
end
defp check_override!(other) do
raise TypeCheck.CompileError, "`check_overrides!` expects a list of two-element tuples `{mfa, mfa}` where `mfa` is either `{Module, function, arity}` or `&Module.function/arity`. However, an element not adhering to the `{mfa, mfa}` format was found: `#{inspect(other)}`."
end
defp ensure_external_function!(fun) when is_function(fun) do
case Function.info(fun, :type) do
{:type, :external} ->
info = Function.info(fun)
{info[:module], info[:name], info[:arity]}
_other ->
raise TypeCheck.CompileError, "Error while parsing TypeCheck overides: #{inspect(fun)} is not an external function of the format `&Module.function/arity`!"
end
end
defp ensure_external_function!({module, function, arity}) when is_atom(module) and is_atom(function) and arity >= 0 do
{module, function, arity}
end
defp ensure_external_function!(fun) do
raise TypeCheck.CompileError, "Error while parsing TypeCheck overides: #{inspect(fun)} is not a function!"
end
end
|
lib/type_check/options.ex
| 0.834171
| 0.824108
|
options.ex
|
starcoder
|
defmodule JrtpBridge do
@moduledoc """
JrtpBridge - JSON/REST Transport Protocol Bridge
Supports the REST methodoy to access points on the Hub, using JSON as
the notation of the state.
GET /a/point Maps to Hub.deltas
PUT /a/point Maps to Hub.update
"""
alias :cowboy_req, as: CowboyReq
alias Nerves.Hub, as: Hub
@doc false
def init(_transport, _req, _state) do
{:upgrade, :protocol, :cowboy_rest} #, req, state}
end
@doc false
def rest_init(req, handler_opts) do
{:ok, req, handler_opts}
end
@doc false
def resource_exists(req, state) do
case Hub.fetch(request_path(req)) do
{_, :error} -> {false, req, state}
{_, _} -> {true, req, state}
end
end
@doc false
def allowed_methods(req, state) do
{["GET", "PUT", "POST", "OPTIONS", "DELETE"], req, state}
end
@doc false
def content_types_provided(req, state) do
{[{"application/merge-patch+json", :rfc7386_provider},
{"application/json", :json_provider},
{"text/html", :html_provider},
{"text/plain", :text_provider}
], req, state}
end
@doc """
is_authorized/2 is a callback in the cowboy request process. To
enable authorization pass function under :auth_fun used to determine
authorization. If no function is passed all requests will be accepted.
Example function requiring authorization for PUT and POST commands:
```elixir
{method, req} = :cowboy_req.method(req)
if method == "PUT" or method == "POST" do
case :cowboy_req.parse_header("authorization", req) do
{:ok, {"basic", {user, password}}, req} ->
if (user == "test" and password == "<PASSWORD>") do
{true, req, state}
else
{{false, <<"Basic realm=\"Authorized Area\"">>}, req, state}
end
_ -> {{false, <<"Basic realm=\"Authorized Area\"">>}, req, state}
end
else
{true, req, state}
end
```
"""
def is_authorized(req, state) do
if state[:auth_fun] do
state.auth_fun.(req, state)
else
{true, req, state}
end
end
@doc false
def rfc7386_provider(req, state) do
path = request_path(req)
{vers_header, req} = CowboyReq.header("x-since-version", req)
vreq = vheader_to_ver(vers_header)
{long_poll, req} = CowboyReq.header("x-long-poll", req)
{long_poll_header_value, req} = CowboyReq.header("x-long-poll-timeout", req)
long_poll_timeout = case long_poll_header_value do
:undefined -> 30000
n -> String.to_integer n
end
{vres, tree} = case long_poll do
:undefined -> Hub.deltas(vreq, path)
_ ->
start_result = case Dict.get(state, :on_wait_start) do
nil -> nil
wait_start_fn -> wait_start_fn.()
end
Hub.watch(path, [])
r = wait_for_version_after(vreq, path, long_poll_timeout)
Hub.unwatch(path)
case Dict.get(state, :on_wait_end) do
nil -> nil
wait_end_fn -> wait_end_fn.(start_result)
end
r
end
{vlock_req, _} = vreq
{vlock_res, _} = vres
#{set_time, req} = CowboyReq.header("x-set-time", req)
req = CowboyReq.set_resp_header("x-version", ver_to_vheader(vres), req)
req = invoke_response_hook_if_present(req, state)
req = case vlock_res do
v when v == vlock_req -> req
_ -> CowboyReq.set_resp_header("content-type", "application/json", req)
end
case {vreq, tree} do
{{:undefined, 0}, []} -> {"", req, state}
{_, []} ->
{:ok, req} = CowboyReq.reply(304, [], req)
{:halt, req, state}
_ ->
body = erl_to_json tree
{ body <> "\n", req, state}
end
end
@doc false
def json_provider(req, state) do
path = request_path(req)
{vres, tree} = Hub.deltas({:undefined, 0}, path)
req = CowboyReq.set_resp_header("x-version", ver_to_vheader(vres), req)
req = CowboyReq.set_resp_header("access-control-allow-methods", "GET, OPTIONS", req)
req = CowboyReq.set_resp_header("access-control-allow-origin", "*", req)
req = invoke_response_hook_if_present(req, state)
case tree do
[] -> {"", req, state}
_ ->
body = erl_to_json tree
{ body <> "\n", req, state}
end
end
@doc false
def html_provider(req, state) do
header = "<html><head><meta charset=\"utf-8\"><title>#{Dict.get(state, :webpage_title)}</title></head><body><pre>"
footer = "</pre></body></html>"
{body, reply, state} = json_provider(req, state)
{header <> body <> footer, reply, state}
end
@doc false
def text_provider(req, state) do
json_provider(req, state)
end
@doc false
def content_types_accepted(req, state) do
{[
{{"application", "merge-patch+json", []}, :rfc7386_acceptor},
{{"application", "json", []}, :json_acceptor},
{{"application", "x-firmware", []}, :firmware_acceptor},
{{"application", "file", []}, :file_acceptor},
{{"application", "x-device-lock", []}, :device_lock_acceptor}
], req, state}
end
def rfc7386_acceptor(req, state) do
json_acceptor(req, state)
end
def json_acceptor(req, state) do
{:ok, request_body, req} = CowboyReq.body(req)
proposed_changes = json_to_erl(request_body)
{method, req} = CowboyReq.method(req)
case Hub.request(request_path(req), proposed_changes, %{http_method: method}) do
{:changes, vres, changes} ->
change_json = erl_to_json(changes)
response_body = change_json <> "\n"
bver = ver_to_vheader(vres)
req = CowboyReq.set_resp_header("x-version", bver, req)
req = CowboyReq.set_resp_body(response_body, req)
{true, req, state}
{:nochanges, vres, _changes} ->
bver = ver_to_vheader(vres)
req = CowboyReq.set_resp_header("x-version", bver, req)
{:ok, req} = CowboyReq.reply(304, [], req)
{:halt, req, state}
{:accepted, location} ->
req = CowboyReq.set_resp_header("Location", location, req)
{:ok, req} = CowboyReq.reply(202, [], req)
{:halt, req, state}
:ok ->
{:ok, req} = CowboyReq.reply(202, [], req)
{:halt, req, state}
_ ->
{:ok, req} = CowboyReq.reply(400, [], req)
{:halt, req, state}
end
end
def firmware_acceptor(req, state) do
case Dict.get(state, :firmware_acceptor) do
nil ->
{:ok, req} = CowboyReq.reply(404, [], req)
{:halt, req, state}
fa -> fa.upload_acceptor(req, state)
end
end
def device_lock_acceptor(req, state) do
case Dict.get(state, :device_lock_acceptor) do
nil ->
{:ok, req} = CowboyReq.reply(404, [], req)
{:halt, req, state}
dla -> dla.device_lock_acceptor(req, state)
end
end
# Handle DELETE method
def delete_resource(req, state) do
case Hub.request(request_path(req), nil, %{http_method: "DELETE"}) do
:ok ->
{:ok, req} = CowboyReq.reply(202, [], req)
{true, req, state}
_ ->
{:ok, req} = CowboyReq.reply(400, [], req)
{true, req, state}
end
end
defp vheader_to_ver(version_header_value) do
case version_header_value do
:undefined -> {:undefined, 0}
s ->
case String.split(s, ":") do
[vlock, vs] -> {vlock, String.to_integer(vs)}
_ -> {:undefined, 0}
end
end
end
defp ver_to_vheader({vlock, ver}) do
bver = Integer.to_string ver
"#{vlock}:#{bver}"
end
defp invoke_response_hook_if_present(req, state) do
case Dict.get(state, :json_provider_hook) do
nil -> req
resp_hook_fn -> resp_hook_fn.(req)
end
end
defp request_path(req) do
{tokens, _} = CowboyReq.path_info(req)
tokens
end
defp wait_for_version_after(vreq, path, long_poll_timeout) do
case Hub.deltas(vreq, path) do
{vq, _} when vq == vreq->
receive do
_ -> wait_for_version_after(vq, path, long_poll_timeout)
after
long_poll_timeout ->
Hub.deltas(vq, path)
end
{vres, []} ->
wait_for_version_after(vres, path, long_poll_timeout)
{vres, change_tree} ->
{vres, change_tree}
end
end
def erl_to_json(term) do
case JSX.encode(term, [{:space, 1}, {:indent, 2}]) do
{:ok, json} -> json
{:error, _} -> throw(:error)
end
end
def json_to_erl(json) do
case JSX.decode(json, [{:labels, :atom}]) do
{:error, _} -> throw(:error)
{:ok, erl} -> erl |> Enum.into []
end
end
end
|
lib/jrtp_bridge.ex
| 0.590543
| 0.662223
|
jrtp_bridge.ex
|
starcoder
|
defmodule ExPlasma.Output do
@moduledoc """
An Output.
`output_id` - The identifier scheme for the Output. We currently have two: Position and Id.
`output_type` - An integer value of what type of output data is associated.
`output_data` - The main data for the output. This can be decode by the different output types.
"""
alias ExPlasma.Output.Position
alias ExPlasma.Transaction.TypeMapper
alias ExPlasma.Utils.RlpDecoder
@type output_id() :: map() | nil
@type output_type() :: non_neg_integer() | nil
@type output_data() :: map() | nil
@type rlp() :: [output_type() | output_data()]
@type t() :: %__MODULE__{
output_id: output_id(),
output_type: output_type(),
output_data: output_data()
}
@type input_position() :: %__MODULE__{
output_id: Position.with_position(),
output_type: nil,
output_data: nil
}
@type output() :: %__MODULE__{
output_id: nil,
output_type: output_type(),
output_data: output_data()
}
@type decoding_error() :: :malformed_output_rlp | mapping_error()
@type mapping_error() :: :malformed_outputs | :unrecognized_output_type | atom()
@type validation_responses() :: :ok | validation_errors()
@type validation_errors() :: {:error, {atom(), atom()}}
# Output Types and Identifiers should implement these.
@callback to_map(any()) :: {:ok, map()} | {:error, atom()}
@callback to_rlp(map()) :: list() | binary() | nil
@callback validate(map()) :: validation_responses()
@output_types_modules TypeMapper.output_type_modules()
defstruct output_id: nil, output_type: nil, output_data: nil
@doc """
Decode RLP data into an Output.
## Examples
# Generate an Output from an RLP list
iex> encoded = <<245, 1, 243, 148, 205, 193, 229, 59, 220, 116, 187, 245, 181, 247, 21, 214,
...> 50, 125, 202, 87, 133, 226, 40, 180, 148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
...> 0, 0, 0, 0, 0, 0, 0, 0, 136, 13, 224, 182, 179, 167, 100, 0, 0>>
iex> ExPlasma.Output.decode(encoded)
{:ok, %ExPlasma.Output{
output_data: %{
amount: 1000000000000000000,
output_guard: <<205, 193, 229, 59, 220, 116, 187, 245, 181, 247, 21, 214, 50, 125, 202, 87, 133, 226, 40, 180>>,
token: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>},
output_id: nil,
output_type: 1
}}
"""
@spec decode(binary()) :: {:ok, output()} | {:error, decoding_error()}
def decode(data) do
case RlpDecoder.decode(data) do
{:ok, rlp} -> to_map(rlp)
{:error, :malformed_rlp} -> {:error, :malformed_output_rlp}
end
end
@doc """
Throwing version of decode/1
"""
@spec decode!(binary()) :: output() | no_return()
def decode!(data) do
{:ok, output} = decode(data)
output
end
@doc """
Decode RLP input position into an Output.
## Example
iex> encoded_position = <<59, 154, 202, 0>>
iex> ExPlasma.Output.decode_id(encoded_position)
{:ok, %ExPlasma.Output{
output_data: nil,
output_id: %{
blknum: 1,
oindex: 0,
position: 1000000000,
txindex: 0
},
output_type: nil
}}
"""
@spec decode_id(binary()) :: {:ok, input_position()} | {:error, :malformed_input_position_rlp}
def decode_id(data) do
with {:ok, pos} <- Position.decode(data),
{:ok, output_id} <- Position.to_map(pos) do
{:ok, %__MODULE__{output_id: output_id}}
end
end
@doc """
Throwing version of decode_id/1
"""
@spec decode_id!(binary()) :: input_position() | no_return()
def decode_id!(data) do
{:ok, output} = decode_id(data)
output
end
@doc """
Maps the given RLP list into an output.
The RLP list must start with the output type and follow with its data.
Only validates that the RLP is structurally correct.
Does not perform any other kind of validation, use validate/1 for that.
## Examples
iex> rlp = [
...> <<1>>,
...> [
...> <<11, 246, 22, 41, 33, 46, 44, 159, 55, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>,
...> <<46, 38, 45, 41, 28, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>,
...> <<1>>
...> ]
...>]
iex> ExPlasma.Output.to_map(rlp)
{:ok,
%ExPlasma.Output{
output_data: %{
amount: 1,
output_guard: <<11, 246, 22, 41, 33, 46, 44, 159, 55, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>,
token: <<46, 38, 45, 41, 28, 46, 150, 159, 176, 132, 157, 153, 217, 206, 65, 226, 241, 55, 0, 110>>
},
output_id: nil,
output_type: 1}
}
"""
@spec to_map(list()) :: {:ok, output()} | {:error, mapping_error()}
def to_map([raw_output_type | _output_rlp_items] = rlp) do
with {:ok, output_module} <- parse_output_type(raw_output_type),
{:ok, output_data} <- output_module.to_map(rlp) do
{:ok, struct(__MODULE__, output_data)}
end
end
def to_map(_), do: {:error, :malformed_outputs}
@doc """
Maps the given integer position into an output.
Only validates that the RLP is structurally correct.
Does not perform any other kind of validation, use validate/1 for that.
## Examples
iex> pos = 1_000_000_000
iex> ExPlasma.Output.to_map_id(pos)
{:ok, %ExPlasma.Output{output_id: %{position: 1_000_000_000, blknum: 1, txindex: 0, oindex: 0}}}
"""
@spec to_map_id(Position.position()) :: {:ok, input_position()} | {:error, :malformed_output_position}
def to_map_id(position) do
case Position.to_map(position) do
{:ok, output_id} -> {:ok, %__MODULE__{output_id: output_id}}
error -> error
end
end
@doc """
## Examples
# Encode as an Output
iex> output = %ExPlasma.Output{
...> output_data: %{
...> amount: 1000000000000000000,
...> output_guard: <<205, 193, 229, 59, 220, 116, 187, 245, 181, 247, 21, 214, 50, 125, 202, 87, 133, 226, 40, 180>>,
...> token: <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>},
...> output_id: nil,
...> output_type: 1
...> }
iex> ExPlasma.Output.encode(output)
{:ok, <<245, 1, 243, 148, 205, 193, 229, 59, 220, 116, 187, 245, 181, 247, 21, 214,
50, 125, 202, 87, 133, 226, 40, 180, 148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 136, 13, 224, 182, 179, 167, 100, 0, 0>>}
# Encode as an Input
iex> output = %ExPlasma.Output{
...> output_data: nil,
...> output_id: %{
...> blknum: 1,
...> oindex: 0,
...> position: 1000000000,
...> txindex: 0
...> },
...> output_type: nil
...> }
iex> ExPlasma.Output.encode(output, as: :input)
{:ok, <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 154, 202, 0>>}
"""
@spec encode(t() | list()) ::
{:ok, binary()} | {:error, :invalid_output_id | :invalid_output_data | :unrecognized_output_type}
def encode(%__MODULE__{} = output, as: :input), do: to_rlp_id(output)
def encode(%__MODULE__{} = output) do
with {:ok, rlp} <- to_rlp(output),
{:ok, encoded} <- encode(rlp) do
{:ok, encoded}
end
end
def encode(rlp_items), do: {:ok, ExRLP.encode(rlp_items)}
@doc """
Encode an Output into RLP bytes
## Example
iex> output = %ExPlasma.Output{
...> output_id: nil,
...> output_type: 1,
...> output_data: %{output_guard: <<fdf8:f53e:61e4::18>>, token: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b>>, amount: 1}
...> }
iex> ExPlasma.Output.to_rlp(output)
{:ok, [<<1>>, [<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1>>, <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>, <<1>>]]}
"""
@spec to_rlp(output()) :: {:ok, list()} | {:error, :invalid_output_data | :unrecognized_output_type}
def to_rlp(%__MODULE__{output_type: nil}), do: {:error, :invalid_output_data}
def to_rlp(output) do
case get_output_module(output.output_type) do
{:ok, module} -> {:ok, module.to_rlp(output)}
{:error, :unrecognized_output_type} = error -> error
end
end
@doc """
Transforms an Output identifer into an RLP encoded position. This is to generate
the `inputs` in a Transaction.
## Example
iex> output = %ExPlasma.Output{
...> output_data: nil,
...> output_id: %{
...> blknum: 1,
...> oindex: 0,
...> position: 1000000000,
...> txindex: 0
...> },
...> output_type: nil
...> }
iex> ExPlasma.Output.to_rlp_id(output)
{:ok, <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 154, 202, 0>>}
"""
@spec to_rlp_id(input_position()) :: {:ok, binary()} | {:error, :invalid_output_id}
def to_rlp_id(%__MODULE__{output_id: id}) when is_map(id), do: {:ok, Position.to_rlp(id)}
def to_rlp_id(_), do: {:error, :invalid_output_id}
@doc """
Validates the Output
## Example
# Validate a Payment v1 Output
iex> encoded = <<245, 1, 243, 148, 205, 193, 229, 59, 220, 116, 187, 245, 181, 247, 21, 214,
...> 50, 125, 202, 87, 133, 226, 40, 180, 148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
...> 0, 0, 0, 0, 0, 0, 0, 0, 136, 13, 224, 182, 179, 167, 100, 0, 0>>
iex> encoded |> ExPlasma.Output.decode!() |> ExPlasma.Output.validate()
:ok
# Validate a Output position
iex> encoded_position = <<59, 154, 202, 0>>
iex> encoded_position |> ExPlasma.Output.decode_id!() |> ExPlasma.Output.validate()
:ok
"""
@spec validate(t()) :: validation_responses()
def validate(%__MODULE__{} = output) do
with :ok <- do_validate_integrity(output),
:ok <- do_validate_data(output),
:ok <- do_validate_id(output) do
:ok
end
end
# Validate that we have either output_type or output_id.
defp do_validate_integrity(%__MODULE__{output_type: nil, output_id: output_id}) when is_map(output_id), do: :ok
defp do_validate_integrity(%__MODULE__{output_type: type, output_id: nil}) when is_integer(type), do: :ok
defp do_validate_integrity(_), do: {:error, {:output, :invalid_output}}
# Validate the output type and data. Bypass the validation if it doesn't
# exist in the output body.
defp do_validate_data(%__MODULE__{output_type: nil}), do: :ok
defp do_validate_data(output) do
case get_output_module(output.output_type) do
{:ok, module} -> module.validate(output)
{:error, :unrecognized_output_type} -> {:error, {:output_type, :unrecognized_output_type}}
end
end
# Validate the output ID. Bypass the validation if it doesn't
# exist in the output body.
defp do_validate_id(output), do: Position.validate(output.output_id)
defp parse_output_type(output_type_rlp) do
with {:ok, output_type} <- RlpDecoder.parse_uint256(output_type_rlp),
{:ok, module} <- get_output_module(output_type) do
{:ok, module}
else
_ -> {:error, :unrecognized_output_type}
end
end
defp get_output_module(type) do
case Map.get(@output_types_modules, type) do
nil -> {:error, :unrecognized_output_type}
module -> {:ok, module}
end
end
end
|
lib/ex_plasma/output.ex
| 0.908986
| 0.489748
|
output.ex
|
starcoder
|
defmodule Membrane.AudioMixer.ClipPreventingAdder do
@moduledoc """
Module responsible for mixing audio tracks (all in the same format, with the same number of
channels and sample rate). The result is a single path in the format mixed paths are encoded in.
If overflow happens during mixing, a wave will be scaled down to the max sample value.
Description of the algorithm:
- Start with an empty queue
- Put merged values while the sign of the values remains the same
- If the sign of values changes or adder is flushed:
- If none of the values overflows limits of the format, convert the queued values
to binary samples and return them
- Otherwise, scale down the queued values, so the peak of the wave will become
maximal (minimal) allowed value, then convert it to binary samples and return
them.
"""
@behaviour Membrane.AudioMixer.Mixer
alias Membrane.AudioMixer.Helpers
alias Membrane.Caps.Audio.Raw
@enforce_keys [:caps, :sample_size]
defstruct @enforce_keys ++ [is_wave_positive: true, queue: []]
@type t :: %__MODULE__{
caps: Raw.t(),
is_wave_positive: boolean(),
sample_size: integer(),
queue: [integer()]
}
@impl true
def init(caps) do
size = Raw.sample_size(caps)
%__MODULE__{caps: caps, sample_size: size}
end
@impl true
def mix(buffers, %__MODULE__{caps: caps, sample_size: sample_size} = state) do
buffers
|> Helpers.zip_longest_binary_by(sample_size, fn buf -> do_mix(buf, caps) end)
|> add_values(false, state)
end
@impl true
def flush(state), do: add_values([], true, state)
defp do_mix(samples, caps) do
samples
|> Enum.map(&Raw.sample_to_value(&1, caps))
|> Enum.sum()
end
defp add_values(values, is_last_wave, state, buffer \\ <<>>) do
split_fun = if state.is_wave_positive, do: &(&1 >= 0), else: &(&1 <= 0)
{values, rest} = Enum.split_while(values, split_fun)
if !is_last_wave && rest == [] do
state = %__MODULE__{state | queue: state.queue ++ values}
{buffer, state}
else
buffer = [buffer | get_iodata(values, state)] |> IO.iodata_to_binary()
state =
state
|> Map.put(:is_wave_positive, !state.is_wave_positive)
|> Map.put(:queue, [])
if is_last_wave && rest == [] do
{buffer, state}
else
add_values(rest, is_last_wave, state, buffer)
end
end
end
defp get_iodata([], %__MODULE__{queue: []}), do: <<>>
defp get_iodata(values, %__MODULE__{caps: caps, queue: queue}) do
(queue ++ values)
|> scale(caps)
|> Enum.map(&Raw.value_to_sample(&1, caps))
end
defp scale(values, caps) do
{min, max} = Enum.min_max(values)
max_sample_value = Raw.sample_max(caps)
min_sample_value = Raw.sample_min(caps)
cond do
min < min_sample_value -> do_scale(values, min_sample_value / min)
max > max_sample_value -> do_scale(values, max_sample_value / max)
true -> values
end
end
defp do_scale(values, coefficient), do: Enum.map(values, &trunc(&1 * coefficient))
end
|
lib/membrane_audio_mixer/clip_preventing_adder.ex
| 0.874158
| 0.75401
|
clip_preventing_adder.ex
|
starcoder
|
defmodule Tensorflow.DebugTensorWatch do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
node_name: String.t(),
output_slot: integer,
debug_ops: [String.t()],
debug_urls: [String.t()],
tolerate_debug_op_creation_failures: boolean
}
defstruct [
:node_name,
:output_slot,
:debug_ops,
:debug_urls,
:tolerate_debug_op_creation_failures
]
field(:node_name, 1, type: :string)
field(:output_slot, 2, type: :int32)
field(:debug_ops, 3, repeated: true, type: :string)
field(:debug_urls, 4, repeated: true, type: :string)
field(:tolerate_debug_op_creation_failures, 5, type: :bool)
end
defmodule Tensorflow.DebugOptions do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
debug_tensor_watch_opts: [Tensorflow.DebugTensorWatch.t()],
global_step: integer,
reset_disk_byte_usage: boolean
}
defstruct [:debug_tensor_watch_opts, :global_step, :reset_disk_byte_usage]
field(:debug_tensor_watch_opts, 4,
repeated: true,
type: Tensorflow.DebugTensorWatch
)
field(:global_step, 10, type: :int64)
field(:reset_disk_byte_usage, 11, type: :bool)
end
defmodule Tensorflow.DebuggedSourceFile do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
host: String.t(),
file_path: String.t(),
last_modified: integer,
bytes: integer,
lines: [String.t()]
}
defstruct [:host, :file_path, :last_modified, :bytes, :lines]
field(:host, 1, type: :string)
field(:file_path, 2, type: :string)
field(:last_modified, 3, type: :int64)
field(:bytes, 4, type: :int64)
field(:lines, 5, repeated: true, type: :string)
end
defmodule Tensorflow.DebuggedSourceFiles do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
source_files: [Tensorflow.DebuggedSourceFile.t()]
}
defstruct [:source_files]
field(:source_files, 1, repeated: true, type: Tensorflow.DebuggedSourceFile)
end
|
lib/tensorflow/core/protobuf/debug.pb.ex
| 0.708918
| 0.427456
|
debug.pb.ex
|
starcoder
|
defmodule Knigge do
@moduledoc """
An opinionated way of dealing with behaviours.
Opinionated means that it offers an easy way of defining a "facade" for a
behaviour. This facade then delegates calls to the real implementation, which
is either given directly to `Knigge` or fetched from the configuration.
`Knigge` can be `use`d directly in a behaviour, or in a separate module by
passing the behaviour which should be "facaded" as an option.
## Overview
- [Motivation](#module-motivation)
- [Examples](#module-examples)
- [Options](#module-options)
- [Knigge and Compiler Warnings](#module-knigge-and-compiler-warnings)
## Motivation
`Knigge` was born out of a desire to standardize dealing with behaviours and
their implementations.
As great fans of [`mox`](https://github.com/plataformatec/mox) we longed for
an easy way to swap out implementations from the configuration which lead us
to introduce a facade pattern, where a module's sole responsibility was
loading the correct implementation and delegating calls.
This pattern turned out to be very flexible and useful but required a fair bit
of boilerplate code. `Knigge` was born out of an attempt to reduce this
boilerplate to the absolute minimum.
You can read about our motivation in depth [in our devblog](https://dev.betterdoc.org/elixir/friday_project/behaviour/2019/07/30/how-we-deal-with-behaviours-and-boilerplate.html).
## Examples
Imagine a behaviour looking like this:
defmodule MyGreatBehaviour do
@callback my_great_callback(my_argument :: any()) :: any()
end
Now imagine you want to delegate calls to this behaviour like this:
defmodule MyGreatBehaviourFacade do
@behaviour MyGreatBehaviour
@implementation Application.fetch_env!(:my_application, __MODULE__)
defdelegate my_great_callback, to: @implementation
end
With this in place you can simply reference the "real implementation" by
calling functions on your facade:
MyGreatBehaviourFacade.my_great_callback(:with_some_argument)
`Knigge` allows you to reduce this boilerplate to the absolute minimum:
defmodule MyGreatBehaviourFacade do
use Knigge,
behaviour: MyGreatBehaviour,
otp_app: :my_application
end
Under the hood this compiles down to the explicit delegation visible on the top.
In case you don't want to fetch your implementation from the configuration,
`Knigge` also allows you to explicitely pass the implementation of the
behaviour with the aptly named key `implementation`:
defmodule MyGreatBehaviourFacade do
use Knigge,
behaviour: MyGreatBehaviour,
implementation: MyGreatImplementation
end
### `defdefault` - Fallback implementations for optional callbacks
Now imagine you have a more sophisticated behaviour with some optional callbacks:
defmodule MySophisticatedBehaviour do
@callback an_optional_callback() :: any()
@callback a_required_callback() :: any()
@optional_callbacks an_optional_callback: 0
end
As you would expect `Knigge` delegates calls to this callback as usual. But
since it's optional this delegation might fail. A common pattern is to check
if the implementation exports the function in question:
if function_exported?(MyImplementation, :an_optional_callback, 0) do
MyImplementation.an_optional_callback()
else
:my_fallback_implementation
end
`Knigge` offers an easy way to specify these fallback implementations with
`defdefault`:
defmodule MySophisticatedFacade do
use Knigge,
behaviour: MySophisticatedBehaviour,
otp_app: :my_application
defdefault an_optional_callback do
:my_fallback_implementation
end
end
`Knigge` tries to determine at compile-time if the implementation exports
the function in question and only uses the default if this is not the case.
As such `defdefault` incurs no runtime overhead and compiles to a simple `def`.
Of course `defdefault`s can accept arguments as any usual function:
defdefault my_optional_callback_with_arguments(first_argument, another_argument) do
case first_argument do
# ...
end
end
## Options
`Knigge` expects either the `otp_app` key or the `implementation` key. If
neither is provided an error will be raised at compile time.
When using the `otp_app` configuration you can also pass `config_key`, which
results in a call looking like this: `Application.fetch_env!(otp_app, config_key)`.
`config_key` defaults to `__MODULE__`.
By default `Knigge` does as much work as possible at compile time. This will
be fine most of the time. In case you want to swap out the implementation at
runtime - by calling `Application.put_env/2` - you can force `Knigge` to do all
delegation at runtime. As you might expect this incurs runtime overhead,
since the implementing module will have to be loaded for each call.
If you want to do delegation at runtime simply pass `delegate_at_runtime?: true`
as option - by default `Knigge` delegates at runtime in your `:test`s.
For further information about options check the `Knigge.Options` module.
## Verifying your Implementations - `mix knigge.verify`
Before version 1.2.0 `Knigge` tried to check at compile time if the implementation of your facade existed.
Due to the way the Elixir compiler goes about compiling your modules this didn't work as expected - [checkout this page if you're interested in the details](https://hexdocs.pm/knigge/the-existence-check.html).
As an alternative `Knigge` now offers the `mix knigge.verify` task which verifies that the implementation modules of your facades actually exist.
The task returns with an error code when an implementation is missing, which allows you to plug it into your CI pipeline - for example as `MIX_ENV=prod mix knigge.verify`.
For details check the documentation of `mix knigge.verify` by running `mix help knigge.verify`.
## Knigge and the `:test` environment
To give the maximum amount of flexibility `Knigge` delegates at runtime in your
`:test` environment and at compile time everywhere else.
This allows you to easily swap out your behaviour implementation - for example by
calling `Application.put_env/3` - and it also avoids a bunch of compiler warnings.
### Compiler Warnings
With the default configuration `Knigge` does not generate any compiler warnings.
In case you change the `delegate_at_runtime?` configuration to anything which
excludes the `:test` environment you will - most likely - encounter compiler
warnings like this:
warning: function MyMock.my_great_callback/1 is undefined (module MyMock is not available)
lib/my_facade.ex:1
warning: function MyMock.another_callback/0 is undefined (module MyMock is not available)
lib/my_facade.ex:1
This can quickly become quite unnerving. Luckily you can explicitly tell the
compiler to ignore this module in your `mix.exs` file.
To disable the check simply add a single line to your `mix.exs`' `project/0` function:
def project do
[
# ...
xref: [exclude: [MyMock]]
]
end
Where `MyMock` is the name of your configured module in question.
"""
@type key :: :behaviour | :implementation | :options
@spec __using__(Knigge.Options.raw()) :: no_return
defmacro __using__(options) do
quote bind_quoted: [options: options] do
import Knigge.Code, only: [defdefault: 2]
@before_compile Knigge.Code
Module.register_attribute(__MODULE__, :__knigge__, accumulate: true)
options =
options
|> Keyword.put_new(:behaviour, __MODULE__)
|> Keyword.put_new(:config_key, __MODULE__)
|> Knigge.Options.new()
behaviour =
options
|> Knigge.Behaviour.fetch!()
|> Knigge.Module.ensure_exists!(__ENV__)
@__knigge__ {:options, options}
@doc "Acts as a \"flag\" to mark this module as a Knigge module."
@spec __knigge__() :: :ok
def __knigge__, do: :ok
@doc "Access Knigge internal values, such as the implementation being delegated to etc."
@spec __knigge__(:behaviour) :: module()
@spec __knigge__(:implementation) :: module()
@spec __knigge__(:options) :: Knigge.Options.t()
def __knigge__(:behaviour), do: unquote(behaviour)
def __knigge__(:options), do: @__knigge__[:options]
if options.delegate_at_runtime? do
def __knigge__(:implementation) do
Knigge.Implementation.fetch!(__knigge__(:options))
end
else
implementation = Knigge.Implementation.fetch!(options)
def __knigge__(:implementation) do
unquote(implementation)
end
end
end
end
@doc "Access the options passed to Knigge for a module"
@spec options!(module()) :: Knigge.Options.t()
def options!(module) do
cond do
Module.open?(module) ->
Module.get_attribute(module, :__knigge__)[:options]
function_exported?(module, :__knigge__, 1) ->
module.__knigge__(:options)
true ->
raise ArgumentError, "expected a module using Knigge but #{inspect(module)} does not."
end
end
end
|
lib/knigge.ex
| 0.913189
| 0.75316
|
knigge.ex
|
starcoder
|
defmodule Blockchain.Transaction.Receipt do
@moduledoc """
This module specifies functions to create and
interact with the transaction receipt, defined
in Section 4.4.1 of the Yellow Paper.
Transaction receipts track incremental state changes
after each transaction (e.g. how much gas has been
expended).
"""
# Defined in Eq.(19)
defstruct [
state: <<>>,
cumulative_gas: 0,
bloom_filter: <<>>,
logs: <<>>,
]
# Types defined in Eq.(20)
@type t :: %__MODULE__{
state: EVM.trie_root,
cumulative_gas: EVM.Gas.t, # Defined in Eq.(21)
bloom_filter: binary(), # TODO: Bloom filter
logs: EVM.SubState.logs,
}
@doc """
Encodes a transaction receipt such that it can be
RLP encoded. This is defined in Eq.(20) of the Yellow
Paper.
## Examples
iex> Blockchain.Transaction.Receipt.serialize(%Blockchain.Transaction.Receipt{})
[<<>>, 0, <<>>, <<>>]
iex> Blockchain.Transaction.Receipt.serialize(%Blockchain.Transaction.Receipt{state: <<1,2,3>>, cumulative_gas: 5, bloom_filter: <<2,3,4>>, logs: "hi mom"})
[<<1,2,3>>, 5, <<2,3,4>>, "hi mom"]
"""
@spec serialize(t) :: ExRLP.t
def serialize(trx_receipt) do
[
trx_receipt.state,
trx_receipt.cumulative_gas,
trx_receipt.bloom_filter,
trx_receipt.logs,
]
end
@doc """
Decodes a transaction receipt based on the serialization format
defined in Eq.(20). This is the inverse of `serialize/1`.
## Examples
iex> Blockchain.Transaction.Receipt.deserialize([<<1,2,3>>, <<5>>, <<2,3,4>>, "hi mom"])
%Blockchain.Transaction.Receipt{state: <<1,2,3>>, cumulative_gas: 5, bloom_filter: <<2,3,4>>, logs: "hi mom"}
iex> Blockchain.Transaction.Receipt.deserialize([<<>>, <<0>>, <<>>, <<>>])
%Blockchain.Transaction.Receipt{}
"""
@spec deserialize(ExRLP.t) :: t
def deserialize(rlp) do
[
state,
cumulative_gas,
bloom_filter,
logs
] = rlp
%__MODULE__{
state: state,
cumulative_gas: :binary.decode_unsigned(cumulative_gas),
bloom_filter: bloom_filter,
logs: logs
}
end
end
|
lib/blockchain/transaction/receipt.ex
| 0.69035
| 0.596844
|
receipt.ex
|
starcoder
|
defmodule SnowplowTracker.Payload do
@moduledoc """
Represents the data structure used to store event information
"""
@derive Jason.Encoder
alias __MODULE__
alias SnowplowTracker.Payloads.Helper
@keys [
pairs: %{}
]
defstruct @keys
@type t :: %__MODULE__{
pairs: map()
}
def new(payload) do
struct(__MODULE__, pairs: payload["pairs"])
end
@doc """
This function is used to add a key-value map to the payload object
"""
@spec add(t, String.t() | Atom.t(), String.t() | Atom.t()) :: t
def add(%Payload{} = payload, key, value) do
{key, value} = {
Helper.sanitize(key),
Helper.sanitize(value)
}
payload
|> Map.get(:pairs)
|> Map.put(key, value)
|> (&%Payload{pairs: &1}).()
end
@doc """
This function is used to add a map containing one or many key-value pairs to a payload
object.
"""
@spec add_map(t, map()) :: t
def add_map(%Payload{} = payload, map) do
payload
|> Map.get(:pairs)
|> Map.merge(map)
|> (&%Payload{pairs: &1}).()
end
@doc """
This function is used convert a map to json, encode it as a base64 string depending on the
`encode` flag and add it as a key-value pair to a payload object.
"""
@spec add_json(t, map(), String.t(), String.t(), boolean()) :: t
def add_json(payload, map, _, key_not_encoded, encode) when encode == false do
map
|> Helper.convert_to_json(encode)
|> (&add(payload, key_not_encoded, &1)).()
end
def add_json(%Payload{} = payload, map, key_encoded, _, encode) when encode == true do
map
|> Helper.convert_to_json(encode)
|> (&add(payload, key_encoded, &1)).()
end
@doc """
This function is used to get the map containing key-value pairs stored in a payload object.
"""
@spec get(t) :: map()
def get(%Payload{} = payload), do: payload.pairs
@doc """
This function is used to convert a payload object to string and encode it to base64 depending
on the `encode` flag.
"""
@spec string(t, boolean()) :: t
def string(%Payload{} = payload, encode), do: Helper.convert_to_json(payload.pairs, encode)
@doc """
Decode the JSON payload storing the original json as part of the struct.
"""
@spec decode(binary) :: {:ok, %__MODULE__{}} | {:error, Jason.DecodeError.t()}
def decode(json_payload) do
case Jason.decode(json_payload) do
{:ok, response} ->
__MODULE__.new(response)
{:error, error} ->
{:error, error}
end
end
@doc """
Decode the JSON payload storing the original json as part of the struct, raising if there is an error
"""
@spec decode!(binary) :: %__MODULE__{}
def decode!(payload) do
response = Jason.decode!(payload)
%{}
|> Map.put("pairs", response)
|> __MODULE__.new()
end
end
|
lib/snowplow_tracker/payload.ex
| 0.85496
| 0.453383
|
payload.ex
|
starcoder
|
defmodule Estated.Property do
@moduledoc "A property record."
@moduledoc since: "0.1.0"
alias Estated.Property.Address
alias Estated.Property.Assessment
alias Estated.Property.Deed
alias Estated.Property.MarketAssessment
alias Estated.Property.Metadata
alias Estated.Property.Owner
alias Estated.Property.Parcel
alias Estated.Property.Structure
alias Estated.Property.Tax
alias Estated.Property.Valuation
defstruct [
:metadata,
:address,
:parcel,
:structure,
:taxes,
:assessments,
:market_assessments,
:valuation,
:owner,
:deeds
]
@typedoc "A property record."
@typedoc since: "0.1.0"
@type t :: %__MODULE__{
metadata: Metadata.t() | nil,
address: Address.t() | nil,
parcel: Parcel.t() | nil,
structure: Structure.t() | nil,
taxes: [Tax.t()],
assessments: [Assessment.t()],
market_assessments: [MarketAssessment.t()],
valuation: Valuation.t() | nil,
owner: Owner.t() | nil,
deeds: Deed.deeds()
}
@doc false
@doc since: "0.1.0"
@spec cast(map()) :: t()
def cast(%{} = property) do
Enum.reduce(property, %__MODULE__{}, &cast_field/2)
end
@spec cast(nil) :: nil
def cast(nil) do
nil
end
defp cast_field({"metadata", metadata}, acc) do
%__MODULE__{acc | metadata: Metadata.cast(metadata)}
end
defp cast_field({"address", address}, acc) do
%__MODULE__{acc | address: Address.cast(address)}
end
defp cast_field({"parcel", parcel}, acc) do
%__MODULE__{acc | parcel: Parcel.cast(parcel)}
end
defp cast_field({"structure", structure}, acc) do
%__MODULE__{acc | structure: Structure.cast(structure)}
end
defp cast_field({"taxes", taxes}, acc) do
%__MODULE__{acc | taxes: Tax.cast_list(taxes)}
end
defp cast_field({"assessments", assessments}, acc) do
%__MODULE__{acc | assessments: Assessment.cast_list(assessments)}
end
defp cast_field({"market_assessments", market_assessments}, acc) do
%__MODULE__{acc | market_assessments: MarketAssessment.cast_list(market_assessments)}
end
defp cast_field({"valuation", valuation}, acc) do
%__MODULE__{acc | valuation: Valuation.cast(valuation)}
end
defp cast_field({"owner", owner}, acc) do
%__MODULE__{acc | owner: Owner.cast(owner)}
end
defp cast_field({"deeds", deeds}, acc) do
%__MODULE__{acc | deeds: Deed.cast_list(deeds)}
end
defp cast_field(_map_entry, acc) do
acc
end
end
|
lib/estated/property.ex
| 0.83056
| 0.47658
|
property.ex
|
starcoder
|
defmodule Cizen.Saga do
@moduledoc """
The saga behaviour
## Example
defmodule SomeSaga do
use Cizen.Saga
defstruct []
@impl true
def on_start(%__MODULE__{}) do
saga
end
@impl true
def handle_event(_event, state) do
state
end
end
"""
alias Cizen.CizenSagaRegistry
alias Cizen.Dispatcher
alias Cizen.Event
alias Cizen.Pattern
alias Cizen.SagaID
require Pattern
@type t :: struct
@type state :: any
# `pid | {atom, node} | atom` is the same as the Process.monitor/1's argument.
@type lifetime :: pid | {atom, node} | atom | nil
@type start_option ::
{:saga_id, SagaID.t()}
| {:lifetime, pid | SagaID.t() | nil}
| {:return, :pid | :saga_id}
| {:resume, term}
@doc """
Invoked when the saga is started.
Saga.Started event will be dispatched after this callback.
Returned value will be used as the next state to pass `c:handle_event/2` callback.
"""
@callback on_start(t()) :: state
@doc """
Invoked when the saga receives an event.
Returned value will be used as the next state to pass `c:handle_event/2` callback.
"""
@callback handle_event(Event.t(), state) :: state
@doc """
Invoked when the saga is resumed.
Returned value will be used as the next state to pass `c:handle_event/2` callback.
This callback is predefined. The default implementation is here:
```
def on_resume(saga, state) do
on_start(saga)
state
end
```
"""
@callback on_resume(t(), state) :: state
@doc """
The handler for `Saga.call/2`.
You should call `Saga.reply/2` with `from`, otherwise the call will be timeout.
You can reply from any process, at any time.
"""
@callback handle_call(message :: term, from :: GenServer.from(), state) :: state
@doc """
The handler for `Saga.cast/2`.
"""
@callback handle_cast(message :: term, state) :: state
@internal_prefix :"$cizen.saga"
@saga_id_key :"$cizen.saga.id"
@lazy_init :"$cizen.saga.lazy_init"
defmacro __using__(_opts) do
alias Cizen.{CizenSagaRegistry, Dispatcher, Saga}
quote do
@behaviour Saga
@impl Saga
def on_resume(saga, state) do
on_start(saga)
state
end
# @impl GenServer
def init({:start, id, saga, lifetime}) do
Saga.init_with(id, saga, lifetime, %Saga.Started{saga_id: id}, :on_start, [
saga
])
end
# @impl GenServer
def init({:resume, id, saga, state, lifetime}) do
Saga.init_with(id, saga, lifetime, %Saga.Resumed{saga_id: id}, :on_resume, [
saga,
state
])
end
# @impl GenServer
def handle_info({:DOWN, _, :process, _, _}, state) do
{:stop, {:shutdown, :finish}, state}
end
# @impl GenServer
def handle_info(event, state) do
id = Saga.self()
case event do
%Saga.Finish{saga_id: ^id} ->
{:stop, {:shutdown, :finish}, state}
event ->
state = handle_event(event, state)
{:noreply, state}
end
rescue
reason -> {:stop, {:shutdown, {reason, __STACKTRACE__}}, state}
end
# @impl GenServer
def terminate(:shutdown, _state) do
:shutdown
end
def terminate({:shutdown, :finish}, _state) do
Dispatcher.dispatch(%Saga.Finished{saga_id: Saga.self()})
:shutdown
end
def terminate({:shutdown, {reason, trace}}, _state) do
id = Saga.self()
saga =
case Saga.get_saga(id) do
{:ok, saga} ->
saga
# nil -> should not happen
end
Dispatcher.dispatch(%Saga.Crashed{
saga_id: id,
saga: saga,
reason: reason,
stacktrace: trace
})
:shutdown
end
# @impl GenServer
@impl Saga
def handle_call({:"$cizen.saga", :get_saga_id}, _from, state) do
[saga_id] = Registry.keys(CizenSagaRegistry, Kernel.self())
{:reply, saga_id, state}
end
def handle_call({:"$cizen.saga", :request, request}, _from, state) do
result = Saga.handle_request(request)
{:reply, result, state}
end
def handle_call({:"$cizen.saga", message}, from, state) do
state = handle_call(message, from, state)
{:noreply, state}
end
# @impl GenServer
@impl Saga
def handle_cast({:"$cizen.saga", :dummy_to_prevent_dialyzer_errors}, state), do: state
def handle_cast({:"$cizen.saga", message}, state) do
state = handle_cast(message, state)
{:noreply, state}
end
defoverridable on_resume: 2
end
end
defmodule Finish do
@moduledoc "A event fired to finish"
defstruct([:saga_id])
end
defmodule Started do
@moduledoc "A event fired on start"
defstruct([:saga_id])
end
defmodule Resumed do
@moduledoc "A event fired on resume"
defstruct([:saga_id])
end
defmodule Finished do
@moduledoc "A event fired on finish"
defstruct([:saga_id])
end
defmodule Crashed do
@moduledoc "A event fired on crash"
defstruct([:saga_id, :saga, :reason, :stacktrace])
end
@doc """
Returns the pid for the given saga ID.
"""
@spec get_pid(SagaID.t()) :: {:ok, pid} | :error
defdelegate get_pid(saga_id), to: CizenSagaRegistry
@doc """
Returns the saga struct for the given saga ID.
"""
@spec get_saga(SagaID.t()) :: {:ok, t()} | :error
defdelegate get_saga(saga_id), to: CizenSagaRegistry
def lazy_init, do: @lazy_init
@doc """
Returns the module for a saga.
"""
@spec module(t) :: module
def module(saga) do
saga.__struct__
end
@doc """
Resumes a saga with the given state.
Options:
- `{:lifetime, lifetime_saga_or_pid}` the lifetime saga ID or pid. (Default: the saga lives forever)
- `{:return, return_type}` when `:saga_id`, `{:ok, saga_id}` is returned. (Default: :pid)
"""
@spec resume(SagaID.t(), t(), state, [start_option]) :: GenServer.on_start()
def resume(id, saga, state, opts \\ []) do
start(saga, Keyword.merge(opts, saga_id: id, resume: state))
end
@doc """
Starts a saga.
Options:
- `{:saga_id, saga_id}` starts with the specified saga ID. (Default: randomly generated)
- `{:lifetime, lifetime_saga_or_pid}` the lifetime saga ID or pid. (Default: the saga lives forever)
- `{:return, return_type}` when `:saga_id`, `{:ok, saga_id}` is returned. (Default: :pid)
- `{:resume, state}` when given, resumes the saga with the specified state.
"""
@spec start(t(), opts :: [start_option]) :: GenServer.on_start()
def start(%module{} = saga, opts \\ []) do
{saga_id, return, init} = handle_opts(saga, opts)
result = GenServer.start(module, init)
handle_opts_return(result, saga_id, return)
end
@doc """
Starts a saga linked to the current process.
See `Saga.start/2` for details.
"""
@spec start_link(t(), opts :: [start_option]) :: GenServer.on_start()
def start_link(%module{} = saga, opts \\ []) do
{saga_id, return, init} = handle_opts(saga, opts)
result = GenServer.start_link(module, init)
handle_opts_return(result, saga_id, return)
end
defp handle_opts(saga, opts) do
{saga_id, opts} = Keyword.pop(opts, :saga_id, SagaID.new())
{lifetime, opts} = Keyword.pop(opts, :lifetime, nil)
{return, opts} = Keyword.pop(opts, :return, :pid)
mode =
case Keyword.fetch(opts, :resume) do
{:ok, state} -> {:resume, state}
_ -> :start
end
opts = Keyword.delete(opts, :resume)
if opts != [],
do: raise(ArgumentError, message: "invalid argument(s): #{inspect(Keyword.keys(opts))}")
lifetime =
case lifetime do
nil ->
nil
pid when is_pid(pid) ->
pid
saga_id ->
get_lifetime_pid_from_saga_id(saga_id)
end
init =
case mode do
:start -> {:start, saga_id, saga, lifetime}
{:resume, state} -> {:resume, saga_id, saga, state, lifetime}
end
{saga_id, return, init}
end
defp get_lifetime_pid_from_saga_id(saga_id) do
case get_pid(saga_id) do
{:ok, pid} -> pid
_ -> spawn(fn -> nil end)
end
end
defp handle_opts_return(result, saga_id, return) do
case result do
{:ok, pid} ->
{:ok, if(return == :pid, do: pid, else: saga_id)}
other ->
other
end
end
@spec stop(SagaID.t()) :: :ok
def stop(id) do
GenServer.stop({:via, Registry, {CizenSagaRegistry, id}}, :shutdown)
catch
:exit, _ -> :ok
end
def send_to(id, message) do
Registry.dispatch(CizenSagaRegistry, id, fn entries ->
for {pid, _} <- entries, do: send(pid, message)
end)
end
def exit(id, reason, trace) do
GenServer.stop({:via, Registry, {CizenSagaRegistry, id}}, {:shutdown, {reason, trace}})
end
def call(id, message) do
GenServer.call({:via, Registry, {CizenSagaRegistry, id}}, {@internal_prefix, message})
end
def cast(id, message) do
GenServer.cast({:via, Registry, {CizenSagaRegistry, id}}, {@internal_prefix, message})
end
def self do
Process.get(@saga_id_key)
end
def reply(from, reply) do
GenServer.reply(from, reply)
end
@doc false
def init_with(id, saga, lifetime, event, function, arguments) do
Registry.register(CizenSagaRegistry, id, saga)
Dispatcher.listen(Pattern.new(fn %Finish{saga_id: ^id} -> true end))
module = module(saga)
unless is_nil(lifetime), do: Process.monitor(lifetime)
Process.put(@saga_id_key, id)
state =
case apply(module, function, arguments) do
{@lazy_init, state} ->
state
state ->
Dispatcher.dispatch(event)
state
end
{:ok, state}
end
@doc false
def handle_request({:register, registry, saga_id, key, value}) do
Registry.register(registry, key, {saga_id, value})
end
def handle_request({:unregister, registry, key}) do
Registry.unregister(registry, key)
end
def handle_request({:unregister_match, registry, key, pattern, guards}) do
Registry.unregister_match(registry, key, pattern, guards)
end
def handle_request({:update_value, registry, key, callback}) do
Registry.update_value(registry, key, fn {saga_id, value} -> {saga_id, callback.(value)} end)
end
@doc false
def saga_id_key, do: @saga_id_key
@doc false
def internal_prefix, do: @internal_prefix
end
|
lib/cizen/saga.ex
| 0.857231
| 0.640875
|
saga.ex
|
starcoder
|
defmodule Phoenix.PubSub.Redis do
@moduledoc """
Phoenix PubSub adapter based on Redis.
To start it, list it in your supervision tree as:
{Phoenix.PubSub,
adapter: Phoenix.PubSub.Redis,
host: "192.168.1.100",
node_name: System.get_env("NODE")}
You will also need to add `:phoenix_pubsub_redis` to your deps:
defp deps do
[{:phoenix_pubsub_redis, "~> 2.1.0"}]
end
## Options
* `:url` - The url to the redis server ie: `redis://username:password@host:port`
* `:name` - The required name to register the PubSub processes, ie: `MyApp.PubSub`
* `:node_name` - The required name of the node, defaults to Erlang --sname flag. It must be unique.
* `:host` - The redis-server host IP, defaults `"127.0.0.1"`
* `:port` - The redis-server port, defaults `6379`
* `:password` - The redis-server password, defaults `""`
* `:ssl` - The redis-server ssl option, defaults `false`
* `:redis_pool_size` - The size of the redis connection pool. Defaults `5`
* `:compression_level` - Compression level applied to serialized terms - from `0` (no compression), to `9` (highest). Defaults `0`
* `:socket_opts` - List of options that are passed to the network layer when connecting to the Redis server. Default `[]`
* `:sentinel` - Redix sentinel configuration. Default to `nil`
"""
use Supervisor
@behaviour Phoenix.PubSub.Adapter
@redis_pool_size 5
@redis_opts [:host, :port, :password, :database, :ssl, :socket_opts, :sentinel]
@defaults [host: "127.0.0.1", port: 6379]
## Adapter callbacks
@impl true
defdelegate node_name(adapter_name),
to: Phoenix.PubSub.RedisServer
@impl true
defdelegate broadcast(adapter_name, topic, message, dispatcher),
to: Phoenix.PubSub.RedisServer
@impl true
defdelegate direct_broadcast(adapter_name, node_name, topic, message, dispatcher),
to: Phoenix.PubSub.RedisServer
## GenServer callbacks
@doc false
def start_link(opts) do
adapter_name = Keyword.fetch!(opts, :adapter_name)
supervisor_name = Module.concat(adapter_name, "Supervisor")
Supervisor.start_link(__MODULE__, opts, name: supervisor_name)
end
@impl true
def init(opts) do
pubsub_name = Keyword.fetch!(opts, :name)
adapter_name = Keyword.fetch!(opts, :adapter_name)
compression_level = Keyword.get(opts, :compression_level, 0)
opts = handle_url_opts(opts)
opts = Keyword.merge(@defaults, opts)
redis_opts = Keyword.take(opts, @redis_opts)
node_name = opts[:node_name] || node()
validate_node_name!(node_name)
:ets.new(adapter_name, [:public, :named_table, read_concurrency: true])
:ets.insert(adapter_name, {:node_name, node_name})
:ets.insert(adapter_name, {:compression_level, compression_level})
pool_opts = [
name: {:local, adapter_name},
worker_module: Redix,
size: opts[:redis_pool_size] || @redis_pool_size,
max_overflow: 0
]
children = [
{Phoenix.PubSub.RedisServer, {pubsub_name, adapter_name, node_name, redis_opts}},
:poolboy.child_spec(adapter_name, pool_opts, redis_opts)
]
Supervisor.init(children, strategy: :rest_for_one)
end
defp handle_url_opts(opts) do
if opts[:url] do
merge_url_opts(opts)
else
opts
end
end
defp merge_url_opts(opts) do
info = URI.parse(opts[:url])
user_opts =
case String.split(info.userinfo || "", ":") do
[""] -> []
[username] -> [username: username]
[username, password] -> [username: username, password: password]
end
opts
|> Keyword.merge(user_opts)
|> Keyword.merge(host: info.host, port: info.port || @defaults[:port])
end
defp validate_node_name!(node_name) do
if node_name in [nil, :nonode@nohost] do
raise ArgumentError, ":node_name is a required option for unnamed nodes"
end
:ok
end
end
|
lib/phoenix_pubsub_redis/redis.ex
| 0.787564
| 0.482429
|
redis.ex
|
starcoder
|
defmodule Queue do
@moduledoc """
This module crawls all pages and returns a list of pages as tuples.
The crawler will never go outside of the given URL host.
"""
@doc """
Asyncronously crawls all page linked from the initial URL.
It returns a list of tuples, each tuple containing:
- status code
- page url
- map with CSS selectors and their count
## Parameters
- `url`: the initial URL to crawl
- `query`: list of valid CSS selectors as strings
- `options`: Keyword list of options like `[{:connections, 10}]`
- `sputnik_pid`: the pid which will receive the output
"""
def start(url, query, options, sputnik_pid) do
spawn __MODULE__, :init, [url, query, options, sputnik_pid]
end
@doc false
def init(url, query, options, sputnik_pid) do
Page.start(url, query, self())
%URI{host: host} = URI.parse(url)
done = loop(host, [], [url], [], query, options)
send sputnik_pid, {:ok, done}
end
defp loop(_, [], [], done, _, _), do: done
defp loop(domain, to_do, processing, done, query, options) do
receive do
{:ok, status_code, request_url, links, result} ->
{to_do, processing, done} = set_as_done(request_url, to_do, processing, done, status_code, result)
{to_do} = enqueue(links, to_do, processing, done, domain, query)
{to_do, processing} = fill_processing(to_do, processing, query, options)
IO.write "."
loop(domain, to_do, processing, done, query, options)
{:error, url, _error} ->
IO.write "x"
# NOTE: when we get an error from HTTPoison, we use the status code 999
{to_do, processing, done} = set_as_done(url, to_do, processing, done, 999, %{})
loop(domain, to_do, processing, done, query, options)
_ ->
Greetings.error
raise "Unknown message"
end
end
defp set_as_done(request_url, to_do, processing, done, status_code, result) do
done = done ++ [{status_code, request_url, result}]
processing = processing -- [request_url]
to_do = to_do -- [request_url]
{to_do, processing, done}
end
defp enqueue(links, to_do, processing, done, domain, _query) do
done_urls = Enum.map(done, fn({_, url, _}) -> url end) ++ processing ++ to_do
filtered_links = select_same_domain_links(links, domain) -- done_urls
{to_do ++ filtered_links}
end
defp fill_processing(to_do, processing, query, options) do
urls_amount = (options[:connections] || 10) - Enum.count(processing)
to_be_processing = Enum.take(to_do, urls_amount)
Enum.each to_be_processing, (fn(link) -> Page.start(link, query, self()) end)
{to_do -- to_be_processing, processing ++ to_be_processing}
end
defp same_domain(link, domain) do
%URI{host: host} = URI.parse(link)
host == domain
end
defp select_same_domain_links(links, domain) do
Enum.filter(links, (fn(link) -> same_domain(link, domain) end))
end
end
|
lib/queue.ex
| 0.735071
| 0.49823
|
queue.ex
|
starcoder
|
defmodule Replug do
@moduledoc """
```
# ---- router.ex ----
plug Replug,
plug: Corsica,
opts: {MyAppWeb.PlugConfigs, :corsica}
# ---- plug_configs.ex ----
defmodule MyAppWeb.PlugConfigs do
def corsica do
[
max_age: System.get_env("CORSICA_MAX_AGE"),
expose_headers: ~w(X-Foo),
origins: System.get_env("VALID_ORIGINS")
]
end
end
```
"""
@behaviour Plug
@impl true
def init(opts) do
plug = parse_plug_opt(opts)
opts_opt = parse_opts_opt(opts)
%{
plug: plug,
opts: opts_opt
}
end
@impl true
def call(conn, %{plug: {plug_type, plug_module, plug_opts}, opts: opts}) do
opts = build_plug_opts(plug_type, plug_module, plug_opts, opts)
call_plug(conn, plug_type, plug_module, opts)
end
defp call_plug(conn, :fn, {plug_module, plug_function}, opts) do
apply(plug_module, plug_function, [conn, opts])
end
defp call_plug(conn, :mod, plug_module, opts) do
plug_module.call(conn, opts)
end
defp parse_plug_opt(opts) do
case Keyword.get(opts, :plug) do
nil ->
raise("Replug requires a :plug entry with a module or tuple value")
{{plug_module, plug_function} = plug, opts} when is_atom(plug_module) and is_atom(plug_function) ->
{:fn, plug, opts}
{plug, opts} when is_atom(plug) ->
{:mod, plug, opts}
plug when is_atom(plug) ->
{:mod, plug, :only_dynamic_opts}
end
# |> plug_with_type()
end
defp parse_opts_opt(opts) do
case Keyword.get(opts, :opts) do
nil ->
raise("Replug requires a :opts entry")
{opts_module, opts_function} ->
{opts_module, opts_function, []}
{opts_module, opts_function, opts_args} ->
{opts_module, opts_function, opts_args}
end
end
defp build_plug_opts(plug_type, plug, plug_opts, opts) do
dynamic_opts = call_opts(opts)
case plug_opts do
:only_dynamic_opts ->
dynamic_opts
static_opts ->
merge_opts(static_opts, dynamic_opts)
end
|> maybe_plug_init(plug_type, plug)
end
defp maybe_plug_init(opts, :fn, _plug_fn) do
opts
end
defp maybe_plug_init(opts, :mod, plug_module) do
if function_exported?(plug_module, :init, 1) do
plug_module.init(opts)
else
opts
end
end
defp call_opts({opts_module, opts_function, opt_args}) do
apply(opts_module, opts_function, opt_args)
end
defp merge_opts(static_opts, dynamic_opts)
when is_list(static_opts) and is_list(dynamic_opts) do
Keyword.merge(static_opts, dynamic_opts)
end
defp merge_opts(static_opts, dynamic_opts) when is_map(static_opts) and is_map(dynamic_opts) do
Map.merge(static_opts, dynamic_opts)
end
end
|
lib/replug.ex
| 0.602529
| 0.570182
|
replug.ex
|
starcoder
|
defmodule Ueberauth.Strategy.FreeAgent.OAuth do
@moduledoc """
An implementation of OAuth2 for FreeAgent, using the v2 API.
See `Ueberauth.Strategy.FreeAgent` for configuration details.
"""
use OAuth2.Strategy
@defaults [
strategy: __MODULE__,
site: "https://api.freeagent.com/v2",
authorize_url: "https://api.freeagent.com/v2/approve_app",
token_url: "https://api.freeagent.com/v2/token_endpoint",
]
@sandbox_defaults [
strategy: __MODULE__,
site: "https://api.sandbox.freeagent.com/v2",
authorize_url: "https://api.sandbox.freeagent.com/v2/approve_app",
token_url: "https://api.sandbox.freeagent.com/v2/token_endpoint",
]
@use_sandbox_defaults [
true: @sandbox_defaults,
false: @defaults
]
@doc """
Construct a client for requests to FreeAgent.
Optionally include any OAuth2 options here to be merged with the defaults.
These options are only useful for usage outside the normal callback phase of Ueberauth.
## Examples
```
profile =
Ueberauth.Strategy.FreeAgent.OAuth.client(token: "THE_ACCESS_TOKEN")
|> OAuth2.Client.get("/users/me")
```
"""
def client(opts \\ []) do
defaults = @use_sandbox_defaults[Application.get_env(:ueberauth_freeagent, :sandbox, false)]
config = Application.get_env(:ueberauth, Ueberauth.Strategy.FreeAgent.OAuth)
client_opts =
defaults
|> Keyword.merge(config)
|> Keyword.merge(opts)
client = OAuth2.Client.new(client_opts)
client
# FreeAgent doesn't seem to currently like basic auth
|> put_param("client_id", client.client_id())
|> put_param("client_secret", client.client_secret())
end
@doc false
def authorize_url!(params \\ [], opts \\ []) do
opts
|> client
|> OAuth2.Client.authorize_url!(params)
end
@doc false
def get_token!(params \\ [], options \\ []) do
headers = Keyword.get(options, :headers, [])
options = Keyword.get(options, :options, [])
client_options = Keyword.get(options, :client_options, [])
client = OAuth2.Client.get_token!(client(client_options), params, headers, options)
client.token
end
# Strategy Callbacks
@doc false
def authorize_url(client, params) do
OAuth2.Strategy.AuthCode.authorize_url(client, params)
end
@doc false
def get_token(client, params, headers) do
client
|> put_param("grant_type", "authorization_code")
|> put_header("Accept", "application/json")
|> OAuth2.Strategy.AuthCode.get_token(params, headers)
end
end
|
lib/ueberauth/strategy/freeagent/oauth.ex
| 0.851876
| 0.65902
|
oauth.ex
|
starcoder
|
defmodule Dwarlixir.World do
alias Dwarlixir.World
@type t :: [World.Location.t]
use GenServer
@ets_name :world
@world_map_key :world_map
def start_link(opts \\ %{}) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(%{init: false}) do
:ets.new(@ets_name, [:set, :named_table, :protected])
common_init([])
end
def init(%{init: :simple}) do
:ets.new(@ets_name, [:set, :named_table, :protected])
children = map_data_old()
common_init(children)
end
def init(%{init: :new}) do
:ets.new(@ets_name, [:set, :named_table, :protected])
children = map_data()
common_init(children)
end
def init(%{init: world}) do
{:ok, @ets_name} = :ets.file2tab(@ets_name)
[{{:world_map, _world}, children}] = :ets.lookup(@ets_name, {@world_map_key, world})
common_init(children)
end
def save_world do
GenServer.call(__MODULE__, :save_world)
end
defp common_init(children) do
Enum.each(children, &World.Supervisor.start_child/1)
{:ok, %{}}
end
def handle_call(:save_world, _from, state) do
world_identifier = UUID.uuid4(:hex)
world =
Supervisor.which_children(World.Supervisor)
|> Enum.map(fn({_id, pid, _type, _module}) -> pid end)
|> Enum.map(&Task.async(fn() -> GenServer.call(&1, :location_data) end))
|> Enum.map(&Task.await/1)
:ets.insert(@ets_name, {{@world_map_key, world_identifier}, world})
:ets.tab2file(@ets_name, @ets_name)
{:reply, world_identifier, state}
end
@spec map_data() :: World.t
def map_data, do: World.Generator.call
@spec map_data_old() :: World.t
def map_data_old do
[
location("1", "The Broken Drum", "A tired bar that has seen too many fights",
[
partial_pathway("2", "upstairs"),
partial_pathway("3", "out"),
]),
location("2", "A quiet room", "This room is above the main room of the Broken Drum, and surprisingly all the noise dies down up here",
[
partial_pathway("1","down"),
]),
location("3", "outside", "This is the street outside the Broken Drum",
[
partial_pathway("1", "drum"),
partial_pathway("4", "east")
]),
location("4", "a busy street", "The Broken Drum is West of here.",
[
partial_pathway("3", "west"),
partial_pathway("5", "north")
]),
location("5", "a dark alley", "It is dark and you are likely to be eaten by a grue.",
[
partial_pathway("4", "south")
])
]
end
def location(id, name, desc, pathways) do
%World.Location{
id: id,
name: name,
description: desc,
pathways: pathways
}
end
def partial_pathway(from_id, name) do
%{from_id: from_id, name: name}
end
def random_room_id do
1
# Registry.match(World.Registry, "location", :_)
# |> Enum.map(fn({_, id}) -> id end)
# |> Enum.random
end
end
|
lib/dwarlixir/world/world.ex
| 0.578329
| 0.444203
|
world.ex
|
starcoder
|
defmodule ExPrompt do
@moduledoc """
ExPrompt is a helper package to add interactivity to your
command line applications as easy as possible.
It allows common operations such as:
- Asking for an answer.
- Asking for a "required" answer.
- Choosing between several options.
- Asking for confirmation.
- Asking for a password.
"""
@type prompt :: String.t()
@type choices :: list(String.t())
@doc """
Reads a line from `:stdio` displaying the prompt that is passed in.
In case of any errors or `:eof` this function will return the default value if present, or an empty string otherwise.
## Examples
To ask a user for their name and await for the input:
ExPrompt.string("What is your name?\n")
To ask for a hostname defaulting to `localhost`:
ExPrompt.string("What is the hostname?\n", "localhost")
"""
@spec string(prompt) :: String.t()
@spec string(prompt, String.t()) :: String.t()
def string(prompt, default \\ "") do
string_prompt(prompt, default)
|> ask(default)
end
@doc "Alias for `string/2`."
@spec get(prompt, String.t()) :: String.t()
def get(prompt, default \\ ""), do: string(prompt, default)
@doc """
Same as `string/2` but it will continue to "prompt" the user in case of an empty response.
"""
@spec string_required(prompt) :: String.t()
def string_required(prompt) do
case string(prompt) do
"" -> string_required(prompt)
str -> str
end
end
@doc "Alias for `string_required/1`."
@spec get_required(prompt) :: String.t()
def get_required(prompt), do: string_required(prompt)
@doc """
Asks for confirmation to the user.
It allows the user to answer or respond with the following options:
- Yes, yes, YES, Y, y
- No, no, NO, N, n
In case that the answer is none of the above, it will prompt again until we do or the default value if it's present.
## Examples
To ask whether the user wants to delete a file or not:
ExPrompt.confirm("Are you sure you want to delete this file?")
It's the same example above, returning false as default.
ExPrompt.confirm("Are you sure you want to delete this file?", false)
"""
@spec confirm(prompt) :: boolean()
@spec confirm(prompt, boolean() | nil) :: boolean()
def confirm(prompt, default \\ nil) do
answer =
prompt
|> boolean_prompt(default)
|> ask()
|> String.downcase()
cond do
answer in ~w(yes y) -> true
answer in ~w(no n) -> false
answer == "" and default != nil -> default
true -> confirm(prompt, default)
end
end
@doc "Alias for `confirm/2`."
@spec yes?(prompt) :: boolean()
@spec yes?(prompt, boolean() | nil) :: boolean()
def yes?(prompt, default \\ nil), do: confirm(prompt, default)
@doc """
Asks the user to select form a list of choices.
It returns either the index of the element in the list
or -1 if it's not found.
This method tries first to get said element by the list number,
if it fails it will attempt to get the index from the list of choices
by the value that the user wrote.
## Examples
To ask for a favorite color in a predefined list
ExPrompt.choose("Favorite color?" , ~w(red green blue))
It's the same example above, but defines to the second option (green) as default value if none is selected.
ExPrompt.choose("Favorite color?" , ~w(red green blue), 2)
"""
@spec choose(prompt, choices) :: integer()
@spec choose(prompt, choices, integer()) :: integer()
def choose(prompt, choices, default \\ -1) do
IO.puts("")
if default < -1 || default >= length(choices) do
raise "Invalid default value, the value must be between -1 (none) and #{length(choices) - 1}"
end
answer =
list_prompt(prompt, choices, default)
|> ask()
try do
n = String.to_integer(answer)
if n > 0 and n <= length(choices), do: n - 1, else: -1
rescue
_e in ArgumentError ->
case answer do
"" ->
default
_ ->
case Enum.find_index(choices, &(&1 == answer)) do
nil -> -1
idx -> idx
end
end
end
end
@doc """
Asks for a password.
This method will hide the password by default as the user types.
If that's not the desired behavior, it accepts `false`, and the password
will be shown as it's being typed.
## Examples
ExPrompt.password("Password: ")
ExPrompt.password("Password: ", false)
"""
@spec password(prompt) :: String.t()
@spec password(prompt, hide :: boolean()) :: String.t()
def password(prompt, hide \\ true) do
prompt = String.trim(prompt)
case hide do
true ->
pid = spawn_link(fn -> pw_loop(prompt) end)
ref = make_ref()
value = IO.gets(prompt <> " ")
send(pid, {:done, self(), ref})
receive do: ({:done, ^pid, ^ref} -> :ok)
value
false ->
IO.gets(prompt <> " ")
end
|> String.trim()
end
defp pw_loop(prompt) do
receive do
{:done, parent, ref} ->
send(parent, {:done, self(), ref})
IO.write(:stderr, "\e[2K\r")
after
1 ->
IO.write(:stderr, "\e[2K\r#{prompt} ")
pw_loop(prompt)
end
end
defp format_trailing(prompt, default \\ nil) do
case Regex.named_captures(~r/(?<spaces>[[:space:]]*)$/, prompt) do
%{"spaces" => ""} when default != nil ->
"#{String.trim_trailing(prompt)} #{default} "
%{"spaces" => ""} ->
"#{String.trim_trailing(prompt)} "
%{"spaces" => spaces} when default != nil ->
"#{String.trim_trailing(prompt)} #{default}#{spaces}"
%{"spaces" => spaces} ->
"#{String.trim_trailing(prompt)}#{spaces}"
end
end
defp format_choices(choices) do
choices
|> Enum.with_index()
|> Enum.reduce("", fn {c, i}, acc ->
"#{acc} #{i + 1}) #{c}\n"
end)
end
defp string_prompt(prompt, ""), do: format_trailing(prompt)
defp string_prompt(prompt, nil), do: format_trailing(prompt)
defp string_prompt(prompt, default), do: format_trailing(prompt, "(#{default})")
defp boolean_prompt(prompt, nil), do: format_trailing(prompt, "[yn]")
defp boolean_prompt(prompt, true), do: format_trailing(prompt, "[Yn]")
defp boolean_prompt(prompt, false), do: format_trailing(prompt, "[yN]")
defp list_prompt(prompt, choices, -1) do
"#{format_choices(choices)}\n#{format_trailing(prompt)}"
end
defp list_prompt(prompt, choices, default) do
"#{format_choices(choices)}\n" <> format_trailing(prompt, "(#{Enum.at(choices, default)})")
end
defp ask(prompt, default \\ nil) do
case IO.gets(prompt) do
:eof ->
""
{:error, _reason} ->
""
str ->
case String.trim_trailing(str) do
"" -> default || ""
value -> value
end
end
end
end
|
lib/ex_prompt.ex
| 0.781331
| 0.523177
|
ex_prompt.ex
|
starcoder
|
defmodule J1605.Device do
use GenServer
@enforce_keys [:socket]
defstruct [:socket, :time_to_wait, relays: nil]
def start_link(arg) do
GenServer.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init({address, port, time_to_wait}) do
with {:ok, socket} <- :gen_tcp.connect(address, port, [:binary, active: true]) do
{:ok, %__MODULE__{socket: socket, time_to_wait: time_to_wait, relays: nil}}
else
error -> {:stop, error}
end
end
def init(_) do
{:stop, :bad_args}
end
@impl true
def handle_cast(request, state = %{socket: socket}) do
with :ok <- perform(socket, request) do
time = state.time_to_wait
if(is_integer(time)) do
Process.sleep(time)
end
{:noreply, state}
else
{:error, reason} -> {:stop, reason, %{state | socket: nil}}
end
end
defp perform(socket, request) do
case request do
:states -> check_states(socket)
{action, number} -> perform_switch(socket, action, number)
end
end
defp perform_switch(socket, value, number) when number in 0..15 do
case value do
true ->
:gen_tcp.send(socket, <<0x25, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, number, 0x00>>)
false ->
:gen_tcp.send(socket, <<0x26, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, number, 0x00>>)
end
end
defp check_states(socket) do
:gen_tcp.send(socket, <<0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00>>)
end
@impl true
def handle_info(
{:tcp, socket, message},
state = %__MODULE__{socket: state_socket}
)
when socket === state_socket do
case message do
<<0x01, _, _, _, 0x05, _, _, 0x20, relay_bits::little-integer-size(16), 0x00, 0x00>> ->
relays =
<<relay_bits::size(16)>> |> relay_bits_to_list |> Enum.reverse() |> List.to_tuple()
Registry.dispatch(J1605.Registry, "subscribers", fn entries ->
Enum.each(entries, fn {pid, _} -> send(pid, {:states, relays}) end)
end)
{:noreply, %{state | relays: relays}}
_ ->
{:noreply, state}
end
end
defp relay_bits_to_list(bits) when is_bitstring(bits) do
for <<b::size(1) <- bits>> do
case b do
0 -> false
1 -> true
end
end
end
end
|
lib/j1605/device.ex
| 0.5794
| 0.401219
|
device.ex
|
starcoder
|
defmodule SparkPost.SuppressionList do
@moduledoc """
The SparkPost Suppression List API for working with suppression lists.
Use `SparkPost.SuppressionList.delete/1` to delete a single entry from a list,
`SparkPost.SuppressionList.upsert_one/3` to insert or update a single list entry,
or `SparkPost.SuppressionList.search/1` to search through your account's suppression list.
Check out the documentation for each function
or use the [SparkPost API reference](https://developers.sparkpost.com/api/suppression_list.html) for details.
Returned by `SparkPost.SuppressionList.delete/1`:
- \\{:ok, ""}
Returned by `SparkPost.SuppressionList.upsert_one/3`:
- {:ok, message} (A success message string)
Returned by `SparkPost.SuppressionList.search/1`.
- %SparkPost.SuppressionList.SearchResult{}
"""
alias SparkPost.Endpoint
@doc """
Insert or update a single entry in the suppression list.
Returns a single string with the success message if the entry
was updated or inserted. Returns a %SparkPost.Endpoint.Error{} with a 400
if there was an issue with the request format.
Parameters:
- recipient: the email to insert or update in the suppression list
- type: one of "transactional" or "non_transactional"
- description (optional): optional description of this entry in the suppression list
"""
def upsert_one(recipient, type, description \\ nil) do
body = if description == nil do
%{type: type}
else
%{type: type, description: description}
end
response = Endpoint.request(:put, "suppression-list/#{recipient}", body)
case response do
%SparkPost.Endpoint.Response{status_code: 200, results: results} ->
{:ok, Map.get(results, :message, "")}
_ -> {:error, response}
end
end
@doc """
Deletes a specific entry from the list. Returns an empty string if
the deletion was successful. Returns a %SparkPost.Endpoint.Error{} with a 404
if the specified entry is not in the list. Returns a %SparkPost.Endpoint.Error{}
with a 403 if the entry could not be removed for any reason (such as Compliance).
Parameters:
recipient: the entry to delete from the suppression list.
"""
def delete(recipient) do
response = Endpoint.request(:delete, "suppression-list/#{recipient}", %{}, %{}, [], false)
case response do
%SparkPost.Endpoint.Response{status_code: 204} ->
{:ok, ""}
_ -> {:error, response}
end
end
@doc """
Execute a search of the suppression list based on the provided
parameters.
### Possible Parameters
- to: Datetime the entries were last updated, in the format YYYY-MM-DDTHH:mm:ssZ (defaults to now)
- from: Datetime the entries were last updated, in the format YYYY-MM-DDTHH:mm:ssZ
- domain: Domain of entries to include in search
- cursor: Results cursor (first query should use the value "initial")
- per_page: Max number of results to return per page (between 1 and 10,000)
- page: Results page number to return. Use if looking for less than 10,000 results. Otherwise
use the cursor param.
- sources: Sources of entries to include in the search.
- types: Types of entries to include in the search (transactional and/or non_transactional)
- description: Description of entries to include in the search.
"""
def search(params \\ []) do
response = Endpoint.request(:get, "suppression-list", %{}, %{}, [params: params], false)
case response do
%SparkPost.Endpoint.Response{results: body} ->
mapped_results = Enum.map(body.results, fn res -> struct(SparkPost.SuppressionList.ListEntry, res) end)
%SparkPost.SuppressionList.SearchResult{
results: mapped_results,
links: body.links,
total_count: body.total_count
}
_ -> response
end
end
end
|
lib/suppression_list.ex
| 0.878673
| 0.701048
|
suppression_list.ex
|
starcoder
|
defmodule Membrane.H264.FFmpeg.Encoder do
@moduledoc """
Membrane element that encodes raw video frames to H264 format.
The element expects each frame to be received in a separate buffer, so the parser
(`Membrane.Element.RawVideo.Parser`) may be required in a pipeline before
the encoder (e.g. when input is read from `Membrane.File.Source`).
Additionally, the encoder has to receive proper caps with picture format and dimensions
before any encoding takes place.
Please check `t:t/0` for available options.
"""
use Membrane.Filter
use Bunch.Typespec
alias __MODULE__.Native
alias Membrane.Buffer
alias Membrane.H264
alias Membrane.H264.FFmpeg.Common
alias Membrane.RawVideo
def_input_pad :input,
demand_mode: :auto,
demand_unit: :buffers,
caps: {RawVideo, pixel_format: one_of([:I420, :I422]), aligned: true}
def_output_pad :output,
demand_mode: :auto,
caps: {H264, stream_format: :byte_stream, alignment: :au}
@default_crf 23
@list_type presets :: [
:ultrafast,
:superfast,
:veryfast,
:faster,
:fast,
:medium,
:slow,
:slower,
:veryslow,
:placebo
]
def_options crf: [
description: """
Constant rate factor that affects the quality of output stream.
Value of 0 is lossless compression while 51 (for 8-bit samples)
or 63 (10-bit) offers the worst quality.
The range is exponential, so increasing the CRF value +6 results
in roughly half the bitrate / file size, while -6 leads
to roughly twice the bitrate.
""",
type: :int,
default: @default_crf
],
preset: [
description: """
Collection of predefined options providing certain encoding.
The slower the preset chosen, the higher compression for the
same quality can be achieved.
""",
type: :atom,
spec: presets(),
default: :medium
],
profile: [
description: """
Sets a limit on the features that the encoder will use to the ones supported in a provided H264 profile.
Said features will have to be supported by the decoder in order to decode the resulting video.
It may override other, more specific options affecting compression (e.g setting `max_b_frames` to 2
while profile is set to `:baseline` will have no effect and no B-frames will be present).
""",
type: :atom,
spec: H264.profile_t() | nil,
default: nil
],
use_shm?: [
type: :boolean,
desciption:
"If true, native encoder will use shared memory (via `t:Shmex.t/0`) for storing frames",
default: false
],
max_b_frames: [
type: :int,
description:
"Maximum number of B-frames between non-B-frames. Set to 0 to encode video without b-frames",
default: nil
]
@impl true
def handle_init(opts) do
state =
opts
|> max_b_frames_to_native_format()
|> Map.put(:encoder_ref, nil)
{:ok, state}
end
defp max_b_frames_to_native_format(%{max_b_frames: nil} = opts), do: %{opts | max_b_frames: -1}
defp max_b_frames_to_native_format(opts), do: opts
@impl true
def handle_process(:input, buffer, _ctx, state) do
%{encoder_ref: encoder_ref, use_shm?: use_shm?} = state
pts = buffer.pts || 0
case Native.encode(
buffer.payload,
Common.to_h264_time_base_truncated(pts),
use_shm?,
encoder_ref
) do
{:ok, dts_list, frames} ->
bufs = wrap_frames(dts_list, frames)
{{:ok, bufs}, state}
{:error, reason} ->
{{:error, reason}, state}
end
end
@impl true
def handle_caps(:input, %RawVideo{} = caps, _ctx, state) do
{framerate_num, framerate_denom} = caps.framerate
with {:ok, buffers} <- flush_encoder_if_exists(state),
{:ok, new_encoder_ref} <-
Native.create(
caps.width,
caps.height,
caps.pixel_format,
state.preset,
state.profile,
state.max_b_frames,
framerate_num,
framerate_denom,
state.crf
) do
caps = create_new_caps(caps, state)
actions = buffers ++ [caps: caps]
{{:ok, actions}, %{state | encoder_ref: new_encoder_ref}}
else
{:error, reason} -> {{:error, reason}, state}
end
end
@impl true
def handle_end_of_stream(:input, _ctx, state) do
case flush_encoder_if_exists(state) do
{:ok, buffers} ->
actions = buffers ++ [end_of_stream: :output, notify: {:end_of_stream, :input}]
{{:ok, actions}, state}
{:error, reason} ->
{{:error, reason}, state}
end
end
@impl true
def handle_prepared_to_stopped(_ctx, state) do
{:ok, %{state | encoder_ref: nil}}
end
defp flush_encoder_if_exists(%{encoder_ref: nil}) do
{:ok, []}
end
defp flush_encoder_if_exists(%{encoder_ref: encoder_ref, use_shm?: use_shm?}) do
with {:ok, dts_list, frames} <- Native.flush(use_shm?, encoder_ref) do
buffers = wrap_frames(dts_list, frames)
{:ok, buffers}
end
end
defp wrap_frames([], []), do: []
defp wrap_frames(dts_list, frames) do
Enum.zip(dts_list, frames)
|> Enum.map(fn {dts, frame} ->
%Buffer{dts: Common.to_membrane_time_base_truncated(dts), payload: frame}
end)
|> then(&[buffer: {:output, &1}])
end
defp create_new_caps(caps, state) do
{:output,
%H264{
alignment: :au,
framerate: caps.framerate,
height: caps.height,
width: caps.width,
profile: state.profile,
stream_format: :byte_stream
}}
end
end
|
lib/membrane_h264_ffmpeg/encoder.ex
| 0.881405
| 0.553928
|
encoder.ex
|
starcoder
|
defmodule Adventofcode.Day15OxygenSystem do
use Adventofcode
alias __MODULE__.{Direction, Droid, Maze, Position, Printer, Program, Runner, Tile}
def part_1(input) do
input
|> Program.parse()
|> Maze.new(view: {-19..21, -21..19})
|> Runner.find_oxygen_system()
|> Map.get(:oxygen_system)
|> Map.get(:steps)
end
def part_2(input) do
input
|> Program.parse()
|> Maze.new(view: {-19..21, -21..19})
|> Runner.find_oxygen_system()
|> Runner.fill_with_oxygen()
# |> Printer.print()
|> Map.get(:last_empty)
|> Map.get(:steps)
end
defmodule Position do
@enforce_keys [:x, :y]
defstruct x: 0, y: 0
def new(x, y), do: %Position{x: x, y: y}
def move(position, direction) do
%{position | x: position.x + direction.x, y: position.y + direction.y}
end
end
defmodule Direction do
@enforce_keys [:x, :y, :code]
defstruct x: 0, y: 0, code: 0
def north, do: %Direction{x: 0, y: -1, code: 1}
def south, do: %Direction{x: 0, y: 1, code: 2}
def west, do: %Direction{x: 1, y: 0, code: 3}
def east, do: %Direction{x: -1, y: 0, code: 4}
def all, do: [west(), east(), north(), south()]
def code(%Direction{code: code}), do: code
end
defmodule Tile do
@enforce_keys [:type, :code]
defstruct type: :empty, code: 1
def wall, do: %Tile{type: :wall, code: 0}
def empty, do: %Tile{type: :empty, code: 1}
def oxygen_system, do: %Tile{type: :oxygen_system, code: 2}
def all, do: [wall(), empty(), oxygen_system()]
def empty?(%Tile{type: :wall}), do: true
def empty?(_tile), do: false
def parse(code) do
Enum.find(all(), &(&1.code == code)) || raise("Unknown tile with code #{code}")
end
end
defmodule Program do
alias Adventofcode.IntcodeComputer
def parse(input) do
input
|> IntcodeComputer.parse()
|> IntcodeComputer.fallback_input(nil)
end
def prepare_move(program, direction) do
IntcodeComputer.input(program, Direction.code(direction))
end
def run(program) do
program
|> IntcodeComputer.run()
|> IntcodeComputer.pop_outputs()
|> tile_from_output
end
defp tile_from_output({[code], program}), do: {Tile.parse(code), program}
end
defmodule Droid do
@enforce_keys [:program]
defstruct program: nil, position: Position.new(0, 0), steps: 0, tile: nil
def program(%Droid{program: program}), do: program
def position(%Droid{position: position}), do: position
def tile(%Droid{tile: tile}), do: tile
def move(droid, direction) do
program = Program.prepare_move(droid.program, direction)
position = Position.move(droid.position, direction)
%{droid | program: program, position: position, steps: droid.steps + 1}
end
def run_program(droid) do
{tile, program} = Program.run(droid.program)
%{droid | program: program, tile: tile}
end
def crashed_into_wall?(%Droid{tile: %Tile{type: :wall}}), do: true
def crashed_into_wall?(_droid), do: false
end
defmodule Maze do
@default_view {0..0, 0..0}
@enforce_keys [:droids]
defstruct tiles: %{Position.new(0, 0) => Tile.empty()},
view: @default_view,
droids: [],
oxygen_system: nil,
last_empty: nil
def new(program, options) do
view = Keyword.get(options, :view, @default_view)
%Maze{view: view, droids: [%Droid{program: program}]}
end
def clear_empty_tiles(maze) do
%{maze | tiles: maze.tiles |> Enum.filter(&Tile.empty?(elem(&1, 1))) |> Enum.into(%{})}
end
def tile_visited?(%Maze{tiles: tiles}, position) do
Map.has_key?(tiles, position)
end
def get_tile(%Maze{tiles: tiles}, position) do
Map.get(tiles, position)
end
def get_droid(maze, position) do
Enum.find(maze.droids, &(&1.position == position))
end
def get_droids(%Maze{droids: droids}), do: droids
def set_droids(maze, droids), do: %{maze | droids: droids}
def update_droids(maze, droids) do
droids
|> Enum.reduce(maze, &add_droid_tile(&2, &1))
|> do_update_droids(droids)
end
defp do_update_droids(maze, droids) do
%{maze | droids: Enum.reject(droids, &Droid.crashed_into_wall?/1)}
end
defp add_droid_tile(maze, %{tile: %{type: :oxygen_system}} = droid) do
maze
|> Map.update!(:oxygen_system, &(&1 || droid))
|> do_add_droid_tile(droid)
end
defp add_droid_tile(maze, %{tile: %{type: :empty}} = droid) do
%{maze | last_empty: droid}
|> do_add_droid_tile(droid)
end
defp add_droid_tile(maze, droid) do
maze
|> do_add_droid_tile(droid)
end
defp do_add_droid_tile(maze, droid) do
tiles = Map.put(maze.tiles, droid.position, droid.tile)
%{maze | tiles: tiles, view: update_view(maze.view, droid.position)}
end
defp update_view({x1..x2, y1..y2}, %{x: x, y: y}) do
{update_range(x1..x2, x), update_range(y1..y2, y)}
end
defp update_range(n1..n2, n) when n in n1..n2, do: n1..n2
defp update_range(n1..n2, n) when n < n1, do: n..n2
defp update_range(n1..n2, n) when n > n2, do: n1..n
end
defmodule Explorer do
def explore(droids, maze) do
droids = Enum.map(droids, &Droid.run_program/1)
Maze.update_droids(maze, droids)
end
end
defmodule Runner do
def find_oxygen_system(maze), do: run(maze)
def fill_with_oxygen(maze) do
maze
|> Maze.clear_empty_tiles()
|> Maze.set_droids([%{maze.oxygen_system | steps: 0}])
|> run()
end
def run(%{droids: []} = maze), do: maze
def run(maze) do
maze
|> Maze.get_droids()
|> Enum.flat_map(&unvisited_directions(maze, &1))
|> Enum.uniq_by(&Droid.position/1)
|> Explorer.explore(maze)
|> run()
end
defp unvisited_directions(maze, droid) do
Direction.all()
|> Enum.map(&Droid.move(droid, &1))
|> Enum.reject(&Maze.tile_visited?(maze, Droid.position(&1)))
end
end
defmodule Printer do
def print(maze) do
IO.puts("\n" <> s_print(maze))
maze
end
def s_print(%{view: {_, y1..y2}} = maze) do
y1..y2
|> Enum.to_list()
|> Enum.map_join("\n", &print_row(maze, &1))
end
defp print_row(%{view: {x1..x2, _}} = maze, y) do
x1..x2
|> Enum.to_list()
|> Enum.map(&Position.new(&1, y))
|> Enum.map_join(&do_print_row(maze, &1))
end
defp do_print_row(maze, position) do
droid = Maze.get_droid(maze, position)
print_tile(droid, Maze.get_tile(maze, position))
end
defp print_tile(%Droid{}, _), do: "<>"
defp print_tile(nil, nil), do: " "
defp print_tile(nil, %Tile{type: :wall}), do: "██"
defp print_tile(nil, %Tile{type: :empty}), do: ".."
defp print_tile(nil, %Tile{type: :oxygen_system}) do
IO.ANSI.format([IO.ANSI.blue(), "██", IO.ANSI.reset()])
end
end
end
defimpl Inspect, for: Adventofcode.Day15OxygenSystem.Position do
import Inspect.Algebra
def inspect(%{x: x, y: y}, _opts) do
concat(["#Position{", to_string(x), " ", to_string(y), "}"])
end
end
defimpl Inspect, for: Adventofcode.Day15OxygenSystem.Tile do
import Inspect.Algebra
def inspect(%{type: type}, _opts) do
concat(["#Tile{", to_string(type), "}"])
end
end
|
lib/day_15_oxygen_system.ex
| 0.683208
| 0.570391
|
day_15_oxygen_system.ex
|
starcoder
|
defmodule Commands.ListCommands do
alias Interp.Functions
alias Interp.Interpreter
alias Commands.IntCommands
alias Commands.GeneralCommands
alias Commands.MatrixCommands
require Interp.Functions
def prefixes(a) do
case a do
_ when Functions.is_iterable(a) ->
a |> Stream.scan([], fn (x, y) -> y ++ [x] end) |> Functions.stream
_ ->
String.to_charlist(to_string(a)) |> Stream.scan([], fn (x, y) -> y ++ [x] end) |> Stream.map(fn x -> to_string(x) end)
end
end
def suffixes(a) do
cond do
Functions.is_iterable(a) -> a |> Enum.reverse |> prefixes |> Stream.map(fn x -> x |> Enum.reverse end)
true -> String.graphemes(to_string(a)) |> Enum.reverse |> prefixes |> Stream.map(fn x -> x |> Enum.reverse |> Enum.join("") end)
end
end
def listify(a, b) do
cond do
a == :infinity ->
throw("Invalid head value for list. Value cannot be infinity.")
b == :infinity ->
Stream.scan(Stream.cycle([a]), fn (_, y) -> y + 1 end)
true ->
a..b
end
end
def rangify(a) do
case Stream.take(a, 1) |> Enum.to_list |> List.first do
nil -> []
_ ->
a |> Stream.transform(nil, fn (element, acc) ->
case acc do
nil -> {[element], element}
x when element == x -> {[element], element}
x when element > x -> {acc + 1..element, element}
x when element < x -> {acc - 1..element, element}
end
end)
|> Stream.map(fn x -> x end)
end
end
def split_individual(value) do
cond do
is_integer(value) -> split_individual(to_string(value))
Functions.is_iterable(value) -> value |> Stream.flat_map(&split_individual/1) |> Stream.map(fn x -> x end)
true -> String.graphemes(value)
end
end
def permute_by_function([], _, _), do: [[]]
def permute_by_function([head | remaining], commands, environment) do
for sub <- permute_by_function(remaining, commands, environment), curr <- [head, Interpreter.flat_interp(commands, [head], environment)] do
[curr] ++ sub
end
end
def take_first(value, count) do
cond do
count == 0 and Functions.is_iterable(value) -> []
count == 0 -> ""
is_number(value) -> take_first(to_string(value), count)
Functions.is_iterable(count) -> take_split(value, count)
Functions.is_iterable(value) and count < 0 -> Stream.take(value, length(Enum.to_list(value)) + count) |> Stream.map(fn x -> x end)
Functions.is_iterable(value) -> Stream.take(value, Functions.to_number(count)) |> Stream.map(fn x -> x end)
true -> String.slice(to_string(value), 0..count - 1)
end
end
defp take_split(value, counts) do
cond do
Functions.is_iterable(value) -> Stream.transform(counts, value, fn (x, acc) -> {[Stream.take(acc, Functions.to_number(x))], Stream.drop(acc, Functions.to_number(x))} end)
|> Stream.map(fn x -> x end)
true -> take_split(String.to_charlist(value), counts) |> Stream.map(fn x -> List.to_string(Enum.to_list(x)) end)
end
end
def enumerate(value) do
cond do
Functions.is_iterable(value) -> value |> Stream.with_index(1) |> Stream.map(fn {_, index} -> index end)
true -> 1..String.length(to_string(value))
end
end
def deltas(value) do
cond do
Functions.is_iterable(value) -> Stream.chunk_every(value, 2, 1, :discard) |> Stream.map(fn [x, y] -> Functions.to_number(y) - Functions.to_number(x) end)
true -> deltas(String.graphemes(to_string(value)))
end
end
def sum(value) do
case Enum.take(value, 1) do
[] -> 0
x when Functions.is_iterable(hd(x)) -> value |> Stream.map(fn x -> sum(x) end)
_ -> value |> Enum.reduce(0, fn (x, acc) -> acc + Functions.to_number(x) end)
end
end
def reduce_subtraction(value) do
case Enum.take(value, 1) do
[] -> 0
x when Functions.is_iterable(hd(x)) -> value |> Stream.map(fn x -> reduce_subtraction(x) end)
_ -> value |> Functions.to_number |> Enum.reduce(fn (x, acc) -> acc - x end)
end
end
def product(value) do
case Enum.take(value, 1) do
[] -> 1
x when Functions.is_iterable(hd(x)) -> value |> Stream.map(fn x -> product(x) end)
_ -> value |> Enum.reduce(1, fn (x, acc) -> acc * Functions.to_number(x) end)
end
end
def join(value, _) when not Functions.is_iterable(value), do: to_string(value)
def join(value, joiner) do
cond do
Enum.take(value, 1) == [] -> ""
value |> Enum.any?(fn x -> Functions.is_iterable(x) end) -> value |> Stream.map(fn x -> x |> join(joiner) end)
true -> value |> Enum.to_list |> Enum.map(&Functions.flat_string/1) |> Enum.join(joiner)
end
end
def grid_join(list) do
list |> Stream.map(fn x ->
if Functions.is_iterable(x) do
x |> Enum.to_list |> Enum.map(&Functions.flat_string/1) |> Enum.join(" ")
else
x
end
end) |> Enum.to_list |> Enum.join("\n")
end
def contains(value, element) do
cond do
Functions.is_iterable(value) -> Enum.find(value, fn x -> GeneralCommands.equals(x, element) end) != nil
true -> String.contains?(to_string(value), to_string(element))
end
end
def take_last(value, count) when Functions.is_iterable(value), do: value |> Stream.take(-count) |> Stream.map(fn x -> x end)
def take_last(value, count), do: Enum.join(take_last(String.graphemes(to_string(value)), count), "")
def drop_from(value, count) when Functions.is_iterable(value), do: value |> Stream.drop(count)
def drop_from(value, count), do: Enum.join(drop_from(String.graphemes(to_string(value)), count), "")
def surround(value, element) when Functions.is_iterable(value) and Functions.is_iterable(element), do: Stream.concat([element, value, element]) |> Stream.map(fn x -> x end)
def surround(value, element) when Functions.is_iterable(value), do: Stream.concat([[element], value, [element]]) |> Stream.map(fn x -> x end)
def surround(value, element) when Functions.is_iterable(element), do: Stream.concat([element, String.graphemes(to_string(value)), element]) |> Stream.map(fn x -> x end)
def surround(value, element), do: to_string(element) <> to_string(value) <> to_string(element)
def undelta(value) when Functions.is_iterable(value), do: Stream.concat([[0], value]) |> Stream.scan(fn (x, acc) -> Functions.to_number(x) + acc end)
def undelta(value), do: undelta(String.graphemes(to_string(value)))
def remove_from(value, filter_elements) do
cond do
Functions.is_iterable(value) and Functions.is_iterable(filter_elements) -> value |> Stream.filter(fn x -> !contains(filter_elements, x) end)
Functions.is_iterable(value) -> value |> Stream.filter(fn x -> !GeneralCommands.equals(filter_elements, x) end)
Functions.is_iterable(filter_elements) -> Enum.reduce(filter_elements, to_string(value), fn (x, acc) -> String.replace(acc, to_string(x), "") end)
true -> remove_from(value, [filter_elements])
end
end
def split_into(value, size) do
cond do
Functions.is_iterable(value) -> value |> Stream.chunk_every(size)
true -> to_charlist(to_string(value)) |> Stream.chunk_every(size) |> Stream.map(&to_string/1)
end
end
def split_on(value, split) do
split_chars = cond do
Functions.is_iterable(split) -> Enum.to_list split
true -> [to_string(split)]
end
cond do
Functions.is_iterable(value) ->
value |> Stream.chunk_while([],
fn (x, acc) -> if contains(split_chars, x) do {:cont, Enum.reverse(acc), []} else {:cont, [x | acc]} end end,
fn [] -> {:cont, []}; acc -> {:cont, Enum.reverse(acc), []} end)
true ->
String.split(to_string(value), split_chars)
end
end
def flat_index_in_list(list, element) when not Functions.is_iterable(element), do: flat_index_in_list(list, String.graphemes(to_string(element)))
def flat_index_in_list(list, element) when not Functions.is_iterable(list), do: flat_index_in_list(String.graphemes(to_string(list)), element)
def flat_index_in_list(list, element), do: flat_index_in_list(list, Enum.to_list(element), 0)
defp flat_index_in_list(list, element, index) do
curr_head = Stream.take(list, length(element)) |> Enum.to_list
cond do
length(curr_head) < length(element) -> -1
GeneralCommands.equals(curr_head, element) -> index
true -> flat_index_in_list(Stream.drop(list, 1), element, index + 1)
end
end
def list_multiply(value, len) do
cond do
Functions.is_iterable(value) -> value |> Stream.cycle |> Stream.take(length(Enum.to_list(value)) * len)
true -> Stream.cycle([value]) |> Stream.take(len)
end
end
def closest_to(value, element) when Functions.is_iterable(value), do: closest_to(Functions.to_number(value), Functions.to_number(element), nil, nil)
def closest_to(value, element), do: closest_to(Functions.to_number(String.graphemes(to_string(value))), Functions.to_number(element), nil, nil)
def closest_to(value, element, acc, min_distance) do
head = Enum.take(value, 1) |> Enum.to_list |> List.first
cond do
head == nil and acc == nil -> []
head == nil -> acc
abs(element - head) < min_distance -> closest_to(Enum.drop(value, 1), element, head, abs(element - head))
true -> closest_to(Enum.drop(value, 1), element, acc, min_distance)
end
end
def extract_every(value, n) do
cond do
Functions.is_iterable(value) -> 0..n - 1 |> Stream.map(fn x -> value |> Stream.drop(x) |> Stream.take_every(n) end)
true -> extract_every(String.graphemes(to_string(value)), n) |> Stream.map(fn x -> Enum.join(Enum.to_list(x), "") end)
end
end
def uniques(value) do
cond do
Functions.is_iterable(value) -> value |> Stream.transform([], fn (x, acc) -> if contains(acc, x) do {[], acc} else {[x], [x | acc]} end end) |> Stream.map(fn x -> x end)
true -> Enum.join(uniques(String.graphemes(to_string(value))))
end
end
def filter_to_front(value, filter_chars) do
cond do
Functions.is_iterable(value) and Functions.is_iterable(filter_chars) -> value |> Enum.sort_by(fn x -> not contains(filter_chars, x) end)
Functions.is_iterable(value) -> value |> Enum.sort_by(fn x -> not GeneralCommands.equals(x, filter_chars) end)
true -> filter_to_front(String.graphemes(to_string(value)), filter_chars) |> Enum.join("")
end
end
def keep_truthy_indices(value, indices) when is_bitstring(value), do: Enum.join(keep_truthy_indices(String.graphemes(to_string(value)), indices))
def keep_truthy_indices(value, indices) when is_bitstring(indices), do: keep_truthy_indices(value, String.graphemes(to_string(indices)))
def keep_truthy_indices(value, indices) do
Stream.zip(value, indices) |> Stream.filter(fn {_, index} -> GeneralCommands.equals(index, 1) end) |> Stream.map(fn {element, _} -> element end)
end
def deduplicate(string) when is_bitstring(string) or is_number(string), do: Enum.join(deduplicate(String.graphemes(to_string(string))), "")
def deduplicate(list) do
list |> Stream.transform(nil, fn (x, acc) -> if GeneralCommands.equals(x, acc) do {[], acc} else {[x], x} end end) |> Stream.map(fn x -> x end)
end
def index_in(value, element) when Functions.is_single?(value) do
case String.split(to_string(value), to_string(element), parts: 2) do
[left, _] -> String.length(left)
[_] -> -1
end
end
def index_in(value, element) when Functions.is_iterable(value) do
case first_where(value |> Stream.with_index, fn {x, _} -> GeneralCommands.equals(x, element) end) do
nil -> -1
{_, index} -> index
end
end
def first_where(stream, function) do
stream |> Stream.filter(function) |> Stream.take(1) |> Enum.to_list |> List.first
end
def index_in_stream(stream, element) do
case stream |> Stream.with_index |> Stream.filter(fn {x, _} -> GeneralCommands.equals(x, element) end) |> Stream.take(1) |> Enum.to_list |> List.first do
nil -> -1
{_, index} -> index
end
end
def lift(value) do
cond do
Functions.is_iterable(value) -> value |> Stream.with_index(1) |> Stream.map(fn {x, index} -> Functions.call_binary(fn a, b ->
Functions.to_number(a) * Functions.to_number(b) end, x, index) end)
true -> String.graphemes(to_string(value)) |> Stream.with_index(1) |> Stream.map(fn {x, index} -> String.duplicate(x, index) end)
end
end
def even_split(value, size) when is_number(value) or is_bitstring(value), do: even_split(String.graphemes(to_string(value)), size) |> Stream.map(fn x -> Enum.join(x, "") end)
def even_split(list, size) do
list_length = length(Enum.to_list list)
{final_size, remainder} = {IntCommands.divide(list_length, size), IntCommands.mod(list_length, size)}
if remainder == 0 do
split_into(list, final_size)
else
take_split(list, Stream.concat(Stream.cycle([final_size + 1]) |> Stream.take(remainder), Stream.cycle([final_size]) |> Stream.take(size - remainder)))
end
end
def remove_leading(value, element) when is_number(value) or is_bitstring(value), do: Enum.join(remove_leading(String.graphemes(to_string(value)), element), "")
def remove_leading(value, element) when Functions.is_iterable(element), do: value |> Stream.drop_while(fn x -> contains(element, x) end)
def remove_leading(value, element), do: value |> Stream.drop_while(fn x -> GeneralCommands.equals(x, element) end)
def remove_trailing(value, element) when is_number(value) or is_bitstring(value), do: Enum.join(remove_trailing(String.graphemes(to_string(value)), element), "")
def remove_trailing(value, element), do: value |> Enum.to_list |> Enum.reverse |> remove_leading(element) |> Enum.reverse
@doc """
Rotate the given value to the left or to the right depending on the given shift.
If the shift is larger than 0, the value is shifted that many times to the left.
If the shift is smaller than 0, the value is shifted by abs(shift) many times to the right.
## Parameters
- value: The value that will be shifted
- shift: The number of times the value will be shifted.
## Returns
The shifted result from the value.
"""
def rotate(value, shift) when shift == 0, do: value
def rotate(value, shift) when not Functions.is_iterable(value), do: Enum.join(rotate(String.graphemes(to_string(value)), shift), "")
def rotate(value, shift) when shift > 0 do
case Enum.count value do
0 -> []
x ->
shift = rem(shift, x)
Stream.concat(value |> Stream.drop(shift), value |> Stream.take(shift)) |> Enum.to_list
end
end
def rotate(value, shift) when shift < 0 do
case Enum.count value do
0 -> []
x ->
shift = rem(shift, x)
Stream.concat(value |> Stream.take(shift), value |> Stream.drop(shift)) |> Enum.to_list
end
end
def zip(a) do
Stream.zip(a) |> Stream.map(fn x -> Tuple.to_list x end)
end
def zip(a, b) do
cond do
Functions.is_iterable(a) and Functions.is_iterable(b) -> Stream.zip(a, b) |> Stream.map(fn x -> Tuple.to_list x end)
Functions.is_iterable(a) -> Stream.zip(a, String.graphemes(to_string(b))) |> Stream.map(fn x -> Tuple.to_list x end)
Functions.is_iterable(b) -> Stream.zip(String.graphemes(to_string(a)), b) |> Stream.map(fn x -> Tuple.to_list x end)
true -> Stream.zip(String.graphemes(to_string(a)), String.graphemes(to_string(b))) |> Stream.map(fn x -> Enum.join(Tuple.to_list(x), "") end)
end
end
@doc """
Zip with filler for a single list. Zipping is done internally within the first given argument and
fills the remaining spaces with the given filler character. Since Elixir does not have a zip with filler
function, this is done using a resource generator for a new stream, repeatedly taking the first element of
the given list of lists and dropping one element for the next iteration. It checks each iteration whether
at least one element of the intermediate results is not equal to [] and replaces any occurrence of [] with
the filler element. If all elements of the intermediate result equals [], it halts the stream generation.
## Parameters
- a: The element that will be zipped. Assuming that this element is a list of lists.
- filler: The filler character, which can be of any type.
## Returns
Returns the resulting zipped list as a stream.
"""
def zip_with_filler(a, filler) do
Stream.resource(
# Initialize the accumulator with the given list of lists.
fn -> a end,
# With the intermediate accumulator as a parameter..
fn acc ->
# Take the first element of each sublist.
elements = acc |> Stream.map(fn n -> n |> Stream.take(1) |> Enum.to_list end)
# Check if there exists at least one element that does not equal []. If all elements equal [],
# we would now know that the zipping is done.
if Enum.any?(elements, fn n -> n != [] end) do
{[elements |> Stream.flat_map(fn n -> if n == [] do [filler] else n end end) |> Stream.map(fn x -> x end)], acc |> Stream.map(fn n -> n |> Stream.drop(1) end)}
else
{:halt, nil}
end
end,
fn _ -> nil end)
|> Stream.map(fn x -> x end)
end
def zip_with_filler(a, b, filler) do
a = cond do
Functions.is_iterable(a) -> a
true -> String.graphemes(to_string(a))
end
b = cond do
Functions.is_iterable(b) -> b
true -> String.graphemes(to_string(b))
end
Stream.resource(
# Initialize the accumulator with the given list of lists.
fn -> {a, b} end,
# With the intermediate accumulator as a parameter..
fn {left, right} ->
# Take the first element of each sublist.
elements = [left |> Stream.take(1) |> Enum.to_list, right |> Stream.take(1) |> Enum.to_list]
# Check if there exists at least one element that does not equal []. If all elements equal [],
# we would now know that the zipping is done.
if Enum.any?(elements, fn n -> n != [] end) do
{[elements |> Stream.flat_map(fn n -> if n == [] do [filler] else n end end) |> Stream.map(fn x -> x end)], {left |> Stream.drop(1), right |> Stream.drop(1)}}
else
{:halt, nil}
end
end,
fn _ -> nil end)
|> Stream.map(fn x -> x end)
end
def deep_flatten(list) do
list |> Stream.flat_map(fn x -> if Functions.is_iterable(x) do deep_flatten(x) else [x] end end)
|> Stream.map(fn x -> x end)
end
def substrings(list) do
list |> suffixes |> Enum.reverse |> Stream.flat_map(fn y -> prefixes(y) end) |> Stream.map(fn x -> x end)
end
def reverse(list) when Functions.is_iterable(list), do: list |> Enum.to_list |> Enum.reverse
def reverse(string), do: String.reverse(to_string(string))
def group_equal(list) do
cond do
Functions.is_iterable(list) -> Stream.chunk_while(list, {[], nil},
fn (x, {acc, last}) -> if last == nil or GeneralCommands.equals(x, last) do {:cont, {[x | acc], x}} else {:cont, acc, {[x], x}} end end,
fn ({acc, _}) -> case acc do [] -> {:cont, []}; acc -> {:cont, acc, []} end end)
true -> String.graphemes(to_string(list)) |> group_equal |> Stream.map(fn x -> x |> Enum.join("") end)
end
end
def keep_with_length(list, length) do
cond do
Functions.is_iterable(list) -> list |> Stream.filter(fn x -> GeneralCommands.length_of(x) == length end)
true -> []
end
end
def permutations([]), do: [[]]
def permutations(list) do
list = Enum.to_list list
for element <- list, remaining <- permutations(list -- [element]), do: [element | remaining]
end
# Powerset example: [1, 2, 3, 4] → [[], [1], [2], [1, 2], [3], [1, 3], [2, 3], [1, 2, 3], [4], [1, 4], [2, 4], ...]
def powerset(list) do
Stream.concat([[]], Stream.transform(list, [[]], fn (x, acc) ->
current_result = Enum.map(acc, fn n -> n ++ [x] end)
{current_result, acc ++ current_result}
end))
|> Stream.map(fn x -> x end)
end
def shape_like(a, b) when Functions.is_iterable(a) and Functions.is_iterable(b), do: a |> Stream.cycle |> Stream.take(length(Enum.to_list b)) |> Enum.to_list
def shape_like(a, b) when Functions.is_iterable(a), do: a |> Stream.cycle |> Stream.take(if Functions.is_number?(b) do Functions.to_integer(b) else String.length(to_string(b)) end) |> Enum.to_list
def shape_like(a, b), do: Enum.join(shape_like(Functions.to_list(a), b), "")
def cartesian(a, b) do
cond do
Functions.is_iterable(a) and Functions.is_iterable(b) -> Stream.flat_map(a, fn x -> b |> Stream.map(fn y -> [x, y] end) end) |> Stream.map(fn x -> x end)
Functions.is_iterable(a) -> cartesian(a, String.graphemes(to_string(b)))
Functions.is_iterable(b) -> cartesian(String.graphemes(to_string(a)), b)
true -> cartesian(String.graphemes(to_string(a)), String.graphemes(to_string(b))) |> Stream.map(fn x -> Enum.to_list(x) |> Enum.join("") end)
end
end
def cartesian_repeat(_, 0), do: [[]]
def cartesian_repeat(value, n) do
cond do
Functions.is_iterable(value) -> cartesian_repeat(value, n - 1) |> Stream.flat_map(fn x -> value |> Stream.map(fn y -> x ++ [y] end) end) |> Stream.map(fn x -> x end)
true -> cartesian_repeat(String.graphemes(to_string(value)), n) |> Stream.map(fn x -> Enum.join(Enum.to_list(x), "") end)
end
end
def enumerate_inner(list) when Functions.is_iterable(list), do: list |> Stream.with_index |> Stream.map(fn {element, index} -> [element, index] end)
def enumerate_inner(value), do: enumerate_inner(String.graphemes(to_string(value)))
def divide_into(list, n) when is_number(n), do: divide_into(list, [List.duplicate([], n)])
def divide_into([], containers), do: containers
def divide_into(_, []), do: []
def divide_into([head | remaining], acc) do
acc |> Enum.flat_map(fn x -> divide_into(remaining, insert_anywhere([head], x)) end)
end
defp insert_anywhere(element, containers), do: insert_anywhere(element, containers, [])
defp insert_anywhere(_, [], _), do: []
defp insert_anywhere(element, [head_container | remaining], parsed) do
[parsed ++ [(head_container ++ element) | remaining] | insert_anywhere(element, remaining, parsed ++ [head_container])]
end
def combinations(_, 0), do: [[]]
def combinations([], _), do: []
def combinations(list, n) when length(list) == n, do: [list]
def combinations(list, n) when length(list) < n, do: []
def combinations([head | remaining], n), do: (for element <- combinations(remaining, n - 1), do: [head | element]) ++ combinations(remaining, n)
def partitions(list), do: partitions(list, [])
defp partitions([], acc), do: [acc |> Enum.reverse]
defp partitions([head | remaining], []), do: partitions(remaining, [[head]])
defp partitions([head | remaining], [head_acc | remaining_acc]) do
partitions(remaining, [[head]] ++ [head_acc | remaining_acc]) ++ partitions(remaining, [head_acc ++ [head] | remaining_acc])
end
def integer_partitions(number), do: integer_partitions(number, [], 1)
defp integer_partitions(0, acc, _), do: [acc |> Enum.reverse]
defp integer_partitions(x, _, _) when x < 0, do: []
defp integer_partitions(x, _, min_index) when min_index > x, do: []
defp integer_partitions(number, acc, min_index), do: min_index..number |> Enum.flat_map(fn index -> integer_partitions(number - index, [index | acc], index) end)
def increasing_contains([], _), do: false
def increasing_contains(list, value, certainty \\ 5), do: increasing_contains(list, value, certainty, certainty)
def increasing_contains(_, _, _, 0), do: false
def increasing_contains(list, value, certainty, allowance) do
head = Functions.to_number GeneralCommands.head(list)
cond do
head == nil -> false
head < value -> increasing_contains(list |> Stream.drop(1), value, certainty, certainty)
head == value -> true
head > value -> increasing_contains(list |> Stream.drop(1), value, certainty, allowance - 1)
end
end
def unfold_up_to(start, function, limit) do
Stream.unfold(start,
fn index ->
result = function.(index)
cond do
result > limit -> nil
true -> {result, index + 1}
end
end) |> Stream.map(fn x -> x end)
end
def generate_n(start, function, n) do
Stream.unfold({start, n},
fn
{_, 0} -> nil
{acc, size} ->
result = function.(acc)
{acc, {result, size - 1}}
end) |> Stream.map(fn x -> x end)
end
def list_subtraction(left, []), do: left
def list_subtraction(left, [curr | remaining]) do
case left |> Enum.find_index(fn x -> GeneralCommands.equals(x, curr) end) do
nil -> left |> list_subtraction(remaining)
index -> ((left |> Enum.take(index)) ++ (left |> Enum.drop(index + 1))) |> list_subtraction(remaining)
end
end
def interleave(left, right) do
Stream.unfold({left, right}, fn
{left, right} ->
case Stream.take(left, 1) |> Enum.to_list do
[] ->
case Stream.take(right, 1) |> Enum.to_list do
[] -> nil
[element] -> {element, right |> Stream.drop(1)}
end
[element] -> {element, {right, left |> Stream.drop(1)}}
end
acc ->
case Stream.take(acc, 1) |> Enum.to_list do
[] -> nil
[element] -> {element, acc |> Stream.drop(1)}
end
end) |> Stream.map(fn x -> x end)
end
def continue(list), do: Stream.concat(list, Stream.cycle([List.last(Enum.to_list(list))])) |> Functions.as_stream
def deck_shuffle(list) do
[left, right] = list |> even_split(2) |> Enum.to_list
interleave(left, right)
end
def deck_unshuffle(list) do
list |> split_into(2) |> MatrixCommands.columns_of |> Stream.concat |> Functions.as_stream
end
def permutation_index(range, index) when Functions.is_single?(range), do: permutation_index(1..Functions.to_integer(range), index)
def permutation_index(range, index), do: permutation_index(Enum.to_list(range), index, [])
defp permutation_index([], _, parsed), do: parsed |> Enum.reverse
defp permutation_index(range, index, parsed) do
curr_index = Enum.at(range, div(index, IntCommands.factorial(length(range) - 1)))
permutation_index(range -- [curr_index], rem(index, IntCommands.factorial(length(range) - 1)), [curr_index | parsed])
end
def middle_of(string) when Functions.is_single?(string) do
case middle_of(Functions.to_list(string)) do
[left, right] -> left <> right
middle -> middle
end
end
def middle_of(list) do
list = Enum.to_list(list)
len = length(list)
mid = div(len, 2)
cond do
len == 0 -> []
rem(len, 2) == 0 -> [Enum.at(list, mid - 1), Enum.at(list, mid)]
true -> Enum.at(list, mid)
end
end
def split_on_truthy_indices(a, b) do
Stream.zip(a, Stream.concat(b, Stream.cycle([0]))) |> Stream.chunk_while(
[],
fn {item, index}, acc ->
if GeneralCommands.equals(index, 1) do
{:cont, Enum.reverse(acc), [item]}
else
{:cont, [item | acc]}
end
end,
fn
[] -> {:cont, []}
acc -> {:cont, Enum.reverse(acc), []}
end
) |> Stream.map(fn x -> x end)
end
def non_vectorizing_index_in(a, b) do
case Functions.to_list(a) |> Stream.with_index |> first_where(fn {x, _} -> GeneralCommands.equals(x, b) end) do
nil -> -1
{_, index} -> index
end
end
end
|
lib/commands/list_commands.ex
| 0.612773
| 0.684449
|
list_commands.ex
|
starcoder
|
defmodule GraphTh.Path do
@moduledoc """
GraphTh.Path is a path.
"""
defstruct path: []
@doc """
Generate an empty path.
## Examples
iex> GraphTh.Path.empty()
%GraphTh.Path{path: []}
"""
def empty() do
%GraphTh.Path{path: []}
end
@doc """
Generate a path from the given `path_list`.
## Examples
iex> GraphTh.Path.path([:a, :b])
%GraphTh.Path{path: [:a, :b]}
"""
def path(path_list) when is_list(path_list) do
%GraphTh.Path{path: path_list}
end
@doc """
Returns whether the given `path` is simple, that is, all vertices on `path` are distinct.
## Examples
iex> GraphTh.Path.is_simple?(GraphTh.Path.path([:a, :b]))
true
iex> GraphTh.Path.is_simple?(GraphTh.Path.path([:a, :b, :a]))
false
"""
def is_simple?(path) when is_struct(path) do
length(Enum.uniq(path.path)) == length(path.path)
end
@doc """
Returns the length of `path`.
## Examples
iex> GraphTh.Path.length_p(GraphTh.Path.empty())
0
iex> GraphTh.Path.length_p(GraphTh.Path.path([:a]))
0
iex> GraphTh.Path.length_p(GraphTh.Path.path([:a, :b]))
1
iex> GraphTh.Path.length_p(GraphTh.Path.path([:a, :b, :c]))
2
"""
def length_p(%GraphTh.Path{path: []}) do
0
end
def length_p(path) when is_struct(path) do
length(path.path) - 1
end
@doc """
Returns whether the given `path` is trivial, that is, with zero length consists of a single vertex.
## Examples
iex> GraphTh.Path.is_trivial?(GraphTh.Path.empty())
true
iex> GraphTh.Path.is_trivial?(GraphTh.Path.path([:a]))
true
iex> GraphTh.Path.is_trivial?(GraphTh.Path.path([:a, :b]))
false
iex> GraphTh.Path.is_trivial?(GraphTh.Path.path([:a, :b, :c]))
false
"""
def is_trivial?(path) when is_struct(path) do
length_p(path) == 0
end
@doc """
Returns a directed graph representation of the given `path`.
## Examples
iex> GraphTh.Path.induced_graph(GraphTh.Path.empty())
%GraphTh.Digraph{arcs: %{}}
iex> GraphTh.Path.induced_graph(GraphTh.Path.path([:a]))
%GraphTh.Digraph{arcs: %{a: []}}
iex> GraphTh.Path.induced_graph(GraphTh.Path.path([:a, :b]))
%GraphTh.Digraph{arcs: %{a: [:b], b: []}}
iex> GraphTh.Path.induced_graph(GraphTh.Path.path([:a, :b, :c]))
%GraphTh.Digraph{arcs: %{a: [:b], b: [:c], c: []}}
"""
def induced_graph(%GraphTh.Path{path: [vertice]}) do
GraphTh.Digraph.empty() |> GraphTh.Digraph.add_vertice(vertice)
end
def induced_graph(path) when is_struct(path) do
Enum.zip([nil] ++ path.path, path.path ++ [nil])
|> Enum.reduce(
GraphTh.Digraph.empty(),
fn
{_, nil}, g -> g
{nil, _}, g -> g
{v1, v2}, g -> GraphTh.Digraph.add_arc(g, {v1, v2})
end
)
end
end
|
lib/graph_th/path.ex
| 0.923902
| 0.742282
|
path.ex
|
starcoder
|
defmodule Immortal.Ghost do
@moduledoc """
A process that persists temporarily after an original process dies.
## Rationale
Suppose you need to track users' online status. You have one process
per user per connection, and automatically kill the process if the user
disconnects.
This strategy is good as far as it goes, but it assumes that the user is
offline the instant that they drop their connection. This may not be
accurate. Users can drop their connection for many reasons, such as by
driving through an area with poor cell reception or opening another app on
their mobile device, causing the app to go to the background and drop
connections.
In these cases, it is often better to assume a user is offline if they
drop their connection _and_ do not reconnect after a period of time.
This is what Ghost processes are for.
Instead of tying a user's status to a process that dies immediately on
disconnects, you should instead tie the status to a ghost: a process that
will remain alive for a period of time after the connection process dies.
This way, the user will be considered offline only after all their
connections _and_ their ghosts are dead.
## Example
Create a ghost when your connection process starts and use it to interface
with whatever presence tracking system you are using. This example uses
`Phoenix.Presence`, and the ghost will stay alive for 60 seconds after the
connection process dies.
# self() refers to the calling process here
{:ok, ghost} = Ghost.start(self(), 60_000, fn ->
# self() refers to the ghost here
Presence.track(self(), topic, user_id, %{status: "online"})
end)
Since the presence tracker will be watching the ghost, not the connection,
it will only consider the user offline 60 seconds after the connection
drops. If the user reconnects during this time, a new connection and new
ghost will start, causing the user to continue to appear online.
Save the ghost's `pid` in the connection process so that you can update
statuses later when users change it manually:
Ghost.update(ghost, fn _state ->
Presence.update(self(), topic, user_id, %{status: "busy"})
end)
"""
use GenServer
@typedoc """
A pid referencing an active Ghost.
"""
@type t :: pid
@typedoc """
A pid referencing the process the ghost should watch before dying.
"""
@type target_pid :: pid
@typedoc """
The number of milliseconds a Ghost should remain active after its
target process dies.
"""
@type milliseconds :: integer
@doc """
Starts a Ghost process, monitoring the given `pid`.
The timeout determines how long the Ghost will remain alive after the
`pid` process dies, in milliseconds.
The optional function determines the internal state that the Ghost process
will hold. You can use and modify this state using `update/2`.
## Examples
Ghosts remain alive after the target process dies.
iex> {:ok, target} = Agent.start(fn -> nil end)
...> {:ok, ghost} = Ghost.start(target, 1000)
...> Process.exit(target, :kill)
...> Process.alive?(ghost)
true
But they fade after the given timeout:
iex> {:ok, target} = Agent.start(fn -> nil end)
...> {:ok, ghost} = Ghost.start(target, 100)
...> Process.exit(target, :kill)
...> :timer.sleep(200)
...> Process.alive?(ghost)
false
"""
@spec start(target_pid, milliseconds, fun) :: {:ok, t}
def start(pid, timeout, fun \\ fn -> nil end)
when is_pid(pid) and is_integer(timeout) and is_function(fun) do
GenServer.start(__MODULE__, [pid, timeout, fun])
end
@doc """
Updates the internal state of the ghost using a function.
The return value of the function becomes the new state.
## Example
iex> {:ok, ghost} = Ghost.start(self(), 100, fn -> "hello" end)
...> Ghost.update(ghost, fn word -> word <> " world" end)
{:ok, "hello world"}
"""
@spec update(t, fun) :: {:ok, any}
def update(ghost, fun) when is_pid(ghost) and is_function(fun) do
GenServer.call(ghost, {:update, fun})
end
@doc false
def init([source, timeout, fun]) do
Process.monitor(source)
{:ok, %{timeout: timeout, source: source, value: fun.()}}
end
@doc false
def handle_call({:update, fun}, _from, state) do
value = fun.(state.value)
{:reply, {:ok, value}, %{state | value: value}}
end
@doc false
def handle_info({:DOWN, _ref, :process, _pid, _reason}, state) do
:timer.kill_after(state.timeout, self())
{:noreply, state}
end
end
|
lib/immortal/ghost.ex
| 0.800185
| 0.565419
|
ghost.ex
|
starcoder
|
defmodule Exq.Middleware.Server do
@moduledoc """
Middleware Server is responsible for storing middleware chain that is evaluated
when performing particular job. Middleware chain defaults to Stats, Job and Manager middlewares.
To push new middleware you must create module with common interface. Interface is similar to `Plug`
implementation. It has three functions, every function receives `Exq.Middlewares.Pipeline` structure
and every function must return the same structure, modified or not.
Basically, `before_work/1` function may update worker state, while `after_processed_work/1` and
`after_failed_work/1` are for cleanup and notification stuff.
For example, here is a valid middleware module:
```elixir
defmodule MyMiddleware do
@behaiour Exq.Middleware.Behaviour
def before_work(pipeline) do
# some functionality goes here...
pipeline
end
def after_processed_work(pipeline) do
# some functionality goes here...
pipeline
end
def after_failed_work(pipeline) do
# some functionality goes here...
pipeline
end
end
```
To add this module to middleware chain:
```elixir
Exq.Middleware.Server.push(middleware_server_pid, MyMiddleware)
```
"""
use GenServer
@doc """
Starts middleware server
"""
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, default_middleware(opts), name: server_name(opts[:name]))
end
@doc """
Adds specified `middleware` module into the end of middleware list. `middleware` should have
`Exq.Middleware.Behaviour` behaviour
"""
def push(pid, middleware) do
GenServer.cast(pid, {:push, middleware})
end
@doc """
Retrieves list of middleware modules
"""
def all(pid) do
GenServer.call(pid, :all)
end
@doc """
Returns middleware server name
"""
def server_name(name) do
name = name || Exq.Support.Config.get(:name)
"#{name}.Middleware.Server" |> String.to_atom
end
@doc false
def terminate(_reason, _state) do
:ok
end
##===========================================================
## gen server callbacks
##===========================================================
def handle_cast({:push, middleware}, state) do
{:noreply, List.insert_at(state, -1, middleware)}
end
def handle_call(:all, _from, state) do
{:reply, state, state}
end
def init(args) do
{:ok, args}
end
##===========================================================
## Internal Functions
##===========================================================
defp default_middleware([]), do: []
defp default_middleware(opts), do: opts[:default_middleware]
end
|
lib/exq/middleware/server.ex
| 0.842037
| 0.776369
|
server.ex
|
starcoder
|
defmodule GameOfLifeCore.Universe do
@neighbours_places [:top_left, :top, :top_right, :left, :right, :bottom_left, :bottom, :bottom_right]
@valid_cells_values [:dead, :alive]
def evolve(universe) do
case check(universe) do
:ok -> evolve_safe(universe)
{error, extra_info} -> {:error, error, extra_info}
error -> {:error, error}
end
end
defp check(universe) when is_map(universe) === false, do: :universe_is_not_a_map
defp check(universe) do
{check_values_response, invalid_cells} = check_values(universe)
cond do
universe === %{} -> :universe_empty
!has_ordered_indexes?(universe) -> :universe_has_not_ordered_indexes
!is_well_formed?(universe) -> :universe_is_not_well_formed
check_values_response === :not_valid -> {:universe_contains_unrecognized_values, invalid_cells}
true -> :ok
end
end
defp has_ordered_indexes?(universe) do
cols_ixs = Map.keys(universe) |> Enum.sort
cols_ixs === Enum.to_list(0..(Enum.count(universe) - 1))
&& Enum.all?(universe, fn({_, rows}) ->
Enum.sort(Map.keys(rows)) === Enum.to_list(0 ..(Enum.count(rows) - 1))
end)
end
defp is_well_formed?(universe), do:
Enum.all?(universe, fn({_, row}) -> Enum.count(row) === Enum.count(universe[0]) end)
defp check_values(universe) do
invalid_cells = for y <- Map.keys(universe), x <- Map.keys(universe[y]), into: [] do
value = get_in(universe, [y, x])
if !is_a_valid_cell_value?(value), do: {y, x, value}
end
|> Enum.filter(&(&1 !== nil))
if(invalid_cells === []) do
{:ok, []}
else
{:not_valid, invalid_cells}
end
end
defp is_a_valid_cell_value?(value), do: Enum.member?(@valid_cells_values, value)
defp evolve_safe(universe) do
evolved_universe = map(universe, fn(x, y, cell) ->
neighbours = get_all_neighbours(universe, {y, x})
{:ok, evolved_cell} = GameOfLifeCore.Cell.evolve(cell, neighbours)
evolved_cell
end)
{:ok, evolved_universe}
end
defp map(universe, callback) do
for y <- Map.keys(universe), into: %{} do
mapped_cells = for x <- Map.keys(universe[y]), into: %{} do
{x, callback.(x, y, universe[y][x])}
end
{y, mapped_cells}
end
end
defp get_all_neighbours(universe, cell_coordinate) do
Enum.reduce(@neighbours_places, [], fn(p, n_acc) ->
case get_neighbour(universe, cell_coordinate, p) do
nil -> n_acc
neighbour -> [neighbour | n_acc]
end
end)
end
defp get_neighbour(universe, {x, y}, :top_left), do: universe[x - 1][y - 1]
defp get_neighbour(universe, {x, y}, :top), do: universe[x][y - 1]
defp get_neighbour(universe, {x, y}, :top_right), do: universe[x + 1][y - 1]
defp get_neighbour(universe, {x, y}, :left), do: universe[x - 1][y]
defp get_neighbour(universe, {x, y}, :right), do: universe[x + 1][y]
defp get_neighbour(universe, {x, y}, :bottom_left), do: universe[x - 1][y + 1]
defp get_neighbour(universe, {x, y}, :bottom), do: universe[x][y + 1]
defp get_neighbour(universe, {x, y}, :bottom_right), do: universe[x + 1][y + 1]
def get_random_one({rows, cols}) do
0..(rows - 1)
|> Enum.reduce(%{}, fn(row_index, row_acc) ->
row = 0..(cols - 1)
|> Enum.reduce(%{}, fn(col_index, col_acc) -> Map.put_new(col_acc, col_index, get_random_cell_status()) end)
Map.put_new(row_acc, row_index, row)
end)
end
defp get_random_cell_status do
r = :rand.uniform(100)
if (rem(r, 2) === 0) do
:alive
else
:dead
end
end
end
|
apps/game_of_life_core/lib/game_of_life/universe.ex
| 0.512693
| 0.538983
|
universe.ex
|
starcoder
|
defmodule Nebulex.Telemetry.StatsHandler do
@moduledoc """
Telemetry handler for aggregating cache stats; it relies on the default stats
implementation based on Erlang counters. See `Nebulex.Adapter.Stats`.
This handler is used by the built-in local adapter when the option `:stats`
is set to `true`.
"""
alias Nebulex.Adapter.Stats
## Handler
@doc false
def handle_event(_event, _measurements, %{adapter_meta: %{stats_counter: ref}} = metadata, ref) do
update_stats(metadata)
end
# coveralls-ignore-start
def handle_event(_event, _measurements, _metadata, _ref) do
:ok
end
# coveralls-ignore-stop
defp update_stats(%{
function_name: action,
result: :"$expired",
adapter_meta: %{stats_counter: ref}
})
when action in [:get, :take, :ttl] do
:ok = Stats.incr(ref, :misses)
:ok = Stats.incr(ref, :evictions)
:ok = Stats.incr(ref, :expirations)
end
defp update_stats(%{function_name: action, result: nil, adapter_meta: %{stats_counter: ref}})
when action in [:get, :take, :ttl] do
:ok = Stats.incr(ref, :misses)
end
defp update_stats(%{function_name: action, result: _, adapter_meta: %{stats_counter: ref}})
when action in [:get, :ttl] do
:ok = Stats.incr(ref, :hits)
end
defp update_stats(%{function_name: :take, result: _, adapter_meta: %{stats_counter: ref}}) do
:ok = Stats.incr(ref, :hits)
:ok = Stats.incr(ref, :evictions)
end
defp update_stats(%{
function_name: :put,
args: [_, _, _, :replace, _],
result: true,
adapter_meta: %{stats_counter: ref}
}) do
:ok = Stats.incr(ref, :updates)
end
defp update_stats(%{function_name: :put, result: true, adapter_meta: %{stats_counter: ref}}) do
:ok = Stats.incr(ref, :writes)
end
defp update_stats(%{
function_name: :put_all,
result: true,
args: [entries | _],
adapter_meta: %{stats_counter: ref}
}) do
:ok = Stats.incr(ref, :writes, length(entries))
end
defp update_stats(%{function_name: :delete, result: _, adapter_meta: %{stats_counter: ref}}) do
:ok = Stats.incr(ref, :evictions)
end
defp update_stats(%{
function_name: :execute,
args: [:delete_all | _],
result: result,
adapter_meta: %{stats_counter: ref}
}) do
:ok = Stats.incr(ref, :evictions, result)
end
defp update_stats(%{function_name: action, result: true, adapter_meta: %{stats_counter: ref}})
when action in [:expire, :touch] do
:ok = Stats.incr(ref, :updates)
end
defp update_stats(%{
function_name: :update_counter,
args: [_, amount, _, default, _],
result: result,
adapter_meta: %{stats_counter: ref}
}) do
offset = if amount >= 0, do: -1, else: 1
if result + amount * offset === default do
:ok = Stats.incr(ref, :writes)
else
:ok = Stats.incr(ref, :updates)
end
end
defp update_stats(_), do: :ok
end
|
lib/nebulex/telemetry/stats_handler.ex
| 0.764935
| 0.406126
|
stats_handler.ex
|
starcoder
|
defmodule ChromicPDF.ChromeError do
@moduledoc """
Exception in the communication with Chrome.
"""
defexception [:error, :opts, :message]
@impl true
def message(%__MODULE__{error: error, opts: opts}) do
"""
#{title_for_error(error)}
#{hint_for_error(error, opts)}
"""
end
defp title_for_error({:evaluate, _error}) do
"Exception in :evaluate expression"
end
defp title_for_error(error) do
error
end
defp hint_for_error("net::ERR_INTERNET_DISCONNECTED", _opts) do
"""
You are trying to navigate to a remote URL but Chrome is not able to establish a connection
to the remote host. Please make sure that you have access to the internet and that Chrome is
allowed to open a connection to the remote host by your firewall policy.
In case you are running ChromicPDF in "offline mode" this error is to be expected.
"""
end
defp hint_for_error("net::ERR_CERT" <> _, _opts) do
"""
You are trying to navigate to a remote URL via HTTPS and Chrome is not able to verify the
remote host's SSL certificate. If the remote is a production system, please make sure its
certificate is valid and has not expired.
In case you are connecting to a development/test system with a self-signed certificate, you
can disable certificate verification by passing the `:ignore_certificate_errors` flag.
{ChromicPDF, ignore_certificate_errors: true}
"""
end
defp hint_for_error({:evaluate, error}, opts) do
%{
"exception" => %{"description" => description},
"lineNumber" => line_number
} = error
%{expression: expression} = Keyword.fetch!(opts, :evaluate)
"""
Exception:
#{indent(description)}
Evaluated expression:
#{indent(expression, line_number)}
"""
end
defp hint_for_error(_other, _opts) do
"""
Chrome has responded with the above error error while you were trying to print a PDF.
"""
end
defp indent(expression, line_number \\ nil) do
expression
|> String.trim()
|> String.split("\n")
|> Enum.with_index()
|> Enum.map(fn
{line, ^line_number} -> "!!! #{line}"
{line, _line_number} -> " #{line}"
end)
|> Enum.join("\n")
end
end
|
lib/chromic_pdf/api/chrome_error.ex
| 0.717903
| 0.440409
|
chrome_error.ex
|
starcoder
|
defmodule Hui do
@moduledoc """
Hui 辉 ("shine" in Chinese) is an [Elixir](https://elixir-lang.org) client and library for
[Solr enterprise search platform](http://lucene.apache.org/solr/).
### Usage
- Searching Solr: `q/1`, `q/6`, `search/2`, `search/7`
- Updating: `update/3`, `delete/3`, `delete_by_query/3`, `commit/2`
- Other: `suggest/2`, `suggest/5`
- [README](https://hexdocs.pm/hui/readme.html#usage)
"""
import Hui.Guards
# deprecated
alias Hui.Request
alias Hui.Query
alias HTTPoison.Response
@type url :: binary | atom | Hui.URL.t()
@type query :: Query.solr_query()
@type update_query :: binary | map | list(map) | Query.Update.t()
# invalid / non existing host or domain
@error_nxdomain %Hui.Error{reason: :nxdomain}
@doc """
Issue a keyword list or structured query to the default Solr endpoint.
The query can either be a keyword list or a list of Hui structs - see `t:Hui.Query.solr_struct/0`.
This function is a shortcut for `search/2` with `:default` as URL key.
### Example
```
Hui.q(q: "loch", rows: 5, facet: true, "facet.field": ["year", "subject"])
# supply a list of Hui structs for more complex query, e.g. faceting
alias Hui.Query
Hui.q([%Query.Standard{q: "author:I*"}, %Query.Facet{field: ["cat", "author"], mincount: 1}])
# DisMax
x = %Query.Dismax{q: "run", qf: "description^2.3 title", mm: "2<-25% 9<-3"}
y = %Query.Common{rows: 10, start: 10, fq: ["edited:true"]}
z = %Query.Facet{field: ["cat", "author"], mincount: 1}
Hui.q([x, y, z])
```
"""
@spec q(query) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
# deprecated - to be removed in a future version
# coveralls-ignore-start
def q(%Hui.Q{} = query), do: Request.search(:default, [query])
# coveralls-ignore-stop
def q(q) when is_list(q), do: query(q, :default)
@doc """
Issue a keyword list or structured query to the default Solr endpoint, raising an exception in case of failure.
See `q/1`.
"""
@spec q!(Hui.Q.t() | Request.query_struct_list() | Keyword.t()) :: Response.t()
# deprecated - to be removed in a future version
# coveralls-ignore-start
def q!(%Hui.Q{} = query), do: Request.search(:default, true, [query])
# coveralls-ignore-stop
def q!(q) when is_list(q), do: query!(q, :default)
@doc """
Convenience function for issuing various typical queries to the default Solr endpoint.
### Example
```
Hui.q("scott")
# keywords
Hui.q("loch", 10, 20)
# .. with paging parameters
Hui.q("\\\"apache documentation\\\"~5", 1, 0, "stream_content_type_str:text/html", ["subject"])
# .. plus filter(s) and facet fields
```
"""
@spec q(
binary,
nil | integer,
nil | integer,
nil | binary | list(binary),
nil | binary | list(binary),
nil | binary
) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
def q(keywords, rows \\ nil, start \\ nil, filters \\ nil, facet_fields \\ nil, sort \\ nil)
def q(keywords, _, _, _, _, _) when is_nil_empty(keywords),
do: {:error, %Hui.Error{reason: :einval}}
def q(keywords, rows, start, filters, facet_fields, sort) do
search(:default, keywords, rows, start, filters, facet_fields, sort)
end
@doc """
Convenience function for issuing various typical queries to the default Solr endpoint,
raise an exception in case of failure.
"""
@spec q!(
binary,
nil | integer,
nil | integer,
nil | binary | list(binary),
nil | binary | list(binary),
nil | binary
) :: Response.t()
def q!(keywords, rows \\ nil, start \\ nil, filters \\ nil, facet_fields \\ nil, sort \\ nil)
def q!(keywords, _, _, _, _, _) when is_nil_empty(keywords),
do: raise(%Hui.Error{reason: :einval})
def q!(keywords, rows, start, filters, facet_fields, sort) do
search!(:default, keywords, rows, start, filters, facet_fields, sort)
end
@doc """
Issue a keyword list or structured query to a specified Solr endpoint.
### Example - parameters
```
url = "http://localhost:8983/solr/collection"
# a keyword list of arbitrary parameters
Hui.search(url, q: "edinburgh", rows: 10)
# supply a list of Hui structs for more complex query e.g. DisMax
alias Hui.Query
x = %Query.DisMax{q: "run", qf: "description^2.3 title", mm: "2<-25% 9<-3"}
y = %Query.Common{rows: 10, start: 10, fq: ["edited:true"]}
z = %Query.Facet{field: ["cat", "author_str"], mincount: 1}
Hui.search(url, [x, y, z])
# SolrCloud query
x = %Query.DisMax{q: "john"}
y = %Query.Common{collection: "library,commons", rows: 10, distrib: true, "shards.tolerant": true, "shards.info": true}
Hui.search(url, [x,y])
# With results highlighting (snippets)
x = %Query.Standard{q: "features:photo"}
y = %Query.Highlight{fl: "features", usePhraseHighlighter: true, fragsize: 250, snippets: 3 }
Hui.search(url, [x, y])
```
### Example - faceting
```
alias Hui.Query
range1 = %Query.FacetRange{range: "price", start: 0, end: 100, gap: 10, per_field: true}
range2 = %Query.FacetRange{range: "popularity", start: 0, end: 5, gap: 1, per_field: true}
x = %Query.DisMax{q: "ivan"}
y = %Query.Facet{field: ["cat", "author_str"], mincount: 1, range: [range1, range2]}
Hui.search(:default, [x, y])
```
The above `Hui.search(:default, [x, y])` example issues a request that resulted in
the following Solr response header showing the corresponding generated and encoded parameters.
```json
"responseHeader" => %{
"QTime" => 106,
"params" => %{
"f.popularity.facet.range.end" => "5",
"f.popularity.facet.range.gap" => "1",
"f.popularity.facet.range.start" => "0",
"f.price.facet.range.end" => "100",
"f.price.facet.range.gap" => "10",
"f.price.facet.range.start" => "0",
"facet" => "true",
"facet.field" => ["cat", "author_str"],
"facet.mincount" => "1",
"facet.range" => ["price", "popularity"],
"q" => "ivan"
},
"status" => 0,
"zkConnected" => true
}
```
"""
@spec search(url, query) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
# deprecated - will be removed in a future version
# coveralls-ignore-start
def search(url, %Hui.Q{} = query), do: Request.search(url, [query])
# coveralls-ignore-stop
def search(url, q) when is_list(q) or is_map(q), do: query(q, url)
@doc """
Issue a keyword list or structured query to a specified Solr endpoint, raise an exception in case of failure.
See `search/2`.
"""
@spec search!(url, Hui.Q.t() | Request.query_struct_list() | Keyword.t()) :: Response.t()
# deprecated - will be removed in a future version
# coveralls-ignore-start
def search!(url, %Hui.Q{} = query), do: Request.search(url, true, [query])
# coveralls-ignore-stop
def search!(url, q) when is_list(q) or is_map(q), do: query!(q, url)
@doc """
Convenience function for issuing various typical queries to a specified Solr endpoint.
See `q/6`.
"""
@spec search(
url,
binary,
nil | integer,
nil | integer,
nil | binary | list(binary),
nil | binary | list(binary),
nil | binary
) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
def search(
url,
keywords,
rows \\ nil,
start \\ nil,
filters \\ nil,
facet_fields \\ nil,
sort \\ nil
)
def search(url, keywords, _, _, _, _, _) when is_nil_empty(keywords) or is_nil_empty(url),
do: {:error, %Hui.Error{reason: :einval}}
def search(url, keywords, rows, start, filters, facet_fields, sort) do
x = %Query.Standard{q: keywords}
y = %Query.Common{rows: rows, start: start, fq: filters, sort: sort}
z = %Query.Facet{field: facet_fields}
[x, y, z] |> query(url)
end
@doc """
Convenience function for issuing various typical queries to a specified Solr endpoint,
raise an exception in case of failure.
See `q/6`.
"""
@spec search!(
url,
binary,
nil | integer,
nil | integer,
nil | binary | list(binary),
nil | binary | list(binary),
nil | binary
) :: Response.t()
def search!(
url,
keywords,
rows \\ nil,
start \\ nil,
filters \\ nil,
facet_fields \\ nil,
sort \\ nil
)
def search!(url, keywords, _, _, _, _, _) when is_nil_empty(keywords) or is_nil_empty(url),
do: raise(%Hui.Error{reason: :einval})
def search!(url, keywords, rows, start, filters, facet_fields, sort) do
x = %Query.Standard{q: keywords}
y = %Query.Common{rows: rows, start: start, fq: filters, sort: sort}
z = %Query.Facet{field: facet_fields}
[x, y, z] |> query!(url)
end
# coveralls-ignore-start
@doc false
@spec spellcheck(url, Query.SpellCheck.t()) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
@deprecated "Please use search/2 with Hui.Query.SpellCheck query struct."
def spellcheck(url, %Hui.Sp{} = query), do: Request.search(url, [query])
@doc false
@spec spellcheck!(url, Hui.Sp.t()) :: Response.t()
@deprecated "Please use search!/2 with Hui.Query.SpellCheck struct."
def spellcheck!(url, %Hui.Sp{} = query), do: Request.search(url, true, [query])
@doc false
@spec spellcheck(url, Hui.Sp.t(), Hui.Q.t()) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
@deprecated "Please use search/2 with Hui.Query.SpellCheck struct."
def spellcheck(url, %Hui.Sp{} = query_sp, %Hui.Q{} = query),
do: Request.search(url, [query, query_sp])
@doc false
@spec spellcheck!(url, Hui.Sp.t(), Hui.Q.t()) :: Response.t()
@deprecated "Please use search/2 with Hui.Query.SpellCheck struct."
def spellcheck!(url, %Hui.Sp{} = query_sp, %Hui.Q{} = query),
do: Request.search(url, true, [query, query_sp])
# coveralls-ignore-stop
@doc """
Issue a structured suggest query to a specified Solr endpoint.
### Example
```
suggest_query = %Hui.Query.Suggest{q: "ha", count: 10, dictionary: "name_infix"}
Hui.suggest(:library, suggest_query)
```
"""
@spec suggest(url, Query.Suggest.t()) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
def suggest(url, %Query.Suggest{} = q), do: q |> query(url)
@doc """
Issue a structured suggester query to a specified Solr endpoint, raise an exception in case of failure.
"""
@spec suggest!(url, Query.Suggest.t()) :: Response.t()
def suggest!(url, %Query.Suggest{} = q), do: q |> query!(url)
@doc """
Convenience function for issuing a suggester query to a specified Solr endpoint.
### Example
```
Hui.suggest(:autocomplete, "t")
Hui.suggest(:autocomplete, "bo", 5, ["name_infix", "ln_prefix", "fn_prefix"], "1939")
```
"""
@spec suggest(url, binary, nil | integer, nil | binary | list(binary), nil | binary) ::
{:ok, Response.t()} | {:error, Hui.Error.t()}
def suggest(url, q, count \\ nil, dictionaries \\ nil, context \\ nil)
def suggest(url, q, _, _, _) when is_nil_empty(q) or is_nil_empty(url),
do: {:error, %Hui.Error{reason: :einval}}
def suggest(url, q, count, dictionaries, context) do
%Query.Suggest{q: q, count: count, dictionary: dictionaries, cfq: context}
|> query(url)
end
@doc """
Convenience function for issuing a suggester query to a specified Solr endpoint,
raise an exception in case of failure.
"""
@spec suggest!(url, binary, nil | integer, nil | binary | list(binary), nil | binary) ::
Response.t()
def suggest!(url, q, count \\ nil, dictionaries \\ nil, context \\ nil)
def suggest!(url, q, _, _, _) when is_nil_empty(q) or is_nil_empty(url),
do: raise(%Hui.Error{reason: :einval})
def suggest!(url, q, count, dictionaries, context) do
%Query.Suggest{q: q, count: count, dictionary: dictionaries, cfq: context}
|> query!(url)
end
# coveralls-ignore-start
@doc false
@spec mlt(url, Hui.Q.t(), Hui.M.t()) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
@deprecated "Please use search/2 with Hui.Query.MoreLikeThis struct."
def mlt(url, %Hui.Q{} = query_struct, %Hui.M{} = mlt_query_struct),
do: Request.search(url, [query_struct, mlt_query_struct])
@doc false
@deprecated "Please use search/2 with Hui.Query.MoreLikeThis struct."
@spec mlt!(url, Hui.Q.t(), Hui.M.t()) :: Response.t()
def mlt!(url, %Hui.Q{} = query_struct, %Hui.M{} = mlt_query_struct),
do: Request.search(url, true, [query_struct, mlt_query_struct])
# coveralls-ignore-stop
@doc """
Updates or adds Solr documents to an index or collection.
This function accepts documents as map (single or a list) and commits the docs
to the index immediately by default - set `commit` to `false` for manual or
auto commits later.
It can also operate in update struct and binary modes,
the former uses the `t:Hui.Query.Update.t/0` struct
while the latter acepts text containing any valid Solr update data or commands.
An index/update handler endpoint should be specified through a `t:Hui.URL.t/0` struct
or a URL config key. A content type header is required so that Solr knows the
incoming data format (JSON, XML etc.) and can process data accordingly.
### Example - map, list and binary data
```
# Index handler for JSON-formatted update
headers = [{"Content-type", "application/json"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
# Solr docs in maps
doc1 = %{
"actors" => ["<NAME>", "<NAME>", "<NAME>", "<NAME>"],
"desc" => "A married daughter who longs for her mother's love is visited by the latter, a successful concert pianist.",
"directed_by" => ["<NAME>"],
"genre" => ["Drama", "Music"],
"id" => "tt0077711",
"initial_release_date" => "1978-10-08",
"name" => "<NAME>"
}
doc2 = %{
"actors" => ["<NAME>", "<NAME>", "<NAME>"],
"desc" => "A nurse is put in charge of a mute actress and finds that their personas are melding together.",
"directed_by" => ["<NAME>"],
"genre" => ["Drama", "Thriller"],
"id" => "tt0060827",
"initial_release_date" => "1967-09-21",
"name" => "Persona"
}
Hui.update(url, doc1) # add a single doc
Hui.update(url, [doc1, doc2]) # add a list of docs
# Don't commit the docs e.g. mass ingestion when index handler is setup for autocommit.
Hui.update(url, [doc1, doc2], false)
# Send to a configured endpoint
Hui.update(:updater, [doc1, doc2])
# Binary mode, add and commit a doc
Hui.update(url, "{\\\"add\\\":{\\\"doc\\\":{\\\"name\\\":\\\"Blade Runner\\\",\\\"id\\\":\\\"tt0083658\\\",..}},\\\"commit\\\":{}}")
# Binary mode, delete a doc via XML
headers = [{"Content-type", "application/xml"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
Hui.update(url, "<delete><id>9780141981727</id></delete>")
```
### Example - `t:Hui.Query.Update.t/0` and other update options
```
# url, doc1, doc2 from the above example
...
# Hui.Query.Update struct command for updating and committing the docs to Solr within 5 seconds
alias Hui.Query
x = %Query.Update{doc: [doc1, doc2], commitWithin: 5000, overwrite: true}
{status, resp} = Hui.update(url, x)
# Delete the docs by IDs, with a URL key from configuration
{status, resp} = Hui.update(:library_update, %Query.Update{delete_id: ["tt1316540", "tt1650453"]})
# Commit and optimise index, keep max index segments at 10
{status, resp} = Hui.update(url, %Query.Update{commit: true, waitSearcher: true, optimize: true, maxSegments: 10})
# Commit index, expunge deleted docs
{status, resp} = Hui.update(url, %Query.Update{commit: true, expungeDeletes: true})
```
"""
@spec update(url, update_query, boolean) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
def update(url, q, commit \\ true) do
cond do
is_binary(q) -> q
is_map(q) and Map.has_key?(q, :__struct__) -> q
is_map(q) or is_list(q) -> %Query.Update{doc: q, commit: commit}
end
|> query(url, :post)
end
@doc """
Updates or adds Solr documents to an index or collection, raise an exception in case of failure.
"""
@spec update!(binary | Hui.URL.t(), update_query, boolean) :: Response.t()
def update!(url, q, commit \\ true) do
cond do
is_binary(q) -> q
is_map(q) and Map.has_key?(q, :__struct__) -> q
is_map(q) or is_list(q) -> %Query.Update{doc: q, commit: commit}
end
|> query!(url, :post)
end
@doc """
Deletes Solr documents.
This function accepts a single or list of IDs and immediately delete the corresponding
documents from the Solr index (commit by default).
An index/update handler endpoint should be specified through a `t:Hui.URL.t/0` struct
or a URL config key. A JSON content type header for the URL is required so that Solr knows the
incoming data format and can process data accordingly.
### Example
```
# Index handler for JSON-formatted update
headers = [{"Content-type", "application/json"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
Hui.delete(url, "tt2358891") # delete a single doc
Hui.delete(url, ["tt2358891", "tt1602620"]) # delete a list of docs
Hui.delete(url, ["tt2358891", "tt1602620"], false) # delete without immediate commit
```
"""
@spec delete(binary | Hui.URL.t(), binary | list(binary), boolean) ::
{:ok, Response.t()} | {:error, Hui.Error.t()}
def delete(url, ids, commit \\ true) when is_binary(ids) or is_list(ids) do
%Query.Update{delete_id: ids, commit: commit}
|> query(url, :post)
end
@doc """
Deletes Solr documents, raise an exception in case of failure.
"""
@spec delete!(binary | Hui.URL.t(), binary | list(binary), boolean) :: Response.t()
def delete!(url, ids, commit \\ true) when is_binary(ids) or is_list(ids) do
%Query.Update{delete_id: ids, commit: commit}
|> query!(url, :post)
end
@doc """
Deletes Solr documents by filter queries.
This function accepts a single or list of filter queries and immediately delete the corresponding
documents from the Solr index (commit by default).
An index/update handler endpoint should be specified through a `t:Hui.URL.t/0` struct
or a URL config key. A JSON content type header for the URL is required so that Solr knows the
incoming data format and can process data accordingly.
### Example
```
# Index handler for JSON-formatted update
headers = [{"Content-type", "application/json"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
Hui.delete_by_query(url, "name:Persona") # delete with a single filter
Hui.delete_by_query(url, ["genre:Drama", "name:Persona"]) # delete with a list of filters
```
"""
@spec delete_by_query(binary | Hui.URL.t(), binary | list(binary), boolean) ::
{:ok, Response.t()} | {:error, Hui.Error.t()}
def delete_by_query(url, q, commit \\ true) when is_binary(q) or is_list(q) do
%Query.Update{delete_query: q, commit: commit}
|> query(url, :post)
end
@doc """
Deletes Solr documents by filter queries, raise an exception in case of failure.
"""
@spec delete_by_query!(binary | Hui.URL.t(), binary | list(binary), boolean) :: Response.t()
def delete_by_query!(url, q, commit \\ true) when is_binary(q) or is_list(q) do
%Query.Update{delete_query: q, commit: commit}
|> query!(url, :post)
end
@doc """
Commit any added or deleted Solr documents to the index.
This provides a (separate) mechanism to commit previously added or deleted documents to
Solr index for different updating and index maintenance scenarios. By default, the commit
waits for a new Solr searcher to be regenerated, so that the commit result is made available
for search.
An index/update handler endpoint should be specified through a `t:Hui.URL.t/0` struct
or a URL config key. A JSON content type header for the URL is required so that Solr knows the
incoming data format and can process data accordingly.
### Example
```
# Index handler for JSON-formatted update
headers = [{"Content-type", "application/json"}]
url = %Hui.URL{url: "http://localhost:8983/solr/collection", handler: "update", headers: headers}
Hui.commit(url) # commits, make new docs available for search
Hui.commit(url, false) # commits op only, new docs to be made available later
```
Use `t:Hui.Query.Update.t/0` struct for other types of commit and index optimisation, e.g. expunge deleted docs to
physically remove docs from the index, which could be a system-intensive operation.
"""
@spec commit(binary | Hui.URL.t(), boolean) :: {:ok, Response.t()} | {:error, Hui.Error.t()}
def commit(url, wait_searcher \\ true) do
%Query.Update{commit: true, waitSearcher: wait_searcher}
|> query(url, :post)
end
@doc """
Commit any added or deleted Solr documents to the index, raise an exception in case of failure.
"""
@spec commit!(binary | Hui.URL.t(), boolean) :: Response.t()
def commit!(url, wait_searcher \\ true) do
%Query.Update{commit: true, waitSearcher: wait_searcher}
|> query!(url, :post)
end
defp query(q, url, method \\ :get) do
{status, url} = parse_url(url)
case {status, method} do
{:ok, :get} -> _parse_resp(Query.get(url, q))
{:ok, :post} -> _parse_resp(Query.post(url, q))
{:error, _} -> {:error, @error_nxdomain}
end
end
defp query!(q, url, method \\ :get) do
{status, url} = parse_url(url)
case {status, method} do
{:ok, :get} -> _parse_resp(Query.get!(url, q))
{:ok, :post} -> _parse_resp(Query.post!(url, q))
{:error, _} -> raise(@error_nxdomain)
end
end
defp parse_url(%Hui.URL{} = url), do: {:ok, url}
defp parse_url(url) when is_atom(url), do: Hui.URL.configured_url(url)
defp parse_url(url) when is_binary(url) do
uri = URI.parse(url)
if uri.scheme != nil && uri.host =~ ~r/./ do
{:ok, %Hui.URL{url: url}}
else
{:error, @error_nxdomain}
end
end
defp parse_url(_), do: {:error, @error_nxdomain}
defp _parse_resp({:error, %HTTPoison.Error{id: _, reason: reason}}),
do: {:error, %Hui.Error{reason: reason}}
defp _parse_resp(resp), do: resp
end
|
lib/hui.ex
| 0.895082
| 0.774114
|
hui.ex
|
starcoder
|
defmodule WorkerPool do
@moduledoc """
A pool module for user-defined workers.
## Example
The following module is a sample worker module, which sends `:ok` and a doubled list of each element to the host process after being called.
```elixir
defmodule SampleWorker do
use WorkerPool.Worker
@impl true
def work({pid, list}), do: send(pid, {:ok, Enum.map(list, & &1 * 2)})
end
```
```elixir
WorkerPool.start_link(SampleWorker)
WorkerPool.get_worker(SampleWorker) |> send({:work, {self(), [1, 2, 3]}})
receive do
{:ok, result} -> IO.inspect result
after 1000 -> IO.puts "Timeout"
end
```
"""
use GenServer
@doc """
Gets the server name of the given worker module.
## Parameters
- `worker_module`: Module of the worker that implements WorkerPool.Worker.
"""
@spec server_name(module) :: atom
def server_name(worker_module) do
:"#{__MODULE__}.#{worker_module}"
end
@doc """
Starts a `WorkerPool` process for the given worker module that linked to the current process.
## Parameters
- `worker_module`: Module of the worker that implements WorkerPool.Worker.
"""
@spec start_link(module) :: {:ok, pid}
def start_link(worker_module) do
GenServer.start_link(__MODULE__, {worker_module, []}, name: server_name(worker_module))
end
@doc """
Gets a worker of the given module.
The worker has been instantiated and cached before calling `get_worker`.
"""
@spec get_worker(module) :: pid
def get_worker(worker_module) do
GenServer.call(server_name(worker_module), :get_worker)
end
@doc false
@spec alive(pid, module) :: :ok
def alive(pid, worker_module) do
GenServer.cast(server_name(worker_module), {:alive, pid})
end
@impl true
@spec init({module, list}) :: {:ok, {module, list, non_neg_integer}}
def init({worker_module, pool}) do
{:ok, {worker_module, pool, 0}}
end
@impl true
@spec handle_call(atom, {pid, any}, {module, list, non_neg_integer}) ::
{:reply, pid, {module, list, non_neg_integer}}
def handle_call(:get_worker, _from, {worker_module, [], wid}) do
{:reply, WorkerPool.Worker.create(worker_module, wid), {worker_module, [], wid + 1}}
end
def handle_call(:get_worker, _from, {worker_module, [head | tail], wid}) do
{:reply, head, {worker_module, tail, wid}}
end
@impl true
@spec handle_cast({atom, pid}, {module, list, non_neg_integer}) ::
{:noreply, {module, list, non_neg_integer}}
def handle_cast({:alive, pid}, {worker_module, pool, wid}) do
{:noreply, {worker_module, pool ++ [pid], wid}}
end
end
|
lib/worker_pool.ex
| 0.883601
| 0.774669
|
worker_pool.ex
|
starcoder
|
defmodule Game.Session.Effects do
@moduledoc """
Handle effects on a user
"""
use Game.Zone
require Logger
alias Game.Character
alias Game.Effect
alias Game.Environment
alias Game.Events.CharacterDied
alias Game.Format.Effects, as: FormatEffects
alias Game.Player
alias Game.Session.Process
alias Game.Socket
import Game.Session, only: [echo: 2]
import Game.Character.Helpers, only: [update_effect_count: 2, is_alive?: 1]
@doc """
Apply effects after receiving them from a targeter
Used from the character callback `{:apply_effects, effects, from, description}`.
"""
@spec apply([Data.Effect.t()], tuple, String.t(), Map) :: map
def apply(effects, from, description, state = %{save: save}) do
{stats, effects, continuous_effects} =
Character.Effects.apply_effects(
Character.to_simple(state.character),
save.stats,
state,
effects,
from
)
state = Player.update_save(state, %{save | stats: stats})
state.character |> echo_effects(from, description, effects)
state.character |> maybe_died(state, from)
case is_alive?(state.save) do
true ->
state |> Map.put(:continuous_effects, state.continuous_effects ++ continuous_effects)
false ->
state |> Map.put(:continuous_effects, [])
end
end
@doc """
Check for health < 0 and perform actions if it is
"""
@spec maybe_died(User.t(), State.t(), Character.t()) :: :ok
def maybe_died(player, state, from)
def maybe_died(player = %{save: %{stats: %{health_points: health_points}}}, state, from)
when health_points < 1 do
player |> maybe_transport_to_graveyard()
event = %CharacterDied{character: Character.to_simple(player), killer: from}
Environment.notify(state.save.room_id, Character.to_simple(player), event)
:ok
end
def maybe_died(_player, _state, _from), do: :ok
@doc """
Check if there is a graveyard to teleport to. The zone will have it set
"""
@spec maybe_transport_to_graveyard(User.t()) :: :ok
def maybe_transport_to_graveyard(player)
def maybe_transport_to_graveyard(%{save: %{room_id: room_id}}) do
{:ok, room} = room_id |> Environment.look()
case @zone.graveyard(room.zone_id) do
{:ok, graveyard_id} ->
send(self(), {:resurrect, graveyard_id})
{:error, :no_graveyard} ->
:ok
end
end
@doc """
Echo effects to the player's session
"""
def echo_effects(player, from, description, effects) do
player = Character.to_simple(player)
case Character.equal?(player, from) do
true ->
:ok
false ->
description = [description | FormatEffects.effects(effects, player)]
echo(self(), description |> Enum.join("\n"))
end
end
@doc """
Apply a continuous effect to the player
"""
@spec handle_continuous_effect(State.t(), String.t()) :: State.t()
def handle_continuous_effect(state, effect_id) do
case Effect.find_effect(state, effect_id) do
{:ok, effect} ->
apply_continuous_effect(state, effect)
{:error, :not_found} ->
state
end
end
@doc """
Apply a continuous effect to the player
"""
@spec apply_continuous_effect(State.t(), Effect.t()) :: State.t()
def apply_continuous_effect(state = %{save: save}, {from, effect}) do
{stats, effects} = Character.Effects.apply_continuous_effect(save.stats, state, effect)
state = Player.update_save(state, %{save | stats: stats})
effects_message =
effects
|> FormatEffects.effects(Character.to_simple(state.character))
|> Enum.join("\n")
state |> Socket.echo(effects_message)
state.character |> maybe_died(state, from)
state |> Process.prompt()
case is_alive?(state.save) do
true ->
state |> update_effect_count({from, effect})
false ->
state |> Map.put(:continuous_effects, [])
end
end
end
|
lib/game/session/effects.ex
| 0.891941
| 0.407451
|
effects.ex
|
starcoder
|
defmodule Carrier.Messaging.Tracker do
defstruct [reply_endpoints: %{}, subscriptions: %{}, monitors: %{}, unused_topics: []]
@spec add_reply_endpoint(Tracker.t, pid()) :: {Tracker.t, String.t}
def add_reply_endpoint(%__MODULE__{reply_endpoints: reps}=tracker, subscriber) do
case Map.get(reps, subscriber) do
nil ->
topic = make_reply_endpoint_topic()
reps = reps
|> Map.put(subscriber, topic)
|> Map.put(topic, subscriber)
{maybe_monitor_subscriber(%{tracker | reply_endpoints: reps}, subscriber), topic}
topic ->
{tracker, topic}
end
end
@spec get_reply_endpoint(Tracker.t, pid()) :: String.t | nil
def get_reply_endpoint(%__MODULE__{reply_endpoints: reps}, subscriber) do
Map.get(reps, subscriber)
end
@spec add_subscription(Tracker.t, String.t, pid()) :: Tracker.t
def add_subscription(%__MODULE__{subscriptions: subs}=tracker, topic, subscriber) do
subs = Map.update(subs, topic, {subscription_matcher(topic), [subscriber]},
fn({matcher, subscribed}) -> {matcher, Enum.uniq([subscriber|subscribed])} end)
maybe_monitor_subscriber(%{tracker | subscriptions: subs}, subscriber)
end
@spec del_subscription(Tracker.t, String.t, pid()) :: {Tracker.t, boolean()}
def del_subscription(%__MODULE__{subscriptions: subs, unused_topics: ut}=tracker, topic, subscriber) do
case Map.get(subs, topic) do
{_matcher, [^subscriber]} ->
subs = Map.delete(subs, topic)
{maybe_unmonitor_subscriber(%{tracker | subscriptions: subs, unused_topics: Enum.uniq([topic|ut])}, subscriber), true}
{matcher, subscribed} ->
case List.delete(subscribed, subscriber) do
^subscribed ->
{tracker, false}
updated ->
{maybe_unmonitor_subscriber(%{tracker | subscriptions: Map.put(subs, topic, {matcher, updated})}, subscriber), true}
end
nil ->
{tracker, false}
end
end
@spec find_reply_subscriber(Tracker.t, String.t) :: pid() | nil
def find_reply_subscriber(%__MODULE__{reply_endpoints: reps}, topic) do
Map.get(reps, topic)
end
@spec find_subscribers(Tracker.t, String.t) :: [] | [pid()]
def find_subscribers(%__MODULE__{subscriptions: subs}, topic) do
Enum.reduce(subs, [], &(find_matching_subscriptions(&1, topic, &2)))
end
@spec del_subscriber(Tracker.t, pid()) :: Tracker.t
def del_subscriber(tracker, subscriber) do
tracker
|> del_reply_endpoint(subscriber)
|> del_all_subscriptions(subscriber)
|> unmonitor_subscriber(subscriber)
end
@spec unused?(Tracker.t) :: boolean()
def unused?(%__MODULE__{reply_endpoints: reps, subscriptions: subs, monitors: monitors}) do
Enum.empty?(reps) and Enum.empty?(subs) and Enum.empty?(monitors)
end
@spec get_and_reset_unused_topics(Tracker.t) :: {Tracker.t, [String.t]}
def get_and_reset_unused_topics(tracker) do
{%{tracker | unused_topics: []}, tracker.unused_topics}
end
defp make_reply_endpoint_topic() do
id = UUID.uuid4(:hex)
"carrier/call/reply/#{id}"
end
defp find_matching_subscriptions({_, {matcher, subscribed}}, topic, accum) do
if Regex.match?(matcher, topic) do
accum ++ subscribed
else
accum
end
end
# Tracker users regexes to find subscribers for a given MQTT topic because
# the MQTT standard describes two types of wildcard subscriptions:
# * "foo/+" means "subscribe to foo and all topics one level down"
# * "foo/*" means "subscribe to foo and all subtopics regardless of depth"
defp subscription_matcher(sub_topic) do
regex = case String.slice(sub_topic, -2, 2) do
"/+" ->
"^#{String.slice(sub_topic, 0, String.length(sub_topic) - 1)}[a-zA-Z0-9_\-]+$"
"/*" ->
"^#{String.slice(sub_topic, 0, String.length(sub_topic) - 1)}.*"
_ ->
"^#{sub_topic}$"
end
Regex.compile!(regex)
end
def maybe_monitor_subscriber(%__MODULE__{monitors: monitors}=tracker, subscriber) do
case Map.get(monitors, subscriber) do
nil ->
mref = :erlang.monitor(:process, subscriber)
%{tracker | monitors: Map.put(monitors, subscriber, mref)}
_ ->
tracker
end
end
def maybe_unmonitor_subscriber(%__MODULE__{monitors: monitors}=tracker, subscriber) do
if has_subscriptions?(tracker, subscriber) do
tracker
else
{_, monitors} = Map.get_and_update(monitors, subscriber,
fn(nil) -> :pop
(mref) -> :erlang.demonitor(mref, [:flush])
:pop end)
%{tracker | monitors: monitors}
end
end
def unmonitor_subscriber(%__MODULE__{monitors: monitors}=tracker, subscriber) do
case Map.pop(monitors, subscriber) do
{nil, _monitors} ->
tracker
{mref, monitors} ->
:erlang.demonitor(mref, [:flush])
%{tracker | monitors: monitors}
end
end
defp has_subscriptions?(tracker, subscriber) do
Map.has_key?(tracker.reply_endpoints, subscriber) or
Enum.any?(tracker.subscriptions, &(has_subscription?(&1, subscriber)))
end
defp has_subscription?({_, {_, subscribed}}, subscriber) do
Enum.member?(subscribed, subscriber)
end
defp del_reply_endpoint(%__MODULE__{reply_endpoints: reps, unused_topics: ut}=tracker, subscriber) do
case Map.get(reps, subscriber) do
nil ->
tracker
rep ->
reps = reps
|> Map.delete(rep)
|> Map.delete(subscriber)
%{tracker | reply_endpoints: reps, unused_topics: Enum.uniq([rep|ut])}
end
end
defp del_all_subscriptions(%__MODULE__{subscriptions: subs}=tracker, subscriber) do
{subs, unused_topics} = Enum.reduce(Map.keys(subs), {subs, []}, &(delete_subscription(&1, subscriber, &2)))
%{tracker | subscriptions: subs, unused_topics: tracker.unused_topics ++ unused_topics}
end
defp delete_subscription(topic, subscriber, {subs, unused_topics}) do
case Map.get(subs, topic) do
nil ->
{subs, unused_topics}
{_matcher, [^subscriber]} ->
{Map.delete(subs, topic), [topic|unused_topics]}
{matcher, subscribed} ->
{Map.put(subs, topic, {matcher, List.delete(subscribed, subscriber)}), unused_topics}
end
end
end
|
lib/carrier/messaging/tracker.ex
| 0.759047
| 0.40751
|
tracker.ex
|
starcoder
|
defmodule Exsolr.Suggest do
@moduledoc """
Provides search functions to Solr
"""
require Logger
alias Exsolr.Config
alias Exsolr.HttpResponse
@doc """
Receives the query params, converts them to an url, queries a Solr suggestions and builds
the response
"""
def suggest(params) do
params
|> build_solr_query()
|> do_suggest()
|> extract_response()
end
@doc """
Builds the solr url query. It will use the following default values if they
are not specifier
wt: "json"
q: "*:*"
start: 0
rows: 10
## Examples
iex> Exsolr.Searcher.build_solr_query(q: "roses", fq: ["blue", "violet"])
"?wt=json&start=0&rows=10&q=roses&fq=blue&fq=violet"
iex> Exsolr.Searcher.build_solr_query(q: "roses", fq: ["blue", "violet"], start: 0, rows: 10)
"?wt=json&q=roses&fq=blue&fq=violet&start=0&rows=10"
iex> Exsolr.Searcher.build_solr_query(q: "roses", fq: ["blue", "violet"], wt: "xml")
"?start=0&rows=10&q=roses&fq=blue&fq=violet&wt=xml"
"""
def build_solr_query(params) do
"?" <> build_solr_query_params(params)
end
defp build_solr_query_params(params) do
params
|> add_default_params()
|> Enum.map(fn {key, value} -> build_solr_query_parameter(key, value) end)
|> Enum.join("&")
end
defp add_default_params(params) do
default_parameters()
|> Keyword.merge(params)
end
defp default_parameters do
[wt: "json", q: "*:*", start: 0, rows: 10]
end
defp build_solr_query_parameter(_, []), do: nil
defp build_solr_query_parameter(key, [head | tail]) do
[build_solr_query_parameter(key, head), build_solr_query_parameter(key, tail)]
|> Enum.reject(fn x -> x == nil end)
|> Enum.join("&")
end
defp build_solr_query_parameter(:q, value) do
"q=#{URI.encode_www_form(value)}"
end
defp build_solr_query_parameter(:cursorMark, value) do
["cursorMark", value]
|> Enum.join("=")
|> URI.encode()
|> String.replace("+", "%2B")
end
defp build_solr_query_parameter(key, value) do
require Logger
Logger.debug(fn -> key end)
Logger.debug(fn -> value end)
[Atom.to_string(key), value]
|> Enum.join("=")
|> URI.encode()
end
def do_suggest(solr_query) do
solr_query
|> build_solr_url()
|> HTTPoison.get()
|> HttpResponse.body()
end
defp build_solr_url(solr_query) do
url = Config.suggest_url() <> solr_query
Logger.debug(fn -> url end)
url
end
defp extract_response(solr_response) do
case solr_response do
{:ok, solr_response} ->
case parse_response(solr_response) do
{:ok, solr_response} ->
{:ok, solr_response}
{:error, reason} ->
Logger.error(fn -> "Solr response parse: #{inspect(reason)}" end)
{:error, reason}
end
{:error, reason, message} ->
Logger.error(fn -> "Solr request failed: #{inspect(reason)}" end)
{:error, reason, message}
{:error, reason} ->
Logger.error(fn -> "Solr request failed: #{inspect(reason)}" end)
{:error, reason}
end
end
defp parse_response(solr_response) do
case Poison.decode(solr_response) do
{:ok, %{"suggest" => suggesters}} ->
{:ok, suggesters}
{:error, reason} ->
{:error, reason}
end
end
defp extract_mlt_result(mlt) do
result = for k <- Map.keys(mlt), do: get_in(mlt, [k, "docs"])
result |> List.flatten()
end
end
|
lib/exsolr/suggest.ex
| 0.781372
| 0.466603
|
suggest.ex
|
starcoder
|
defmodule VisNetwork do
@moduledoc """
Elixir bindings to [vis-network](https://github.com/visjs/vis-network).
"""
defstruct spec: %{}
alias VisNetwork.Utils
@type t :: %__MODULE__{
spec: spec()
}
@type spec :: map()
@doc """
Returns a new specification wrapped in the `VisNetwork` struct.
All provided options are converted to top-level properties
of the specification.
## Examples
Vl.new(
title: "My graph",
width: 200,
height: 200
)
|> ...
See [the docs](https://vega.github.io/vis-network/docs/spec.html) for more details.
"""
@spec new(keyword()) :: t()
def new(opts \\ []) do
vn = %VisNetwork{spec: %{"options" => %{"height" => "400px"}}}
options(vn, opts)
end
@compile {:no_warn_undefined, {Jason, :decode!, 1}}
@doc """
Parses the given vis-network JSON specification
and wraps in the `VisNetwork` struct for further processing.
## Examples
Vn.from_json(\"\"\"
{
"data": {
"nodes": [{"id": "1", "label": "lbl1", "color": "rgb(68,0,0)"}, {"id": "2", "label": "lbl2", "color": "rgb(0,100,0)"}],
"edges": [{"id": "1-2", "from": "1", "to": "2", "value": 1, "arrows": "to"}]
},
"options": {
"configure": {
"enabled": false,
},
"nodes": {
"shape": "dot",
"size": 16,
},
"physics": {
"forceAtlas2Based": {
"gravitationalConstant": -26,
"centralGravity": 0.005,
"springLength": 230,
"springConstant": 0.18,
},
"maxVelocity": 146,
"solver": "forceAtlas2Based",
"timestep": 0.35,
"stabilization": { "iterations": 150 },
},
}
}
\"\"\")
See [the docs](https://visjs.github.io/vis-network/docs/network/) for more details.
"""
@spec from_json(String.t()) :: t()
def from_json(json) do
Utils.assert_jason!("from_json/1")
json
|> Jason.decode!()
|> from_spec()
end
@doc """
Sets data nodes in the specification.
## Examples
Vn.new()
|> Vn.nodes([])
|> ...
See [the docs](https://visjs.github.io/vis-network/docs/network/nodes.html) for more details.
"""
@spec nodes(t(), list(map())) :: t()
def nodes(vn, nodes_data) when is_list(nodes_data) do
put_in(vn, [Access.key(:spec), Access.key("data", %{}), "nodes"], to_vn(nodes_data))
end
@doc """
Sets data edges in the specification.
## Examples
Vn.new()
|> Vn.edges[])
|> ...
See [the docs](https://visjs.github.io/vis-network/docs/network/edges.html) for more details.
"""
@spec edges(t(), list(map())) :: t()
def edges(vn, edges_data) when is_list(edges_data) do
put_in(vn, [Access.key(:spec), Access.key("data", %{}), "edges"], to_vn(edges_data))
end
@doc """
Sets options in the specification.
## Examples
Vn.new()
|> Vn.options(%{})
|> ...
See [the docs](https://visjs.github.io/vis-network/docs/network/#options) for more details.
"""
@spec options(t(), keyword()) :: t()
def options(vn, opts) do
vn_props = opts_to_vn_props(opts)
update_in(vn, [Access.key(:spec), Access.key("options", %{})], &(Map.merge(&1, vn_props)))
end
@doc """
Wraps the given vis-network specification in the `VisNetwork`
struct for further processing.
There is also `from_json/1` that handles JSON parsing for you.
See [the docs](https://vega.github.io/vis-network/docs/spec.html) for more details.
"""
@spec from_spec(spec()) :: t()
def from_spec(spec) do
%VisNetwork{spec: spec}
end
@doc """
Returns the underlying vis-network specification.
The result is a nested Elixir datastructure that serializes
to vis-network JSON specification.
See [the docs](https://vega.github.io/vis-network/docs/spec.html) for more details.
"""
@spec to_spec(t()) :: spec()
def to_spec(vn) do
vn.spec
end
# Helpers
defp opts_to_vn_props(opts) do
opts |> Map.new() |> to_vn()
end
defp to_vn(value) when value in [true, false, nil], do: value
defp to_vn(atom) when is_atom(atom), do: to_vn_key(atom)
defp to_vn(map) when is_map(map) do
Map.new(map, fn {key, value} ->
{to_vn(key), to_vn(value)}
end)
end
defp to_vn([{key, _} | _] = keyword) when is_atom(key) do
Map.new(keyword, fn {key, value} ->
{to_vn(key), to_vn(value)}
end)
end
defp to_vn(list) when is_list(list) do
Enum.map(list, &to_vn/1)
end
defp to_vn(value), do: value
defp to_vn_key(key) when is_atom(key) do
key |> to_string() |> snake_to_camel()
end
defp snake_to_camel(string) do
[part | parts] = String.split(string, "_")
Enum.join([String.downcase(part, :ascii) | Enum.map(parts, &String.capitalize(&1, :ascii))])
end
end
|
lib/vis_network.ex
| 0.805326
| 0.601125
|
vis_network.ex
|
starcoder
|
require Logger
defmodule Tail do
@moduledoc """
Tail implements a simple file tail functionality.
Given a file, a function, and an interval, Tail will execute the function with a list of new lines found
in the file and continue checking for additional lines on the interval.
## Usage
{:ok, pid} = Tail.start_link("test.txt", &IO.inspect(&1), 1000)
Tail.stop(pid)
"""
use GenServer
@type state :: {File.Stream.t(), ([String.t()] -> nil), integer, term, integer, integer}
@doc """
Public interface. Starts a Tail Genserver for the given file, function, and interval (in ms)
"""
@spec start_link(String.t(), ([String.t()] -> nil), integer) :: GenServer.on_start()
def start_link(file, fun, interval \\ 1000) do
GenServer.start_link(__MODULE__, {file, fun, interval})
end
@doc """
Public interface. Sends a call to kill the GenServer
"""
@spec stop(pid) :: :ok
def stop(pid) do
GenServer.call(pid, :kill)
end
@doc """
Init callback. Starts the check loop by casting :check to self and then returns the initial state
"""
@spec init({String.t(), ([String.t()] -> nil), integer}) :: {:ok, state}
def init({file, fun, interval}) do
stream = File.stream!(file)
GenServer.cast(self(), :check)
{:ok, {stream, fun, interval, nil, 0, 0}}
end
@doc """
Main loop. Calls check_for_lines, sleeps, then continues the loop by casting :check to self
and returning with the (possibly updated) last_modified and position
"""
@spec handle_cast(:check, state) :: {:noreply, state}
def handle_cast(:check, state = {_stream, _fun, interval, _last_modified, _position, _size}) do
state = check_for_lines(state)
:timer.sleep(interval)
GenServer.cast(self(), :check)
{:noreply, state}
end
@doc """
Handles :kill call. Checks for any final lines before stopping the genserver
"""
@spec handle_call(:kill, {pid, term}, state) :: {:stop, :normal, :ok, state}
def handle_call(:kill, _from, state) do
state = check_for_lines(state)
{:stop, :normal, :ok, state}
end
# Implementation of line checking. If the file doesn't exist, it simply returns the current state, assuming the
# file will appear eventually. If the file hasn't been modified since last time, it also returns the current state.
# If the file has been modified, Stream.drop(position) skips lines previously read, then Enum.each gathers the new lines.
# Returns the new last_modified and position.
@spec check_for_lines(state) :: state
defp check_for_lines(state = {stream, fun, interval, last_modified, position, size}) do
with {:exists, true} <- {:exists, File.exists?(stream.path)},
{:ok, stat} <- File.stat(stream.path),
{:mtime, true} <- {:mtime, stat.mtime != last_modified},
{:size, true} <- {:size, stat.size >= size} do
lines =
stream
|> Stream.drop(position)
|> Enum.into([])
if length(lines) > 0 do
fun.(lines)
end
{stream, fun, interval, stat.mtime, position + length(lines), stat.size}
else
{:exists, false} -> {File.stream!(stream.path), fun, interval, last_modified, 0, 0}
{:error, _} -> {File.stream!(stream.path), fun, interval, last_modified, 0, 0}
{:size, false} -> {File.stream!(stream.path), fun, interval, last_modified, 0, 0}
{:mtime, false} -> state
end
end
end
|
lib/tail.ex
| 0.839372
| 0.566498
|
tail.ex
|
starcoder
|
defmodule Dagex.Repo do
@moduledoc """
Adds Dagex-specific functionality to your application's `Ecto.Repo` module.
```elixir
defmodule MyApp.Repo do
use Ecto.Repo, otp_app: :my_app, adapter: Ecto.Adapters.Postgres
use Dagex.Repo
end
```
"""
alias Dagex.Operations.{CreateEdge, RemoveEdge}
@spec dagex_update(Ecto.Repo.t(), CreateEdge.t() | RemoveEdge.t()) ::
CreateEdge.result() | RemoveEdge.result()
@doc false
def dagex_update(_repo, {:error, _reason} = error), do: error
def dagex_update(repo, %CreateEdge{} = op) do
result =
case repo.query("SELECT dagex_create_edge($1, $2, $3)", [
op.node_type,
op.parent_id,
op.child_id
]) do
{:ok, _result} ->
:ok
{:error, %Postgrex.Error{postgres: %{constraint: constraint_name}}}
when is_bitstring(constraint_name) ->
{:error, constraint_name}
end
CreateEdge.process_result(result, op)
end
def dagex_update(repo, %RemoveEdge{} = op) do
result =
case repo.query("SELECT dagex_remove_edge($1, $2, $3)", [
op.node_type,
op.parent_id,
op.child_id
]) do
{:ok, _result} ->
:ok
{:error, %Postgrex.Error{postgres: %{constraint: constraint_name}}}
when is_bitstring(constraint_name) ->
{:error, constraint_name}
end
RemoveEdge.process_result(result, op)
end
@doc """
Executes a Dagex repo operation such as `Dagex.Operations.CreateEdge` and processes the result.
"""
@callback dagex_update(operation :: CreateEdge.t() | RemoveEdge.t()) ::
CreateEdge.result() | RemoveEdge.result()
@spec dagex_paths(repo :: Ecto.Repo.t(), queryable :: Ecto.Queryable.t()) ::
list(list(Ecto.Schema.t()))
@doc false
def dagex_paths(repo, queryable) do
queryable
|> repo.all()
|> Enum.group_by(fn node -> node.path end)
|> Enum.map(fn {_path, nodes} ->
nodes
|> Enum.sort(&(&1.position <= &2.position))
|> Enum.map(fn node -> node.node end)
end)
end
@doc """
Executes the query generated by `c:Dagex.all_paths/2` and processes the result
into a list of paths where each path is a list of the nodes between (and
including) the `ancestor` and the `descendant` nodes.
"""
@callback dagex_paths(all_paths_query :: Ecto.Queryable.t()) :: list(list(Ecto.Schema.t()))
@spec __using__(any()) :: Macro.t()
defmacro __using__(_opts) do
quote do
@behaviour Dagex.Repo
@impl Dagex.Repo
@spec dagex_update(CreateEdge.t() | RemoveEdge.t()) ::
CreateEdge.result() | RemoveEdge.result()
def dagex_update(operation), do: Dagex.Repo.dagex_update(__MODULE__, operation)
@impl Dagex.Repo
@spec dagex_paths(Ecto.Queryable.t()) :: list(list(Ecto.Schema.t()))
def dagex_paths(queryable), do: Dagex.Repo.dagex_paths(__MODULE__, queryable)
end
end
end
|
lib/dagex/repo.ex
| 0.864882
| 0.693473
|
repo.ex
|
starcoder
|
defmodule Baiji.Request.Query.Encoder do
@moduledoc """
Encodes an input map into a query string based on the input shape
"""
import Baiji.Core.Utilities
alias Baiji.Operation
@doc """
Encode an input map into a query string
"""
def encode(%Operation{input_shape: nil} = op) do
[]
|> encode_action(op)
|> encode_version(op)
|> to_query_string
end
def encode(%Operation{input: input} = op) when map_size(input) == 0 do
[]
|> encode_action(op)
|> encode_version(op)
|> to_query_string
end
def encode(%Operation{input: input, input_shape: shape, endpoint: %{shapes: shapes}} = op) do
[]
|> encode_action(op)
|> encode_version(op)
|> encode_input(input, shapes[shape], shapes, [])
|> to_query_string
end
@doc """
Encode the action name into the component list
"""
def encode_action(components, %Operation{action: action}) do
[{"Action", action} | components]
end
@doc """
Encode the API version into the component list
"""
def encode_version(components, %Operation{endpoint: %{version: version}}) do
[{"Version", version} | components]
end
def encode_input(query, input, %{"type" => "string"}, _shapes, keys), do: [to_component(keys, input) | query]
def encode_input(query, input, %{"type" => "integer"}, _shapes, keys), do: [to_component(keys, input) | query]
def encode_input(query, input, %{"type" => "long"}, _shapes, keys), do: [to_component(keys, input) | query]
def encode_input(query, input, %{"type" => "boolean"}, _shapes, keys), do: [to_component(keys, input) | query]
def encode_input(query, input, %{"type" => "list", "member" => %{"shape" => member_shape}, "flattened" => true}, shapes, keys) do
input
|> Enum.reduce({query, 1}, fn(member, {query, index}) ->
{encode_input(query, member, shapes[member_shape], shapes, [index | keys]), index + 1}
end)
|> then(fn {query, _} -> query end)
end
def encode_input(query, input, %{"type" => "list", "member" => %{"shape" => member_shape}}, shapes, keys) do
input
|> Enum.reduce({query, 1}, fn(member, {query, index}) ->
{encode_input(query, member, shapes[member_shape], shapes, [index, "member" | keys]), index + 1}
end)
|> then(fn {query, _} -> query end)
end
def encode_input(query, input, %{"type" => "structure", "members" => members}, shapes, keys) do
members
|> Enum.reduce(query, fn({name, %{"shape" => member_shape} = val}, query) ->
location = Map.get(val, "locationName", name)
if Map.has_key?(input, location) do
encode_input(query, input[location], shapes[member_shape], shapes, [location | keys])
else
query
end
end)
end
@doc """
Given a key component list and a value, generate a key-value tuple
"""
def to_component(keys, value) do
{to_key(keys), value}
end
@doc """
Generate a query string key name from a set of components
"""
def to_key(keys) when is_list(keys) do
keys
|> :lists.reverse
|> Enum.join(".")
end
@doc """
Take a component list and turn it into a query string
"""
def to_query_string([]), do: ""
def to_query_string(components) do
components
|> Enum.sort_by(fn {key, _} -> key end)
|> URI.encode_query
|> then(fn query -> "?" <> query end)
end
end
|
lib/baiji/request/query/encoder.ex
| 0.806472
| 0.541954
|
encoder.ex
|
starcoder
|
defmodule Throttlex do
@moduledoc """
Throttlex implements leaky bucket algorithm for rate limiting, it uses erlang ETS for storage.
"""
use GenServer
@buckets Application.get_env(:throttlex, :buckets) || []
@type key :: integer | binary | tuple | atom
@spec check(atom, key) :: :ok | :error
def check(name, id), do: check(name, id, nil)
@spec check(atom, key, integer | nil) :: :ok | :error
Enum.map(@buckets, fn {name, config} ->
[rate_per_second: rate, max_accumulated: max, cost: cost] = config
def check(unquote(name), id, cost) do
Throttlex.do_check(unquote(name), id, unquote(rate), unquote(max), cost || unquote(cost))
end
end)
def check(name, _id, _cost) do
raise "#{name} is not configured"
end
@doc false
def start_link() do
if Application.get_env(:throttlex, :verbose, false) == true do
IO.inspect @buckets
end
new_table(Keyword.keys(@buckets))
GenServer.start_link(__MODULE__, [], [name: __MODULE__])
end
@spec new_table([atom]) :: nil
defp new_table([]), do: nil
defp new_table([name | names]) do
:ets.new(name, [:public, :named_table, :set, write_concurrency: true, read_concurrency: true])
new_table(names)
end
@doc """
Check user's rate, same `rate_per_second`, `max_accumulated` should be passed to check functions
in order to inspect user's rate.
##Arguments:
- `table`: an atom representing bucket name.
- `id`: id.
- `rate_per_second`: how many rates should be added to bucket per second.
- `max_accumulated`: maximum rates allowed in the bucket.
- `cost`: costs of each request.
##Examples:
# For user id 1, one extra request will be added to bucket, maximum accumulated requests number
is 4, and every request will cost 1 token. First request will be permitted.
iex> Throttlex.check(:user_request, 1, 1, 2, 1)
:ok
# Second request is permitted also since we allowed 2 requests maximum.
iex> Throttlex.check(:user_request, 1, 1, 2, 1)
:ok
# If the third request is made within 1 second (the recovery time), it will return :error.
iex> Throttlex.check(:user_request, 1, 1, 2, 1)
:error
"""
@spec do_check(atom, key, integer, integer, integer) :: :ok | :error
def do_check(table, id, rate_per_second, max_accumulated, cost) do
now = :erlang.system_time(:milli_seconds)
case :ets.lookup(table, id) do
[] ->
:ets.insert(table, {id, max_accumulated - cost, now})
:ok
[{id, tokens_left, last_time}] ->
tokens = tokens_left + (now - last_time)/1000 * rate_per_second
tokens = case tokens > max_accumulated do
true -> max_accumulated
false -> tokens
end
case tokens < cost do
true -> :error
false ->
:ets.update_element(table, id, [{2, tokens - cost} ,{3, now}])
:ok
end
end
end
@doc """
Clear given ets table, this is often needed in tests
"""
@spec clear(atom | [atom] ) :: :ok
def clear([]), do: :ok
def clear([table | tables]) do
clear(table)
clear(tables)
end
def clear(table) do
:ets.delete_all_objects(table)
:ok
end
def clear_all() do
clear(Keyword.keys(@buckets))
end
def inspect(table, id) do
case :ets.lookup(table, id) do
[] -> nil
[{_id, tokens_left, _last_time}] -> tokens_left
end
end
end
|
lib/throttlex.ex
| 0.890809
| 0.454775
|
throttlex.ex
|
starcoder
|
defmodule Is.Validators.Length do
@moduledoc """
Length validator.
You can implement the protocol `Is.Validators.Length.Of` to get custom structure length.
## Examples
iex> Is.validate("1", length: 1)
[]
iex> Is.validate("123", length: [min: 3])
[]
iex> Is.validate("123", length: [max: 3])
[]
iex> Is.validate("123", length: [min: 1, max: 3])
[]
iex> Is.validate([1, 2, 3], length: [min: 1, max: 3])
[]
iex> Is.validate([1, 2, 3], length: [1, 3])
[]
iex> Is.validate("12", length: 4)
[{:error, [], "length must equals to 4"}]
iex> Is.validate("12", length: [min: 3])
[{:error, [], "length must at least be 3"}]
iex> Is.validate("1234", length: [max: 3])
[{:error, [], "length must at most be 3"}]
iex> Is.validate("1234", length: [min: 1, max: 3])
[{:error, [], "length must be between 1 and 3 inclusive"}]
iex> Is.validate("1234", length: [1, 3])
[{:error, [], "length must be between 1 and 3 inclusive"}]
"""
def validate(data, num) when is_number(num) do
with {:ok, size} <- Is.Validators.Length.Of.get_length(data) do
case size === num do
true -> :ok
false -> {:error, "length must equals to #{num}"}
end
else
{:error, error} -> {:error, error}
end
end
def validate(data, [min, max]) when is_number(min) and is_number(max) do
validate(data, [min: min, max: max])
end
def validate(data, options) when is_list(options) do
min = Keyword.get(options, :min)
max = Keyword.get(options, :max)
with {:ok, size} <- Is.Validators.Length.Of.get_length(data) do
validate_with_range(size, [min, max])
else
{:error, error} -> {:error, error}
end
end
def validate(_data, _options) do
{:error, "length: value is not a binary or list, or options are invalid"}
end
defp validate_with_range(size, [min, max]) do
cond do
is_number(min) and is_number(max) ->
if size >= min and size <= max do
:ok
else
{:error, "length must be between #{min} and #{max} inclusive"}
end
is_number(min) and not is_number(max) ->
if size >= min do
:ok
else
{:error, "length must at least be #{min}"}
end
is_number(max) and not is_number(min) ->
if size <= max do
:ok
else
{:error, "length must at most be #{max}"}
end
true -> {:error, "length: value is not a binary or options are invalid"}
end
end
end
defprotocol Is.Validators.Length.Of do
@fallback_to_any true
@spec get_length(any) :: {:ok, integer} | {:error, binary}
def get_length(data)
end
defimpl Is.Validators.Length.Of, for: Any do
def get_length(data) when is_binary(data) do
{:ok, String.length(data)}
end
def get_length(data) when is_list(data) do
{:ok, length(data)}
end
def get_length(data) when is_tuple(data) do
{:ok, data |> Tuple.to_list() |> length()}
end
def get_length(_data) do
{:error, "value length cannot be determined. Please implement protocol Is.Validators.Length.Of"}
end
end
|
lib/is/validators/length.ex
| 0.887966
| 0.693019
|
length.ex
|
starcoder
|
defmodule MapTileRenderer.TileGrid do
defstruct width: 0, height: 0, lat: 0, lon: 0, resolution: 0, tiles: %{}, bbox: {{0.0, 0.0}, {0.0, 0.0}}, row_bboxes: []
use GenServer
def start_link(width, height, lat, lon, resolution, default_tile) do
bbox = calc_bbox(lon, lat, width, height, resolution)
row_bboxes = make_row_bboxes(lon, lat, width, height, resolution)
tile_grid = %MapTileRenderer.TileGrid{width: width, height: height, lat: lat, lon: lon, bbox: bbox,
resolution: resolution, tiles: make_tile_grid(width, height, default_tile),
row_bboxes: row_bboxes}
GenServer.start_link(__MODULE__, tile_grid, [])
end
def stop(grid) do
GenServer.stop(grid)
end
defp calc_bbox(lon, lat, width, height, resolution) do
moved_lon = MapTileRenderer.Coordinates.move_lon({lat, lon}, resolution, width)
moved_lat = MapTileRenderer.Coordinates.move_lat({lat, lon}, resolution, height)
min_bounds = {min(lon, moved_lon), min(lat, moved_lat)}
max_bounds = {max(lon, moved_lon), max(lat, moved_lat)}
{min_bounds, max_bounds}
end
defp make_row_bboxes(lon, lat, width, height, resolution) do
{{min_lon, _}, {max_lon, _}} = calc_bbox(lon, lat, width, height, resolution)
Enum.map(0..height - 1, fn row ->
row_lat = MapTileRenderer.Coordinates.move_lat({lat, lon}, resolution, row)
{{min_lon, row_lat}, {max_lon, row_lat}}
end)
end
defp make_tile_grid(width, height, default_tile) do
Map.new for row <- 0..height - 1, col <- 0..width - 1, do: {{row, col}, [default_tile]}
end
def render_polygon(grid, polygon) do
GenServer.call(grid, {:render, polygon})
end
def get_tiles(grid) do
GenServer.call(grid, {:get_tiles})
end
def get_bbox(grid) do
GenServer.call(grid, {:get_bbox})
end
def init(tile_grid) do
{:ok, tile_grid}
end
def handle_call({:render, polygon}, _from, grid) do
grid = %{grid | tiles: update_tiles(grid, polygon)}
{:reply, :ok, grid}
end
def handle_call({:get_tiles}, _from, grid) do
{:reply, grid.tiles, grid}
end
def handle_call({:get_bbox}, _from, grid) do
{:reply, grid.bbox, grid}
end
defp update_tiles(grid, polygon) do
Enum.with_index(grid.row_bboxes)
|> Enum.reduce(grid.tiles, fn {row_bbox, row}, tiles ->
lat = MapTileRenderer.Coordinates.move_lat({grid.lat, grid.lon}, grid.resolution, row)
apply_intersections(row, lat, polygon, grid, tiles, row_bbox)
end)
end
defp apply_intersections(row, lat, polygon, grid, tiles, bbox) do
polygon_intersections(lat, polygon, bbox)
|> to_grid_space(grid)
|> Enum.sort_by(fn {_, intersection} -> intersection end)
|> Enum.chunk(2)
|> Enum.filter(fn [{_, start}, {_, stop}] -> stop > 0 && start < grid.width end)
|> Enum.reduce(tiles, fn [{start_tile, start}, {_, stop}], tiles ->
Enum.reduce(start..stop, tiles, fn col, tiles ->
case Map.has_key?(tiles, {row, col}) do
true ->
{_, tiles} = Map.get_and_update!(tiles, {row, col}, &({&1, [start_tile | &1]}))
tiles
_ -> tiles
end
end)
end)
end
defp to_grid_space(intersections, %{lat: lat, lon: lon, resolution: resolution, width: width}) do
Enum.map(intersections, fn {type, intersection} ->
lon_width = MapTileRenderer.Coordinates.move_lon({lat, lon}, resolution, width) - lon
{type, round((intersection - lon) / lon_width * width)}
end)
end
defp polygon_intersections(lat, polygon, bbox) do
Enum.filter(polygon.areas, fn {_, _, area_bbox} ->
MapTileRenderer.Intersection.box_vs_box?(bbox, area_bbox)
end)
|> Enum.flat_map(fn {tile, vertices, _bbox} ->
MapTileRenderer.Intersection.polygon_scanline_intersections(lat, vertices)
|> Enum.map(&({tile, &1}))
end)
end
end
|
lib/map_tile_renderer/tile_grid.ex
| 0.76708
| 0.737016
|
tile_grid.ex
|
starcoder
|
defmodule PlayfabEx.Server.Default.SharedGroupData do
@doc """
Adds users to the set of those able to update both the shared data, as well as the set of users in the group. Only users in the group (and the server) can add new members. Shared Groups are designed for sharing data between a very small number of players, please see our guide:
@link https://api.playfab.com/documentation/server/method/AddSharedGroupMembers
"""
@spec add_shared_group_members(map()) :: {:ok, map} | {:error, String.t}
def add_shared_group_members(params) do
fetch("Server/AddSharedGroupMembers", params)
end
@doc """
Deletes a shared group, freeing up the shared group ID to be reused for a new group. Shared Groups are designed for sharing data between a very small number of players, please see our guide:
@link https://api.playfab.com/documentation/server/method/CreateSharedGroup
"""
@spec create_shared_group(map()) :: {:ok, map} | {:error, String.t}
def create_shared_group(params) do
fetch("Server/CreateSharedGroup", params)
end
@doc """
Removes users from the set of those able to update the shared data and the set of users in the group. Only users in the group can remove members. If as a result of the call, zero users remain with access, the group and its associated data will be deleted. Shared Groups are designed for sharing data between a very small number of players, please see our guide:
@link https://api.playfab.com/documentation/server/method/DeleteSharedGroup
"""
@spec delete_shared_group(map()) :: {:ok, map} | {:error, String.t}
def delete_shared_group(params) do
fetch("Server/DeleteSharedGroup", params)
end
@doc """
@link https://api.playfab.com/documentation/server/method/GetSharedGroupData
"""
@spec get_shared_group_data(map()) :: {:ok, map} | {:error, String.t}
def get_shared_group_data(params) do
fetch("Server/GetSharedGroupData", params)
end
@doc """
@link https://api.playfab.com/documentation/server/method/RemoveSharedGroupMembers
"""
@spec remove_shared_group_members(map()) :: {:ok, map} | {:error, String.t}
def remove_shared_group_members(params) do
fetch("Server/RemoveSharedGroupMembers", params)
end
@doc """
@link https://api.playfab.com/documentation/server/method/UpdateSharedGroupData
"""
@spec update_shared_group_data(map()) :: {:ok, map} | {:error, String.t}
def update_shared_group_data(params) do
fetch("Server/UpdateSharedGroupData", params)
end
defp fetch(path, params) do
string_params = Poison.encode!(params)
try do
response = Tesla.post("https://#{title_id()}.playfabapi.com/#{path}", string_params, headers: %{"Content-Type" => "application/json", "X-SecretKey" => secret_key()})
|> Map.get(:body)
|> Poison.decode!
{:ok, response}
rescue
_ -> {:error, "Unable to fetch data from playfab"}
end
end
defp title_id() do
Application.get_env(:playfab_ex, :title_id)
end
defp secret_key() do
Application.get_env(:playfab_ex, :secret_key)
end
end
|
lib/server/default/shared_group_data.ex
| 0.61115
| 0.46557
|
shared_group_data.ex
|
starcoder
|
defmodule Tyyppi.Stats do
@moduledoc """
Process caching the loaded types information.
Whether your application often uses the types information, it makes sense
to cache it in a process state, because gathering it takes some time. In such
a case your application should start this process in the application’s
supervision tree, and call `#{inspect(__MODULE__)}.rehash!/0` every time when the
new module is compiled in the runtime.
"""
alias Tyyppi.T
use GenServer
@typedoc "Types information cache"
@type info :: %{fun() => Tyyppi.T.t(term())}
@typedoc """
Function to be called upon rehashing. When arity is `0`, the full new state
would be passed, for arity `1`, `added` and `removed` types would be passed,
for arity `2`, `added`, `removed`, and full state would be passed.
"""
@type callback ::
(info() -> any()) | (info(), info() -> any()) | (info(), info(), info() -> any())
@spec start_link(types :: info(), meta :: keyword()) :: GenServer.on_start()
@doc """
Starts the cache process. The optional parameter might contain any payload
that will be stored in the process’ state.
If a payload has `callback :: (-> :ok)` parameter, this function will
be called every time the types information gets rehashed.
"""
def start_link(types \\ %{}, meta \\ []),
do: GenServer.start_link(__MODULE__, %{meta: meta, types: types}, name: __MODULE__)
@spec types :: info()
@doc """
Retrieves all the types information currently available in the system.
"""
def types do
case Process.whereis(__MODULE__) do
pid when is_pid(pid) -> GenServer.call(__MODULE__, :types)
nil -> __MODULE__ |> :ets.info() |> types_from_ets()
end
end
@doc false
@spec dump(Path.t()) :: :ok | {:error, File.posix()}
def dump(file) do
case Process.whereis(__MODULE__) do
pid when is_pid(pid) ->
File.write(file, :erlang.term_to_binary(types()))
nil ->
File.rm(file)
with {:ok, dets} <- :dets.open_file(__MODULE__, file: to_charlist(file)),
_info <- __MODULE__ |> :ets.info() |> types_from_ets(),
__MODULE__ <- :ets.to_dets(__MODULE__, dets),
do: :dets.close(dets)
end
end
@doc false
@spec load(:process | :ets, Path.t(), keyword()) :: GenServer.on_start()
def load(kind \\ :ets, file, meta \\ [])
def load(:process, file, meta) do
file
|> File.read!()
|> :erlang.binary_to_term()
|> start_link(meta)
end
def load(:ets, file, _meta) do
with true <- File.exists?(file),
{:ok, dets} <- file |> to_charlist() |> :dets.open_file(),
_ <- types_from_ets(:undefined),
true <- :ets.from_dets(__MODULE__, dets),
do: :dets.close(dets)
end
@spec type(fun() | atom() | T.ast() | T.raw()) :: Tyyppi.T.t(wrapped) when wrapped: term()
@doc """
Retrieves the type information for the type given.
"""
def type(fun) when is_function(fun) do
__MODULE__
|> Process.whereis()
|> case do
pid when is_pid(pid) -> __MODULE__ |> GenServer.call(:types) |> Map.get(fun)
nil -> __MODULE__ |> :ets.info() |> type_from_ets(fun)
end
|> case do
# FIXME FIXME
nil -> Tyyppi.any()
%T{} = t -> t
end
end
def type({module, fun, arity}) when is_atom(module) and is_atom(fun) and is_integer(arity),
do: module |> Function.capture(fun, arity) |> type()
def type(definition) when is_tuple(definition) do
%T{
type: :built_in,
module: nil,
name: nil,
params: [],
source: nil,
definition: definition,
quoted: definition_to_quoted(definition)
}
end
@spec rehash! :: :ok
@doc """
Rehashes the types information currently available in the system. This function
should be called after the application has created a module in runtime for this
module information to appear in the cache.
"""
def rehash! do
case Process.whereis(__MODULE__) do
pid when is_pid(pid) ->
GenServer.cast(__MODULE__, :rehash!)
nil ->
if :ets.info(__MODULE__) != :undefined, do: :ets.delete(__MODULE__)
spawn_link(fn -> types_from_ets(:undefined) end)
:ok
end
end
@impl GenServer
@doc false
def init(state), do: {:ok, state, {:continue, :load}}
@impl GenServer
@doc false
def handle_continue(:load, state),
do: {:noreply, %{state | types: loaded_types(state.types, state.meta[:callback])}}
@impl GenServer
@doc false
def handle_cast(:rehash!, state),
do: {:noreply, %{state | types: loaded_types(state.types, state.meta[:callback])}}
@impl GenServer
@doc false
def handle_call(:types, _from, state), do: {:reply, state.types, state}
@spec type_to_map(module(), charlist(), {atom(), Tyyppi.T.ast()}) ::
{fun(), Tyyppi.T.t(wrapped)}
when wrapped: term()
defp type_to_map(module, source, {type, {name, definition, params}}) do
param_names = T.param_names(params)
{Function.capture(module, name, length(params)),
%T{
type: type,
module: module,
name: name,
params: params,
source: to_string(source),
definition: definition,
quoted: quote(do: unquote(module).unquote(name)(unquote_splicing(param_names)))
}}
end
@spec loaded_types(types :: nil | info(), callback :: nil | callback()) :: info()
defp loaded_types(_types, nil) do
:code.all_loaded()
|> Enum.flat_map(fn {module, source} ->
case Code.Typespec.fetch_types(module) do
{:ok, types} -> Enum.map(types, &type_to_map(module, source, &1))
:error -> []
end
end)
|> Map.new()
end
defp loaded_types(_types, callback) when is_function(callback, 1) do
result = loaded_types(nil, nil)
callback.(result)
result
end
defp loaded_types(types, callback) when is_function(callback, 2) do
result = loaded_types(nil, nil)
added = Map.take(result, Map.keys(result) -- Map.keys(types))
removed = Map.take(types, Map.keys(types) -- Map.keys(result))
callback.(added, removed)
result
end
defp loaded_types(types, callback) when is_function(callback, 3) do
result = loaded_types(nil, nil)
added = Map.take(result, Map.keys(result) -- Map.keys(types))
removed = Map.take(types, Map.keys(types) -- Map.keys(result))
callback.(added, removed, result)
result
end
defp definition_to_quoted({:type, _, name, params}),
do: quote(do: unquote(name)(unquote_splicing(params)))
defp definition_to_quoted({:atom, _, name}),
do: quote(do: unquote(name))
@spec types_from_ets(:undefined | keyword()) :: info()
defp types_from_ets(:undefined) do
try do
:ets.new(__MODULE__, [:set, :named_table, :public])
rescue
_ in [ArgumentError] -> :ok
end
result = loaded_types(nil, nil)
Enum.each(result, &:ets.insert(__MODULE__, &1))
result
end
defp types_from_ets(_),
do: :ets.foldl(fn {k, v}, acc -> Map.put(acc, k, v) end, %{}, __MODULE__)
@spec type_from_ets(:undefined | keyword(), fun()) :: Tyyppi.T.t(wrapped) when wrapped: term()
defp type_from_ets(:undefined, key),
do: :undefined |> types_from_ets() |> Map.get(key)
defp type_from_ets(_, key),
do: __MODULE__ |> :ets.select([{{key, :"$1"}, [], [:"$1"]}]) |> List.first()
end
|
lib/tyyppi/stats.ex
| 0.711932
| 0.425665
|
stats.ex
|
starcoder
|
defmodule PandaDoc.PhoenixController do
@moduledoc """
Implements a PhoenixController that can be easily wired up and used.
## Examples
```elixir
defmodule YourAppWeb.PandaDocController do
use PandaDoc.PhoenixController
def handle_document_change(id, status, _details) do
id
|> Documents.get_by_pandadoc_id!()
|> Documents.update_document(%{status: status})
end
def handle_document_complete(id, pdf, status, _details) do
id
|> Documents.get_by_pandadoc_id!()
|> Documents.update_document(%{data: pdf, status: status})
end
end
```
Put the following lines into your `router.ex` and configure the WebHook in the pandadoc portal.
```elixir
post "/callbacks/pandadoc", YourAppWeb.PandaDocController, :webhook
```
"""
@doc """
Triggers when a PandaDoc document changed.
"""
@callback handle_document_change(String.t(), String.t(), map()) :: any()
@doc """
Triggers when a PandaDoc document has been completed/signed.
"""
@callback handle_document_complete(String.t(), binary(), String.t(), map()) :: any()
defmacro __using__(_) do
quote do
@moduledoc "Implements a PhoenixController with callbacks for PandaDoc."
@behaviour PandaDoc.PhoenixController
require Logger
use Phoenix.Controller
alias Plug.Conn
@doc "default webhook that should match."
@spec webhook(Conn.t(), map()) :: Conn.t()
def webhook(conn, %{"_json" => data}) do
Enum.each(data, &parse_document/1)
Conn.send_resp(conn, 200, "")
end
@doc "fallback webhook that should not match."
@spec webhook(Conn.t(), map()) :: Conn.t()
def webhook(conn, _), do: Conn.send_resp(conn, 406, "")
# parsing valid document state changes
defp parse_document(%{
"event" => "document_state_changed",
"data" =>
%{
"id" => id,
"status" => status
} = details
}) do
spawn(fn ->
case status do
"document.completed" ->
pdf = download_data(id)
handle_document_complete(id, pdf, status, details)
_ ->
handle_document_change(id, status, details)
end
end)
end
# failsafe for parsing bad documents
defp parse_document(_), do: :ok
# downloads the document from pandadoc
defp download_data(id) do
if Mix.env() == :test do
# no way of testing the pandadoc api programatically
Logger.info("[PandaDoc] Using dummy data for tests")
:crypto.strong_rand_bytes(128)
else
# we wait here since PandaDoc is not the fastest
Logger.info("[PandaDoc] Downloading document #{id} in 30 seconds")
:timer.sleep(30_000)
id
|> PandaDoc.download_document()
|> ok_data(id)
end
end
# just returns the pdf document
defp ok_data({:ok, pdf}, id) do
Logger.info("[PandaDoc] Successfully downloaded document #{id}")
pdf
end
# retries the request in 2 seconds
defp ok_data({:error, error}, id) do
Logger.info(
"[PandaDoc] Retrying download of document #{id} in 2 seconds: #{inspect(error)}"
)
:timer.sleep(2_000)
download_data(id)
end
end
end
end
|
lib/panda_doc/phoenix_controller.ex
| 0.776962
| 0.599163
|
phoenix_controller.ex
|
starcoder
|
defmodule JWT.Algorithm.Hmac do
@moduledoc """
Sign or verify a JSON Web Signature (JWS) structure using HMAC with SHA-2 algorithms
see http://tools.ietf.org/html/rfc7518#section-3.2
"""
require JWT.Algorithm.SHA, as: SHA
@doc """
Return a Message Authentication Code (MAC)
## Example
iex> shared_key = "<KEY>"
...> JWT.Algorithm.Hmac.sign(:sha256, shared_key, "signing_input")
<<90, 34, 44, 252, 147, 130, 167, 173, 86, 191, 247, 93, 94, 12, 200, 30, 173, 115, 248, 89, 246, 222, 4, 213, 119, 74, 70, 20, 231, 194, 104, 103>>
"""
def sign(sha_bits, shared_key, signing_input) when SHA.valid?(sha_bits) do
validate_key_size!(sha_bits, shared_key)
:crypto.mac(:hmac, sha_bits, shared_key, signing_input)
end
# http://tools.ietf.org/html/rfc7518#section-3.2
defp validate_key_size!(sha_bits, key) do
bits = SHA.fetch_length!(sha_bits)
if byte_size(key) * 8 < bits do
raise JWT.SecurityError, type: :hmac, message: "Key size smaller than the hash output size"
end
end
@doc """
Predicate to verify the signing_input by comparing a given `mac` to the `mac` for a newly
signed message; comparison done in a constant-time manner to thwart timing attacks
## Example
iex> mac = <<90, 34, 44, 252, 147, 130, 167, 173, 86, 191, 247, 93, 94, 12, 200, 30, 173, 115, 248, 89, 246, 222, 4, 213, 119, 74, 70, 20, 231, 194, 104, 103>>
...> shared_key = "<KEY>"
...> JWT.Algorithm.Hmac.verify?(mac, :sha256, shared_key, "signing_input")
true
"""
def verify?(mac, sha_bits, shared_key, signing_input) when SHA.valid?(sha_bits) do
mac_match?(mac, sign(sha_bits, shared_key, signing_input))
end
# compares two strings for equality in constant-time to avoid timing attacks
defp mac_match?(expected, actual) do
byte_size(expected) == byte_size(actual) && arithmetic_compare(expected, actual) == 0
end
defp arithmetic_compare(left, right, acc \\ 0)
defp arithmetic_compare(<<x, left::binary>>, <<y, right::binary>>, acc) do
import Bitwise
arithmetic_compare(left, right, bxor(acc ||| x, y))
end
defp arithmetic_compare("", "", acc), do: acc
end
|
lib/jwt/algorithm/hmac.ex
| 0.883544
| 0.475605
|
hmac.ex
|
starcoder
|
defmodule Que.Job do
defstruct [:id, :arguments, :worker, :status, :ref, :pid, :created_at, :updated_at]
## Note: Update Que.Persistence.Mnesia after changing these values
@moduledoc """
Module to manage a Job's state and execute the worker's callbacks.
Defines a `Que.Job` struct an keeps track of the Job's worker, arguments,
status and more. Meant for internal usage, so you shouldn't use this
unless you absolutely know what you're doing.
"""
@statuses [:queued, :started, :failed, :completed]
@typedoc "One of the atoms in `#{inspect(@statuses)}`"
@type status :: atom
@typedoc "A `Que.Job` struct"
@type t :: %Que.Job{}
@doc """
Returns a new Job struct with defaults
"""
@spec new(worker :: Que.Worker.t, args :: list) :: Que.Job.t
def new(worker, args \\ nil) do
%Que.Job{
status: :queued,
worker: worker,
arguments: args
}
end
@doc """
Update the Job status to one of the predefined values in `@statuses`
"""
@spec set_status(job :: Que.Job.t, status :: status) :: Que.Job.t
def set_status(job, status) when status in @statuses do
%{ job | status: status }
end
@doc """
Updates the Job struct with new status and spawns & monitors a new Task
under the TaskSupervisor which executes the perform method with supplied
arguments
"""
@spec perform(job :: Que.Job.t) :: Que.Job.t
def perform(job) do
Que.Helpers.log("Starting #{job}")
{:ok, pid} =
Que.Helpers.do_task(fn ->
job.worker.perform(job.arguments)
end)
%{ job | status: :started, pid: pid, ref: Process.monitor(pid) }
end
@doc """
Handles Job Success, Calls appropriate worker method and updates the job
status to :completed
"""
@spec handle_success(job :: Que.Job.t) :: Que.Job.t
def handle_success(job) do
Que.Helpers.log("Completed #{job}")
Que.Helpers.do_task(fn ->
job.worker.on_success(job.arguments)
end)
%{ job | status: :completed, pid: nil, ref: nil }
end
@doc """
Handles Job Failure, Calls appropriate worker method and updates the job
status to :failed
"""
@spec handle_failure(job :: Que.Job.t, error :: term) :: Que.Job.t
def handle_failure(job, error) do
Que.Helpers.log("Failed #{job}")
Que.Helpers.do_task(fn ->
job.worker.on_failure(job.arguments, error)
end)
%{ job | status: :failed, pid: nil, ref: nil }
end
end
## Implementing the String.Chars protocol for Que.Job structs
defimpl String.Chars, for: Que.Job do
def to_string(job) do
"Job # #{job.id} with #{ExUtils.Module.name(job.worker)}"
end
end
|
lib/que/job.ex
| 0.617282
| 0.542257
|
job.ex
|
starcoder
|
defmodule ExAdvent.Day02 do
@moduledoc """
# Day 2: Inventory Management System
You stop falling through time, catch your breath, and check the screen on the device. "Destination reached. Current Year: 1518. Current Location: North Pole Utility Closet 83N10." You made it! Now, to find those anomalies.
Outside the utility closet, you hear footsteps and a voice. "...I'm not sure either. But now that so many people have chimneys, maybe he could sneak in that way?" Another voice responds, "Actually, we've been working on a new kind of suit that would let him fit through tight spaces like that. But, I heard that a few days ago, they lost the prototype fabric, the design plans, everything! Nobody on the team can even seem to remember important details of the project!"
"Wouldn't they have had enough fabric to fill several boxes in the warehouse? They'd be stored together, so the box IDs should be similar. Too bad it would take forever to search the warehouse for two similar box IDs..." They walk too far away to hear any more.
"""
@input_path Path.join(:code.priv_dir(:ex_advent), "day_02_input")
def clean_sequence(input_path \\ @input_path) do
input_path
|> File.stream!()
|> Stream.map(&String.trim_trailing(&1))
|> Enum.into([])
end
@doc """
Late at night, you sneak to the warehouse - who knows what kinds of paradoxes you could cause if you were discovered - and use your fancy wrist device to quickly scan every box and produce a list of the likely candidates (your puzzle input).
To make sure you didn't miss any, you scan the likely candidate boxes again, counting the number that have an ID containing exactly two of any letter and then separately counting those with exactly three of any letter. You can multiply those two counts together to get a rudimentary checksum and compare it to what your device predicts.
For example, if you see the following box IDs:
abcdef contains no letters that appear exactly two or three times.
bababc contains two a and three b, so it counts for both.
abbcde contains two b, but no letter appears exactly three times.
abcccd contains three c, but no letter appears exactly two times.
aabcdd contains two a and two d, but it only counts once.
abcdee contains two e.
ababab contains three a and three b, but it only counts once.
Of these box IDs, four of them contain a letter which appears exactly twice, and three of them contain a letter which appears exactly three times. Multiplying these together produces a checksum of 4 * 3 = 12.
What is the checksum for your list of box IDs?
"""
def calculate_checksum(input_list) do
{p, t} =
input_list
|> Stream.map(&letter_count(&1, %{}))
|> Stream.map(fn counts ->
# FIXME I hate this
{Enum.count(counts, fn c -> c == 2 end), Enum.count(counts, fn c -> c == 3 end)}
end)
|> Enum.reduce({0, 0}, fn {p, t}, {pairs, triplets} ->
{pairs + p, triplets + t}
end)
# FIXME This was not necessary for my input, but I would
# like to integrate this a bit better with above.
{p, t} =
case {p, t} do
{0, t} when t > 0 -> {1, t}
{p, 0} when p > 0 -> {p, 1}
_ -> {p, t}
end
p * t
end
def letter_count(<<>>, map) do
map
|> Map.values()
# FIXME can I do most of the work here to get a
# useful output - {pair, triplet} as {0 or 1, 0 or 1}
# would be ideal
|> Enum.uniq()
end
def letter_count(<<hd, tl::binary>>, map) do
case Map.has_key?(map, <<hd>>) do
true -> letter_count(tl, Map.update!(map, <<hd>>, &(&1 + 1)))
false -> letter_count(tl, Map.put(map, <<hd>>, 1))
end
end
@doc """
# Part Two
Confident that your list of box IDs is complete, you're ready to find the boxes full of prototype fabric.
The boxes will have IDs which differ by exactly one character at the same position in both strings. For example, given the following box IDs:
abcde
fghij
klmno
pqrst
fguij
axcye
wvxyz
The IDs abcde and axcye are close, but they differ by two characters (the second and fourth). However, the IDs fghij and fguij differ by exactly one character, the third (h and u). Those must be the correct boxes.
What letters are common between the two correct box IDs? (In the example above, this is found by removing the differing character from either ID, producing fgij.)
"""
def common_letters(input_list) do
input_list
|> create_pairings()
|> Enum.reduce_while("", fn {a, b}, _ ->
case String.myers_difference(a, b) do
[eq: common_a, del: <<_diff_a::8>>, ins: <<_diff_b::8>>, eq: common_b] ->
{:halt, common_a <> common_b}
_ ->
{:cont, ""}
end
end)
end
def create_pairings(input_list) do
for id_a <- input_list,
id_b <- input_list,
id_a != id_b,
do: {id_a, id_b}
end
end
|
lib/ex_advent/day_02.ex
| 0.504883
| 0.631537
|
day_02.ex
|
starcoder
|
defmodule Faker.DateTime do
@moduledoc false
@microseconds_per_day 86_400_000_000
@doc """
Returns a random date in the past up to N days, today not included
## Examples
iex> Faker.DateTime.backward(4)
#=> %DateTime{calendar: Calendar.ISO, day: 20, hour: 6,
#=> microsecond: {922180, 6}, minute: 2, month: 12, second: 17,
#=> std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0, year: 2016,
#=> zone_abbr: "UTC"}
"""
@spec backward(integer) :: DateTime.t()
def backward(days) do
forward(-days)
end
@doc """
Returns a random date in the future up to N days, today not included
## Examples
iex> Faker.DateTime.forward(4)
#=> %DateTime{calendar: Calendar.ISO, day: 25, hour: 6,
#=> microsecond: {922180, 6}, minute: 2, month: 12, second: 17,
#=> std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0, year: 2016,
#=> zone_abbr: "UTC"}
"""
@spec forward(integer) :: DateTime.t()
def forward(days) do
sign = if days < 0, do: -1, else: 1
today = DateTime.utc_now() |> to_timestamp
# add or subtract extra day to avoid returning today
from = today + sign * @microseconds_per_day
to = from + @microseconds_per_day * days
unix_between(from, to)
end
@doc """
Returns a random DateTime between two dates
## Examples
iex> Faker.DateTime.between(~N[2016-12-20 00:00:00], ~N[2016-12-25 00:00:00])
#=> %DateTime{calendar: Calendar.ISO, day: 22, hour: 7,
#=> microsecond: {753572, 6}, minute: 56, month: 12, second: 26,
#=> std_offset: 0, time_zone: "Etc/UTC", utc_offset: 0, year: 2016,
#=> zone_abbr: "UTC"}
"""
@spec between(
Date.t() | NaiveDateTime.t() | DateTime.t(),
Date.t() | NaiveDateTime.t() | DateTime.t()
) :: DateTime.t()
def between(%Date{} = from, %Date{} = to) do
between(date_to_datetime(from), date_to_datetime(to))
end
def between(%NaiveDateTime{} = from, %NaiveDateTime{} = to) do
between(naivedatetime_to_datetime(from), naivedatetime_to_datetime(to))
end
def between(from, to) do
unix_between(to_timestamp(from), to_timestamp(to))
end
# private
@spec date_to_datetime(Date.t()) :: DateTime.t()
defp date_to_datetime(date) do
%DateTime{
calendar: Calendar.ISO,
day: date.day,
hour: 0,
minute: 0,
month: date.month,
second: 0,
time_zone: "Etc/UTC",
utc_offset: 0,
std_offset: 0,
year: date.year,
zone_abbr: "UTC"
}
end
@spec naivedatetime_to_datetime(NaiveDateTime.t()) :: DateTime.t()
defp naivedatetime_to_datetime(naivedatetime) do
%DateTime{
calendar: naivedatetime.calendar,
day: naivedatetime.day,
hour: naivedatetime.hour,
minute: naivedatetime.minute,
month: naivedatetime.month,
second: naivedatetime.second,
time_zone: "Etc/UTC",
utc_offset: 0,
std_offset: 0,
year: naivedatetime.year,
zone_abbr: "UTC"
}
end
defp to_timestamp(datetime) do
DateTime.to_unix(datetime, :microsecond)
end
defp unix_between(from, to) do
diff = to - from
sign = if diff < 0, do: -1, else: 1
date = from + sign * Faker.random_between(0, abs(diff))
DateTime.from_unix!(date, :microsecond)
end
end
|
lib/faker/datetime.ex
| 0.846403
| 0.44077
|
datetime.ex
|
starcoder
|
defmodule Optimal.Schema do
@moduledoc """
Functions for generating and validating the opts that generate a schema.
"""
defstruct opts: [],
defaults: [],
describe: [],
required: [],
extra_keys?: false,
types: [],
custom: [],
annotations: []
@type t :: %__MODULE__{
opts: [atom],
defaults: Keyword.t(),
describe: Keyword.t(),
required: [atom],
extra_keys?: boolean,
types: Keyword.t(),
custom: Keyword.t(),
annotations: Keyword.t()
}
# These opts cannot be auto-documented, so must be regenerated manually
@doc """
Create a new schema.
---
## Opts
* `opts`(`[{:list, :atom}, :keyword]`): A list of opts accepted, or a keyword of opt name to opt type - Default: []
* `required`(`{:list, :atom}`): A list of required opts (all of which must be in `opts` as well) - Default: []
* `defaults`(`:keyword`): A keyword list of option name to a default value. Values must pass type rules - Default: []
* `extra_keys?`(`:boolean`): If enabled, extra keys not specified by the schema do not fail validation - Default: false
* `custom`(`:keyword`): A keyword list of option name (for errors) and custom validations. See README - Default: []
* `describe`(`:keyword`): A keyword list of option names to short descriptions (like these) - Default: []
---
A custom validation is run on the types provided at schema creation time, to ensure they are all valid types.
"""
@spec new() :: t()
def new() do
new(opts: [])
end
@spec new(opts :: Keyword.t()) :: t() | no_return
def new([]), do: new(opts: [])
def new(opts) do
opts = Optimal.validate!(opts, Optimal.SchemaHelpers.schema_schema())
%__MODULE__{
opts: opt_keys(opts[:opts]),
types: to_keyword(opts[:opts]),
defaults: opts[:defaults],
describe: opts[:describe],
extra_keys?: opts[:extra_keys?],
required: opts[:required],
custom: opts[:custom],
annotations: []
}
end
# These opts cannot be auto-documented, so must be regenerated manually
@doc """
Merges two optimal schemas to create a superset schema.
---
## Opts
* `annotate`(`:string`): Annotates the source of the opt, to be used in displaying documentation.
* `add_required?`(`:boolean`): "If true, all required fields from left/right are marked as required. Otherwise, only takes required fields from the left."
---
"""
@spec merge(left :: t(), right :: t(), opts :: Keyword.t()) :: t()
def merge(left, right, opts \\ []) do
opts = Optimal.validate!(opts, Optimal.SchemaHelpers.merge_schema())
%__MODULE__{
opts: Enum.uniq(left.opts ++ right.opts),
defaults: Keyword.merge(left.defaults, right.defaults),
extra_keys?: right.extra_keys? || left.extra_keys?,
describe:
Keyword.merge(left.describe, right.describe, fn _, v1, v2 -> v1 <> " | " <> v2 end),
types: merge_types(left.types, right.types),
custom: left.custom ++ right.custom,
required: merge_required(left, right, opts),
annotations: merge_annotations(left, right, opts[:annotate])
}
end
defp opt_keys(opts) do
Enum.map(opts, fn
{opt, _} -> opt
opt -> opt
end)
end
defp to_keyword(opts) do
Enum.map(opts, fn
{opt, value} -> {opt, value}
opt -> {opt, :any}
end)
end
defp merge_types(left, right) do
Keyword.merge(left, right, fn _, v1, v2 -> Optimal.Type.merge(v1, v2) end)
end
defp merge_required(left, right, opts) do
if opts[:add_required?] do
Enum.uniq(left.required ++ right.required)
else
left.required
end
end
defp merge_annotations(left, right, annotation) when is_bitstring(annotation) do
base_annotations = Keyword.merge(left.annotations, right.annotations)
right.opts
|> Enum.map(&{&1, annotation})
|> Keyword.merge(base_annotations)
end
defp merge_annotations(left, right, nil) do
Keyword.merge(left.annotations, right.annotations)
end
end
|
lib/optimal/schema.ex
| 0.897328
| 0.62134
|
schema.ex
|
starcoder
|
defmodule AWS.ResourceGroupsTaggingAPI do
@moduledoc """
Resource Groups Tagging API
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-01-26",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "tagging",
global?: false,
protocol: "json",
service_id: "Resource Groups Tagging API",
signature_version: "v4",
signing_name: "tagging",
target_prefix: "ResourceGroupsTaggingAPI_20170126"
}
end
@doc """
Describes the status of the `StartReportCreation` operation.
You can call this operation only from the organization's management account and
from the us-east-1 Region.
"""
def describe_report_creation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeReportCreation", input, options)
end
@doc """
Returns a table that shows counts of resources that are noncompliant with their
tag policies.
For more information on tag policies, see [Tag Policies](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_tag-policies.html)
in the *Organizations User Guide.*
You can call this operation only from the organization's management account and
from the us-east-1 Region.
This operation supports pagination, where the response can be sent in multiple
pages. You should check the `PaginationToken` response parameter to determine if
there are additional results available to return. Repeat the query, passing the
`PaginationToken` response parameter value as an input to the next request until
you recieve a `null` value. A null value for `PaginationToken` indicates that
there are no more results waiting to be returned.
"""
def get_compliance_summary(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetComplianceSummary", input, options)
end
@doc """
Returns all the tagged or previously tagged resources that are located in the
specified Amazon Web Services Region for the account.
Depending on what information you want returned, you can also specify the
following:
* *Filters* that specify what tags and resource types you want
returned. The response includes all tags that are associated with the requested
resources.
* Information about compliance with the account's effective tag
policy. For more information on tag policies, see [Tag Policies](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_tag-policies.html)
in the *Organizations User Guide.*
This operation supports pagination, where the response can be sent in multiple
pages. You should check the `PaginationToken` response parameter to determine if
there are additional results available to return. Repeat the query, passing the
`PaginationToken` response parameter value as an input to the next request until
you recieve a `null` value. A null value for `PaginationToken` indicates that
there are no more results waiting to be returned.
"""
def get_resources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetResources", input, options)
end
@doc """
Returns all tag keys currently in use in the specified Amazon Web Services
Region for the calling account.
This operation supports pagination, where the response can be sent in multiple
pages. You should check the `PaginationToken` response parameter to determine if
there are additional results available to return. Repeat the query, passing the
`PaginationToken` response parameter value as an input to the next request until
you recieve a `null` value. A null value for `PaginationToken` indicates that
there are no more results waiting to be returned.
"""
def get_tag_keys(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetTagKeys", input, options)
end
@doc """
Returns all tag values for the specified key that are used in the specified
Amazon Web Services Region for the calling account.
This operation supports pagination, where the response can be sent in multiple
pages. You should check the `PaginationToken` response parameter to determine if
there are additional results available to return. Repeat the query, passing the
`PaginationToken` response parameter value as an input to the next request until
you recieve a `null` value. A null value for `PaginationToken` indicates that
there are no more results waiting to be returned.
"""
def get_tag_values(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetTagValues", input, options)
end
@doc """
Generates a report that lists all tagged resources in the accounts across your
organization and tells whether each resource is compliant with the effective tag
policy.
Compliance data is refreshed daily. The report is generated asynchronously.
The generated report is saved to the following location:
`s3://example-bucket/AwsTagPolicies/o-exampleorgid/YYYY-MM-ddTHH:mm:ssZ/report.csv`
You can call this operation only from the organization's management account and
from the us-east-1 Region.
"""
def start_report_creation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartReportCreation", input, options)
end
@doc """
Applies one or more tags to the specified resources.
Note the following:
* Not all resources can have tags. For a list of services with
resources that support tagging using this operation, see [Services that support the Resource Groups Tagging
API](https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/supported-services.html).
If the resource doesn't yet support this operation, the resource's service might
support tagging using its own API operations. For more information, refer to the
documentation for that service.
* Each resource can have up to 50 tags. For other limits, see [Tag Naming and Usage
Conventions](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html#tag-conventions)
in the *Amazon Web Services General Reference.*
* You can only tag resources that are located in the specified
Amazon Web Services Region for the Amazon Web Services account.
* To add tags to a resource, you need the necessary permissions for
the service that the resource belongs to as well as permissions for adding tags.
For more information, see the documentation for each service.
Do not store personally identifiable information (PII) or other confidential or
sensitive information in tags. We use tags to provide you with billing and
administration services. Tags are not intended to be used for private or
sensitive data.
## Minimum permissions
In addition to the `tag:TagResources` permission required by this operation, you
must also have the tagging permission defined by the service that created the
resource. For example, to tag an Amazon EC2 instance using the `TagResources`
operation, you must have both of the following permissions:
* `tag:TagResource`
* `ec2:CreateTags`
"""
def tag_resources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResources", input, options)
end
@doc """
Removes the specified tags from the specified resources.
When you specify a tag key, the action removes both that key and its associated
value. The operation succeeds even if you attempt to remove tags from a resource
that were already removed. Note the following:
* To remove tags from a resource, you need the necessary permissions
for the service that the resource belongs to as well as permissions for removing
tags. For more information, see the documentation for the service whose resource
you want to untag.
* You can only tag resources that are located in the specified
Amazon Web Services Region for the calling Amazon Web Services account.
## Minimum permissions
In addition to the `tag:UntagResources` permission required by this operation,
you must also have the remove tags permission defined by the service that
created the resource. For example, to remove the tags from an Amazon EC2
instance using the `UntagResources` operation, you must have both of the
following permissions:
* `tag:UntagResource`
* `ec2:DeleteTags`
"""
def untag_resources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResources", input, options)
end
end
|
lib/aws/generated/resource_groups_tagging_api.ex
| 0.907499
| 0.40439
|
resource_groups_tagging_api.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.