code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule GenQueue do
@moduledoc """
A behaviour module for implementing queues.
GenQueue relies on adapters to handle the specifics of how the queues
are run. At its most simple, this can mean basic memory FIFO queues. At its
most advanced, this can mean full async job queues with retries and
backoffs. By providing a standard interface for such tools - ease in
switching between different implementations is assured.
## Example
The GenQueue behaviour abstracts the common queue interactions.
Developers are only required to implement the callbacks and functionality
they are interested in via adapters.
Let's start with a simple FIFO queue:
defmodule Queue do
use GenQueue
end
# Start the queue
Queue.start_link()
# Push items into the queue
Queue.push(:hello)
#=> {:ok, :hello}
Queue.push(:world)
#=> {:ok, :world}
# Pop items from the queue
Queue.pop()
#=> {:ok, :hello}
Queue.pop()
#=> {:ok, :world}
We start our enqueuer by calling `start_link/1`. This call is then
forwarded to our adapter. In this case, we dont specify an adapter
anywhere, so it defaults to the simple FIFO queue implemented with
the included `GenQueue.Adapters.Simple`.
We can then add items into our simple FIFO queues with `push/2`, as
well as remove them with `pop/1`.
## use GenQueue and adapters
As we can see from above - implementing a simple queue is easy. But
we can further extend our queues by creating our own adapters or by using
external libraries. Simply specify the adapter name in your config.
config :my_app, MyApp.Enqueuer, [
adapter: MyApp.MyAdapter
]
defmodule MyApp.Enqueuer do
use GenQueue, otp_app: :my_app
end
The adapter can also be specified for the module in line:
defmodule MyApp.Enqueuer do
use GenQueue, adapter: MyApp.MyAdapter
end
We can then create our own adapter by creating an adapter module that handles
the callbacks specified by `GenQueue.Adapter`.
defmodule MyApp.MyAdapter do
use GenQueue.Adapter
def handle_push(gen_queue, item) do
IO.inspect(item)
{:ok, item}
end
end
## Current adapters
Currently, the following adapters are available:
* [GenQueue Exq](https://github.com/nsweeting/gen_queue_exq) - Redis-backed job queue.
* [GenQueue TaskBunny](https://github.com/nsweeting/gen_queue_task_bunny) - RabbitMQ-backed job queue.
* [GenQueue Verk](https://github.com/nsweeting/gen_queue_verk) - Redis-backed job queue.
* [GenQueue OPQ](https://github.com/nsweeting/gen_queue_opq) - GenStage-backed job queue.
## Job queues
One of the benefits of using `GenQueue` is that it can abstract common tasks
like job enqueueing. We can then provide a common API for the various forms
of job enqueing we would like to implement, as well as easily swap
implementations.
Please refer to the documentation for each adapter for more details.
"""
@callback start_link(opts :: Keyword.t()) :: GenServer.on_start()
@doc """
Pushes an item to a queue
## Example
case MyQueue.push(value) do
{:ok, value} -> # Pushed with success
{:error, _} -> # Something went wrong
end
"""
@callback push(item :: any, opts :: Keyword.t()) :: {:ok, any} | {:error, any}
@doc """
Same as `push/2` but returns the item or raises if an error occurs.
"""
@callback push!(item :: any, opts :: Keyword.t()) :: any | no_return
@doc """
Pops an item from a queue
## Example
case MyQueue.pop() do
{:ok, value} -> # Popped with success
{:error, _} -> # Something went wrong
end
"""
@callback pop(opts :: Keyword.t()) :: {:ok, any} | {:error, any}
@doc """
Same as `pop/1` but returns the item or raises if an error occurs.
"""
@callback pop!(opts :: Keyword.t()) :: any | no_return
@doc """
Removes all items from a queue
## Example
case MyQueue.flush() do
{:ok, number_of_items} -> # Flushed with success
{:error, _} -> # Something went wrong
end
"""
@callback flush(opts :: Keyword.t()) :: {:ok, integer} | {:error, any}
@doc """
Gets the number of items in a queue
## Example
case MyQueue.length() do
{:ok, number_of_items} -> # Counted with success
{:error, _} -> # Something went wrong
end
"""
@callback length(opts :: Keyword.t()) :: {:ok, integer} | {:error, any}
@doc """
Returns the application config for a queue
"""
@callback config :: Keyword.t()
@doc """
Returns the adapter for a queue
"""
@callback adapter :: GenQueue.Adapter.t()
@type t :: module
@default_adapter GenQueue.Adapters.Simple
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour GenQueue
@adapter GenQueue.adapter(__MODULE__, opts)
@config GenQueue.config(__MODULE__, opts)
def child_spec(arg) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [arg]}
}
end
defoverridable child_spec: 1
def start_link(opts \\ []) do
apply(@adapter, :start_link, [__MODULE__, opts])
end
def push(item, opts \\ []) do
apply(@adapter, :handle_push, [__MODULE__, item, opts])
end
def push!(item, opts \\ []) do
case push(item, opts) do
{:ok, item} -> item
_ -> raise GenQueue.Error, "Failed to push item."
end
end
def pop(opts \\ []) do
apply(@adapter, :handle_pop, [__MODULE__, opts])
end
def pop!(opts \\ []) do
case pop(opts) do
{:ok, item} -> item
_ -> raise GenQueue.Error, "Failed to pop item."
end
end
def flush(opts \\ []) do
apply(@adapter, :handle_flush, [__MODULE__, opts])
end
def length(opts \\ []) do
apply(@adapter, :handle_length, [__MODULE__, opts])
end
def config do
@config
end
def adapter do
@adapter
end
end
end
@doc false
@deprecated "Use adapter/2 instead"
@spec config_adapter(GenQueue.t(), opts :: Keyword.t()) :: GenQueue.Adapter.t()
def config_adapter(gen_queue, opts \\ [])
def config_adapter(_gen_queue, adapter: adapter) when is_atom(adapter), do: adapter
def config_adapter(gen_queue, otp_app: app) when is_atom(app) do
app
|> Application.get_env(gen_queue, [])
|> Keyword.get(:adapter, @default_adapter)
end
def config_adapter(_gen_queue, _opts), do: @default_adapter
@doc """
Get the adapter for a GenQueue module based on the options provided.
If no adapter if specified, the default `GenQueue.Adapters.Simple` is returned.
## Options:
* `:adapter` - The adapter to be returned.
* `:otp_app` - An OTP application that has your GenQueue adapter configuration.
## Example
GenQueue.adapter(MyQueue, [otp_app: :my_app])
"""
@since "0.1.7"
@spec adapter(GenQueue.t(), opts :: Keyword.t()) :: GenQueue.Adapter.t()
def adapter(gen_queue, opts \\ [])
def adapter(_gen_queue, adapter: adapter) when is_atom(adapter), do: adapter
def adapter(gen_queue, otp_app: app) when is_atom(app) do
app
|> Application.get_env(gen_queue, [])
|> Keyword.get(:adapter, @default_adapter)
end
def adapter(_gen_queue, _opts), do: @default_adapter
@doc """
Get the config for a GenQueue module based on the options provided.
If an `:otp_app` option is provided, this will return the application config.
Otherwise, it will return the options given.
## Options
* `:otp_app` - An OTP application that has your GenQueue configuration.
## Example
# Get the application config
GenQueue.config(MyQueue, [otp_app: :my_app])
# Returns the provided options
GenQueue.config(MyQueue, [adapter: MyAdapter])
"""
@since "0.1.7"
@spec config(GenQueue.t(), opts :: Keyword.t()) :: GenQueue.Adapter.t()
def config(gen_queue, opts \\ [])
def config(gen_queue, otp_app: app) when is_atom(app) do
Application.get_env(app, gen_queue, [])
end
def config(_gen_queue, opts) when is_list(opts), do: opts
end
|
lib/gen_queue.ex
| 0.888822
| 0.587529
|
gen_queue.ex
|
starcoder
|
defmodule ScrapyCloudEx.Endpoints.Storage do
@moduledoc """
Documents commonalities between all storage endpoint-related functions.
## Format
The `:format` option given as an optional parameter must be one of
`:json`, `:csv`, `:html`, `:jl`, `:text`, `:xml`. If none is given, it
defaults to `:json`. Note that not all functions will accept all format
values.
### CSV options
When requesting results in CSV format with `format: :csv`, additional
configuration parameters must be provided within the value associated
to the `:csv` key:
* `:fields` - required, list of binaries indicating the fields to include,
in order from left to right.
* `:include_headers` - optional, boolean indicating whether to include the
header names in the first row.
* `:sep` - optional, separator character to use between cells.
* `:quote` - optional, quote character.
* `:escape` - optional, escape character.
* `:lineend` - line end string.
#### Example
```
params = [format: :csv, csv: [fields: ~w(foo bar), include_headers: true]]
```
## Pagination
The `:pagination` option must be a keyword list containing pagination-relevant
options. Note that not all functions will accept all pagination options.
Providing pagination options outside of the `:pagination` keyword list will
result in a warning.
Parameters:
* `:count` - number of results to provide.
* `:start` - skip results before the given one. See a note about format below.
* `:startafter` - return results after the given one. See a note about format below.
* `:index` - a non-zero positive offset to retrieve specific records. May be
provided multiple times.
While the `index` parameter is just a short `<entity_id>` (ex: `[index: 4]`), `start`
and `startafter` parameters should have the full form with 4 sections
`<project_id>/<spider_id>/<job_id>/<entity_id>` (ex: `[start: "1/2/3/4"]`, `[startafter: "1/2/3/3"]`).
### Example
```
params = [format: :json, pagination: [count: 100, index: 101]]
```
## Meta parameters
You can use the `:meta` parameter to return metadata for the record in addition to its core data.
The following values are available:
* `:_key` - the item key in the format `:project_id/:spider_id/:job_id/:item_no` (`t:String.t/0`).
* `:_project` - the project id (`t:integer/0`).
* `:_ts` - timestamp in milliseconds for when the item was added (`t:integer/0`).
### Example
```
params = [meta: [:_key, :_ts]]
```
"""
alias ScrapyCloudEx.Endpoints.Helpers
@doc false
@spec pagination_params() :: [atom, ...]
def pagination_params(), do: [:count, :index, :start, :startafter]
@doc false
@spec csv_params() :: [atom, ...]
def csv_params(), do: [:fields, :include_headers, :sep, :quote, :escape, :lineend]
@doc false
@spec meta_params() :: [atom, ...]
def meta_params(), do: [:_key, :_project, :_ts]
@valid_formats [:json, :jl, :xml, :csv, :text, :html]
@doc false
@spec validate_format(any) :: :ok | {:invalid_param, {atom, any}}
def validate_format(nil), do: :ok
def validate_format(format) when format in @valid_formats, do: :ok
def validate_format(format) do
"expected format '#{inspect(format)}' to be one of: #{inspect(@valid_formats)}"
|> Helpers.invalid_param_error(:format)
end
end
|
lib/endpoints/storage.ex
| 0.944035
| 0.958343
|
storage.ex
|
starcoder
|
defmodule Mix.Tasks.Renew do
use Mix.Task
@shortdoc "Creates a new Elixir project based on Nebo #15 requirements."
@moduledoc """
Creates a new Elixir project.
It expects the path of the project as argument.
mix renew PATH [--module MODULE] [--app APP] [--umbrella | --ecto --amqp --sup --phoenix] [--ci --docker]
A project at the given PATH will be created. The
application name and module name will be retrieved
from the path, unless `--module` or `--app` is given.
When you run command from `apps/` path withing umbrella application,
different project structure will be applied.
A `--sup` option can be given to generate an OTP application
skeleton including a supervision tree. Normally an app is
generated without a supervisor and without the app callback.
An `--umbrella` option can be given to generate an
umbrella project. When you add this flag `--ecto`, `--sup`,
`--amqp`, `--phoenix` options will options be ignored.
An `--app` option can be given in order to
name the OTP application for the project.
A `--module` option can be given in order
to name the modules in the generated code skeleton.
A `--docker` option can be given in order
to add Docker build strategy in the generated code skeleton.
A `--ci` option can be given in order
to add CI tools in the generated code skeleton.
A `--heroku` option will setup a Heroku auto-deploy and
easy install script (works only when both `--ci` and `--docker`
options are enabled).
A `--ecto` option can be given in order
to add Ecto in the generated code skeleton.
`--ecto-db` - specify the database adapter for ecto.
Values can be `postgres`, `mysql`. Defaults to `postgres`.
A `--phoenix` option can be given in order
to add Phoenix Framework in the generated code skeleton.
A `--amqp` option can be given in order
to add Rabbit MQ client (AMQP) in the generated code skeleton.
A `--docs` option will auto-generate API and deployment
documentation templates.
A `--repo-slug` option will set GitHub repo slug in app
description and docs.
## Examples
mix renew hello_world
Is equivalent to:
mix renew hello_world --module HelloWorld
To generate an app with supervisor and application callback:
mix renew hello_world --sup
Recommended usage:
mix renew hello_world --ci --docker --sup
"""
@generator_plugins [
Renew.Generators.Supervisor,
Renew.Generators.Ecto,
Renew.Generators.Phoenix,
Renew.Generators.Docker,
Renew.Generators.CI,
Renew.Generators.Heroku,
Renew.Generators.AMQP,
Renew.Generators.Docs,
]
@switches [
docker: :boolean,
ci: :boolean,
sup: :boolean,
ecto: :boolean,
ecto_db: :string,
amqp: :boolean,
phoenix: :boolean,
docs: :boolean,
heroku: :boolean,
umbrella: :boolean,
app: :string,
module: :string,
repo_slug: :string,
]
@spec run(OptionParser.argv) :: :ok
def run(argv) do
{opts, argv} = OptionParser.parse!(argv, strict: @switches, aliases: [db: :ecto_db])
# Normalize opts structure
opts = opts ++ [
docker: opts[:docker] || false,
ci: opts[:ci] || false,
sup: opts[:phoenix] || opts[:sup] || false, # Phoenix requires supervisor
ecto: opts[:ecto] || false,
amqp: opts[:amqp] || false,
heroku: opts[:heroku] || false,
docs: opts[:docs] || true,
ecto_db: opts[:ecto_db] || "postgres",
phoenix: opts[:phoenix] || false,
umbrella: opts[:umbrella] || false,
repo_slug: opts[:repo_slug] || false,
]
case argv do
[] ->
Mix.raise ~S(Expected PATH to be given, please use "mix renew PATH")
[path | _] ->
# Get module and app names
dirname = opts[:app] || Path.basename(Path.expand(path))
app = String.replace(dirname, ["-", "."], "_")
check_application_name!(app, !!opts[:app])
mod = opts[:module] || Macro.camelize(app)
check_module_name_validity!(mod)
check_module_name_availability!(mod)
check_directory_existence!(app)
repo =
if opts[:repo_slug],
do: "https://github.com/" <> opts[:repo_slug],
else: "https://github.com/Nebo15/#{dirname}"
# Create project path
File.mkdir_p!(path)
# Assigns for EEx
assigns = opts
|> Enum.into(%{})
|> Map.merge(%{
module_name: mod,
directory_name: dirname,
application_name: app,
repo: repo,
in_umbrella: in_umbrella?(path),
elixir_version: get_version(System.version),
elixir_minor_version: get_minor_version(System.version),
project_dependencies: [],
project_applications: [],
project_start_module: "",
project_settings: [],
project_compilers: [],
config: "",
config_test: "",
config_dev: "",
config_prod: "",
erlang_cookie: random_string(64),
secret_key_base: random_string(64),
secret_key_base_prod: random_string(64),
signing_salt: random_string(8),
has_custom_module_name?: Macro.camelize(app) != mod
})
gens = @generator_plugins
|> Enum.filter(fn module -> apply(module, :apply?, [assigns]) end)
# Print begin message
get_begin_message(assigns)
|> Mix.shell.info
# Apply project templates
{path, assigns}
|> Renew.Generators.Mix.apply_settings
|> (&apply_generators_settings(gens, &1)).()
|> Renew.Generators.Mix.apply_template
|> (&apply_generators_templates(gens, &1)).()
# Print success message
!!opts[:umbrella]
|> get_success_message(dirname)
|> EEx.eval_string(assigns: Enum.to_list(opts))
|> String.trim_trailing()
|> Mix.shell.info
end
end
defp apply_generators_settings(generators, {path, assigns}) do
generators
|> Enum.reduce({path, assigns}, fn module, acc -> apply(module, :apply_settings, [acc]) end)
end
defp apply_generators_templates(generators, {path, assigns}) do
generators
|> Enum.reduce({path, assigns}, fn module, acc -> apply(module, :apply_template, [acc]) end)
end
defp check_application_name!(name, from_app_flag) do
unless name =~ ~r/^[a-z][\w_]*$/ do
extra =
if !from_app_flag do
". The application name is inferred from the path, if you'd like to " <>
"explicitly name the application then use the `--app APP` option."
else
""
end
Mix.raise "Application name must start with a letter and have only lowercase " <>
"letters, numbers and underscore, got: #{inspect name}" <> extra
end
end
defp check_module_name_validity!(name) do
unless name =~ ~r/^[A-Z]\w*(\.[A-Z]\w*)*$/ do
Mix.raise "Module name must be a valid Elixir alias (for example: Foo.Bar), got: #{inspect name}"
end
end
defp check_module_name_availability!(name) do
name = Module.concat(Elixir, name)
if Code.ensure_loaded?(name) do
Mix.raise "Module name #{inspect name} is already taken, please choose another name"
end
end
def check_directory_existence!(name) do
if File.dir?(name) && !Mix.shell.yes?("The directory #{name} already exists. Are you sure you want to continue?") do
Mix.raise "Please select another directory for installation."
end
end
defp get_minor_version(version) do
{:ok, version} = Version.parse(version)
"#{version.major}.#{version.minor}.#{version.patch}"
end
defp get_version(version) do
{:ok, version} = Version.parse(version)
"#{version.major}.#{version.minor}" <>
case version.pre do
[h | _] -> "-#{h}"
[] -> ""
end
end
defp random_string(length) do
:crypto.strong_rand_bytes(length) |> Base.encode64 |> binary_part(0, length)
end
defp in_umbrella?(app_path) do
try do
umbrella = Path.expand(Path.join [app_path, "..", ".."]) # TODO debug
File.exists?(Path.join(umbrella, "mix.exs")) &&
Mix.Project.in_project(:umbrella_check, umbrella, fn _ ->
path = Mix.Project.config[:apps_path]
path && Path.expand(path) == Path.join(umbrella, "apps")
end)
catch
_, _ -> false
end
end
def web_prefix do
app = to_string(otp_app())
if in_umbrella?(File.cwd!()) do
Path.join("lib", app)
else
Path.join(["lib", app, "web"])
end
end
def test_prefix do
if in_umbrella?(File.cwd!()) do
"test"
else
"test/web"
end
end
def otp_app do
Mix.Project.config |> Keyword.fetch!(:app)
end
defp get_begin_message(%{umbrella: true} = opts) do
"""
Starting generation of Elixir umbrella project..
Your settings will include:
- Distillery release manager
- Confex environment variables helper<%= if @ci do %>
- Code Coverage, Analysis and Benchmarking tools
- Setup for Travis-CI Continuous Integration<% end %><%= if @docker do %>
- Docker container build config and scripts<% end %>
"""
|> EEx.eval_string(assigns: Enum.to_list(opts))
end
defp get_begin_message(opts) do
"""
Starting generation of Elixir project..
Your settings will include:
- Distillery release manager
- Confex environment variables helper<%= if @in_umbrella do %>
- Parent umbrella application bindings<% end %><%= if @sup do %>
- Application supervisor<% end %><%= if @ecto do %>
- Ecto database wrapper with <%= @ecto_db %> adapter.<% end %><%= if @phoenix do %>
- Phoenix Framework
- Multiverse response compatibility layers<% end %><%= if @amqp do %>
- AMQP RabbitMQ wrapper<% end %><%= if @ci do %>
- Code Coverage, Analysis and Benchmarking tools
- Setup for Travis-CI Continuous Integration
- Pre-Commit hooks to keep code clean, run `$ ./bin/install-git-hooks.sh`.<% end %><%= if @docker do %>
- Docker container build config and scripts<% end %><%= if @heroku do %>
- Heroku auto-deploy scripts, see `./bin/ci/deploy.sh`.<% end %>
"""
|> EEx.eval_string(assigns: Enum.to_list(opts))
end
defp get_success_message(true, application_dir) do
"""
Your umbrella project was created successfully.
Inside your project, you will find an apps/ directory
where you can create and host many apps:
cd #{application_dir}
cd apps
mix renew my_app
Commands like "mix compile" and "mix test" when executed
in the umbrella project root will automatically run
for each application in the apps/ directory.
"""
end
defp get_success_message(false, application_dir) do
"""
Your Mix project was created successfully.
You can use "mix" to compile it, test it, and more:
cd #{application_dir}
git init<%= if @docker do %>
git submodule add -b elixir https://github.com/Nebo15/ci-utils.git bin/ci/release<% end %>
mix test
Run "mix help" for more commands.
"""
end
end
|
lib/mix/renew.ex
| 0.805326
| 0.509703
|
renew.ex
|
starcoder
|
defmodule AdventOfCode.Day13 do
import AdventOfCode.Utils
@typep coords :: {integer(), integer()}
@typep fold :: {:x | :y, integer()}
@spec part1([binary()]) :: integer()
def part1(args) do
{coords, [fold | _]} = parse_args(args)
apply_fold(fold, coords) |> Enum.count()
end
@spec part2([binary()]) :: String.t()
def part2(args) do
{coords, folds} = parse_args(args)
Enum.reduce(folds, coords, &apply_fold/2) |> render_solution()
end
@spec apply_fold(fold(), MapSet.t(coords())) :: MapSet.t(coords)
defp apply_fold({type, axis}, coords) do
{preserved, folded} =
Enum.split_with(coords, fn {x, y} ->
case type do
:x -> x < axis
:y -> y < axis
end
end)
folded =
Enum.map(folded, fn {x, y} ->
case type do
:x -> {axis - (x - axis), y}
:y -> {x, axis - (y - axis)}
end
end)
MapSet.union(MapSet.new(preserved), MapSet.new(folded))
end
@spec render_solution(MapSet.t(coords())) :: String.t()
defp render_solution(coords) do
max_x = Enum.map(coords, fn {x, _} -> x end) |> Enum.max()
max_y = Enum.map(coords, fn {_, y} -> y end) |> Enum.max()
for y <- 0..max_y do
for x <- 0..max_x do
if(MapSet.member?(coords, {x, y}), do: "•", else: " ")
end
|> Enum.join()
end
|> Enum.join("\n")
end
@spec parse_args([binary()]) :: {MapSet.t(coords()), [fold()]}
defp parse_args(args) do
{coords, [_ | instructions]} = Enum.split_while(args, &(String.length(&1) != 0))
coords = Enum.map(coords, &parse_coord/1) |> MapSet.new()
instructions = Enum.map(instructions, &parse_instruction/1)
{coords, instructions}
end
defp parse_instruction(<<_::binary-size(11), type, _, axis::binary>>),
do: {parse_type(type), parse_int!(axis)}
defp parse_coord(coord),
do: String.split(coord, ",") |> Enum.map(&parse_int!/1) |> List.to_tuple()
defp parse_type(?x), do: :x
defp parse_type(?y), do: :y
end
|
lib/advent_of_code/day_13.ex
| 0.734215
| 0.428831
|
day_13.ex
|
starcoder
|
defmodule Brook.Storage do
@moduledoc """
Defines the `Brook.Storage` behaviour that must be implemented by
storage driver processes. Starts a process and defines a child specification
for including the driver in Brook's supervision tree.
Implements the CRUD functionality for persisting events to the application
view state within the storage driver and subsequent retrieval.
"""
@doc """
Start the storage driver and link it to the current process.
"""
@callback start_link(instance: Brook.instance()) :: GenServer.on_start()
@doc """
Define a child specification for including the storage driver in the Brook
supervision tree.
"""
@callback child_spec(term()) :: Supervisor.child_spec()
@doc """
Save a value from a processed event to the application view state
stored on the underlying storage system. Events are saved to a collection of
related events under a given identifying key.
The event is simultaneously stored under a different key to serve as a log of
all events that produced or modified the value saved to the given key and collection.
"""
@callback persist(Brook.instance(), Brook.Event.t(), Brook.view_collection(), Brook.view_key(), Brook.view_value()) ::
:ok | {:error, Brook.reason()}
@doc """
Delete the record of a saved value from the view state within a given collection and
identified by a given key.
"""
@callback delete(Brook.instance(), Brook.view_collection(), Brook.view_key()) :: :ok | {:error, Brook.reason()}
@doc """
Return a value from the persisted view state stored within a collection and
identified by a key.
"""
@callback get(Brook.instance(), Brook.view_collection(), Brook.view_key()) ::
{:ok, Brook.view_value()} | {:error, Brook.reason()}
@doc """
Return all values saved to the application view state within the storage system
under a given collection. Events are returned as a map with the identifying keys as keys and the
saved values as values.
"""
@callback get_all(Brook.instance(), Brook.view_collection()) ::
{:ok, %{required(Brook.view_key()) => Brook.view_value()}} | {:error, Brook.reason()}
@doc """
Return a list of events that produced a value saved to the application view state
within the storage system under a given collection and idetifying key.
"""
@callback get_events(Brook.instance(), Brook.view_collection(), Brook.view_key()) ::
{:ok, list(Brook.Event.t())} | {:error, Brook.reason()}
@callback get_events(Brook.instance(), Brook.view_collection(), Brook.view_key(), Brook.event_type()) ::
{:ok, list(Brook.Event.t())} | {:error, Brook.reason()}
end
|
lib/brook/storage.ex
| 0.8477
| 0.551211
|
storage.ex
|
starcoder
|
defmodule Cldr.LanguageTag.Parser do
@moduledoc """
Parses a CLDR language tag (also referred to as locale string).
The applicable specification is from [CLDR](http://unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers)
which is similar based upon [RFC5646](https://tools.ietf.org/html/rfc5646) with some variations.
"""
alias Cldr.LanguageTag
alias Cldr.Locale
alias Cldr.LanguageTag.{U, T}
@doc """
Parse a locale name into a `t:Cldr.LanguageTag.t/0`
* `locale_name` is a string representation of a language tag
as defined by RFC5646
Returns
* `{:ok, language_tag}` or
* `{:error, reasons}`
"""
def parse(locale) do
case Cldr.Rfc5646.Parser.parse(normalize_locale_name(locale)) do
{:ok, language_tag} ->
normalized_tag =
language_tag
|> structify(LanguageTag)
|> Map.put(:requested_locale_name, locale)
|> normalize_language
|> normalize_script
|> normalize_variants
|> normalize_territory
|> U.canonicalize_locale_keys()
|> T.canonicalize_transform_keys()
{:ok, normalized_tag}
{:error, reason} ->
{:error, reason}
end
end
defp structify(list, module) do
struct(module, list)
end
@doc """
Parse a locale name into a `t:Cldr.LanguageTag.t/0`
* `locale_name` is a string representation of a language tag
as defined by RFC5646
Returns
* `language_tag` or
* raises an exception
"""
def parse!(locale) do
case parse(locale) do
{:ok, language_tag} -> language_tag
{:error, {exception, reason}} -> raise exception, reason
end
end
defp normalize_locale_name(name) do
name
|> String.downcase()
|> Locale.locale_name_from_posix()
end
defp normalize_language(%LanguageTag{language: nil} = language_tag), do: language_tag
defp normalize_language(%LanguageTag{language: language} = language_tag) do
language_tag
|> Map.put(:language, String.downcase(language))
end
defp normalize_script(%LanguageTag{script: nil} = language_tag), do: language_tag
defp normalize_script(%LanguageTag{script: script} = language_tag) do
language_tag
|> Map.put(:script, script |> String.downcase() |> String.capitalize())
end
defp normalize_territory(%LanguageTag{territory: nil} = language_tag), do: language_tag
defp normalize_territory(%LanguageTag{territory: territory} = language_tag)
when is_integer(territory) do
territory =
case territory do
territory when territory < 10 -> "00#{territory}"
territory when territory < 100 -> "0#{territory}"
_ -> "#{territory}"
end
territory =
case Cldr.validate_territory(territory) do
{:ok, territory} -> territory
{:error, _} -> territory
end
Map.put(language_tag, :territory, territory)
end
defp normalize_territory(%LanguageTag{territory: territory} = language_tag) do
territory =
case Cldr.validate_territory(territory) do
{:ok, territory} -> territory
{:error, _} -> nil
end
Map.put(language_tag, :territory, territory)
end
defp normalize_variants(%LanguageTag{language_variant: nil} = language_tag), do: language_tag
defp normalize_variants(%LanguageTag{language_variant: variant} = language_tag) do
language_tag
|> Map.put(:language_variant, String.upcase(variant))
end
@doc false
def canonicalize_key([key, valid, default], param) when is_function(valid) do
case valid.(param) do
{:ok, value} -> {key, value}
{:error, _} -> {key, default}
end
end
def canonicalize_key([key, :any, default], param) do
value = param || default
{key, value}
end
def canonicalize_key([key, valid, default], param) do
value = if param in valid, do: param, else: default
{key, value}
end
def canonicalize_key(key, value) when is_atom(key) do
{key, value}
end
end
|
lib/cldr/language_tag/parser.ex
| 0.897131
| 0.634458
|
parser.ex
|
starcoder
|
defmodule NioDokku do
@doc """
Connects to a specific IP address using SSH. Returns the PID for the invoked
connection or returns the atom descibing the error if there was an error
connecting.
"""
def dokku(ip) when is_binary(ip), do: connect(ip)
@doc """
Takes a PID and a string and runs the string as a command appended to
dokku --force. Retuns a list of Strings
"""
def dokku(pid, command) when is_pid(pid) do
case SSHEx.cmd!(pid,
"dokku --force " <> sanitize_input(command) |> String.to_char_list,
[channel_timeout: 60000, exec_timeout: 60000]
) do
{_, stderr} -> {pid, stderr |> clean_output}
stdout -> {pid, stdout |> clean_output}
end
end
@doc """
Takes the PID and output from a previous dokku/2 call and chains the next
command to it
"""
def dokku({pid, _output}, command) when is_pid(pid) do
dokku(pid, command)
end
@doc """
Convenience function to not return the PID and just the output
"""
def dokku!(pid, command) when is_pid(pid) do
{_, output} = dokku(pid, command)
output
end
@doc """
Convenience function to not return the PID and just the output
"""
def dokku!({pid, _output}, command) when is_pid(pid) do
{_, output} = dokku(pid, command)
output
end
@doc """
Returns a list of all the commands available to the dokku command line.
"""
def available_commands(pid) do
{:ok, output, _} = SSHEx.run(pid, "dokku help" |> String.to_char_list)
output
|> String.split("\n")
|> Enum.filter(fn line ->
String.match?(line, ~r/\s{4}/)
end)
|> Enum.map(fn cmd ->
cmd
|> String.strip
|> String.split(" ")
|> Enum.at(0)
end)
end
# Formats the SSH output
defp clean_output(output) do
output
|> String.split("\n")
|> Stream.map(&String.strip/1)
|> Enum.filter(&(&1 != ""))
end
# Connects to passed IP using specified SSH private keys
defp connect(ip) do
case SSHEx.connect(
ip: ip |> String.to_char_list,
user: 'root',
user_dir: Application.get_env(:nio_dokku, :ssh_key_path)
|> String.to_char_list)
do
{:ok, conn} -> conn
{:error, reason} -> reason
end
end
# Sanitizes the input trying to avoid 'help; rm -rf /'
defp sanitize_input(string) do
String.replace(string, ~r/[^a-zA-Z0-9_\-:\/.\s=]/, "")
end
end
|
lib/nio_dokku.ex
| 0.784732
| 0.462776
|
nio_dokku.ex
|
starcoder
|
defmodule ArangoXEcto.GeoData do
@moduledoc """
Methods for interacting with ArangoDB GeoJSON and geo related functions
The methods within this module are really just helpers to generate `Geo` structs.
"""
@type coordinate :: number()
defguard is_coordinate(coordinate) when is_float(coordinate) or is_integer(coordinate)
defguard is_latitude(coordinate)
when is_coordinate(coordinate) and coordinate >= -90 and coordinate <= 90
defguard is_longitude(coordinate)
when is_coordinate(coordinate) and coordinate >= -180 and coordinate <= 180
@doc """
Generates a Geo point
"""
@spec point(coordinate(), coordinate()) :: Geo.Point.t()
def point(lat, lon) when is_latitude(lat) and is_longitude(lon),
do: %Geo.Point{coordinates: {lon, lat}}
def point(_, _), do: raise(ArgumentError, "Invalid coordinates provided")
@doc """
Generates a Geo multi point
"""
@spec multi_point([{coordinate(), coordinate()}]) :: Geo.MultiPoint.t()
def multi_point(coords),
do: %Geo.MultiPoint{coordinates: filter_coordinates(coords)}
@doc """
Generates a Geo linestring
"""
@spec linestring([{coordinate(), coordinate()}]) :: Geo.LineString.t()
def linestring(coords),
do: %Geo.LineString{coordinates: filter_valid_coordinates(coords)}
@doc """
Generates a Geo multi linestring
"""
@spec multi_linestring([[{coordinate(), coordinate()}]]) :: Geo.MultiLineString.t()
def multi_linestring(coords),
do: %Geo.MultiLineString{coordinates: filter_coordinates(coords)}
@doc """
Generates a Geo polygon
"""
@spec polygon([[{coordinate(), coordinate()}]]) :: Geo.Polygon.t()
def polygon(coords),
do: %Geo.Polygon{coordinates: filter_coordinates(coords) |> maybe_embed_in_list()}
@doc """
Generates a Geo multi polygon
"""
@spec multi_polygon([[[{coordinate(), coordinate()}]]]) :: Geo.MultiPolygon.t()
def multi_polygon(coords),
do: %Geo.MultiPolygon{coordinates: filter_coordinates(coords) |> maybe_embed_in_list()}
@doc """
Sanitizes coordinates to ensure they are valid
This function is not automatically applied to Geo constructors and must be applied before hand
"""
@spec sanitize(list() | {coordinate(), coordinate()}) :: list() | {coordinate(), coordinate()}
def sanitize({lat, lon} = coords) when is_latitude(lat) and is_longitude(lon), do: coords
def sanitize({lat, lon}) when lat < -90, do: {lat + 180, lon} |> sanitize()
def sanitize({lat, lon}) when lat > 90, do: {lat - 180, lon} |> sanitize()
def sanitize({lat, lon}) when lon < -180, do: {lat, lon + 360} |> sanitize()
def sanitize({lat, lon}) when lon > 180, do: {lat, lon - 360} |> sanitize()
def sanitize(coord_list) when is_list(coord_list),
do: Enum.map(coord_list, &sanitize/1)
defp filter_coordinates(coords_list) do
coords_list
|> Enum.map(&filter_valid_coordinates/1)
end
defp filter_valid_coordinates({lat, lon}) when is_latitude(lat) and is_longitude(lon),
do: {lon, lat}
defp filter_valid_coordinates({lat, lon}),
do: raise(ArgumentError, "Invalid coordinates provided: {#{lat}, #{lon}}")
defp filter_valid_coordinates(coords) when is_tuple(coords),
do: raise(ArgumentError, "Invalid number of coordinate tuple")
defp filter_valid_coordinates([h | t]),
do: [filter_valid_coordinates(h) | filter_valid_coordinates(t)]
defp filter_valid_coordinates([]), do: []
defp maybe_embed_in_list([{_, _} | _] = coords), do: [coords]
defp maybe_embed_in_list(coords), do: coords
end
|
lib/arangox_ecto/geodata.ex
| 0.911574
| 0.799677
|
geodata.ex
|
starcoder
|
defmodule Elsol do
use HTTPoison.Base
# deprecated
def query(query_arg), do: _query(query_arg, false, [], [recv_timeout: 30000])
def query!(query_arg), do: _query(query_arg, true, [], [recv_timeout: 30000])
def query(query_arg, timeout) when is_integer(timeout), do: _query(query_arg, false, [], [recv_timeout: timeout])
def query!(query_arg, timeout) when is_integer(timeout), do: _query(query_arg, true, [], [recv_timeout: timeout])
def query(query_arg, headers \\ [], options \\ []), do: _query(query_arg, false, headers, options)
def query!(query_arg, headers \\ [], options \\ []), do: _query(query_arg, true, headers, options)
def _query(query_arg, bang \\ false, headers \\ [], options \\ []) do
meth = cond do
bang -> :get!
!bang -> :get
end
query_args = cond do
is_binary(query_arg) -> [query_arg, headers, options]
true -> [build_query(query_arg), headers, options]
end
apply(__MODULE__, meth, query_args)
end
@doc """
Send a list of solr_docs to an update handler using `%Elsol.Query.Update{}` struct,
e.g. `Elsol.update(%Elsol.Query.Update{url: config_key, name: "/update"})`. See `build_query`
for more details.
solr_docs can be:
- a List of field-value documents (in Map)
- encoded JSON field-value array string
- see `https://wiki.apache.org/solr/UpdateJSON`
Other update message formats such as CSV, XML are currently not supported.
Raw 'add doc' update messages (atomic updates), and other update commands
such as 'delete', 'commit' can also be issued as part of the encoded
JSON string (`solr_docs`) for JSON update handler.
Direct update commands can also be issued using the `%Elsol.Query.Update{}` struct:
- `Elsol.update(%Elsol.Query.Update{url: config_key, commit: "true", expungeDeletes: "true"})`
- `Elsol.update(%Elsol.Query.Update{url: config_key, optimize: "true", maxSegments: 10})`
"""
def _update(struct, docs \\ [], bang \\ false) do
{method, {status, json_docs}} = cond do
is_list(docs) and (length(docs) == 0) -> cond do
bang -> {:get!, {:ok, []}}
!bang -> {:get, {:ok, []}}
end
bang -> {:post!, _decoded(docs)}
true -> {:post, _decoded(docs)}
end
cond do
status == :ok -> apply(__MODULE__, method, [build_query(struct), json_docs, [{"Content-type", "application/json"}]])
true -> {status, json_docs}
end
end
def _decoded(docs) do
cond do
is_list(docs) and is_map(hd(docs)) -> Poison.encode docs
is_binary(docs) -> {:ok, docs}
true -> {:error, "Unknown solr documents"}
end
end
def update(struct, docs \\ []), do: _update(struct, docs, false)
def update!(struct, docs \\ []), do: _update(struct, docs, true)
@doc """
Build solr query with `%Elsol.Query{}` structs. See `Elsol.Query` for more details.
Configuring endpoints:
- default `url` setting in application config (`config :elsol`), in `config/config.exs` or other config files
- configure multiple Solr endpoints in application config with custom keys
Using endpoints during runtime:
- `url` setting in app config is applied by default
- specify custom key in query struct (`%Elsol.Query{url: config_key}`) for other pre-defined endpoints in app config
- directly specify any Solr endpoint via `%Elsol.Query{url: "http://solr_endpoint"}`
Examples
... iex doctests to do
"""
def build_query(query_struct) when is_map(query_struct) do
url = Map.get(query_struct, :url)
full_url = cond do
is_bitstring(url) && String.match?(url, ~r/^http(s)?:\/\//) -> url
is_bitstring(url) -> "http://" <> url
is_nil(url) -> Application.get_env(:elsol, :url)
is_atom(url) -> Application.get_env(:elsol, url)
true -> "" # we must just not have a host?
end
full_url <> Elsol.Query.build(query_struct)
end
# decode JSON data for now
def process_response_body("{\"responseHeader\":{" <> body) do
Poison.decode! "{\"responseHeader\":{" <> body
end
# to fix: decode other types of Solr data, returns iodata for now
# https://cwiki.apache.org/confluence/display/solr/Response+Writers
def process_response_body(body) do
body
end
end
|
lib/elsol.ex
| 0.77552
| 0.450722
|
elsol.ex
|
starcoder
|
import Kernel, except: [abs: 1]
defmodule RationalNumbers do
@type rational :: {integer, integer}
@doc """
Add two rational numbers
"""
@spec add(a :: rational, b :: rational) :: rational
def add({a1, b}, {a2, b}) when a1 === a2 * -1, do: {0, 1}
def add({a1, b1}, {a2, b2}), do: {a1 * b2 + a2 * b1, b1 * b2} |> reduce()
@doc """
Subtract two rational numbers
"""
@spec subtract(a :: rational, b :: rational) :: rational
def subtract(a, a), do: {0, 1}
def subtract({a1, b1}, {a2, b2}), do: {a1 * b2 - a2 * b1, b1 * b2} |> reduce()
@doc """
Multiply two rational numbers
"""
@spec multiply(a :: rational, b :: rational) :: rational
def multiply({a1, b1}, {a2, b2}), do: {a1 * a2, b1 * b2} |> reduce()
@doc """
Divide two rational numbers
"""
@spec divide_by(num :: rational, den :: rational) :: rational
def divide_by({_, _}, {0, _}), do: :error
def divide_by({a1, b1}, {a2, b2}), do: {a1 * b2, a2 * b1} |> reduce()
@doc """
Absolute value of a rational number
"""
@spec abs(a :: rational) :: rational
def abs({a, b}), do: {Kernel.abs(a), Kernel.abs(b)} |> reduce()
@doc """
Exponentiation of a rational number by an integer
"""
@spec pow_rational(a :: rational, n :: integer) :: rational
def pow_rational({a, b}, n) when n >= 0, do: {pow(a, n), pow(b, n)} |> reduce()
def pow_rational({a, b}, n), do: {pow(b, n * -1), pow(a, n * -1)} |> reduce()
@doc """
Exponentiation of a real number by a rational number
"""
@spec pow_real(x :: integer, n :: rational) :: float
def pow_real(x, {a, b}) when a < 0, do: pow(1 / x, a * -1.0) |> pow(1 / b)
def pow_real(x, {a, b}), do: pow(x, a) |> pow(1 / b)
@doc """
Reduce a rational number to its lowest terms
"""
@spec reduce(a :: rational) :: rational
def reduce({a, b}) when b < 0, do: reduce({a * -1, b * -1})
def reduce({a, b}) do
gcd = Integer.gcd(a, b)
{div(a, gcd), div(b, gcd)}
end
defp pow(a, b) when is_integer(a) and is_integer(b), do: Integer.pow(a, b)
defp pow(a, b) when is_integer(a) and is_float(b), do: Float.pow(a / 1, b)
defp pow(a, b) when is_float(a) and is_number(b), do: Float.pow(a, b)
end
|
exercism/elixir/rational-numbers/lib/rational_numbers.ex
| 0.848157
| 0.489503
|
rational_numbers.ex
|
starcoder
|
defmodule Numato.Gpio do
use GenStage
defmodule State do
defstruct [
uart_pid: nil,
last_command: nil
]
end
def start_link(uart_pid) do
GenStage.start_link(__MODULE__, uart_pid)
end
def init(uart_pid) when is_pid(uart_pid) do
{:producer, %State{uart_pid: uart_pid}}
end
def init(com_port) when is_bitstring(com_port) do
{:ok, uart_pid} = Circuits.UART.start_link()
:ok = Circuits.UART.open(uart_pid, com_port,
speed: 115200,
active: true,
framing: {Numato.UART.Framing, separator: "\r\n"})
{:producer, %State{uart_pid: uart_pid}}
end
def handle_demand(_demand, state) do
{:noreply, [], state}
end
@doc """
Returns Numato firmware version.
"""
def ver(pid) when is_pid(pid) do
GenStage.call(pid, :ver)
end
@doc """
Reads ID of the Numato module.
"""
def id_get(pid) when is_pid(pid) do
GenStage.call(pid, :id_get)
end
@doc """
Writes ID of the Numato module. The ID must be a string with exactly 8 characters.
"""
def id_set(pid, id) when is_pid(pid) and is_bitstring(id) and byte_size(id) == 8 do
GenStage.call(pid, {:id_set, id})
end
@doc """
Reads the digial input status for the given GPIO. Returns 0 for low and 1 for high state.
"""
def gpio_read(pid, gpio) when is_pid(pid) and is_integer(gpio) do
GenStage.call(pid, {:gpio_read, gpio})
end
@doc """
Sets the GPIO output status to either low (value `0`) or high (value `1`).
"""
def gpio_write(pid, gpio, value) when is_pid(pid) and is_integer(gpio) and (value == 0 or value == 1) do
GenStage.call(pid, {:gpio_write, gpio, value})
end
@doc """
Sets mask for subsequent GPIO `Numato.Gpio.gpio_writeall` and `Numato_Gpio.gpio_iodir` commands.
A 0 in a bit position mask the corresponding GPIO and any update to that GPIO is ignored
during `Numato.Gpio.gpio_iodir` and `Numato.Gpio.gpio_writeall` operations.
"""
def gpio_iomask(pid, iomask) when is_bitstring(iomask) do
GenStage.call(pid, {:gpio_iomask, iomask})
end
@doc """
Sets the direction of all GPIOs in a single operation.
A 0 in a bit position configures that GPIO as output and 1 configures it as input.
This operation respects the `iomask`, set using `Numato.Gpio.gpio_iomask()` function.
"""
def gpio_iodir(pid, iodir) when is_bitstring(iodir) do
GenStage.call(pid, {:gpio_iodir, iodir})
end
@doc """
Reads the status of all GPIOs in a single operation.
"""
def gpio_readall(pid) when is_pid(pid) do
GenStage.call(pid, :gpio_readall)
end
@doc """
Enables GPIOs input change notifications. When notifications are enabled, this `GenStage` process
will produce events that are tuples with three elemetns: `{current_value, previous_value, iodir}`.
"""
def gpio_notify_on(pid) when is_pid(pid) do
GenStage.call(pid, :gpio_notify_on)
end
@doc """
Disables GPIOs input change notifications.
"""
def gpio_notify_off(pid) when is_pid(pid) do
GenStage.call(pid, :gpio_notify_off)
end
@doc """
Controls al GPIOs in a single operation.
This operation respects the `iomask`, set using `Numato.Gpio.gpio_iomask()` function.
"""
def gpio_writeall(pid, value) when is_pid(pid) do
GenStage.call(pid, {:gpio_writeall, value})
end
@doc """
Returns information whether notifications are enabled (`true`) or disabled ('false`).
"""
def gpio_notify_get(pid) when is_pid(pid) do
GenStage.call(pid, :gpio_notify_get)
end
@doc """
Reads the analog voltage present at the given ADC input. Responses are
integeres in range 0 - 1023.
"""
def adc_read(pid, input) when is_pid(pid) and is_integer(input) do
GenStage.call(pid, {:adc_read, input})
end
def handle_call(:ver, from, state) do
command_text = Numato.Commands.ver()
send_call(command_text, {:ver, from}, state)
end
def handle_call(:id_get, from, state) do
command_text = Numato.Commands.id_get()
send_call(command_text, {:id_get, from}, state)
end
def handle_call({:id_set, id}, _from, state) do
command_text = Numato.Commands.id_set(id)
send_info(command_text, state)
end
def handle_call({:gpio_read, gpio}, from, state) do
command_text = Numato.Commands.gpio_read(gpio)
send_call(command_text, {:gpio_read, from}, state)
end
def handle_call({:gpio_write, gpio, value}, _from, state) do
command_text = Numato.Commands.gpio_write(gpio, value)
send_info(command_text, state)
end
def handle_call({:gpio_iomask, iomask}, _from, state) do
command_text = Numato.Commands.gpio_iomask(iomask)
send_info(command_text, state)
end
def handle_call({:gpio_iodir, iodir}, _from, state) do
command_text = Numato.Commands.gpio_iodir(iodir)
send_info(command_text, state)
end
def handle_call(:gpio_readall, from, state) do
command_text = Numato.Commands.gpio_readall()
send_call(command_text, {:gpio_readall, from}, state)
end
def handle_call(:gpio_notify_on, from , state) do
command_text = Numato.Commands.gpio_notify_on()
send_call(command_text, {:gpio_notify_on, from}, state)
end
def handle_call(:gpio_notify_off, from, state) do
command_text = Numato.Commands.gpio_notify_off()
send_call(command_text, {:gpio_notify_off, from}, state)
end
def handle_call(:gpio_notify_get, from, state) do
command_text = Numato.Commands.gpio_notify_get()
send_call(command_text, {:gpio_notify_get, from}, state)
end
def handle_call({:gpio_writeall, values}, _from, state) do
command_text = Numato.Commands.gpio_writeall(values)
send_info(command_text, state)
end
def handle_call({:adc_read, input}, from, state) do
command_text = Numato.Commands.adc_read(input)
send_call(command_text, {:adc_read, from}, state)
end
def handle_info({:circuits_uart, _, line}, state) do
response = Numato.Responses.parse(line)
case response do
:echo ->
{:noreply, [], state}
{:notification, previous, current, iodir} ->
{events, new_state} = process_notification(previous, current, iodir, state)
{:noreply, events, new_state}
_ ->
:ok = reply_to_command(state.last_command, response)
{:noreply, [], %State{state | last_command: nil}}
end
end
defp send_info(command_text, state) do
response = Circuits.UART.write(state.uart_pid, command_text)
{:reply, response, [], %State{state | last_command: nil}}
end
defp send_call(command_text, command_tuple, state) do
case Circuits.UART.write(state.uart_pid, command_text) do
:ok -> {:noreply, [], %State{state | last_command: command_tuple}}
error -> {:reply, error, [], %State{state | last_command: nil}}
end
end
defp process_notification(previous, current, iodir, state) do
changes = Numato.Utils.get_changed_ports(previous, current, iodir)
{[{:notification, changes}], state}
end
defp reply_to_command({:gpio_read, from}, {:int, value}) when value == 0 or value == 1 do
GenStage.reply(from, value)
end
defp reply_to_command({:id_get, from}, {:id, value}) do
GenStage.reply(from, value)
end
defp reply_to_command({:id_get, from}, {:bits, value}) do
GenStage.reply(from, Base.encode16(value))
end
defp reply_to_command({:ver, from}, {:bits, value}) do
GenStage.reply(from, Base.encode16(value))
end
defp reply_to_command({:gpio_readall, from}, {:bits, value}) do
GenStage.reply(from, value)
end
defp reply_to_command({:adc_read, from}, {:int, value}) do
GenStage.reply(from, value)
end
defp reply_to_command({:gpio_notify_get, from}, {:notify, value}) do
GenStage.reply(from, value)
end
defp reply_to_command({:gpio_notify_on, from}, {:notify, value}) do
GenStage.reply(from, value)
end
defp reply_to_command({:gpio_notify_off, from}, {:notify, value}) do
GenStage.reply(from, value)
end
defp reply_to_command(nil, _) do
:ok
end
end
|
lib/numato_gpio.ex
| 0.771499
| 0.495484
|
numato_gpio.ex
|
starcoder
|
defmodule Membrane.WebRTC.EndpointBin do
@moduledoc """
Module responsible for interacting with a WebRTC peer.
To send or receive tracks from a WebRTC peer, specify them with
`:inbound_tracks` and `:outbound_tracks` options, and link corresponding
`:input` and `:output` pads with ids matching the declared tracks' ids.
The tracks can be manipulated by sending `t:track_message/0`.
To initiate or modify the connection, the bin sends and expects to receive
`t:signal_message/0`.
"""
use Membrane.Bin
use Bunch
alias ExSDP.Media
alias ExSDP.Attribute.{FMTP, RTPMapping}
alias Membrane.WebRTC.{SDP, Track, TrackFilter}
@type signal_message ::
{:signal, {:sdp_offer | :sdp_answer, String.t()} | {:candidate, String.t()}}
@type track_message :: alter_tracks_message() | enable_track_message() | disable_track_message()
@typedoc """
Message that adds or removes tracks.
"""
@type alter_tracks_message :: {:add_tracks, [Track.t()]} | {:remove_tracks, [Track.id()]}
@typedoc """
Message that enables track.
"""
@type enable_track_message :: {:disable_track, Track.id()}
@typedoc """
Message that disables track.
"""
@type disable_track_message :: {:disable_track, Track.id()}
def_options inbound_tracks: [
spec: [Membrane.WebRTC.Track.t()],
default: [],
description: "List of initial inbound tracks"
],
outbound_tracks: [
spec: [Membrane.WebRTC.Track.t()],
default: [],
description: "List of initial outbound tracks"
],
stun_servers: [
type: :list,
spec: [ExLibnice.stun_server()],
default: [],
description: "List of stun servers"
],
turn_servers: [
type: :list,
spec: [ExLibnice.relay_info()],
default: [],
description: "List of turn servers"
],
port_range: [
spec: Range.t(),
default: 0..0,
description: "Port range to be used by `Membrane.ICE.Bin`"
],
handshake_opts: [
type: :list,
spec: Keyword.t(),
default: [],
description: """
Keyword list with options for handshake module. For more information please
refer to `Membrane.ICE.Bin`
"""
],
video_codecs: [
type: :list,
spec: [ExSDP.Attribute.t()],
default: [],
description: "Video codecs that will be passed for SDP offer generation"
],
audio_codecs: [
type: :list,
spec: [ExSDP.Attribute.t()],
default: [],
description: "Audio codecs that will be passed for SDP offer generation"
],
filter_codecs: [
spec: ({RTPMapping.t(), FMTP.t() | nil} -> boolean()),
default: &SDP.filter_mappings(&1),
description: "Defines function which will filter SDP m-line by codecs"
],
log_metadata: [
spec: :list,
spec: Keyword.t(),
default: [],
description: "Logger metadata used for endpoint bin and all its descendants"
],
use_integrated_turn: [
spec: binary(),
default: true,
description: "Indicator, if use integrated TURN"
],
integrated_turns_pids: [
spec: [pid()],
default: [],
description: "Pids of running integrated TURN servers"
]
def_input_pad :input,
demand_unit: :buffers,
caps: :any,
availability: :on_request,
options: [
encoding: [
spec: :OPUS | :H264 | :VP8,
description: "Track encoding"
],
track_enabled: [
spec: boolean(),
default: true,
description: "Enable or disable track"
],
use_payloader?: [
spec: boolean(),
default: true,
description: """
Defines if incoming stream should be payloaded based on given encoding.
Otherwise the stream is assumed be in RTP format.
"""
]
]
def_output_pad :output,
demand_unit: :buffers,
caps: :any,
availability: :on_request,
options: [
track_enabled: [
spec: boolean(),
default: true,
description: "Enable or disable track"
],
packet_filters: [
spec: [Membrane.RTP.SessionBin.packet_filter_t()],
default: [],
description: "List of packet filters that will be applied to the SessionBin's output pad"
],
extensions: [
spec: [Membrane.RTP.SessionBin.extension_t()],
default: [],
description: "List of tuples representing rtp extensions"
],
use_depayloader?: [
spec: boolean(),
default: true,
description: """
Defines if the outgoing stream should get depayloaded.
This option should be used as a convenience, it is not necessary as the new track notification
returns a depayloading filter's definition that can be attached to the output pad
to work the same way as with the option set to true.
"""
]
]
@impl true
def handle_init(opts) do
children = %{
ice: %Membrane.ICE.Bin{
stun_servers: opts.stun_servers,
turn_servers: opts.turn_servers,
use_integrated_turn: opts.use_integrated_turn,
integrated_turns_pids: opts.integrated_turns_pids,
port_range: opts.port_range,
controlling_mode: true,
handshake_module: Membrane.DTLS.Handshake,
handshake_opts: opts.handshake_opts
},
rtp: %Membrane.RTP.SessionBin{secure?: true},
ice_funnel: Membrane.Funnel
}
rtp_input_ref = make_ref()
links = [
link(:rtp)
|> via_out(Pad.ref(:rtcp_output, rtp_input_ref))
|> to(:ice_funnel),
link(:ice)
|> via_out(Pad.ref(:output, 1))
|> via_in(Pad.ref(:rtp_input, rtp_input_ref))
|> to(:rtp),
link(:ice_funnel)
|> via_out(:output)
|> via_in(Pad.ref(:input, 1))
|> to(:ice)
]
spec = %ParentSpec{
children: children,
links: links
}
state =
%{
inbound_tracks: %{},
outbound_tracks: %{},
audio_codecs: opts.audio_codecs,
video_codecs: opts.video_codecs,
candidates: [],
candidate_gathering_state: nil,
dtls_fingerprint: nil,
ssrc_to_track_id: %{},
filter_codecs: opts.filter_codecs,
ice: %{restarting?: false, waiting_restart?: false, pwd: nil, ufrag: nil, first?: true}
}
|> add_tracks(:inbound_tracks, opts.inbound_tracks)
|> add_tracks(:outbound_tracks, opts.outbound_tracks)
{{:ok, spec: spec, log_metadata: opts.log_metadata}, state}
end
@impl true
def handle_pad_added(Pad.ref(:input, track_id) = pad, ctx, state) do
# TODO: check this one
%{track_enabled: track_enabled, encoding: encoding, use_payloader?: use_payloader?} =
ctx.options
%Track{ssrc: ssrc, rtp_mapping: mapping} = Map.fetch!(state.outbound_tracks, track_id)
options = [
encoding: encoding,
clock_rate: mapping.clock_rate,
payload_type: mapping.payload_type
]
encoding_specific_links =
case encoding do
:H264 when use_payloader? ->
&to(&1, {:h264_parser, ssrc}, %Membrane.H264.FFmpeg.Parser{alignment: :nal})
_other ->
& &1
end
payloader =
if use_payloader? do
{:ok, payloader} = Membrane.RTP.PayloadFormatResolver.payloader(encoding)
payloader
else
nil
end
links = [
link_bin_input(pad)
|> then(encoding_specific_links)
|> to({:track_filter, track_id}, %TrackFilter{enabled: track_enabled})
|> via_in(Pad.ref(:input, ssrc), options: [payloader: payloader])
|> to(:rtp)
|> via_out(Pad.ref(:rtp_output, ssrc), options: options)
|> to(:ice_funnel)
]
{{:ok, spec: %ParentSpec{links: links}}, state}
end
@impl true
def handle_pad_added(Pad.ref(:output, track_id) = pad, ctx, state) do
%Track{ssrc: ssrc, encoding: encoding, rtp_mapping: rtp_mapping} =
track = Map.fetch!(state.inbound_tracks, track_id)
%{track_enabled: track_enabled, use_depayloader?: use_depayloader?} = ctx.options
depayloader =
if use_depayloader? do
{:ok, depayloader} = Membrane.RTP.PayloadFormatResolver.depayloader(encoding)
depayloader
else
nil
end
output_pad_options =
ctx.options
|> Map.take([:extensions, :packet_filters])
|> Map.merge(%{
clock_rate: rtp_mapping.clock_rate,
depayloader: depayloader
})
|> Map.to_list()
spec = %ParentSpec{
links: [
link(:rtp)
|> via_out(Pad.ref(:output, ssrc), options: output_pad_options)
|> to({:track_filter, track_id}, %TrackFilter{
enabled: track_enabled
})
|> to_bin_output(pad)
]
}
state = put_in(state, [:inbound_tracks, track_id], %{track | status: :linked})
{{:ok, spec: spec}, state}
end
@impl true
def handle_notification({:new_rtp_stream, ssrc, _pt}, _from, _ctx, state) do
track_id = Map.fetch!(state.ssrc_to_track_id, ssrc)
track = Map.fetch!(state.inbound_tracks, track_id)
track = %Track{track | ssrc: ssrc}
state = put_in(state, [:inbound_tracks, track.id], track)
depayloading_filter = depayloading_filter_for(track)
{{:ok, notify: {:new_track, track.id, track.encoding, depayloading_filter}}, state}
end
@impl true
def handle_notification({:handshake_init_data, _component_id, fingerprint}, _from, _ctx, state) do
{:ok, %{state | dtls_fingerprint: {:sha256, hex_dump(fingerprint)}}}
end
@impl true
def handle_notification({:local_credentials, credentials}, _from, _ctx, state) do
[ice_ufrag, ice_pwd] = String.split(credentials, " ")
{actions, state} =
if state.ice.first? and state.outbound_tracks == %{} do
{[], state}
else
state = Map.update!(state, :ice, &%{&1 | first?: false})
get_offer_data(state)
end
state = %{state | ice: %{state.ice | ufrag: ice_ufrag, pwd: ice_pwd}}
{{:ok, actions}, state}
end
@impl true
def handle_notification({:new_candidate_full, cand}, _from, _ctx, state) do
state = Map.update!(state, :candidates, &[cand | &1])
{{:ok, notify_candidates([cand])}, state}
end
@impl true
def handle_notification(:candidate_gathering_done, _from, _ctx, state) do
{:ok, %{state | candidate_gathering_state: :done}}
end
@impl true
def handle_notification({:vad, _val} = msg, _from, _ctx, state) do
{{:ok, notify: msg}, state}
end
@impl true
def handle_notification({:connection_failed, _stream_id, _component_id}, _from, _ctx, state) do
state = %{state | ice: %{state.ice | restarting?: false}}
{action, state} = maybe_restart_ice(state, true)
{{:ok, action}, state}
end
@impl true
def handle_notification({:connection_ready, _stream_id, _component_id}, _from, _ctx, state)
when state.ice.restarting? do
outbound_tracks = Map.values(state.outbound_tracks) |> Enum.filter(&(&1.status != :pending))
get_encoding = fn track_id -> Map.get(state.outbound_tracks, track_id).encoding end
outbound_tracks_id_to_link =
outbound_tracks
|> Enum.filter(&(&1.status === :ready))
|> Enum.map(& &1.id)
tracks_id_to_link_with_encoding =
outbound_tracks_id_to_link
|> Enum.map(&{&1, get_encoding.(&1)})
negotiations = [notify: {:negotiation_done, tracks_id_to_link_with_encoding}]
state = %{state | outbound_tracks: change_tracks_status(state, :ready, :linked)}
state = %{state | ice: %{state.ice | restarting?: false}}
{restart_action, state} = maybe_restart_ice(state)
actions = negotiations ++ restart_action
{{:ok, actions}, state}
end
@impl true
def handle_notification({:connection_ready, _stream_id, _component_id}, _from, _ctx, state)
when not state.ice.restarting? do
{action, state} = maybe_restart_ice(state, true)
{{:ok, action}, state}
end
@impl true
def handle_notification(_notification, _from, _ctx, state), do: {:ok, state}
@impl true
def handle_other({:signal, {:sdp_offer, sdp, mid_to_track_id}}, _ctx, state) do
{:ok, sdp} = sdp |> ExSDP.parse()
{new_inbound_tracks, removed_inbound_tracks, inbound_tracks, outbound_tracks} =
get_tracks_from_sdp(sdp, mid_to_track_id, state)
state = %{
state
| outbound_tracks: Map.merge(state.outbound_tracks, Map.new(outbound_tracks, &{&1.id, &1})),
inbound_tracks: Map.merge(state.inbound_tracks, Map.new(inbound_tracks, &{&1.id, &1}))
}
{link_notify, state} = add_inbound_tracks(new_inbound_tracks, state)
answer =
SDP.create_answer(
inbound_tracks: inbound_tracks,
outbound_tracks: outbound_tracks,
ice_ufrag: state.ice.ufrag,
ice_pwd: state.ice.pwd,
fingerprint: state.dtls_fingerprint,
video_codecs: state.video_codecs,
audio_codecs: state.audio_codecs
)
{actions, state} =
withl tracks_check: true <- state.inbound_tracks != %{} or outbound_tracks != %{},
candidate_gathering_check: nil <- state.candidate_gathering_state do
{[forward: [ice: :gather_candidates]], %{state | candidate_gathering_state: :in_progress}}
else
tracks_check: _ -> {[], state}
candidate_gathering_check: _ -> {notify_candidates(state.candidates), state}
end
mid_to_track_id = Map.new(inbound_tracks ++ outbound_tracks, &{&1.mid, &1.id})
actions =
if Enum.empty?(removed_inbound_tracks),
do: actions,
else: actions ++ [notify: {:removed_tracks, removed_inbound_tracks}]
actions =
link_notify ++
[notify: {:signal, {:sdp_answer, to_string(answer), mid_to_track_id}}] ++
set_remote_credentials(sdp) ++
actions
{{:ok, actions}, state}
end
@impl true
def handle_other({:signal, {:candidate, candidate}}, _ctx, state) do
{{:ok, forward: {:ice, {:set_remote_candidate, "a=" <> candidate, 1}}}, state}
end
@impl true
def handle_other({:signal, :renegotiate_tracks}, _ctx, state) do
{action, state} =
cond do
state.ice.first? and state.ice.pwd != nil ->
state = Map.update!(state, :ice, &%{&1 | first?: false})
get_offer_data(state)
state.ice.first? ->
state = Map.update!(state, :ice, &%{&1 | first?: false})
{[], state}
state.ice.pwd == nil ->
{[], state}
true ->
maybe_restart_ice(state, true)
end
{{:ok, action}, state}
end
@impl true
def handle_other({:add_tracks, tracks}, _ctx, state) do
outbound_tracks = state.outbound_tracks
tracks =
Enum.map(tracks, fn track ->
if Map.has_key?(outbound_tracks, track.id),
do: track,
else: %{track | status: :pending, mid: nil}
end)
state = add_tracks(state, :outbound_tracks, tracks)
{action, state} =
if state.ice.first? and state.ice.pwd != nil do
state = Map.update!(state, :ice, &%{&1 | first?: false})
outbound_tracks = change_tracks_status(state, :pending, :ready)
state = %{state | outbound_tracks: outbound_tracks}
get_offer_data(state)
else
maybe_restart_ice(state, true)
end
{{:ok, action}, state}
end
@impl true
def handle_other({:remove_tracks, tracks_to_remove}, _ctx, state) do
outbound_tracks = state.outbound_tracks
new_outbound_tracks =
Enum.map(tracks_to_remove, &Map.get(outbound_tracks, &1.id))
|> Map.new(fn track -> {track.id, %{track | status: :disabled}} end)
{actions, state} =
state
|> Map.update!(:outbound_tracks, &Map.merge(&1, new_outbound_tracks))
|> maybe_restart_ice(true)
{{:ok, actions}, state}
end
@impl true
def handle_other({:enable_track, track_id}, _ctx, state) do
{{:ok, forward: {{:track_filter, track_id}, :enable_track}}, state}
end
@impl true
def handle_other({:disable_track, track_id}, _ctx, state) do
{{:ok, forward: {{:track_filter, track_id}, :disable_track}}, state}
end
defp maybe_restart_ice(state, set_waiting_restart? \\ false) do
state =
if set_waiting_restart?,
do: %{state | ice: %{state.ice | waiting_restart?: true}},
else: state
if not state.ice.restarting? and state.ice.waiting_restart? do
state = %{state | ice: %{state.ice | restarting?: true, waiting_restart?: false}}
outbound_tracks = change_tracks_status(state, :pending, :ready)
state = %{state | outbound_tracks: outbound_tracks}
{[forward: {:ice, :restart_stream}], state}
else
{[], state}
end
end
defp get_offer_data(state) do
tracks_types =
Map.values(state.outbound_tracks)
|> Enum.filter(&(&1.status != :pending))
|> Enum.map(& &1.type)
media_count = %{
audio: Enum.count(tracks_types, &(&1 == :audio)),
video: Enum.count(tracks_types, &(&1 == :video))
}
actions = [notify: {:signal, {:offer_data, media_count}}]
state = Map.update!(state, :ice, &%{&1 | restarting?: true})
{actions, state}
end
defp change_tracks_status(state, prev_status, new_status) do
state.outbound_tracks
|> Map.values()
|> Map.new(fn track ->
{track.id, if(track.status === prev_status, do: %{track | status: new_status}, else: track)}
end)
end
defp get_tracks_from_sdp(sdp, mid_to_track_id, state) do
old_inbound_tracks = Map.values(state.inbound_tracks)
outbound_tracks = Map.values(state.outbound_tracks) |> Enum.filter(&(&1.status != :pending))
SDP.get_tracks(
sdp,
state.filter_codecs,
old_inbound_tracks,
outbound_tracks,
mid_to_track_id
)
end
defp add_inbound_tracks(new_tracks, state) do
track_id_to_track = Map.new(new_tracks, &{&1.id, &1})
state = Map.update!(state, :inbound_tracks, &Map.merge(&1, track_id_to_track))
ssrc_to_track_id = Map.new(new_tracks, fn track -> {track.ssrc, track.id} end)
state = Map.update!(state, :ssrc_to_track_id, &Map.merge(&1, ssrc_to_track_id))
actions = if Enum.empty?(new_tracks), do: [], else: [notify: {:new_tracks, new_tracks}]
{actions, state}
end
defp add_tracks(state, direction, tracks) do
tracks =
case direction do
:outbound_tracks ->
Track.add_ssrc(
tracks,
Map.values(state.inbound_tracks) ++ Map.values(state.outbound_tracks)
)
:inbound_tracks ->
tracks
end
tracks = Map.new(tracks, &{&1.id, &1})
Map.update!(state, direction, &Map.merge(&1, tracks))
end
defp notify_candidates(candidates) do
Enum.flat_map(candidates, fn cand ->
[notify: {:signal, {:candidate, cand, 0}}]
end)
end
defp set_remote_credentials(sdp) do
case List.first(sdp.media) do
nil ->
[]
media ->
{_key, ice_ufrag} = Media.get_attribute(media, :ice_ufrag)
{_key, ice_pwd} = Media.get_attribute(media, :ice_pwd)
remote_credentials = ice_ufrag <> " " <> ice_pwd
[forward: {:ice, {:set_remote_credentials, remote_credentials}}]
end
end
defp hex_dump(digest_str) do
digest_str
|> :binary.bin_to_list()
|> Enum.map_join(":", &Base.encode16(<<&1>>))
end
defp depayloading_filter_for(track) do
case Membrane.RTP.PayloadFormatResolver.depayloader(track.encoding) do
{:ok, depayloader} ->
%Membrane.RTP.DepayloaderBin{
depayloader: depayloader,
clock_rate: track.rtp_mapping.clock_rate
}
:error ->
nil
end
end
end
|
lib/membrane_webrtc_plugin/endpoint_bin.ex
| 0.896849
| 0.478773
|
endpoint_bin.ex
|
starcoder
|
defmodule Litmus.Type.DateTime do
@moduledoc """
This type validates DateTimes. It accepts either `DateTime` structs or
ISO-8601 strings. ISO-8601 datetime with timezone strings will be converted
into `DateTime`s.
## Options
* `:default` - Setting `:default` will populate a field with the provided
value, assuming that it is not present already. If a field already has a
value present, it will not be altered.
* `:required` - Setting `:required` to `true` will cause a validation error
when a field is not present or the value is `nil`. Allowed values for
required are `true` and `false`. The default is `false`.
## Examples
iex> schema = %{"start_date" => %Litmus.Type.DateTime{}}
iex> {:ok, %{"start_date" => datetime}} = Litmus.validate(%{"start_date" => "2017-06-18T05:45:33Z"}, schema)
iex> DateTime.to_iso8601(datetime)
"2017-06-18T05:45:33Z"
iex> {:ok, default_datetime, _} = DateTime.from_iso8601("2019-05-01T06:25:00-0700")
...> schema = %{
...> "start_date" => %Litmus.Type.DateTime{
...> default: default_datetime
...> }
...> }
iex> {:ok, %{"start_date" => datetime}} = Litmus.validate(%{}, schema)
iex> DateTime.to_iso8601(datetime)
"2019-05-01T13:25:00Z"
"""
alias Litmus.{Default, Required}
alias Litmus.Type
defstruct default: Litmus.Type.Any.NoDefault,
required: false
@type t :: %__MODULE__{
default: any,
required: boolean
}
@spec validate_field(t, term, map) :: {:ok, map} | {:error, String.t()}
def validate_field(type, field, data) do
with {:ok, data} <- Required.validate(type, field, data),
{:ok, data} <- convert(type, field, data) do
{:ok, data}
else
{:ok_not_present, data} -> Default.validate(type, field, data)
{:error, msg} -> {:error, msg}
end
end
@spec convert(t, term, map) :: {:ok, map} | {:error, String.t()}
defp convert(%__MODULE__{}, field, params) do
cond do
params[field] == nil ->
{:ok, params}
is_binary(params[field]) ->
case DateTime.from_iso8601(params[field]) do
{:ok, date_time, _utc_offset} -> {:ok, Map.put(params, field, date_time)}
{:error, _} -> error_tuple(field)
end
datetime?(params[field]) ->
{:ok, params}
true ->
error_tuple(field)
end
end
@spec datetime?(term) :: boolean
defp datetime?(%DateTime{}), do: true
defp datetime?(_), do: false
@spec error_tuple(String.t()) :: {:error, String.t()}
defp error_tuple(field) do
{:error, "#{field} must be a valid ISO-8601 datetime"}
end
defimpl Litmus.Type do
alias Litmus.Type
@spec validate(Type.t(), term, map) :: {:ok, map} | {:error, String.t()}
def validate(type, field, data), do: Type.DateTime.validate_field(type, field, data)
end
end
|
lib/litmus/type/date_time.ex
| 0.925188
| 0.455986
|
date_time.ex
|
starcoder
|
defmodule PinElixir.Charge do
import PinElixir.Utils.RequestOptions
import PinElixir.Utils.Response
@moduledoc """
Handles the creation and retrieval of charges
"""
@pin_url Application.get_env(:pin_elixir, :pin_url)
@doc """
Retreives all charges
Returns a tuple
```
{:ok,
%{charges: [%{amount: 500, amount_refunded: 0, authorisation_expired: false,
captured: true,
card: %{address_city: "Hogwarts", address_country: "Straya",
address_line1: "The Game Keepers Cottage", address_line2: nil,
address_postcode: "H0G", address_state: "WA",
customer_token: "<KEY>",
display_number: "XXXX-XXXX-XXXX-0000", expiry_month: 10,
expiry_year: 2016, name: "<NAME>", primary: true, scheme: "visa",
token: "card_i-DSgMjhcwRi_dInriNBTw"},
created_at: "2015-11-15T08:33:04Z", currency: "AUD",
description: "Dragon Eggs", email: "<EMAIL>",
error_message: nil, ip_address: "127.0.0.1", merchant_entitlement: 455,
refund_pending: false, settlement_currency: "AUD",
status_message: "Success", success: true,
token: "<KEY>", total_fees: 45, transfer: []},
%{amount: ...}],
pagination: %{count: 42, current: 1, next: 2, pages: 2, per_page: 25,
previous: nil}}}
```
OR
{:error, error_map}
"""
def get_all do
HTTPotion.get(charges_url, with_auth)
|> handle_get_all
end
defp handle_get_all(%{status_code: 200, body: body}) do
decode(body)
|> rename_response_field
|> wrap_in_success_tuple
end
defp handle_get_all(%{status_code: ___, body: body}) do
body |> to_error_tuple
end
defp rename_response_field(response) do
%{charges: response.response,
pagination: response.pagination}
end
defp wrap_in_success_tuple(map) do
{:ok, map}
end
@doc """
Given a charge token returns a tuple representing the charge
```
{:ok,
%{charge: %{amount: 500, amount_refunded: 0, authorisation_expired: false,
captured: true,
card: %{address_city: "Hogwarts", address_country: "Straya",
address_line1: "The Game Keepers Cottage", address_line2: nil,
address_postcode: "H0G", address_state: "WA",
customer_token: "<KEY>",
display_number: "XXXX-XXXX-XXXX-0000", expiry_month: 10,
expiry_year: 2016, name: "<NAME>", primary: true, scheme: "visa",
token: "card_i-DSgMjhcwRi_dInriNBTw"},
created_at: "2015-11-15T08:33:04Z", currency: "AUD",
description: "Dragon Eggs", email: "<EMAIL>",
error_message: nil, ip_address: "127.0.0.1", merchant_entitlement: 455,
refund_pending: false, settlement_currency: "AUD",
status_message: "Success", success: true,
token: "<KEY>", total_fees: 45, transfer: []}}}
```
OR
```
{:error, error_details}
```
"""
def get(token) do
HTTPotion.get(charges_url <> "/#{token}", with_auth)
|> handle_get
end
defp handle_get(%{status_code: 200, body: body}) do
decoded = decode(body)
{:ok, %{charge: decoded.response}}
end
defp handle_get(%{status_code: 404, body: body}) do
body |> to_error_tuple
end
@doc """
Takes a map representing a customer charge to create a charge
Can be used with a card, customer_token or card_token key
**Note that amount is in the base unit of the currency, e.g $5 would be represented by an amout of 500 (cents)**
```
charge = %{
amount: 500,
currency: "AUD", # Optional (default: "AUD")
description: "Dragon Eggs",
email: "<EMAIL>",
ip_address: "127.0.0.1",
card: %{
number: 4200000000000000,
expiry_month: "10",
expiry_year: 2016,
cvc: 456,
name: "<NAME>",
address_line1: "The Game Keepers Cottage",
address_city: "Hogwarts",
address_postcode: "H0G",
address_state: "WA",
address_country: "England"
}
}
Charge.create(charge)
```
```
charge = %{
amount: 500,
currency: "AUD", # Optional (default: "AUD")
description: "Dragon Eggs",
email: "<EMAIL>",
ip_address: "127.0.0.1"
card_token: "abcd<PASSWORD>"
}
Charge.create(charge)
```
```
charge = %{
amount: 500,
currency: "AUD", # Optional (default: "AUD")
description: "Dragon Eggs",
email: "<EMAIL>",
ip_address: "127.0.0.1"
customer_token: "cust_123"
}
Charge.create(charge)
```
returns a tuple representing the outcome of the charge
```
{:ok,
%{charge: %{amount: 500, amount_refunded: 0, authorisation_expired: false,
captured: true,
card: %{address_city: "Hogwarts", address_country: "Straya",
address_line1: "The Game Keepers Cottage", address_line2: nil,
address_postcode: "H0G", address_state: "WA",
customer_token: "<KEY>",
display_number: "XXXX-XXXX-XXXX-0000", expiry_month: 10,
expiry_year: 2016, name: "<NAME>", primary: true, scheme: "visa",
token: "card_i-DSgMjhcwRi_dInriNBTw"},
created_at: "2015-11-15T08:33:04Z", currency: "AUD",
description: "Dragon Eggs", email: "<EMAIL>",
error_message: nil, ip_address: "127.0.0.1", merchant_entitlement: 455,
refund_pending: false, settlement_currency: "AUD",
status_message: "Success", success: true,
token: "<KEY>", total_fees: 45, transfer: []}}}
```
OR
{:error, error_message}
"""
def create(%{charge: charge, card: card}) do
Poison.encode!(Map.put(charge, :card, card))
|> post_to_api
|> handle_charge_response
end
def create(%{charge: charge, customer_token: customer_token}) do
Poison.encode!(Map.put(charge, :customer_token, customer_token))
|> post_to_api
|> handle_charge_response
end
def create(%{charge: charge, card_token: card_token}) do
Poison.encode!(Map.put(charge, :card_token, card_token))
|> post_to_api
|> handle_charge_response
end
defp post_to_api(json) do
HTTPotion.post(charges_url, with_auth([headers: ["Content-Type": "application/json"], body: json]))
end
defp handle_charge_response(%{status_code: 201, body: body}) do
decode(body)
|> rename_charge_field
|> wrap_in_success_tuple
end
defp handle_charge_response(%{status_code: ___, body: body}) do
body |> to_error_tuple
end
defp rename_charge_field(map) do
%{charge: map.response}
end
@doc """
Given a token, processes a previously authorized payment
returns a tuple
```
{:ok,
%{charge: %{amount: 500, amount_refunded: 0, authorisation_expired: false,
captured: true,
card: %{address_city: "Hogwarts", address_country: "Straya",
address_line1: "The Game Keepers Cottage", address_line2: nil,
address_postcode: "H0G", address_state: "WA",
customer_token: "<KEY>",
display_number: "XXXX-XXXX-XXXX-0000", expiry_month: 10,
expiry_year: 2016, name: "<NAME>", primary: true, scheme: "visa",
token: "card_i-DSgMjhcwRi_dInriNBTw"},
created_at: "2015-11-15T07:51:05Z", currency: "AUD",
description: "Dragon Eggs", email: "<EMAIL>",
error_message: nil, ip_address: "127.0.0.1", merchant_entitlement: 455,
refund_pending: false, settlement_currency: "AUD",
status_message: "Success", success: true,
token: "ch_NCoA7oBzrycXEPBTEUWNdQ", total_fees: 45, transfer: []}}}
```
OR
{:error, error_map}
"""
def capture(token) do
HTTPotion.put(charges_url <> "/#{token}/capture", with_auth)
|> handle_charge_response
end
defp charges_url do
"https://#{@pin_url}/charges"
end
end
|
lib/charges/charge.ex
| 0.76708
| 0.726583
|
charge.ex
|
starcoder
|
defmodule Hound.Helpers.Element do
@moduledoc "Provides functions related to elements."
@type element_selector :: {atom, String.t}
@type element :: element_selector | String.t
@doc """
Gets visible text of element. Requires the element ID.
element_id = find_element(:css, ".example")
visible_text(element_id)
You can also directly pass the selector as a tuple.
visible_text({:css, ".example"})
"""
@spec visible_text(element) :: String.t
def visible_text(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.visible_text(element)
end
@doc """
Enters value into field.
It does not clear the field before entering the new value. Anything passed is added to the value already present.
element_id = find_element(:id, "example")
input_into_field(element_id, "<NAME>")
You can also pass the selector as a tuple, for the first argument.
input_into_field({:id, "example"}, "<NAME>")
"""
@spec input_into_field(element, String.t) :: :ok
def input_into_field(element, input) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.input_into_field(element, input)
end
@doc """
Sets a field's value. The difference with `input_info_field` is that, the field is cleared before entering the new value.
element_id = find_element(:id, "example")
fill_field(element_id, "<NAME>")
You can also pass the selector as a tuple, for the first argument.
fill_field({:id, "example"}, "<NAME>")
"""
@spec fill_field(element, String.t) :: :ok
def fill_field(element, input) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.fill_field(element, input)
end
@doc """
Gets an element's tag name.
element_id = find_element(:class, "example")
tag_name(element_id)
You can also directly pass the selector as a tuple.
tag_name({:class, "example"})
"""
@spec tag_name(element) :: String.t
def tag_name(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.tag_name(element)
end
@doc """
Clears textarea or input field's value
element_id = find_element(:class, "example")
clear_field(element_id)
You can also directly pass the selector as a tuple.
clear_field({:class, "example"})
"""
@spec clear_field(element) :: :ok
def clear_field(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.clear_field(element)
end
@doc """
Checks if a radio input group or checkbox has any value selected.
element_id = find_element(:name, "example")
selected?(element_id)
You can also pass the selector as a tuple.
selected?({:name, "example"})
"""
@spec selected?(element) :: :true | :false
def selected?(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.selected?(element)
end
@doc """
Checks if an input field is enabled.
element_id = find_element(:name, "example")
element_enabled?(element_id)
You can also pass the selector as a tuple.
element_enabled?({:name, "example"})
"""
@spec element_enabled?(element) :: :true | :false
def element_enabled?(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.element_enabled?(element)
end
@doc """
Gets an element's attribute value.
element_id = find_element(:name, "example")
attribute_value(element_id, "data-greeting")
You can also pass the selector as a tuple, for the first argument
attribute_value({:name, "example"}, "data-greeting")
"""
@spec attribute_value(element, String.t) :: String.t | :nil
def attribute_value(element, attribute_name) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.attribute_value(element, attribute_name)
end
@doc """
Checks if two element IDs refer to the same DOM element.
element_id1 = find_element(:name, "username")
element_id2 = find_element(:id, "user_name")
same_element?(element_id1, element_id2)
"""
@spec same_element?(String.t, String.t) :: :true | :false
def same_element?(element_id1, element_id2) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.same_element?(element_id1, element_id2)
end
@doc """
Checks if an element is currently displayed.
element_id = find_element(:name, "example")
element_displayed?(element_id)
You can also pass the selector as a tuple.
element_displayed?({:name, "example"})
"""
@spec element_displayed?(element) :: :true | :false
def element_displayed?(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.element_displayed?(element)
end
@doc """
Gets an element's location on page. It returns the location as a tuple of the form {x, y}.
element_id = find_element(:name, "example")
element_location(element_id)
You can also pass the selector as a tuple.
element_location({:name, "example"})
"""
@spec element_location(element) :: tuple
def element_location(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.element_location(element)
end
@doc """
Gets an element's size in pixels. It returns the size as a tuple of the form {width, height}.
element_id = find_element(:name, "example")
element_location(element_id)
You can also pass the selector as a tuple.
element_location({:name, "example"})
"""
@spec element_size(element) :: tuple
def element_size(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.element_size(element)
end
@doc """
Gets an element's computed CSS property.
element_id = find_element(:name, "example")
css_property(element_id, "display")
You can also pass the selector as a tuple, for the first argument
css_property({:name, "example"}, "display")
"""
@spec css_property(element, String.t) :: String.t
def css_property(element, property_name) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.css_property(element, property_name)
end
@doc """
Click on an element. You can also use this to click on checkboxes and radio buttons.
element_id = find_element(:id, ".example")
click(element_id)
You can also directly pass the selector as a tuple.
click({:id, "example"})
"""
@spec click(element) :: :ok
def click(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.click(element)
end
@doc """
Sends a submit event to any field or form element.
element_id = find_element(:name, "username")
submit(element_id)
You can also directly pass the selector as a tuple.
submit({:name, "username"})
"""
@spec submit_element(element) :: :ok
def submit_element(element) do
{:ok, driver_info} = Hound.driver_info
driver_info[:driver_type].Element.submit_element(element)
end
end
|
lib/hound/helpers/element.ex
| 0.726523
| 0.442757
|
element.ex
|
starcoder
|
defmodule Flop.Meta do
@moduledoc """
Defines a struct for holding meta information of a query result.
"""
@typedoc """
Meta information for a query result.
- `:flop` - The `Flop` struct used in the query.
- `:current_offset` - The `:offset` value used in the query when using
offset-based pagination or a derived value when using page-based pagination.
Always `nil` when using cursor-based pagination.
- `:current_page` - The `:page` value used in the query when using page-based
pagination or a derived value when using offset-based pagination. Note that
the value will be rounded if the offset lies between pages. Always `nil`
when using cursor-based pagination.
- `:previous_offset`, `:next_offset`, `:previous_page`, `:next_page` - Values
based on `:current_page` and `:current_offset`/`page_size`. Always `nil`
when using cursor-based pagination.
- `:start_cursor`, `:end_cursor` - The cursors of the first and last record
in the result set. Only set when using cursor-based pagination with
`:first`/`:after` or `:last`/`:before`.
- `:has_previous_page?`, `:has_next_page?` - Set in all pagination types.
Note that `:has_previous_page?` is always `false` when using cursor-based
pagination with `:first`/`after` and `:has_next_page?` is always `false`
when using cursor-based pagination with `:last`/`:before`.
- `:page_size` - The page size or limit of the query. Set to the `:first`
or `:last` parameter when using cursor-based pagination.
- `:total_count` - The total count of records for the given query. Always
`nil` when using cursor-based pagination.
- `:total_pages` - The total page count based on the total record count and
the page size. Always `nil` when using cursor-based pagination.
"""
@type t :: %__MODULE__{
current_offset: non_neg_integer | nil,
current_page: pos_integer | nil,
end_cursor: String.t() | nil,
flop: Flop.t(),
has_next_page?: boolean,
has_previous_page?: boolean,
next_offset: non_neg_integer | nil,
next_page: pos_integer | nil,
page_size: pos_integer | nil,
previous_offset: non_neg_integer | nil,
previous_page: pos_integer | nil,
start_cursor: String.t() | nil,
total_count: non_neg_integer | nil,
total_pages: non_neg_integer | nil
}
defstruct [
:current_offset,
:current_page,
:end_cursor,
:flop,
:has_next_page?,
:has_previous_page?,
:next_offset,
:next_page,
:page_size,
:previous_offset,
:previous_page,
:start_cursor,
:total_count,
:total_pages
]
end
|
lib/flop/meta.ex
| 0.91358
| 0.861072
|
meta.ex
|
starcoder
|
defmodule Snitch.Data.Model.TaxRate do
@moduledoc """
Model functions TaxRate.
"""
use Snitch.Data.Model
alias Snitch.Data.Schema.TaxRate
@doc """
Creates a TaxRate with supplied `params`.
> ### Note
The `calculator` field should be converted to an `atom`
before passing in the `params` map.
"""
@spec create(map) :: {:ok, TaxRate.t()} | {:error, Ecto.Changeset.t()}
def create(params) do
QH.create(TaxRate, params, Repo)
end
@doc """
Updates an existing TaxRate with supplied `params`.
> ### Note
The `calculator` field should be converted to an `atom`
before passing in the `params` map.
"""
@spec update(map, TaxRate.t() | nil) ::
{:ok, TaxRate.t()}
| {:error, Ecto.Changeset.t()}
def update(params, instance \\ nil) do
QH.update(TaxRate, params, instance, Repo)
end
@doc """
Soft deletes a TaxRate.
Takes as input the `instance` or `id` of the TaxRate to be deleted.
"""
@spec delete(TaxRate.t() | integer) ::
{:ok, TaxRate.t()}
| {:error, Ecto.Changeset.t()}
def delete(id) when is_integer(id) do
params = %{deleted_at: DateTime.utc_now(), id: id}
QH.update(TaxRate, params, Repo)
end
def delete(instance) do
params = %{deleted_at: DateTime.utc_now()}
QH.update(TaxRate, params, instance, Repo)
end
@doc """
Returns a TaxRate.
Takes as input 'id' field and an `active` flag.
When `active` is false, will return a TaxRate even
if it's _soft deleted_.
> Note, By default tax rate which is present in the table
and is __not soft deleted__ is returned.
"""
@spec get(integer, boolean) :: TaxRate.t() | nil
def get(id, active \\ true) do
if active do
query = from(tc in TaxRate, where: is_nil(tc.deleted_at) and tc.id == ^id)
Repo.one(query)
else
QH.get(TaxRate, id, Repo)
end
end
@doc """
Returns a `list` of available tax rates.
Takes an `active` field. When `active` is false, will
return all the tax_rates, including those which are
_soft deleted_.
> Note the function returns only those tax rates
which are not soft deleted by default or if `active` is
set to true.
"""
@spec get_all(boolean) :: [TaxRate.t()]
def get_all(active \\ true) do
if active do
query = from(tc in TaxRate, where: is_nil(tc.deleted_at))
Repo.all(query)
else
Repo.all(TaxRate)
end
end
end
|
apps/snitch_core/lib/core/data/model/tax_rate.ex
| 0.865153
| 0.677787
|
tax_rate.ex
|
starcoder
|
defmodule Gradient.Tokens do
@moduledoc """
Functions useful for token management.
"""
alias Gradient.Types, as: T
@typedoc "Type of conditional with following tokens"
@type conditional_t() ::
{:case, T.tokens()}
| {:cond, T.tokens()}
| {:unless, T.tokens()}
| {:if, T.tokens()}
| {:with, T.tokens()}
| :undefined
@doc """
Drop tokens to the first conditional occurrence. Returns type of the encountered
conditional and the following tokens.
"""
@spec get_conditional(T.tokens(), integer(), T.options()) :: conditional_t()
def get_conditional(tokens, line, opts) do
conditionals = [:if, :unless, :cond, :case, :with]
{:ok, limit_line} = Keyword.fetch(opts, :end_line)
drop_tokens_while(tokens, limit_line, fn
{:do_identifier, _, c} -> c not in conditionals
{:paren_identifier, _, c} -> c not in conditionals
{:identifier, _, c} -> c not in conditionals
_ -> true
end)
|> case do
[token | _] = tokens when elem(elem(token, 1), 0) == line -> {elem(token, 2), tokens}
_ -> :undefined
end
end
@doc """
Drop tokens to the first list occurrence. Returns type of the encountered
list and the following tokens.
"""
@spec get_list(T.tokens(), T.options()) ::
{:list, T.tokens()} | {:keyword, T.tokens()} | {:charlist, T.tokens()} | :undefined
def get_list(tokens, opts) do
tokens = flatten_tokens(tokens)
{:ok, limit_line} = Keyword.fetch(opts, :end_line)
res =
drop_tokens_while(tokens, limit_line, fn
{:"[", _} -> false
{:list_string, _, _} -> false
{:kw_identifier, _, id} when id not in [:do] -> false
_ -> true
end)
case res do
[{:"[", _} | _] = list -> {:list, list}
[{:list_string, _, _} | _] = list -> {:charlist, list}
[{:kw_identifier, _, _} | _] = list -> {:keyword, list}
_ -> :undefined
end
end
@doc """
Drop tokens to the first tuple occurrence. Returns type of the encountered
list and the following tokens.
"""
@spec get_tuple(T.tokens(), T.options()) ::
{:tuple, T.tokens()} | :undefined
def get_tuple(tokens, opts) do
{:ok, limit_line} = Keyword.fetch(opts, :end_line)
res =
drop_tokens_while(tokens, limit_line, fn
{:"{", _} -> false
{:kw_identifier, _, _} -> false
_ -> true
end)
case res do
[{:"{", _} | _] = tuple -> {:tuple, tuple}
[{:kw_identifier, _, _} | _] = tuple -> {:tuple, tuple}
_ -> :undefined
end
end
@doc """
Drop tokens till the matcher returns false or the token's line exceeds the limit.
"""
@spec drop_tokens_while(T.tokens(), integer(), (T.token() -> boolean())) :: T.tokens()
def drop_tokens_while(tokens, limit_line \\ -1, matcher)
def drop_tokens_while([], _, _), do: []
def drop_tokens_while([token | tokens] = all, limit_line, matcher) do
line = get_line_from_token(token)
limit_passed = limit_line < 0 or line < limit_line
cond do
matcher.(token) and limit_passed ->
drop_tokens_while(tokens, limit_line, matcher)
not limit_passed ->
[]
true ->
all
end
end
@doc """
Drop tokens while the token's line is lower than the given location.
"""
@spec drop_tokens_to_line(T.tokens(), integer()) :: T.tokens()
def drop_tokens_to_line(tokens, line) do
Enum.drop_while(tokens, fn t ->
elem(elem(t, 1), 0) < line
end)
end
@doc """
Get line from token.
"""
@spec get_line_from_token(T.token()) :: integer()
def get_line_from_token(token), do: elem(elem(token, 1), 0)
def get_line_from_form(form) do
form
|> elem(1)
|> :erl_anno.line()
end
@doc """
Drop the tokens to binary occurrence and then collect all belonging tokens.
Return tuple where the first element is a list of tokens making up the binary, and the second
element is a list of tokens after the binary.
"""
@spec cut_tokens_to_bin(T.tokens(), integer()) :: {T.tokens(), T.tokens()}
def cut_tokens_to_bin(tokens, line) do
tokens = drop_tokens_to_line(tokens, line)
drop_tokens_while(tokens, fn
{:"<<", _} -> false
{:bin_string, _, _} -> false
_ -> true
end)
|> case do
[{:"<<", _} | _] = ts -> cut_bottom(ts, 0)
[{:bin_string, _, _} = t | ts] -> {[t], ts}
[] -> {[], tokens}
end
end
@doc """
Flatten the tokens, mostly binaries or string interpolation.
"""
@spec flatten_tokens(T.tokens()) :: T.tokens()
def flatten_tokens(tokens) do
Enum.map(tokens, &flatten_token/1)
|> Enum.concat()
end
# Private
defp flatten_token(token) do
case token do
{:bin_string, _, [s]} = t when is_binary(s) ->
[t]
{:bin_string, _, ts} ->
flatten_tokens(ts)
{{_, _, nil}, {_, _, nil}, ts} ->
flatten_tokens(ts)
str when is_binary(str) ->
[{:str, {0, 0, nil}, str}]
_otherwise ->
[token]
end
end
defp cut_bottom([{:"<<", _} = t | ts], deep) do
{ts, cut_ts} = cut_bottom(ts, deep + 1)
{[t | ts], cut_ts}
end
defp cut_bottom([{:">>", _} = t | ts], deep) do
if deep - 1 > 0 do
{ts, cut_ts} = cut_bottom(ts, deep - 1)
{[t | ts], cut_ts}
else
{[t], ts}
end
end
defp cut_bottom([t | ts], deep) do
{ts, cut_ts} = cut_bottom(ts, deep)
{[t | ts], cut_ts}
end
end
|
lib/gradient/tokens.ex
| 0.848471
| 0.521715
|
tokens.ex
|
starcoder
|
defmodule LiveProps.LiveView do
@moduledoc ~S'''
Use this module inside a Phoenix.LiveView to expose to add state to your LiveView.
### LiveView Lifecycle with LiveProps
LiveProps injects lightweight `c:Phoenix.LiveView.mount/3` and
`c:Phoenix.LiveView.handle_info/2` callbacks to help manage state.
If you define your own mount, it will be run after defaults states have been assigned
but before computed states.
The reason we assign computed states last is because they may depend on data from params
or session. LiveProps does not handle params and session so you will need to manually
assign them in your own mount callback, if needed.
### Example
defmodule ThermostatLive do
# If you generated an app with mix phx.new --live,
# the line below would be: use MyAppWeb, :live_view
use Phoenix.LiveView
use LiveProps.LiveView
state :user_id, :integer
state :scale, :atom, default: :fahrenheit
state :temperature, :float, compute: :get_temperature
def render(assigns) do
~L"""
<div>
Current temperature is <%= @temperature %>
</div>
"""
end
def mount(_, %{"current_user_id" => user_id}, socket) do
# socket.assigns.scale already has a default value
{:ok, assign(socket, :user_id, user_id)}
end
def get_temperature(assigns) do
Temperature.get_user_reading(assigns.user_id, assigns.scale)
end
end
First we defined a `:user_id` state. This doesn't really do anything other than serve
as documentation, since we assign it manually in the mount callback.
Still, depending on your preferences, you may find it helpful to have a list of all assigns in one place.
Next we defined the `:scale` state and gave it a default value. This value will be assigned automatically
on mount and will be available in any custom mount you define.
Finally we defined the `:temperature` state, with the options `compute: :get_temperature`. This means
this state will be calculated by the `get_temperature/1` function, which takes the current assigns
as an argument and returns the value to be assigned.
'''
import Phoenix.LiveView
defmacro __using__(_) do
quote do
use LiveProps, include: [:state]
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(env) do
[
quoted_handle_info(env),
quoted_mount(env)
]
end
defp quoted_handle_info(_env) do
quote do
def handle_info({:liveprops, event, args}, socket) do
LiveProps.LiveView.__handle_info__({event, args}, socket, __MODULE__)
end
end
end
defp quoted_mount(env) do
if Module.defines?(env.module, {:mount, 3}) do
quote do
defoverridable mount: 3
def mount(params, session, socket) do
callback = fn socket -> super(params, session, socket) end
LiveProps.LiveView.__mount__(params, session, socket, __MODULE__, callback)
end
end
else
quote do
def mount(params, session, socket) do
LiveProps.LiveView.__mount__(params, session, socket, __MODULE__)
end
end
end
end
def __mount__(_params, _session, socket, module, callback \\ nil) do
if connected?(socket), do: send(self(), {:liveprops, :after_connect, []})
socket
|> LiveProps.__assign_states__(:defaults, module)
|> maybe_call_callback(callback)
|> case do
{:ok, socket} ->
socket = LiveProps.__assign_states__(socket, :computed, module)
{:ok, socket}
{:ok, socket, options} ->
socket = LiveProps.__assign_states__(socket, :computed, module)
{:ok, socket, options}
end
end
def __handle_info__({event, args}, socket, module) do
apply(__MODULE__, event, [socket, module] ++ args)
end
@doc false
def after_connect(socket, module) do
{:noreply,
socket
|> LiveProps.__assign_states__(:async, module)}
end
defp maybe_call_callback(socket, nil), do: {:ok, socket}
defp maybe_call_callback(socket, callback) do
callback.(socket)
end
end
|
lib/live_props/live_view.ex
| 0.858125
| 0.457924
|
live_view.ex
|
starcoder
|
defmodule ExAliyunOts do
@moduledoc ~S"""
The `ExAliyunOts` module provides a tablestore-based API as a client for working with Alibaba TableStore product servers.
Here are links to official documents in [Chinese](https://help.aliyun.com/document_detail/27280.html) | [English](https://www.alibabacloud.com/help/product/27278.html)
## Configuration
config :ex_aliyun_ots, :my_instance
name: "MyInstanceName",
endpoint: "MyInstanceEndpoint",
access_key_id: "MyAliyunRAMKeyID",
access_key_secret: "MyAliyunRAMKeySecret"
config :ex_aliyun_ots,
instances: [:my_instance],
debug: false,
enable_tunnel: false
* `debug`, optional, specifies whether to enable debug logger, by default it's false, and please DO NOT use debug mode in production.
* `enable_tunnel`, optional, specifies whether to enable tunnel functions, there will startup tunnel related `Supervisor` and `Registry` when enable it, by default it's false.
## Using ExAliyunOts
To use `ExAliyunOts`, a module that calls `use ExAliyunOts` has to be defined:
defmodule MyApp.TableStore do
use ExAliyunOts, instance: :my_instance
end
This automatically defines some macros and functions in the `MyApp.TableStore` module, here are some examples:
import MyApp.TableStore
# Create table
create_table "table",
[{"pk1", :integer}, {"pk2", :string}]
# Put row
put_row "table",
[{"pk1", "id1"}],
[{"attr1", 10}, {"attr2", "attr2_value"}],
condition: condition(:expect_not_exist),
return_type: :pk
# Search index
search "table", "index_name",
search_query: [
query: match_query("age", 28),
sort: [
field_sort("age", order: :desc)
]
]
# Local transaction
start_local_transaction "table", {"partition_key", "partition_value"}
## ExAliyunOts API
There are two ways to use ExAliyunOts:
* using macros and functions from your own ExAliyunOts module, like `MyApp.TableStore`.
* using macros and functions from the `ExAliyunOts` module.
All defined functions and macros in `ExAliyunOts` are available and referable for your own ExAliyunOts module as well, except that the given arity of functions may
different, because the `instance` parameter of each invoke request is NOT needed from your own ExAliyunOts module although the `ExAliyunOts` module defines it.
"""
require ExAliyunOts.Const.OperationType, as: OperationType
alias ExAliyunOts.{Var, Client, Utils}
alias ExAliyunOts.TableStore.{ReturnType, Direction}
@before_compile ExAliyunOts.MergeCompiler
@type instance :: atom
@type table_name :: String.t()
@type primary_keys :: list
@type inclusive_start_primary_keys :: list
@type exclusive_end_primary_keys :: list
@type index_name :: String.t()
@type options :: Keyword.t()
@type result :: {:ok, map()} | {:error, ExAliyunOts.Error.t()}
require Logger
defmacro __using__(opts \\ []) do
opts = Macro.prewalk(opts, &Macro.expand(&1, __CALLER__))
quote do
@instance Keyword.get(unquote(opts), :instance)
use ExAliyunOts.Constants
import ExAliyunOts.DSL
@before_compile ExAliyunOts.Compiler
end
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27312.html) | [English](https://www.alibabacloud.com/help/doc-detail/27312.html)
## Example
create_table "table_name2",
[{"key1", :string}, {"key2", :auto_increment}]
create_table "table_name3",
[{"key1", :string}],
reserved_throughput_write: 1,
reserved_throughput_read: 1,
time_to_live: 100_000,
max_versions: 3,
deviation_cell_version_in_sec: 6_400,
stream_spec: [is_enabled: true, expiration_time: 2]
create_table "table_name",
[{"key1", :string}],
defined_columns: [
{"attr1", :string},
{"attr2", :integer},
{"attr3", :boolean},
{"attr4", :double},
{"attr5", :binary}
]
create_table "table_name",
[{"key1", :string}],
index_metas: [
{"indexname1", ["key1"], ["attr1", "attr2"]},
{"indexname2", ["key1"], ["attr4"]}
]
## Options
* `:reserved_throughput_write`, optional, the reserved throughput write of table, by default it is 0.
* `:reserved_throughput_read`, optional, the reserved throughput read of table, by default it is 0.
* `time_to_live`, optional, the data storage time to live in seconds, the minimum settable value is 864_000 seconds (one day), by default it is -1 (for permanent).
* `:max_versions`, optional, the version of table, by default it is 1 that specifies there is only one version for columns.
* `:deviation_cell_version_in_sec`, optional, maximum version deviation, by default it is 864_000 seconds (one day).
* `:stream_spec`, specifies whether enable stream, by default it is not enable stream feature.
- `:is_enabled`, enable or not enable stream, use `true` or `false`;
- `:expiration_time`, the expiration time of stream.
* `:index_metas`, optional, the index meta of table, each item of `:index_metas` is in {String.t(), list(), list()} format, by default it is [].
* `:defined_columns`, optional, the indexed attribute column, which is a combination of predefined columns of the base table, each item of `:defined_columns`
is in {String.t(), :integer | :double | :boolean | :string | :binary} format, by default it is [].
"""
@doc table: :table
@spec create_table(instance, table_name, primary_keys, options) ::
:ok | {:error, ExAliyunOts.Error.t()}
def create_table(instance, table_name, primary_keys, options \\ []) do
var_create_table = %Var.CreateTable{
table_name: table_name,
primary_keys: primary_keys
}
prepared_var = map_options(var_create_table, options)
Client.create_table(instance, prepared_var)
end
@doc """
Create global secondary indexes. Official document in [Chinese](https://help.aliyun.com/document_detail/91947.html) | [English](https://www.alibabacloud.com/help/doc-detail/91947.html)
## Example
create_index "table_name",
"table_index_name1"
["pk1", "pk2", "col0"],
["col1", "col2"]
create_index "table_name",
"table_index_name2"
["col0", "pk1"],
["col1", "col2", "col3"],
include_base_data: false
## Options
* `:index_update_mode`, the update mode of the index table, optional, currently only support `:IUM_ASYNC_INDEX`,
by default it is `:IUM_ASYNC_INDEX`;
* `:index_type`, the type of the index table, optional, currently only support `:IT_GLOBAL_INDEX`,
by default it is `:IT_GLOBAL_INDEX`;
* `:include_base_data`, specifies whether the index table includes the existing data in the base table, if set it to
`true` means the index includes the existing data, if set it to `false` means the index excludes the existing data,
optional, by default it is `true`.
"""
@doc table: :table
@spec create_index(
instance,
table_name,
index_name,
primary_keys :: [String.t()],
defined_columns :: [String.t()],
options
) :: :ok | {:error, ExAliyunOts.Error.t()}
def create_index(
instance,
table_name,
index_name,
primary_keys,
defined_columns,
options \\ []
) do
Client.create_index(
instance,
table_name,
index_name,
primary_keys,
defined_columns,
options
)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/94558.html) | [English](https://www.alibabacloud.com/help/doc-detail/94558.html)
## Example
import MyApp.TableStore
delete_index("table_name", "index_name")
"""
@doc table: :table
@spec delete_index(instance, table_name, index_name) :: :ok | {:error, ExAliyunOts.Error.t()}
defdelegate delete_index(instance, table_name, index_name), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27314.html) | [English](https://www.alibabacloud.com/help/doc-detail/27314.html)
## Example
import MyApp.TableStore
delete_table("table_name")
"""
@doc table: :table
@spec delete_table(instance, table_name) :: :ok | {:error, ExAliyunOts.Error.t()}
defdelegate delete_table(instance, table_name), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27313.html) | [English](https://www.alibabacloud.com/help/doc-detail/27313.html)
## Example
import MyApp.TableStore
list_table()
"""
@doc table: :table
@spec list_table(instance) :: result
defdelegate list_table(instance), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27315.html) | [English](https://www.alibabacloud.com/help/doc-detail/27315.html)
## Example
import MyApp.TableStore
update_table "table_name",
reserved_throughput_write: 10,
time_to_live: 200_000,
stream_spec: [is_enabled: false]
## Options
Please see options of `create_table/4`.
"""
@doc table: :table
@spec update_table(instance, table_name, options) :: result
def update_table(instance, table_name, options \\ []) do
var_update_table = %Var.UpdateTable{
table_name: table_name
}
prepared_var = map_options(var_update_table, options)
Client.update_table(instance, prepared_var)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27307.html) | [English](https://www.alibabacloud.com/help/doc-detail/27307.html)
## Example
import MyApp.TableStore
describe_table(table_name)
"""
@doc table: :table
@spec describe_table(instance, table_name) :: result
defdelegate describe_table(instance, table_name), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/53813.html) | [English](https://www.alibabacloud.com/help/doc-detail/53813.html)
"""
@doc table: :table
@spec compute_split_points_by_size(instance, table_name, splits_size :: integer()) ::
result
defdelegate compute_split_points_by_size(instance, table_name, splits_size), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27310.html) | [English](https://www.alibabacloud.com/help/doc-detail/27310.html)
## Example
import MyApp.TableStore
batch_get [
get(table_name1, [[{"key1", 1}, {"key2", "1"}]]),
get(
table_name2,
[{"key1", "key1"}],
columns_to_get: ["name", "age"],
filter: filter "age" >= 10
)
]
The batch get operation can be considered as a collection of mulitple `get/3` operations.
"""
@doc row: :row
@spec batch_get(instance, requests :: list()) :: result
defdelegate batch_get(instance, requests), to: Client, as: :batch_get_row
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27311.html) | [English](https://www.alibabacloud.com/help/doc-detail/27311.html)
## Example
import MyApp.TableStore
batch_write [
{"table1", [
write_delete([{"key1", 5}, {"key2", "5"}],
return_type: :pk,
condition: condition(:expect_exist, "attr1" == 5)),
write_put([{"key1", 6}, {"key2", "6"}],
[{"new_put_val1", "val1"}, {"new_put_val2", "val2"}],
condition: condition(:expect_not_exist),
return_type: :pk)
]},
{"table2", [
write_update([{"key1", "new_tab3_id2"}],
put: [{"new_put1", "u1"}, {"new_put2", 2.5}],
condition: condition(:expect_not_exist)),
write_put([{"key1", "new_tab3_id3"}],
[{"new_put1", "put1"}, {"new_put2", 10}],
condition: condition(:expect_not_exist))
]}
]
The batch write operation can be considered as a collection of multiple `write_put/3`, `write_update/2` and `write_delete/2` operations.
## Options
* `:transaction_id`, optional, batch write operation within local transaction.
* `:is_atomic`, optional, defaults to false, whether set this batch write request be with an atomic operation, if this option is `true`,
keep the partition key of each table in the batch write operation is unique, or the corresponding write operation of the table will fail.
"""
@doc row: :row
@spec batch_write(instance, requests :: list(), options) :: result
def batch_write(instance, requests, options \\ [])
def batch_write(instance, requests, options) when is_list(requests) do
batch_write_requests =
Enum.map(requests, fn {table_name, write_rows} ->
%Var.BatchWriteRequest{
table_name: table_name,
rows: write_rows
}
end)
Client.batch_write_row(instance, batch_write_requests, options)
end
def batch_write(instance, {table_name, write_rows}, options) do
batch_write_request = %Var.BatchWriteRequest{
table_name: table_name,
rows: write_rows
}
Client.batch_write_row(instance, batch_write_request, options)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27305.html) | [English](https://www.alibabacloud.com/help/doc-detail/27305.html)
## Example
import MyApp.TableStore
get_row "table1",
[{"key1", "id1"}, {"key2", "id2"}],
columns_to_get: ["name", "level"],
filter: filter(("name[ignore_if_missing: true, latest_version_only: true]" == var_name and "age" > 1) or ("class" == "1"))
get_row "table2",
[{"key", "1"}],
start_column: "room",
filter: pagination(offset: 0, limit: 3)
get_row "table3",
[{"key", "1"}],
transaction_id: "transaction_id"
## Options
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields, pass a field list to specify the expected return fields
e.g. `["field1", "field2"]`.
* `:start_column`, optional, specifies the start column when using for wide-row-read, the returned result contains this `:start_column`.
* `:end_column`, optional, specifies the end column when using for wide-row-read, the returned result does not contain this `:end_column`.
* `:filter`, optional, filter the return results in the server side, please see `filter/1` for details.
* `:max_versions`, optional, how many versions need to return in results, by default it is 1.
* `:time_range`, optional, read data by timestamp range, support two ways to use it:
- `time_range: {start_timestamp, end_timestamp}`, the timestamp in the range (include `start_timestamp` but exclude `end_timestamp`)
and then will return in the results.
- `time_range: special_timestamp`, exactly match and then will return in the results.
- `:time_range` and `:max_versions` are mutually exclusive, by default use `max_versions: 1` and `time_range: nil`.
* `:transaction_id`, optional, read operation within local transaction.
"""
@doc row: :row
@spec get_row(instance, table_name, primary_keys, options) :: result
def get_row(instance, table_name, primary_keys, options \\ []) do
prepared_var = get(table_name, primary_keys, options)
Client.get_row(instance, prepared_var)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27306.html) | [English](https://www.alibabacloud.com/help/doc-detail/27306.html)
## Example
import MyApp.TableStore
put_row "table1",
[{"key1", "id1"}],
[{"name", "name1"}, {"age", 20}],
condition: condition(:expect_not_exist),
return_type: :pk
put_row "table2",
[{"key1", "id1"}],
[{"name", "name1"}, {"age", 20}],
condition: condition(:expect_not_exist),
transaction_id: "transaction_id"
return_type: :pk
## Options
* `:condition`, required, please see `condition/1` or `condition/2` for details.
* `:return_type`, optional, whether return the primary keys after put row, available options are `:pk` | `:none`, by default it is `:none`.
* `:transaction_id`, optional, write operation within local transaction.
"""
@doc row: :row
@spec put_row(instance, table_name, primary_keys, options) :: result
def put_row(instance, table_name, primary_keys, attrs, options \\ []) do
prepared_var =
%Var.PutRow{
table_name: table_name,
primary_keys: primary_keys,
attribute_columns: attrs
}
|> map_options(options)
Client.put_row(instance, prepared_var)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27307.html) | [English](https://www.alibabacloud.com/help/doc-detail/27307.html)
## Example
import MyApp.TableStore
value = "1"
update_row "table1",
[{"key1", 2}, {"key2", "2"}],
delete: [{"attr2", nil, 1524464460}],
delete_all: ["attr1"],
put: [{"attr3", "put_attr3"}],
return_type: :pk,
condition: condition(:expect_exist, "attr2" == value)
update_row "table2",
[{"key1", 1}],
put: [{"attr1", "put_attr1"}],
increment: [{"count", 1}],
return_type: :after_modify,
return_columns: ["count"],
condition: condition(:ignore)
update_row "table3",
[partition_key],
put: [{"new_attr1", "a1"}],
delete_all: ["level", "size"],
condition: condition(:ignore),
transaction_id: "transaction_id"
## Options
* `:put`, optional, require to be valid value, e.g. `[{"field1", "value"}, {...}]`, insert a new column if this field is not existed, or overwrite this field if existed.
* `:delete`, optional, delete the special version of a column or columns, please pass the column's version (timestamp) in `:delete` option, e.g. [{"field1", nil, 1524464460}, ...].
* `:delete_all`, optional, delete all versions of a column or columns, e.g. ["field1", "field2", ...].
* `:increment`, optional, attribute column(s) base on atomic counters for increment or decrement, require the value of column is integer.
- for increment, `increment: [{"count", 1}]`;
- for decrement, `increment: [{"count", -1}]`.
* `:return_type`, optional, whether return the primary keys after update row, available options are `:pk` | `:none` | `:after_modify`, by default it is `:none`.
- if use atomic counters, must set `return_type: :after_modify`.
* `:condition`, required, please see `condition/1` or `condition/2` for details.
* `:transaction_id`, optional, write operation within local transaction.
"""
@doc row: :row
@spec update_row(instance, table_name, primary_keys, options) :: result
def update_row(instance, table_name, primary_keys, options \\ []) do
prepared_var =
%Var.UpdateRow{
table_name: table_name,
primary_keys: primary_keys
}
|> map_options(options)
|> Map.put(:updates, map_updates(options))
Client.update_row(instance, prepared_var)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27308.html) | [English](https://www.alibabacloud.com/help/doc-detail/27308.html)
## Example
import MyApp.TableStore
delete_row "table1",
[{"key1", 3}, {"key2", "3"}],
condition: condition(:expect_exist, "attr2" == "value2")
delete_row "table1",
[{"key1", 3}, {"key2", "3"}],
condition: condition(:expect_exist, "attr2" == "value2"),
transaction_id: "transaction_id"
## Options
* `:condition`, required, please see `condition/1` or `condition/2` for details.
* `:transaction_id`, optional, write operation within local transaction.
"""
@doc row: :row
@spec delete_row(instance, table_name, primary_keys, options) :: result
def delete_row(instance, table_name, primary_keys, options \\ []) do
prepared_var =
%Var.DeleteRow{
table_name: table_name,
primary_keys: primary_keys
}
|> map_options(options)
Client.delete_row(instance, prepared_var)
end
@doc """
Used in batch get operation, please see `batch_get/2` for details.
## Options
The available options are same as `get_row/4`.
"""
@doc row: :row
@spec get(table_name, primary_keys, options) :: map()
def get(table_name, primary_keys, options \\ []) do
%Var.GetRow{table_name: table_name, primary_keys: primary_keys}
|> map_options(options)
end
@doc """
Used in batch write operation, please see `batch_write/2` for details.
## Options
The available options are same as `put_row/5`.
"""
@doc row: :row
@spec write_put(primary_keys, attrs :: list(), options) :: map()
def write_put(primary_keys, attrs, options \\ []) do
%Var.RowInBatchWriteRequest{
type: :PUT,
primary_keys: primary_keys,
updates: attrs
}
|> map_options(options)
end
@doc """
Used in batch write operation, please see `batch_write/2` for details.
## Options
The available options are same as `update_row/4`.
"""
@doc row: :row
@spec write_update(primary_keys, options) :: map()
def write_update(primary_keys, options \\ []) do
%Var.RowInBatchWriteRequest{
type: :UPDATE,
primary_keys: primary_keys,
updates: map_updates(options)
}
|> map_options(options)
end
@doc """
Used in batch write operation, please see `batch_write/2` for details.
## Options
The available operation same as `delete_row/4`.
"""
@doc row: :row
@spec write_delete(primary_keys, options) :: map()
def write_delete(primary_keys, options \\ []) do
%Var.RowInBatchWriteRequest{type: :DELETE, primary_keys: primary_keys}
|> map_options(options)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/27309.html) | [English](https://www.alibabacloud.com/help/doc-detail/27309.html)
## Example
import MyApp.TableStore
get_range "table_name",
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
direction: :forward
get_range "table_name",
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
time_range: {1525922253224, 1525923253224},
direction: :forward
get_range "table_name",
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
time_range: 1525942123224,
direction: :forward
Also, there is an alternative `stream_range/5` to iteratively get range of rows in stream.
## Options
* `:direction`, required, the order of fetch data, available options are `:forward` | `:backward`, by it is `:forward`.
- `:forward`, this query is performed in the order of primary key in ascending, in this case, input `inclusive_start_primary_keys` should less
than `exclusive_end_primary_keys`;
- `:backward`, this query is performed in the order of primary key in descending, in this case, input `inclusive_start_primary_keys` should greater
than `exclusive_end_primary_keys`.
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields, pass a field list to specify the expected return fields,
e.g. `["field1", "field2"]`.
* `:start_column`, optional, specifies the start column when using for wide-row-read, the returned result contains this `:start_column`.
* `:end_column`, optional, specifies the end column when using for wide-row-read, the returned result does not contain this `:end_column`.
* `:filter`, optional, filter the return results in the server side, please see `filter/1` for details.
* `:max_versions`, optional, how many versions need to return in results, by default it is 1.
* `:transaction_id`, optional, read operation within local transaction.
* `:limit`, optional, the maximum number of rows of data to be returned, this value must be greater than 0, whether this option is set or not, there
returns a maximum of 5,000 data rows and the total data size never exceeds 4 MB.
* `:time_range`, optional, read data by timestamp range, support two ways to use it:
- `time_range: {start_timestamp, end_timestamp}`, the timestamp in the range (include `start_timestamp` but exclude `end_timestamp`)
and then will return in the results.
- `time_range: special_timestamp`, exactly match and then will return in the results.
- `:time_range` and `:max_versions` are mutually exclusive, by default use `max_versions: 1` and `time_range: nil`.
"""
@doc row: :row
@spec get_range(
instance,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
) :: result
def get_range(
instance,
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options \\ []
)
def get_range(
instance,
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
)
when is_list(inclusive_start_primary_keys) do
prepared_var =
prepared_get_range(
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
)
Client.get_range(instance, prepared_var, nil)
end
def get_range(
instance,
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
)
when is_binary(inclusive_start_primary_keys) do
prepared_var =
%Var.GetRange{
table_name: table_name,
exclusive_end_primary_keys: exclusive_end_primary_keys
}
|> map_options(options)
Client.get_range(instance, prepared_var, inclusive_start_primary_keys)
end
@doc """
As a wrapper built on `get_range/5` to fetch a full matched data set by iterate, if process a large items,
recommend to use `stream_range/5`.
## Example
import MyApp.TableStore
iterate_all_range table_name1,
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
direction: :forward
## Options
Please see options of `get_range/5` for details.
"""
@doc row: :row
@spec iterate_all_range(
instance,
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
) :: result
def iterate_all_range(
instance,
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options \\ []
) do
prepared_var =
prepared_get_range(
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
)
Client.iterate_get_all_range(instance, prepared_var)
end
@doc """
As a wrapper built on `get_range/5` to create composable and lazy enumerable stream for iteration.
## Example
import MyApp.TableStore
stream =
stream_range table_name1,
[{"key1", 1}, {"key2", :inf_min}],
[{"key1", 4}, {"key2", :inf_max}],
direction: :forward
Enum.to_list(stream, fn
{:ok, %{rows: rows} = response} ->
# process rows
{:error, error} ->
# occur error
end)
## Options
Please see options of `get_range/5` for details.
"""
@doc row: :row
@spec stream_range(
instance,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
) :: Enumerable.t()
def stream_range(
instance,
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options \\ []
) do
prepared_var =
prepared_get_range(
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
)
Client.stream_range(instance, prepared_var)
end
@compile {:inline, prepared_get_range: 4}
defp prepared_get_range(
table_name,
inclusive_start_primary_keys,
exclusive_end_primary_keys,
options
) do
map_options(
%Var.GetRange{
table_name: table_name,
inclusive_start_primary_keys: inclusive_start_primary_keys,
exclusive_end_primary_keys: exclusive_end_primary_keys
},
options
)
end
@doc """
The one entrance to use search index functions, please see `ExAliyunOts.Search` module for details.
Official document in [Chinese](https://help.aliyun.com/document_detail/91974.html) | [English](https://www.alibabacloud.com/help/doc-detail/91974.html)
## Options
* `:search_query`, required, the main option to use query and sort.
- `:query`, required, bind to the query functions:
- `ExAliyunOts.Search.bool_query/1`
- `ExAliyunOts.Search.exists_query/1`
- `ExAliyunOts.Search.geo_bounding_box_query/3`
- `ExAliyunOts.Search.geo_distance_query/3`
- `ExAliyunOts.Search.geo_polygon_query/2`
- `ExAliyunOts.Search.match_all_query/0`
- `ExAliyunOts.Search.match_phrase_query/2`
- `ExAliyunOts.Search.match_query/3`
- `ExAliyunOts.Search.nested_query/3`
- `ExAliyunOts.Search.prefix_query/2`
- `ExAliyunOts.Search.range_query/2`
- `ExAliyunOts.Search.term_query/2`
- `ExAliyunOts.Search.terms_query/2`
- `ExAliyunOts.Search.wildcard_query/2`
- `:sort`, optional, by default it is use `pk_sort/1`, bind to the Sort functions:
- `ExAliyunOts.Search.field_sort/2`
- `ExAliyunOts.Search.geo_distance_sort/3`
- `ExAliyunOts.Search.nested_filter/2`
- `ExAliyunOts.Search.pk_sort/1`
- `ExAliyunOts.Search.score_sort/1`
- `:aggs`, optional, please see official document in [Chinese](https://help.aliyun.com/document_detail/132191.html) | [English](https://www.alibabacloud.com/help/doc-detail/132191.html).
- `:group_bys`, optional, please see official document in [Chinese](https://help.aliyun.com/document_detail/132210.html) | [English](https://www.alibabacloud.com/help/doc-detail/132210.html).
- `:limit`, optional, the limited size of query.
- `:offset`, optional, the offset size of query. When the total rows are less or equal than 2000, can both used`:limit` and `:offset` to pagination.
- `:get_total_count`, optional, return the total count of the all matched rows, by default it is `true`.
- `:token`, optional, when do not load all the matched rows in a single request, there will return a `next_token` value in that result,
and then we can pass it to `:token` in the next same search query to continue load the rest rows.
- `:collapse`, optional, duplicate removal by the specified field, please see official document in [Chinese](https://help.aliyun.com/document_detail/154172.html), please NOTICE that currently there does not support use `:collapse` with `:token` together.
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields, here are available options:
- `:all`, return all attribute column fields;
- `:none`, do not return any attribute column fields;
- `["field1", "field2"]`, specifies the expected return attribute column fields.
"""
@doc search: :search
@spec search(instance, table_name, index_name, options) :: result
def search(instance, table_name, index_name, options) do
prepared_var = prepared_search(table_name, index_name, options)
Client.search(instance, prepared_var)
end
@doc """
As a wrapper built on `search/4` to create composable and lazy enumerable stream for iteration.
## Options
Please see options of `search/4` for details.
"""
@doc search: :search
@spec stream_search(instance, table_name, index_name, options) :: Enumerable.t()
def stream_search(instance, table_name, index_name, options) do
prepared_var = prepared_search(table_name, index_name, options)
Client.stream_search(instance, prepared_var)
end
@doc """
As a wrapper built on `stream_search/4` to fetch a full matched data set as a stream, then use `Enum.reduce/2` to iteratively
format all data into a list, if process a large items, recommend to use `stream_search/4`.
## Options
Please see options of `search/4` for details.
"""
@doc search: :search
@spec iterate_search(instance, table_name, index_name, options) :: result
def iterate_search(instance, table_name, index_name, options) do
prepared_var = prepared_search(table_name, index_name, options)
Client.iterate_search(instance, prepared_var)
end
defp prepared_search(table_name, index_name, options) do
ExAliyunOts.Search.map_search_options(
%Var.Search.SearchRequest{table_name: table_name, index_name: index_name},
options
)
end
@doc """
Query current supported maximum number of concurrent tasks to `parallel_scan/4` request.
Official document in [Chinese](https://help.aliyun.com/document_detail/153862.html) | [English](https://www.alibabacloud.com/help/doc-detail/153862.htm)
"""
@doc search: :search
@spec compute_splits(instance, table_name, index_name) :: result
defdelegate compute_splits(instance, table_name, index_name), to: Client
@doc """
Leverage concurrent tasks to query matched raw data (still be with search function) more quickly, in this use case, this function is improved for speed up
scan query, but no guarantee to the order of query results, and does not support the aggregation of scan query.
In general, recommend to use `iterate_parallel_scan/5` or `iterate_parallel_scan/7` for the common use case of parallel scan.
Official document in [Chinese](https://help.aliyun.com/document_detail/153862.html) | [English](https://www.alibabacloud.com/help/doc-detail/153862.htm)
## Options
* `:scan_query`, required, the main option to use query.
- `:query`, required, bind to the query functions, the same as query option of `search/4`.
- `:limit`, optional, the limited size of query, defaults to 2000, the maximum value of limit is 2000.
- `:token`, optional, when do not load all the matched rows in a single request, there will return a `next_token` value in that result,
and then we can pass it to `:token` in the next same scan query to continue load the rest rows.
- `:max_parallel`, required, the maximum number of concurrent, as the `splits_size` value from the response of `compute_splits/3`.
- `:current_parallel_id`, required, refer the official document, the available value is in [0, max_parallel).
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields of the search index, here are available options:
- `:all_from_index`, return all attribute column fields of search index;
- `:none`, do not return any attribute column fields;
- `["field1", "field2"]`, specifies the expected return attribute column fields.
* `session_id`, as usual, this option is required from the response of `compute_splits/3`, if not set this option, the query result may contain
duplicate data, refer the official document, once occurs an `OTSSessionExpired` error, must initiate another parallel scan task to re-query data.
"""
@doc search: :search
@spec parallel_scan(instance, table_name, index_name, options) :: result
def parallel_scan(instance, table_name, index_name, options) do
request = ExAliyunOts.Search.map_scan_options(table_name, index_name, options)
Client.parallel_scan(instance, request)
end
@doc """
A simple wrapper of `stream_parallel_scan/4` to take care `OTSSessionExpired` error with retry, make parallel scan
as a stream that applies the given function to the complete result of scan query.
In general, recommend to use this function for the common use case of parallel scan.
## Options
* `:scan_query`, required, the main option to use query.
- `:query`, required, bind to the query functions, the same as query option of `search/5`.
- `:limit`, optional, the limited size of query, defaults to 2000, the maximum value of limit is 2000.
* `:columns_to_get`, optional, fetch the special fields, by default it returns all fields of the search index, here are available options:
- `:all_from_index`, return all attribute column fields of search index;
- `:none`, do not return any attribute column fields;
- `["field1", "field2"]`, specifies the expected return attribute column fields.
* `:timeout`, optional, the `:timeout` option of `Task.async_stream/3`, defaults to `:infinity`.
## Example
def iterate_stream(stream) do
Enum.map(stream, fn
{:ok, response} ->
response
{:error, error} ->
error
end)
end
iterate_parallel_scan(
"table",
"index",
&iterate_stream/1,
scan_query: [
query: match_query("is_actived", "true"),
limit: 1000
],
columns_to_get: ["is_actived", "name", "score"]
)
"""
@doc search: :search
@spec iterate_parallel_scan(instance, table_name, index_name, fun :: (term -> term), options) ::
term()
def iterate_parallel_scan(instance, table_name, index_name, fun, options)
when is_function(fun) do
result =
instance
|> stream_parallel_scan(table_name, index_name, options)
|> fun.()
case result do
{:error, %ExAliyunOts.Error{code: "OTSSessionExpired"}} ->
Logger.info("scan_query session expired, will renew a parallel scan task.")
iterate_parallel_scan(instance, table_name, index_name, fun, options)
other ->
other
end
end
@doc """
A simple wrapper of `stream_parallel_scan/4` to take care `OTSSessionExpired` error with retry, make parallel scan
as a stream that applies the given function from `module` with the list of arguments `args` to the complete result of scan query.
In general, recommend to use this function for the common use case of parallel scan.
## Options
Please see options of `iterate_parallel_scan/5`.
## Example
defmodule StreamHandler do
def iterate_stream(stream) do
Enum.map(stream, fn
{:ok, response} ->
response
{:error, error} ->
error
end)
end
end
iterate_parallel_scan(
"table",
"index",
StreamHandler,
:iterate_stream,
[],
scan_query: [
query: match_query("is_actived", "true"),
limit: 1000
],
columns_to_get: ["field1", "field2"]
)
"""
@doc search: :search
@spec iterate_parallel_scan(
instance,
table_name,
index_name,
mod :: module(),
fun :: atom(),
args :: [term],
options
) :: term()
def iterate_parallel_scan(instance, table_name, index_name, mod, fun, args, options) do
value = stream_parallel_scan(instance, table_name, index_name, options)
case apply(mod, fun, [value | args]) do
{:error, %ExAliyunOts.Error{code: "OTSSessionExpired"}} ->
Logger.info("scan_query session expired, will renew a parallel scan task.")
iterate_parallel_scan(instance, table_name, index_name, mod, fun, args, options)
other ->
other
end
end
@doc """
Integrate `parallel_scan/4` with `compute_splits/3` as a complete use, base on the response of `compute_splits/3` to create the corresponding
number of concurrency task(s), use `Task.async_stream/3` to make parallel scan as a stream which properly process `token`
in every request of the internal, when use this function need to consider the possibility of the `OTSSessionExpired` error in the external.
## Options
Please see options of `iterate_parallel_scan/5`.
"""
@doc search: :search
@spec stream_parallel_scan(instance, table_name, index_name, options) :: Enumerable.t()
defdelegate stream_parallel_scan(instance, table_name, index_name, options),
to: ExAliyunOts.Search
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/117477.html) | [English](https://www.alibabacloud.com/help/doc-detail/117477.html)
## Example
import MyApp.TableStore
list_search_index("table")
"""
@doc search: :search
@spec list_search_index(instance, table_name) :: result
defdelegate list_search_index(instance, table_name), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/117452.html) | [English](https://www.alibabacloud.com/help/doc-detail/117452.html)
## Example
import MyApp.TableStore
create_search_index "table", "index_name",
field_schemas: [
field_schema_keyword("name"),
field_schema_integer("age")
]
create_search_index "table", "index_name",
field_schemas: [
field_schema_keyword("name"),
field_schema_geo_point("location"),
field_schema_integer("value")
]
create_search_index "table", "index_name",
field_schemas: [
field_schema_nested(
"content",
field_schemas: [
field_schema_keyword("header"),
field_schema_keyword("body")
]
)
]
## Options
* `:field_schemas`, required, a list of predefined search-index schema fields, please see the following helper functions:
- `ExAliyunOts.Search.field_schema_integer/2`
- `ExAliyunOts.Search.field_schema_float/2`
- `ExAliyunOts.Search.field_schema_boolean/2`
- `ExAliyunOts.Search.field_schema_keyword/2`
- `ExAliyunOts.Search.field_schema_text/2`
- `ExAliyunOts.Search.field_schema_nested/2`
- `ExAliyunOts.Search.field_schema_geo_point/2`
* `:index_sorts`, optional, a list of predefined sort-index schema fields, please see the following helper functions:
- `ExAliyunOts.Search.pk_sort/1`
- `ExAliyunOts.Search.field_sort/2`
- `ExAliyunOts.Search.geo_distance_sort/3`
"""
@doc search: :search
@spec create_search_index(instance, table_name, index_name, options) :: result
def create_search_index(instance, table_name, index_name, options) do
var_request = %Var.Search.CreateSearchIndexRequest{
table_name: table_name,
index_name: index_name,
index_schema: %Var.Search.IndexSchema{
field_schemas: Keyword.fetch!(options, :field_schemas),
index_sorts: Keyword.get(options, :index_sorts)
}
}
Client.create_search_index(instance, var_request)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/117478.html) | [English](https://www.alibabacloud.com/help/doc-detail/117478.html)
## Example
import MyApp.TableStore
delete_search_index("table", "index_name")
"""
@doc search: :search
@spec delete_search_index(instance, table_name, index_name) :: result
def delete_search_index(instance, table_name, index_name) do
var_delete_request = %Var.Search.DeleteSearchIndexRequest{
table_name: table_name,
index_name: index_name
}
Client.delete_search_index(instance, var_delete_request)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/117475.html) | [English](https://www.alibabacloud.com/help/doc-detail/117475.html)
## Example
import MyApp.TableStore
describe_search_index("table", "index_name")
"""
@doc search: :search
@spec describe_search_index(instance, table_name, index_name) :: result
def describe_search_index(instance, table_name, index_name) do
var_describe_request = %Var.Search.DescribeSearchIndexRequest{
table_name: table_name,
index_name: index_name
}
Client.describe_search_index(instance, var_describe_request)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/93819.html) | [English](https://www.alibabacloud.com/help/doc-detail/93819.html)
## Example
import MyApp.TableStore
partition_key = {"key", "key1"}
start_local_transaction("table", partition_key)
"""
@doc local_transaction: :local_transaction
@spec start_local_transaction(instance, table_name, partition_key :: tuple()) :: result
def start_local_transaction(instance, table_name, partition_key) do
var_start_local_transaction = %Var.Transaction.StartLocalTransactionRequest{
table_name: table_name,
partition_key: partition_key
}
Client.start_local_transaction(instance, var_start_local_transaction)
end
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/93819.html) | [English](https://www.alibabacloud.com/help/doc-detail/93819.html)
## Example
import MyApp.TableStore
commit_transaction("transaction_id")
"""
@doc local_transaction: :local_transaction
@spec commit_transaction(instance, transaction_id :: String.t()) :: result
defdelegate commit_transaction(instance, transaction_id), to: Client
@doc """
Official document in [Chinese](https://help.aliyun.com/document_detail/93819.html) | [English](https://www.alibabacloud.com/help/doc-detail/93819.html)
## Example
import MyApp.TableStore
abort_transaction("transaction_id")
"""
@doc local_transaction: :local_transaction
defdelegate abort_transaction(instance, transaction_id), to: Client
defp map_options(var, nil), do: var
defp map_options(var, options) do
options
|> Keyword.keys()
|> Enum.reduce(var, fn key, acc ->
value = Keyword.get(options, key)
if value != nil and Map.has_key?(var, key) do
case key do
:return_type ->
Map.put(acc, key, map_return_type(value))
:direction ->
Map.put(acc, key, map_direction(value))
:stream_spec ->
Map.put(acc, key, struct(Var.StreamSpec, value))
:time_range ->
Map.put(acc, key, map_time_range(value))
_ ->
Map.put(acc, key, value)
end
else
acc
end
end)
end
defp map_return_type(nil), do: :RT_NONE
ReturnType.constants()
|> Enum.map(fn {_value, type} ->
downcase_type = type |> to_string() |> String.slice(3..-1) |> Utils.downcase_atom()
defp map_return_type(unquote(downcase_type)), do: unquote(type)
defp map_return_type(unquote(type)), do: unquote(type)
end)
defp map_return_type(invalid_return_type) do
raise ExAliyunOts.RuntimeError, "invalid return_type: #{inspect(invalid_return_type)}"
end
Direction.constants()
|> Enum.map(fn {_value, type} ->
defp map_direction(unquote(Utils.downcase_atom(type))), do: unquote(type)
defp map_direction(unquote(type)), do: unquote(type)
end)
defp map_direction(invalid_direction) do
raise ExAliyunOts.RuntimeError, "invalid direction: #{inspect(invalid_direction)}"
end
defp map_time_range(specific_time) when is_integer(specific_time) do
%Var.TimeRange{specific_time: specific_time}
end
defp map_time_range({start_time, end_time})
when is_integer(start_time) and is_integer(end_time) do
%Var.TimeRange{start_time: start_time, end_time: end_time}
end
@operation_type_mapping OperationType.updates_supported()
|> Enum.map(fn type -> {Utils.downcase_atom(type), type} end)
defp map_updates(options) do
Enum.reduce(@operation_type_mapping, %{}, fn {update_operation, operation_type}, acc ->
{matched_update, _rest_opts} = Keyword.pop(options, update_operation)
if matched_update != nil do
Map.put(acc, operation_type, matched_update)
else
acc
end
end)
end
end
|
lib/ex_aliyun_ots.ex
| 0.882965
| 0.465145
|
ex_aliyun_ots.ex
|
starcoder
|
defmodule Money.Subscription.Change do
@moduledoc """
Defines the structure of a plan changeset.
* `:first_interval_starts` which is the start date of the first interval for the new
plan
* `:first_billing_amount` is the amount to be billed, net of any credit, at
the `:first_interval_starts`
* `:next_interval_starts` is the start date of the next interval after the `
first interval including any `credit_days_applied`
* `:credit_amount` is the amount of unconsumed credit of the current plan
* `:credit_amount_applied` is the amount of credit applied to the new plan. If
the `:prorate` option is `:price` (the default) the `:first_billing_amount`
is the plan `:price` reduced by the `:credit_amount_applied`. If the `:prorate`
option is `:period` then the `:first_billing_amount` is the plan `price and
the `:next_interval_date` is extended by the `:credit_days_applied`
instead.
* `:credit_days_applied` is the number of days credit applied to the first
interval by adding days to the `:first_interval_starts` date.
* `:credit_period_ends` is the date on which any applied credit is consumed or `nil`
* `:carry_forward` is any amount of credit carried forward to a subsequent period.
If non-zero this amount is a negative `Money.t`. It is non-zero when the credit
amount for the current plan is greater than the price of the new plan. In
this case the `:first_billing_amount` is zero.
"""
@typedoc "A plan change record struct."
@type t :: %__MODULE__{
first_billing_amount: Money.t(),
first_interval_starts: Date.t(),
next_interval_starts: Date.t(),
credit_amount_applied: Money.t(),
credit_amount: Money.t(),
credit_days_applied: non_neg_integer(),
credit_period_ends: Date.t(),
carry_forward: Money.t()
}
@doc """
A struct defining the changes between two plans.
"""
defstruct first_billing_amount: nil,
first_interval_starts: nil,
next_interval_starts: nil,
credit_amount_applied: nil,
credit_amount: nil,
credit_days_applied: 0,
credit_period_ends: nil,
carry_forward: nil
end
|
lib/money/subscription/change.ex
| 0.886181
| 0.906777
|
change.ex
|
starcoder
|
defmodule AshPostgres.Functions.TrigramSimilarity do
@moduledoc """
A filter predicate that filters based on trigram similarity.
See the postgres docs on [https://www.postgresql.org/docs/9.6/pgtrgm.html](trigram) for more information.
Requires the pg_trgm extension. Configure which extensions you have installed in your `AshPostgres.Repo`
At least one of the `equals`, `greater_than` and `less_than`
# Example
filter(query, [trigram_similarity: [:name, "geoff", [greater_than: 0.4]]])
"""
use Ash.Query.Function, name: :trigram_similarity
def args, do: [:ref, :term, {:options, [:less_than, :greater_than, :equals]}]
def new([_, text | _]) when not is_binary(text) do
{:error, "#{Ash.Query.Function.ordinal(2)} argument must be a string, got #{text}"}
end
def new([%Ref{} = ref, text, opts]) do
with :ok <- required_options_provided(opts),
{:ok, less_than} <- validate_similarity(opts[:less_than]),
{:ok, greater_than} <- validate_similarity(opts[:greater_than]),
{:ok, equals} <- validate_similarity(opts[:equals]) do
new_options = [
less_than: less_than,
greater_than: greater_than,
equals: equals
]
{:ok,
%__MODULE__{
arguments: [
ref,
text,
new_options
]
}}
else
_ ->
{:error,
"Invalid options for `trigram_similarity` in the #{Ash.Query.Function.ordinal(3)} argument"}
end
end
def compare(%__MODULE__{arguments: [ref]}, %Ash.Query.Operator.IsNil{left: ref}) do
:mutually_exclusive
end
def compare(_, _), do: :unknown
defp validate_similarity(nil), do: {:ok, nil}
defp validate_similarity(1), do: {:ok, 1.0}
defp validate_similarity(0), do: {:ok, 0.0}
defp validate_similarity(similarity)
when is_float(similarity) and similarity <= 1.0 and similarity >= 0.0 do
{:ok, similarity}
end
defp validate_similarity(similarity) when is_binary(similarity) do
sanitized =
case similarity do
"." <> decimal_part -> "0." <> decimal_part
other -> other
end
case Float.parse(sanitized) do
{float, ""} -> {:ok, float}
_ -> :error
end
end
defp required_options_provided(opts) do
case {opts[:greater_than], opts[:less_than], opts[:equals]} do
{nil, nil, nil} -> :error
{nil, nil, _equals} -> :ok
{_greater_than, nil, nil} -> :ok
{nil, _less_than, nil} -> :ok
{_greater_than, _less_than, nil} -> :ok
end
end
end
|
lib/functions/trigram_similarity.ex
| 0.88504
| 0.681707
|
trigram_similarity.ex
|
starcoder
|
defmodule Seasonal.Pool do
@moduledoc """
Provides a set of functions for using worker pools.
Unlike the main `Seasonal` module, this module allows for working directly with
unnamed and unsupervised pools.
"""
use GenServer
defmodule State do
@moduledoc false
defstruct [
workers: 1,
active_jobs: %{},
queued_jobs: :queue.new(),
queued_keys: HashSet.new(),
waiters: %{},
joiners: [],
]
end
### Client API
@doc """
Start an unnamed, unsupervised pool with the given number of workers.
"""
def start_link(workers) do
state = %State{workers: workers}
GenServer.start_link(__MODULE__, state)
end
@doc """
Start a named unsupervised pool with the given number of workers.
"""
def start_link(name, workers) do
state = %State{workers: workers}
GenServer.start_link(__MODULE__, state, name: to_address(name))
end
@doc """
Wait until all jobs are finished.
"""
def join(name_or_pid, timeout \\ :infinity) do
GenServer.call(to_address(name_or_pid), {:join}, timeout)
end
@doc """
Queue a job.
"""
def queue(name_or_pid, func) do
GenServer.call(to_address(name_or_pid), {:queue, func})
end
@doc """
Gets the pid of the named pool.
"""
def whereis(name_or_pid) do
GenServer.whereis(to_address(name_or_pid))
end
@doc """
Get the number of workers for the given pool.
"""
def workers(name_or_pid) do
GenServer.call(to_address(name_or_pid), :workers)
end
@doc """
Stops the given pool.
"""
def stop(name_or_pid) do
GenServer.stop(to_address(name_or_pid))
end
### Server API
@doc false
def handle_call({:join}, from, state) do
if Map.size(state.active_jobs) > 0 or :queue.len(state.queued_jobs) > 0 do
state = add_joiner(state, from)
{:noreply, state}
else
{:reply, :done, state}
end
end
@doc false
def handle_call({:queue, func}, _from, state) do
key = create_key
state = run_job(state, func, key)
{:reply, key, state}
end
@doc false
def handle_call(:workers, _from, state = %State{workers: workers}) do
{:reply, workers, state}
end
@doc false
def handle_info(message, state) do
active_tasks = Map.values(state.active_jobs)
case Task.find(active_tasks, message) do
{{key, _result}, _task} ->
state = state
|> remove_finished_job(key)
|> run_next_job
nil -> :ok
end
{:noreply, state}
end
### Helpers
defp add_joiner(state, joiner), do: update_in(state.joiners, &([joiner | &1]))
defp clear_joiners(state), do: put_in(state.joiners, [])
defp create_key, do: UUID.uuid4()
defp notify_joiners(state) do
Enum.each(state.joiners, fn(joiner) -> GenServer.reply(joiner, :done) end)
state
end
defp remove_finished_job(state, key), do: update_in(state.active_jobs, &Map.delete(&1, key))
defp run_fun_or_mfa({mod, func, args}), do: apply(mod, func, args)
defp run_fun_or_mfa(func), do: func.()
defp run_job(state, func, key) do
if Map.size(state.active_jobs) < state.workers do
wrapped_func = fn ->
try do
{key, {:ok, run_fun_or_mfa(func)}}
catch
class, reason ->
stacktrace = System.stacktrace()
{key, {:error, class, reason, stacktrace}}
end
end
task = Task.async(wrapped_func)
state = update_in(state.active_jobs, &Map.put(&1, key, task))
else
state = update_in(state.queued_jobs, &:queue.in({key, func}, &1))
state = update_in(state.queued_keys, &Set.put(&1, key))
end
state
end
defp run_next_job(state) do
case :queue.out(state.queued_jobs) do
{{:value, {key, func}}, new_queued_jobs} ->
state
|> unqueue_job(new_queued_jobs, key)
|> run_job(func, key)
{:empty, _} ->
if Map.size(state.active_jobs) == 0 do
state
|> notify_joiners
|> clear_joiners
else
state
end
end
end
defp to_address(pid) when is_pid(pid), do: pid
defp to_address(name), do: {:via, :gproc, {:n, :l, {:seasonal_worker_pool, name}}}
defp unqueue_job(state, new_queued_jobs, key) do
state = put_in(state.queued_jobs, new_queued_jobs)
update_in(state.queued_keys, &Set.delete(&1, key))
end
end
|
lib/seasonal/pool.ex
| 0.734405
| 0.553747
|
pool.ex
|
starcoder
|
defmodule Mix.Tasks.Hydra do
use Mix.Task
@shortdoc "Starts a Hydra server"
@moduledoc @shortdoc
@ascii """
.. .
..:M... I.
.. .. . ..... . NM ..M..
. ........ , ..MZ OMMMMMMM:.~M.
MMMMMMMMMMMNMMMM ... .MMMMMMMMMMMMMMM. .
. MMMMMMMMDMMMMM+... .. MMMMMMMMMMMMMMMMMMM .
.. MMMMMMMMMMMMMM~.. .MMMMMMMM .. ..IMMMMMMMM.
NMM+. MMMMMMM . .7.MMMMMMO. .MMMMN.M...
.... . +MMMMMM.. .MMMMMMD. .MMMMMMMM ...
..OMMMM.:I..MMMMMMM .=MMMMMMMMMM..
..MMMMMM.. MMMMMMMM . .~.. ...IMMMM
.MMMMMM .MMMMMMMMMM.. ..M.. ....ZMM..
7MMMMMI:. .M.MMMMMMMMMM~ ..MMM8,.....
.MMMMMM... ....MMMMMMMMMMMMMMMMMMM, ..
..... ... ,MMMMMM.. ..D....MMMMMMMMMMMMMMMMIMMMMO.
IM. +M.. .MMMMM M. .. .?MMMMMMMMMMMM .MMMMMMMMMMMM.
.MMZMMMMM.. ..MMMMM. . ..MMMMMMMMMMMMMM. .MMMMMMMMMMM.
. MMMMMMMM?~M..MMMMN.. .MMMMMMM...MMMMMM..,... ......
.$MMMMMMMMMMMM .MMMMD. ....MMMMD. .MMMMM . . ...N=.
..M.MMD. .MMMMMM.MMMMM ..MMMMM .MMMMM:. .. :MMMM.
..DMMMM. ..MMMMM.8MMMM.. ZMMMMMM.. .MMMMM ..MMMMMMMMM..
..MMMM ....7MMMMM .MMMMM:. .8MMMMMMM.. :MMMMM...MMMMMMMMMMD.
MMM . .MMMM8M...MMMMMM....M.MMMMMMM.MMMMMM.?MMMM.. MMMMM.
..8M. MMMMM Z....MMMMMMM..Z.,MMMMMMMMMMMMM..MMMM. :MMMMM..
. .. MMMMM . .?MMMMMMMM~..MMMMMMMMMMMM.MMMMMI.. ..MMM.
.MMMMM. ..MMMMMMMMMMMMMMMMMMMMM..=MMMMM? . .M$.
OMMMMN.. ...MMMMMMMMMMMMMMMMMMM...M.MMMM. .
.MMMMMMMM . ...MMMMMMMMMMMMMMMMMM.. ..MMMM..
.MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM,... .MMMMM.
..MMMMMMMMMMMMMMMMMMMMMMMMMMMM$ ,MMMMMMMM.
.... ..MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM .
MMMM...MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM+M:. .
.....,ZMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM=...
....MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM.....
"""
alias Hydra.CLI
def run(args) do
args
|> CLI.parse_args
|> merge_config
|> update_config
|> print_message
Mix.Task.run "run", ["--no-halt"]
end
defp merge_config(opts) do
:hydra
|> Application.get_env(:server)
|> Keyword.merge(opts)
end
defp padded_string(string) do
len = String.length(string)
len = div(68 - len, 2)
padding = String.duplicate(" ", len)
padding <> string
end
defp print_message(opts) do
address = server_location(opts)
msg = padded_string("Hydra has awoken at #{address}")
IO.puts """
#{@ascii}
#{msg}
"""
end
defp server_location(opts) do
ip_string = opts[:ip] |> Tuple.to_list |> Enum.join(".")
"#{ip_string}:#{opts[:port]}"
end
defp update_config(opts) do
Application.put_env(:hydra, :server, opts, persistent: true)
opts
end
end
|
lib/mix/tasks/hydra.ex
| 0.574634
| 0.651587
|
hydra.ex
|
starcoder
|
defmodule Doorman do
@moduledoc """
Provides authentication helpers that take advantage of the options configured
in your config files.
"""
@doc """
Authenticates a user by their email and password. Returns the user if the
user is found and the password is correct, otherwise nil.
Requires `user_module`, `secure_with`, and `repo` to be configured via
`Mix.Config`. See [README.md] for an example.
```
Doorman.authenticate("<EMAIL>", "brandyr00lz")
```
If you want to authenticate other modules, you can pass in the module directly.
```
Doorman.authenticate(Customer, "<EMAIL>", "<PASSWORD>")
```
"""
def authenticate(user_module \\ nil, email, password) do
user_module = user_module || get_user_module()
user = repo_module().get_by(user_module, email: email)
cond do
user && authenticate_user(user, password) -> user
user -> nil
true ->
auth_module().dummy_checkpw()
nil
end
end
@doc """
Authenticates a user. Returns true if the user's password and the given
password match based on the strategy configured, otherwise false.
Use `authenticate/2` if if you would to authenticate by email and password.
Requires `user_module`, `secure_with`, and `repo` to be configured via
`Mix.Config`. See [README.md] for an example.
```
user = Myapp.Repo.get(Myapp.User, 1)
Doorman.authenticate_user(user, "brandyr00lz")
```
"""
def authenticate_user(user, password) do
auth_module().authenticate(user, password)
end
@doc """
Returns true if passed in `conn`s `assigns` has a non-nil `:current_user`,
otherwise returns false.
Make sure your pipeline uses a login plug to fetch the current user for this
function to work correctly..
"""
def logged_in?(conn) do
conn.assigns[:current_user] != nil
end
defp repo_module do
get_module(:repo)
end
defp get_user_module do
get_module(:user_module)
end
defp auth_module do
get_module(:secure_with)
end
defp get_module(name) do
case Application.get_env(:doorman, name) do
nil ->
raise """
You must add `#{Atom.to_string(name)}` to `doorman` in your config
Here is an example configuration:
config :doorman,
repo: MyApp.Repo,
secure_with: Doorman.Auth.Bcrypt,
user_module: MyApp.User
"""
module -> module
end
end
end
|
lib/doorman.ex
| 0.874131
| 0.736756
|
doorman.ex
|
starcoder
|
defmodule AWS.NetworkManager do
@moduledoc """
Transit Gateway Network Manager (Network Manager) enables you to create a
global network, in which you can monitor your AWS and on-premises networks
that are built around transit gateways.
"""
@doc """
Associates a customer gateway with a device and optionally, with a link. If
you specify a link, it must be associated with the specified device.
You can only associate customer gateways that are connected to a VPN
attachment on a transit gateway. The transit gateway must be registered in
your global network. When you register a transit gateway, customer gateways
that are connected to the transit gateway are automatically included in the
global network. To list customer gateways that are connected to a transit
gateway, use the
[DescribeVpnConnections](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVpnConnections.html)
EC2 API and filter by `transit-gateway-id`.
You cannot associate a customer gateway with more than one device and link.
"""
def associate_customer_gateway(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/customer-gateway-associations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Associates a link to a device. A device can be associated to multiple links
and a link can be associated to multiple devices. The device and link must
be in the same global network and the same site.
"""
def associate_link(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/link-associations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new device in a global network. If you specify both a site ID and
a location, the location of the site is used for visualization in the
Network Manager console.
"""
def create_device(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/devices"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new, empty global network.
"""
def create_global_network(client, input, options \\ []) do
path_ = "/global-networks"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new link for a specified site.
"""
def create_link(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/links"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new site in a global network.
"""
def create_site(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/sites"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an existing device. You must first disassociate the device from any
links and customer gateways.
"""
def delete_device(client, device_id, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/devices/#{URI.encode(device_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an existing global network. You must first delete all global
network objects (devices, links, and sites) and deregister all transit
gateways.
"""
def delete_global_network(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an existing link. You must first disassociate the link from any
devices and customer gateways.
"""
def delete_link(client, global_network_id, link_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/links/#{URI.encode(link_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an existing site. The site cannot be associated with any device or
link.
"""
def delete_site(client, global_network_id, site_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/sites/#{URI.encode(site_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deregisters a transit gateway from your global network. This action does
not delete your transit gateway, or modify any of its attachments. This
action removes any customer gateway associations.
"""
def deregister_transit_gateway(client, global_network_id, transit_gateway_arn, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/transit-gateway-registrations/#{URI.encode(transit_gateway_arn)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Describes one or more global networks. By default, all global networks are
described. To describe the objects in your global network, you must use the
appropriate `Get*` action. For example, to list the transit gateways in
your global network, use `GetTransitGatewayRegistrations`.
"""
def describe_global_networks(client, global_network_ids \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/global-networks"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(global_network_ids) do
[{"globalNetworkIds", global_network_ids} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Disassociates a customer gateway from a device and a link.
"""
def disassociate_customer_gateway(client, customer_gateway_arn, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/customer-gateway-associations/#{URI.encode(customer_gateway_arn)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Disassociates an existing device from a link. You must first disassociate
any customer gateways that are associated with the link.
"""
def disassociate_link(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/link-associations"
headers = []
{query_, input} =
[
{"DeviceId", "deviceId"},
{"LinkId", "linkId"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Gets the association information for customer gateways that are associated
with devices and links in your global network.
"""
def get_customer_gateway_associations(client, global_network_id, customer_gateway_arns \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/customer-gateway-associations"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(customer_gateway_arns) do
[{"customerGatewayArns", customer_gateway_arns} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about one or more of your devices in a global network.
"""
def get_devices(client, global_network_id, device_ids \\ nil, max_results \\ nil, next_token \\ nil, site_id \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/devices"
headers = []
query_ = []
query_ = if !is_nil(site_id) do
[{"siteId", site_id} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(device_ids) do
[{"deviceIds", device_ids} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets the link associations for a device or a link. Either the device ID or
the link ID must be specified.
"""
def get_link_associations(client, global_network_id, device_id \\ nil, link_id \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/link-associations"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(link_id) do
[{"linkId", link_id} | query_]
else
query_
end
query_ = if !is_nil(device_id) do
[{"deviceId", device_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about one or more links in a specified global network.
If you specify the site ID, you cannot specify the type or provider in the
same request. You can specify the type and provider in the same request.
"""
def get_links(client, global_network_id, link_ids \\ nil, max_results \\ nil, next_token \\ nil, provider \\ nil, site_id \\ nil, type \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/links"
headers = []
query_ = []
query_ = if !is_nil(type) do
[{"type", type} | query_]
else
query_
end
query_ = if !is_nil(site_id) do
[{"siteId", site_id} | query_]
else
query_
end
query_ = if !is_nil(provider) do
[{"provider", provider} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(link_ids) do
[{"linkIds", link_ids} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about one or more of your sites in a global network.
"""
def get_sites(client, global_network_id, max_results \\ nil, next_token \\ nil, site_ids \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/sites"
headers = []
query_ = []
query_ = if !is_nil(site_ids) do
[{"siteIds", site_ids} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about the transit gateway registrations in a specified
global network.
"""
def get_transit_gateway_registrations(client, global_network_id, max_results \\ nil, next_token \\ nil, transit_gateway_arns \\ nil, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/transit-gateway-registrations"
headers = []
query_ = []
query_ = if !is_nil(transit_gateway_arns) do
[{"transitGatewayArns", transit_gateway_arns} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the tags for a specified resource.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Registers a transit gateway in your global network. The transit gateway can
be in any AWS Region, but it must be owned by the same AWS account that
owns the global network. You cannot register a transit gateway in more than
one global network.
"""
def register_transit_gateway(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/transit-gateway-registrations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Tags a specified resource.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes tags from a specified resource.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"TagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Updates the details for an existing device. To remove information for any
of the parameters, specify an empty string.
"""
def update_device(client, device_id, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/devices/#{URI.encode(device_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates an existing global network. To remove information for any of the
parameters, specify an empty string.
"""
def update_global_network(client, global_network_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the details for an existing link. To remove information for any of
the parameters, specify an empty string.
"""
def update_link(client, global_network_id, link_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/links/#{URI.encode(link_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the information for an existing site. To remove information for any
of the parameters, specify an empty string.
"""
def update_site(client, global_network_id, site_id, input, options \\ []) do
path_ = "/global-networks/#{URI.encode(global_network_id)}/sites/#{URI.encode(site_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "networkmanager"}
host = build_host("networkmanager", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/network_manager.ex
| 0.818229
| 0.423279
|
network_manager.ex
|
starcoder
|
defmodule Tensorflow.TensorProto do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
dtype: Tensorflow.DataType.t(),
tensor_shape: Tensorflow.TensorShapeProto.t() | nil,
version_number: integer,
tensor_content: binary,
half_val: [integer],
float_val: [float | :infinity | :negative_infinity | :nan],
double_val: [float | :infinity | :negative_infinity | :nan],
int_val: [integer],
string_val: [binary],
scomplex_val: [float | :infinity | :negative_infinity | :nan],
int64_val: [integer],
bool_val: [boolean],
dcomplex_val: [float | :infinity | :negative_infinity | :nan],
resource_handle_val: [Tensorflow.ResourceHandleProto.t()],
variant_val: [Tensorflow.VariantTensorDataProto.t()],
uint32_val: [non_neg_integer],
uint64_val: [non_neg_integer]
}
defstruct [
:dtype,
:tensor_shape,
:version_number,
:tensor_content,
:half_val,
:float_val,
:double_val,
:int_val,
:string_val,
:scomplex_val,
:int64_val,
:bool_val,
:dcomplex_val,
:resource_handle_val,
:variant_val,
:uint32_val,
:uint64_val
]
field :dtype, 1, type: Tensorflow.DataType, enum: true
field :tensor_shape, 2, type: Tensorflow.TensorShapeProto
field :version_number, 3, type: :int32
field :tensor_content, 4, type: :bytes
field :half_val, 13, repeated: true, type: :int32, packed: true
field :float_val, 5, repeated: true, type: :float, packed: true
field :double_val, 6, repeated: true, type: :double, packed: true
field :int_val, 7, repeated: true, type: :int32, packed: true
field :string_val, 8, repeated: true, type: :bytes
field :scomplex_val, 9, repeated: true, type: :float, packed: true
field :int64_val, 10, repeated: true, type: :int64, packed: true
field :bool_val, 11, repeated: true, type: :bool, packed: true
field :dcomplex_val, 12, repeated: true, type: :double, packed: true
field :resource_handle_val, 14, repeated: true, type: Tensorflow.ResourceHandleProto
field :variant_val, 15, repeated: true, type: Tensorflow.VariantTensorDataProto
field :uint32_val, 16, repeated: true, type: :uint32, packed: true
field :uint64_val, 17, repeated: true, type: :uint64, packed: true
end
defmodule Tensorflow.VariantTensorDataProto do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
type_name: String.t(),
metadata: binary,
tensors: [Tensorflow.TensorProto.t()]
}
defstruct [:type_name, :metadata, :tensors]
field :type_name, 1, type: :string
field :metadata, 2, type: :bytes
field :tensors, 3, repeated: true, type: Tensorflow.TensorProto
end
|
lib/messages/tensorflow/core/framework/tensor.pb.ex
| 0.79158
| 0.820146
|
tensor.pb.ex
|
starcoder
|
defmodule EctoIPRange.IP4R do
@moduledoc """
Struct for PostgreSQL `:ip4r`.
## Usage
When used during a changeset cast the following values are accepted:
- `:inet.ip4_address()`: an IP4 tuple, e.g. `{127, 0, 0, 1}` (single address only)
- `binary`
- `"127.0.0.1"`: single address
- `"127.0.0.0/24"`: CIDR notation for a range from `127.0.0.0` to `127.0.0.255`
- `"127.0.0.1-127.0.0.2"`: arbitrary range
- `EctoIPRange.IP4R.t()`: a pre-casted struct
## Fields
* `range`
* `first_ip`
* `last_ip`
"""
use Ecto.Type
alias EctoIPRange.Util.CIDR
alias EctoIPRange.Util.Inet
@type t :: %__MODULE__{
range: binary,
first_ip: :inet.ip4_address(),
last_ip: :inet.ip4_address()
}
defstruct [:range, :first_ip, :last_ip]
@impl Ecto.Type
def type, do: :ip4r
@impl Ecto.Type
def cast({_, _, _, _} = ip4_address) do
case Inet.ntoa(ip4_address) do
address when is_binary(address) ->
{:ok,
%__MODULE__{
range: address <> "/32",
first_ip: ip4_address,
last_ip: ip4_address
}}
_ ->
:error
end
end
def cast(address) when is_binary(address) do
cond do
String.contains?(address, "-") -> cast_range(address)
String.contains?(address, "/") -> cast_cidr(address)
true -> cast_binary(address)
end
end
def cast(%__MODULE__{} = address), do: {:ok, address}
def cast(_), do: :error
@impl Ecto.Type
def load(%__MODULE__{} = address), do: {:ok, address}
def load(_), do: :error
@impl Ecto.Type
def dump(%__MODULE__{} = address), do: {:ok, address}
def dump(_), do: :error
defp cast_binary(address) do
case Inet.parse_ipv4_binary(address) do
{:ok, ip4_address} ->
{:ok,
%__MODULE__{
range: address <> "/32",
first_ip: ip4_address,
last_ip: ip4_address
}}
_ ->
:error
end
end
defp cast_cidr(cidr) do
with [address, maskstring] <- String.split(cidr, "/", parts: 2),
{maskbits, ""} when maskbits in 0..32 <- Integer.parse(maskstring),
{first_ip4_address, last_ip4_address} <- CIDR.parse_ipv4(address, maskbits) do
{:ok,
%__MODULE__{
range: cidr,
first_ip: first_ip4_address,
last_ip: last_ip4_address
}}
else
_ -> :error
end
end
defp cast_range(range) do
with [first_ip, last_ip] <- String.split(range, "-", parts: 2),
{:ok, first_ip4_address} <- Inet.parse_ipv4_binary(first_ip),
{:ok, last_ip4_address} <- Inet.parse_ipv4_binary(last_ip) do
{:ok,
%__MODULE__{
range: range,
first_ip: first_ip4_address,
last_ip: last_ip4_address
}}
else
_ -> :error
end
end
end
|
lib/ecto_ip_range/ip4r.ex
| 0.900638
| 0.534309
|
ip4r.ex
|
starcoder
|
defmodule ExUnit.DuplicateTestError do
defexception [:message]
end
defmodule ExUnit.DuplicateDescribeError do
defexception [:message]
end
defmodule ExUnit.Case do
@moduledoc """
Helpers for defining test cases.
This module must be used in other modules as a way to configure
and prepare them for testing.
When used, it accepts the following options:
* `:async` - configures tests in this module to run concurrently with
tests in other modules. Tests in the same module never run concurrently.
It should be enabled only if tests do not change any global state.
Defaults to `false`.
This module automatically includes all callbacks defined in
`ExUnit.Callbacks`. See that module for more information on `setup`,
`start_supervised`, `on_exit` and the test process lifecycle.
For grouping tests together, see `describe/2` in this module.
## Examples
defmodule AssertionTest do
# Use the module
use ExUnit.Case, async: true
# The "test" macro is imported by ExUnit.Case
test "always pass" do
assert true
end
end
## Context
All tests receive a context as an argument. The context is particularly
useful for sharing information between callbacks and tests:
defmodule KVTest do
use ExUnit.Case
setup do
{:ok, pid} = KV.start_link()
{:ok, pid: pid}
end
test "stores key-value pairs", context do
assert KV.put(context[:pid], :hello, :world) == :ok
assert KV.get(context[:pid], :hello) == :world
end
end
As the context is a map, it can be pattern matched on to extract
information:
test "stores key-value pairs", %{pid: pid} = _context do
assert KV.put(pid, :hello, :world) == :ok
assert KV.get(pid, :hello) == :world
end
## Tags
The context is used to pass information from the callbacks to
the test. In order to pass information from the test to the
callback, ExUnit provides tags.
By tagging a test, the tag value can be accessed in the context,
allowing the developer to customize the test. Let's see an
example:
defmodule FileTest do
# Changing directory cannot be async
use ExUnit.Case, async: false
setup context do
# Read the :cd tag value
if cd = context[:cd] do
prev_cd = File.cwd!()
File.cd!(cd)
on_exit(fn -> File.cd!(prev_cd) end)
end
:ok
end
@tag cd: "fixtures"
test "reads UTF-8 fixtures" do
File.read("README.md")
end
end
In the example above, we have defined a tag called `:cd` that is
read in the setup callback to configure the working directory the
test is going to run on.
Tags are also very effective when used with case templates
(`ExUnit.CaseTemplate`) allowing callbacks in the case template
to customize the test behaviour.
Note a tag can be set in two different ways:
@tag key: value
@tag :key # equivalent to setting @tag key: true
If a tag is given more than once, the last value wins.
### Module and describe tags
A tag can be set for all tests in a module or describe block by
setting `@moduletag` or `@describetag` inside each context
respectively:
defmodule ApiTest do
use ExUnit.Case
@moduletag :external
describe "makes calls to the right endpoint" do
@describetag :endpoint
# ...
end
end
If you are setting a `@moduletag` or `@describetag` attribute, you must
set them after your call to `use ExUnit.Case` otherwise you will see
compilation errors.
If the same key is set via `@tag`, the `@tag` value has higher
precedence.
### Known tags
The following tags are set automatically by ExUnit and are
therefore reserved:
* `:module` - the module on which the test was defined
* `:file` - the file on which the test was defined
* `:line` - the line on which the test was defined
* `:test` - the test name
* `:async` - if the test case is in async mode
* `:registered` - used for `ExUnit.Case.register_attribute/3` values
* `:describe` - the describe block the test belongs to
The following tags customize how tests behave:
* `:capture_log` - see the "Log Capture" section below
* `:skip` - skips the test with the given reason
* `:timeout` - customizes the test timeout in milliseconds (defaults to 60000).
Accepts `:infinity` as a timeout value.
The `:test_type` tag is automatically set by ExUnit, but is _not_ reserved.
This tag is available for users to customize if they desire.
## Filters
Tags can also be used to identify specific tests, which can then
be included or excluded using filters. The most common functionality
is to exclude some particular tests from running, which can be done
via `ExUnit.configure/1`:
# Exclude all external tests from running
ExUnit.configure(exclude: [external: true])
From now on, ExUnit will not run any test that has the `:external` option
set to `true`. This behaviour can be reversed with the `:include` option
which is usually passed through the command line:
mix test --include external:true
Run `mix help test` for more information on how to run filters via Mix.
Another use case for tags and filters is to exclude all tests that have
a particular tag by default, regardless of its value, and include only
a certain subset:
ExUnit.configure(exclude: :os, include: [os: :unix])
A given include/exclude filter can be given more than once:
ExUnit.configure(exclude: [os: :unix, os: :windows])
Keep in mind that all tests are included by default, so unless they are
excluded first, the `include` option has no effect.
## Log Capture
ExUnit can optionally suppress printing of log messages that are generated
during a test. Log messages generated while running a test are captured and
only if the test fails are they printed to aid with debugging.
You can opt into this behaviour for individual tests by tagging them with
`:capture_log` or enable log capture for all tests in the ExUnit configuration:
ExUnit.start(capture_log: true)
This default can be overridden by `@tag capture_log: false` or
`@moduletag capture_log: false`.
Since `setup_all` blocks don't belong to a specific test, log messages generated
in them (or between tests) are never captured. If you want to suppress these
messages as well, remove the console backend globally by setting:
config :logger, backends: []
"""
@type env :: Module.t() | Macro.Env.t()
@reserved [:module, :file, :line, :test, :async, :registered, :describe]
@doc false
defmacro __using__(opts) do
unless Process.whereis(ExUnit.Server) do
raise "cannot use ExUnit.Case without starting the ExUnit application, " <>
"please call ExUnit.start() or explicitly start the :ex_unit app"
end
quote do
async = !!unquote(opts)[:async]
unless Module.get_attribute(__MODULE__, :ex_unit_tests) do
tag_check =
[:moduletag, :describetag, :tag]
|> Enum.any?(&Module.get_attribute(__MODULE__, &1))
if tag_check do
raise "you must set @tag, @describetag, and @moduletag after the call to \"use ExUnit.Case\""
end
attributes = [
:ex_unit_tests,
:tag,
:describetag,
:moduletag,
:ex_unit_registered_test_attributes,
:ex_unit_registered_describe_attributes,
:ex_unit_registered_module_attributes,
:ex_unit_used_describes
]
Enum.each(attributes, &Module.register_attribute(__MODULE__, &1, accumulate: true))
@before_compile ExUnit.Case
@after_compile ExUnit.Case
@ex_unit_async async
@ex_unit_describe nil
use ExUnit.Callbacks
end
import ExUnit.Callbacks
import ExUnit.Assertions
import ExUnit.Case, only: [describe: 2, test: 1, test: 2, test: 3]
import ExUnit.DocTest
end
end
@doc """
Defines a test with a string.
Provides a convenient macro that allows a test to be
defined with a string. This macro automatically inserts
the atom `:ok` as the last line of the test. That said,
a passing test always returns `:ok`, but, more importantly,
it forces Elixir to not tail call optimize the test and
therefore avoids hiding lines from the backtrace.
## Examples
test "true is equal to true" do
assert true == true
end
"""
defmacro test(message, var \\ quote(do: _), contents) do
contents =
case contents do
[do: block] ->
quote do
unquote(block)
:ok
end
_ ->
quote do
try(unquote(contents))
:ok
end
end
var = Macro.escape(var)
contents = Macro.escape(contents, unquote: true)
quote bind_quoted: [var: var, contents: contents, message: message] do
name = ExUnit.Case.register_test(__ENV__, :test, message, [])
def unquote(name)(unquote(var)), do: unquote(contents)
end
end
@doc """
Defines a not implemented test with a string.
Provides a convenient macro that allows a test to be defined
with a string, but not yet implemented. The resulting test will
always fail and print a "Not implemented" error message. The
resulting test case is also tagged with `:not_implemented`.
## Examples
test "this will be a test in future"
"""
defmacro test(message) do
quote bind_quoted: binding() do
name = ExUnit.Case.register_test(__ENV__, :test, message, [:not_implemented])
def unquote(name)(_), do: flunk("Not implemented")
end
end
@doc """
Describes tests together.
Every describe block receives a name which is used as prefix for
upcoming tests. Inside a block, `ExUnit.Callbacks.setup/1` may be
invoked and it will define a setup callback to run only for the
current block. The describe name is also added as a tag, allowing
developers to run tests for specific blocks.
## Examples
defmodule StringTest do
use ExUnit.Case, async: true
describe "String.capitalize/1" do
test "first grapheme is in uppercase" do
assert String.capitalize("hello") == "Hello"
end
test "converts remaining graphemes to lowercase" do
assert String.capitalize("HELLO") == "Hello"
end
end
end
When using Mix, you can run all tests in a describe block by name:
mix test --only describe:"String.capitalize/1"
or by passing the exact line the describe block starts on:
mix test path/to/file:123
Note describe blocks cannot be nested. Instead of relying on hierarchy
for composition, developers should build on top of named setups. For
example:
defmodule UserManagementTest do
use ExUnit.Case, async: true
describe "when user is logged in and is an admin" do
setup [:log_user_in, :set_type_to_admin]
test ...
end
describe "when user is logged in and is a manager" do
setup [:log_user_in, :set_type_to_manager]
test ...
end
defp log_user_in(context) do
# ...
end
end
By forbidding hierarchies in favor of named setups, it is straightforward
for the developer to glance at each describe block and know exactly the
setup steps involved.
"""
defmacro describe(message, do: block) do
quote do
ExUnit.Case.__describe__(__MODULE__, __ENV__.line, unquote(message))
try do
unquote(block)
after
@ex_unit_describe nil
Module.delete_attribute(__MODULE__, :describetag)
for attribute <- Module.get_attribute(__MODULE__, :ex_unit_registered_describe_attributes) do
Module.delete_attribute(__MODULE__, attribute)
end
end
end
end
@doc false
def __describe__(module, line, message) do
if Module.get_attribute(module, :ex_unit_describe) do
raise "cannot call \"describe\" inside another \"describe\". See the documentation " <>
"for ExUnit.Case.describe/2 on named setups and how to handle hierarchies"
end
cond do
not is_binary(message) ->
raise ArgumentError, "describe name must be a string, got: #{inspect(message)}"
message in Module.get_attribute(module, :ex_unit_used_describes) ->
raise ExUnit.DuplicateDescribeError,
"describe #{inspect(message)} is already defined in #{inspect(module)}"
true ->
:ok
end
if Module.get_attribute(module, :describetag) != [] do
raise "@describetag must be set inside describe/2 blocks"
end
Module.put_attribute(module, :ex_unit_describe, {line, message})
Module.put_attribute(module, :ex_unit_used_describes, message)
:ok
end
@doc false
defmacro __before_compile__(_) do
quote do
def __ex_unit__ do
%ExUnit.TestModule{name: __MODULE__, tests: @ex_unit_tests}
end
end
end
@doc false
def __after_compile__(%{module: module}, _) do
if Module.get_attribute(module, :ex_unit_async) do
ExUnit.Server.add_async_module(module)
else
ExUnit.Server.add_sync_module(module)
end
end
@doc """
Registers a function to run as part of this case.
This is used by third-party projects, like QuickCheck, to
implement macros like `property/3` that works like `test`
but instead defines a property. See `test/3` implementation
for an example of invoking this function.
The test type will be converted to a string and pluralized for
display. You can use `ExUnit.plural_rule/2` to set a custom
pluralization.
"""
def register_test(%{module: mod, file: file, line: line}, test_type, name, tags) do
moduletag = Module.get_attribute(mod, :moduletag)
unless moduletag do
raise "cannot define #{test_type}. Please make sure you have invoked " <>
"\"use ExUnit.Case\" in the current module"
end
registered_attribute_keys = [
:ex_unit_registered_module_attributes,
:ex_unit_registered_describe_attributes,
:ex_unit_registered_test_attributes
]
registered =
for key <- registered_attribute_keys,
attribute <- Module.get_attribute(mod, key),
into: %{} do
{attribute, Module.get_attribute(mod, attribute)}
end
tag = Module.delete_attribute(mod, :tag)
async = Module.get_attribute(mod, :ex_unit_async)
{name, describe, describe_line, describetag} =
case Module.get_attribute(mod, :ex_unit_describe) do
{line, describe} ->
description = :"#{test_type} #{describe} #{name}"
{description, describe, line, Module.get_attribute(mod, :describetag)}
_ ->
{:"#{test_type} #{name}", nil, nil, []}
end
if Module.defines?(mod, {name, 1}) do
raise ExUnit.DuplicateTestError, ~s("#{name}" is already defined in #{inspect(mod)})
end
tags =
(tags ++ tag ++ describetag ++ moduletag)
|> normalize_tags
|> validate_tags
|> Map.merge(%{
line: line,
file: file,
registered: registered,
async: async,
describe: describe,
describe_line: describe_line,
test_type: test_type
})
test = %ExUnit.Test{name: name, case: mod, tags: tags, module: mod}
Module.put_attribute(mod, :ex_unit_tests, test)
for attribute <- Module.get_attribute(mod, :ex_unit_registered_test_attributes) do
Module.delete_attribute(mod, attribute)
end
name
end
@doc """
Registers a new attribute to be used during `ExUnit.Case` tests.
The attribute values will be available through `context.registered`.
Registered values are cleared after each `test/3` similar
to `@tag`.
This function takes the same options as `Module.register_attribute/3`.
## Examples
defmodule MyTest do
use ExUnit.Case
ExUnit.Case.register_attribute(__MODULE__, :fixtures, accumulate: true)
@fixtures :user
@fixtures {:post, insert: false}
test "using custom attribute", context do
assert context.registered.fixtures == [{:post, insert: false}, :user]
end
test "custom attributes are cleared per test", context do
assert context.registered.fixtures == []
end
end
"""
@spec register_attribute(env, atom, keyword) :: :ok
def register_attribute(env, name, opts \\ [])
def register_attribute(%{module: mod}, name, opts), do: register_attribute(mod, name, opts)
def register_attribute(mod, name, opts) when is_atom(mod) and is_atom(name) and is_list(opts) do
register_attribute(:ex_unit_registered_test_attributes, mod, name, opts)
end
@doc """
Registers a new describe attribute to be used during `ExUnit.Case` tests.
The attribute values will be available through `context.registered`.
Registered values are cleared after each `describe/2` similar
to `@describetag`.
This function takes the same options as `Module.register_attribute/3`.
## Examples
defmodule MyTest do
use ExUnit.Case
ExUnit.Case.register_describe_attribute(__MODULE__, :describe_fixtures, accumulate: true)
describe "using custom attribute" do
@describe_fixtures :user
@describe_fixtures {:post, insert: false}
test "has attribute", context do
assert context.registered.describe_fixtures == [{:post, insert: false}, :user]
end
end
describe "custom attributes are cleared per describe" do
test "doesn't have attributes", context do
assert context.registered.describe_fixtures == []
end
end
end
"""
@doc since: "1.10.0"
@spec register_describe_attribute(env, atom, keyword) :: :ok
def register_describe_attribute(env, name, opts \\ [])
def register_describe_attribute(%{module: mod}, name, opts) do
register_describe_attribute(mod, name, opts)
end
def register_describe_attribute(mod, name, opts)
when is_atom(mod) and is_atom(name) and is_list(opts) do
register_attribute(:ex_unit_registered_describe_attributes, mod, name, opts)
end
@doc """
Registers a new module attribute to be used during `ExUnit.Case` tests.
The attribute values will be available through `context.registered`.
This function takes the same options as `Module.register_attribute/3`.
## Examples
defmodule MyTest do
use ExUnit.Case
ExUnit.Case.register_module_attribute(__MODULE__, :module_fixtures, accumulate: true)
@module_fixtures :user
@module_fixtures {:post, insert: false}
test "using custom attribute", context do
assert context.registered.fixtures == [{:post, insert: false}, :user]
end
test "still using custom attribute", context do
assert context.registered.fixtures == [{:post, insert: false}, :user]
end
end
"""
@doc since: "1.10.0"
@spec register_module_attribute(env, atom, keyword) :: :ok
def register_module_attribute(env, name, opts \\ [])
def register_module_attribute(%{module: mod}, name, opts) do
register_module_attribute(mod, name, opts)
end
def register_module_attribute(mod, name, opts)
when is_atom(mod) and is_atom(name) and is_list(opts) do
register_attribute(:ex_unit_registered_module_attributes, mod, name, opts)
end
defp register_attribute(type, mod, name, opts) do
validate_registered_attribute!(type, mod, name)
Module.register_attribute(mod, name, opts)
Module.put_attribute(mod, type, name)
end
defp validate_registered_attribute!(type, mod, name) do
registered_attribute_keys = [
:ex_unit_registered_module_attributes,
:ex_unit_registered_describe_attributes,
:ex_unit_registered_test_attributes
]
for key <- registered_attribute_keys,
type != key and name in Module.get_attribute(mod, key) do
raise ArgumentError, "cannot register attribute #{inspect(name)} multiple times"
end
if Module.get_attribute(mod, name) do
raise "you must set @#{name} after it has been registered"
end
end
defp validate_tags(tags) do
for tag <- @reserved, Map.has_key?(tags, tag) do
raise "cannot set tag #{inspect(tag)} because it is reserved by ExUnit"
end
unless is_atom(tags[:test_type]) do
raise("value for tag \":test_type\" must be an atom")
end
tags
end
defp normalize_tags(tags) do
Enum.reduce(Enum.reverse(tags), %{}, fn
tag, acc when is_atom(tag) -> Map.put(acc, tag, true)
tag, acc when is_list(tag) -> tag |> Enum.into(acc)
end)
end
end
|
lib/ex_unit/lib/ex_unit/case.ex
| 0.865082
| 0.738881
|
case.ex
|
starcoder
|
defmodule VintageNet do
@moduledoc """
`VintageNet` is network configuration library built specifically for [Nerves
Project](https://nerves-project.org) devices. It has the following features:
* Ethernet and WiFi support included. Extendible to other technologies
* Default configurations specified in your Application config
* Runtime updates to configurations are persisted and applied on next boot (can
be disabled)
* Simple subscription to network status change events
* Connect to multiple networks at a time and prioritize which interfaces are
used (Ethernet over WiFi over cellular)
* Internet connection monitoring and failure detection (currently slow and
simplistic)
See
[github.com/nerves-networking/vintage_net](https://github.com/nerves-networking/vintage_net)
for more information.
"""
alias VintageNet.{Info, Interface}
@typedoc """
A name for the network interface
Names depend on the device drivers and any software that may rename them.
Typical names on Nerves are:
* "eth0", "eth1", etc. for wired Ethernet interfaces
* "wlan0", etc. for WiFi interfaces
* "ppp0" for cellular modems
* "usb0" for gadget USB virtual Ethernet interfaces
"""
@type ifname :: String.t()
@typedoc """
IP addresses in VintageNet can be specified as strings or tuples
While VintageNet uses IP addresses in tuple form internally, it can be
cumbersome to always convert to tuple form in practice. The general rule is
that VintageNet is flexible in how it accepts IP addresses, but if you get an
address from a VintageNet API, it will be in tuple form.
"""
@type any_ip_address :: String.t() | :inet.ip_address()
@typedoc """
The number of IP address bits for the subnet
"""
@type prefix_length :: ipv4_prefix_length() | ipv6_prefix_length()
@typedoc """
The number of bits to use for an IPv4 subnet
For example, if you have a subnet mask of 255.255.255.0, then the prefix
length would be 24.
"""
@type ipv4_prefix_length :: 0..32
@typedoc """
The number of bits to use for an IPv6 subnet
"""
@type ipv6_prefix_length :: 0..128
@typedoc """
Interface connection status
* `:disconnected` - The interface doesn't exist or it's not connected
* `:lan` - The interface is connected to the LAN, but may not be able
reach the Internet
* `:internet` - Packets going through the interface should be able to
reach the Internet
"""
@type connection_status :: :lan | :internet | :disconnected
@typedoc """
Interface type
This is a coarse characterization of a network interface that can be useful
for prioritizing interfaces.
* `:ethernet` - Wired-based networking. Generally expected to be fast.
* `:wifi` - Wireless networking. Expected to be not as fast as Ethernet,
* `:mobile` - Cellular-based networking. Expected to be metered and slower
than `:wifi` and `:ethernet`
* `:local` - Interfaces that never route to other hosts
* `:unknown` - Catch-all when the network interface can't be categorized
These are general categories that are helpful for VintageNet's default
routing prioritization. See `VintageNet.Route.DefaultMetric` for more
information on the use.
"""
@type interface_type :: :ethernet | :wifi | :mobile | :local | :unknown
@typedoc """
Valid options for `VintageNet.configure/3`
* `:persist` - Whether or not to save the configuration (defaults to `true`)
"""
@type configure_options :: [persist: boolean]
@typedoc """
Valid options for `VintageNet.info/1`
* `:redact` - Whether to hide passwords and similar information from the output (defaults to `true`)
"""
@type info_options :: {:redact, boolean()}
@typedoc """
A VintageNet property
VintageNet uses lists of strings to name networking configuration and status
items.
"""
@type property :: [String.t()]
@typedoc """
A pattern for matching against VintageNet properties
Patterns are used when subscribing for network property changes or getting a
set of properties and their values.
Since properties are organized hierarchically, the default way of matching patterns is to match on prefixes. It's also
possible to use the `:_` wildcard to match anything at a position.
"""
@type pattern :: [String.t() | :_ | :"$"]
@typedoc """
A property's value
See the `README.md` for documenation on available properties.
"""
@type value :: any()
@doc """
Return a list of all interfaces on the system
"""
@spec all_interfaces() :: [ifname()]
def all_interfaces() do
present = VintageNet.match(["interface", :_, "present"])
for {[_interface, ifname, _present], true} <- present do
ifname
end
end
@doc """
Return a list of configured interface
"""
@spec configured_interfaces() :: [ifname()]
def configured_interfaces() do
type = VintageNet.match(["interface", :_, "type"])
for {[_interface, ifname, _type], value} when value != VintageNet.Technology.Null <- type do
ifname
end
end
@doc """
Return the maximum number of interfaces controlled by VintageNet
Internal constraints mean that VintageNet can't manage an arbitrary number of
interfaces and knowing the max can reduce some processing. The limit is set
by the application config. Unless you need over 100 network interfaces,
VintageNet's use of the Linux networking API is not likely to be an issue,
though.
"""
@spec max_interface_count() :: 1..100
def max_interface_count() do
Application.get_env(:vintage_net, :max_interface_count)
end
@doc """
Update the configuration of a network interface
Configurations are validated and normalized before being applied. This means
that type errors and missing required fields will be caught and old or
redundant ways of specifying configurations will be fixed. Call
`get_configuration/1` to see how what changes, if any, were made as part of
the normalization process.
After validation, the configuration is optionally persisted and applied.
See the `VintageNet` documentation for configuration examples or your
`VintageNet.Technology` provider's docs.
Options:
* `:persist` - set to `false` to avoid persisting this configuration. System
restarts will revert to the previous configuration. Defaults to true.
"""
@spec configure(ifname(), map(), configure_options()) :: :ok | {:error, any()}
def configure(ifname, config, options \\ []) do
Interface.configure(ifname, config, options)
end
@doc """
Deconfigure and persists (by default) settings for a specified interface.
Supports same options as `configure/3`
"""
@spec deconfigure(ifname(), configure_options()) :: :ok | {:error, any()}
def deconfigure(ifname, options \\ []) do
Interface.deconfigure(ifname, options)
end
@doc """
Configure an interface to use the defaults
This configures an interface to the defaults found in the application
environment (`config.exs`). If the application environment doesn't have a
default configuration, the interface is deconfigured. On reboot, the
interface will continue to use the defaults and if a new version of firmware
updates the defaults, it will use those.
"""
@spec reset_to_defaults(ifname()) :: :ok | {:error, any()}
def reset_to_defaults(ifname) do
with :ok <- configure(ifname, default_config(ifname)) do
# Clear out the persistence file so that if the defaults
# change that the new ones will be used.
VintageNet.Persistence.call(:clear, [ifname])
end
end
defp default_config(ifname) do
VintageNet.Application.get_config_env()
|> List.keyfind(ifname, 0)
|> case do
{^ifname, config} -> config
_anything_else -> %{type: VintageNet.Technology.Null}
end
end
@doc """
Return the settings for the specified interface
"""
@spec get_configuration(ifname()) :: map()
def get_configuration(ifname) do
PropertyTable.get(VintageNet, ["interface", ifname, "config"]) ||
raise RuntimeError, "No configuration for #{ifname}"
end
@doc """
Check if this is a valid configuration
This runs the validation routines for a settings map, but doesn't try to
apply them.
"""
@spec configuration_valid?(ifname(), map()) :: boolean()
def configuration_valid?(ifname, config) do
case Interface.to_raw_config(ifname, config) do
{:ok, _raw_config} -> true
_ -> false
end
end
@doc """
Get the current value of a network property
See `get_by_prefix/1` for exact prefix matches (i.e., get all properties for one
interface) and `match/1` to run wildcard matches (i.e., get a specific
property for all interfaces).
"""
@spec get(property(), value()) :: value()
def get(name, default \\ nil) do
PropertyTable.get(VintageNet, name, default)
end
@doc """
Get a list of all properties matching a pattern
Patterns are list of strings that optionally specify `:_` at
a position in the list to match any value.
"""
@spec match(pattern()) :: [{property(), value()}]
def match(pattern) do
PropertyTable.match(VintageNet, pattern ++ [:"$"]) |> Enum.sort()
end
@doc """
Get a list of all properties matching the specified prefix
To get a list of all known properties and their values, call
`VintageNet.get_by_prefix([])`
"""
@spec get_by_prefix(property()) :: [{property(), value()}]
def get_by_prefix(pattern) do
PropertyTable.match(VintageNet, pattern) |> Enum.sort()
end
@doc """
Subscribe to property change messages
Messages have the form:
```
{VintageNet, property_name, old_value, new_value, metadata}
```
Subscriptions are prefix matches. For example, to get notified whenever a property
changes on "wlan0", run this:
```
VintageNet.subscribe(["interface", "wlan0"])
```
It's also possible to match with wildcards using `:_`. For example, to
get notified whenever an IP address in the system changes, do this:
```
VintageNet.subscribe(["interface", :_, "addresses"])
```
"""
@spec subscribe(pattern()) :: :ok
def subscribe(name) do
PropertyTable.subscribe(VintageNet, name)
end
@doc """
Stop subscribing to property change messages
"""
@spec unsubscribe(pattern()) :: :ok
def unsubscribe(name) do
PropertyTable.unsubscribe(VintageNet, name)
end
@doc """
Run a command on a network interface
Commands are mostly network interface-specific. Also see the `VintageNet`
PropertyTable fo getting status or registering for status changes.
"""
@spec ioctl(ifname(), atom(), any()) :: :ok | {:ok, any()} | {:error, any()}
def ioctl(ifname, command, args \\ []) do
Interface.ioctl(ifname, command, args)
end
@doc """
Initiate an access point scan on a wireless interface
The scan results are posted asynchronously to the `["interface", ifname, "wifi", "access_points"]`
property as they come in. After waiting a second or two they can be fetched via
`VintageNet.get(["interface", ifname, "wifi", "access_points"])`.
It appears that there's some variation in how scanning is implemented on WiFi adapters. One
strategy that seems to work is to call `scan/1` every 10 seconds or so while prompting a user to
pick a WiFi network.
This is a utility function for calling the `:scan` ioctl.
"""
@spec scan(ifname()) :: :ok | {:ok, any()} | {:error, any()}
def scan(ifname) do
ioctl(ifname, :scan)
end
@doc """
Print the current network status
Options include:
* `:redact` - Set to `false` to print out passwords
"""
@spec info([info_options()]) :: :ok
defdelegate info(options \\ []), to: Info
@doc """
Check that the system has the required programs installed
NOTE: This isn't completely implemented yet!
"""
@spec verify_system(keyword() | nil) :: :ok | {:error, String.t()}
def verify_system(opts \\ nil) do
opts = opts || Application.get_all_env(:vintage_net)
for ifname <- configured_interfaces() do
type = get(["interface", ifname, "type"])
type.check_system(opts)
end
|> Enum.find(:ok, fn rc -> rc != :ok end)
end
end
|
lib/vintage_net.ex
| 0.898023
| 0.624379
|
vintage_net.ex
|
starcoder
|
defmodule Segment.Analytics.Batcher do
@moduledoc """
The `Segment.Analytics.Batcher` module is the default service implementation for the library which uses the
[Segment Batch HTTP API](https://segment.com/docs/sources/server/http/#batch) to put events in a FIFO queue and
send on a regular basis.
The `Segment.Analytics.Batcher` can be configured with
```elixir
config :segment,
max_batch_size: 100,
batch_every_ms: 5000
```
* `config :segment, :max_batch_size` The maximum batch size of messages that will be sent to Segment at one time. Default value is 100.
* `config :segment, :batch_every_ms` The time (in ms) between every batch request. Default value is 2000 (2 seconds)
The Segment Batch API does have limits on the batch size "There is a maximum of 500KB per batch request and 32KB per call.". While
the library doesn't check the size of the batch, if this becomes a problem you can change `max_batch_size` to a lower number and probably want
to change `batch_every_ms` to run more frequently. The Segment API asks you to limit calls to under 50 a second, so even if you have no other
Segment calls going on, don't go under 20ms!
"""
use GenServer
alias Segment.Analytics.{Track, Identify, Screen, Alias, Group, Page}
@doc """
Start the `Segment.Analytics.Batcher` GenServer with an Segment HTTP Source API Write Key
"""
@spec start_link(String.t()) :: GenServer.on_start()
def start_link(api_key) do
client = Segment.Http.client(api_key)
GenServer.start_link(__MODULE__, {client, :queue.new()}, name: String.to_atom(api_key))
end
@doc """
Start the `Segment.Analytics.Batcher` GenServer with an Segment HTTP Source API Write Key and a Tesla Adapter. This is mainly used
for testing purposes to override the Adapter with a Mock.
"""
@spec start_link(String.t(), Tesla.adapter()) :: GenServer.on_start()
def start_link(api_key, adapter) do
client = Segment.Http.client(api_key, adapter)
GenServer.start_link(__MODULE__, {client, :queue.new()}, name: __MODULE__)
end
# client
@doc """
Make a call to Segment with an event. Should be of type `Track, Identify, Screen, Alias, Group or Page`.
This event will be queued and sent later in a batch.
"""
@spec call(Segment.segment_event(), pid() | __MODULE__.t()) :: :ok
def call(%{__struct__: mod} = event, pid \\ __MODULE__)
when mod in [Track, Identify, Screen, Alias, Group, Page] do
enqueue(event, pid)
end
@doc """
Force the batcher to flush the queue and send all the events as a big batch (warning could exceed batch size)
"""
@spec flush(pid() | __MODULE__.t()) :: :ok
def flush(pid \\ __MODULE__), do: GenServer.call(pid, :flush)
# GenServer Callbacks
@impl true
def init({client, queue}) do
schedule_batch_send()
{:ok, {client, queue}}
end
@impl true
def handle_cast({:enqueue, event}, {client, queue}) do
{:noreply, {client, :queue.in(event, queue)}}
end
@impl true
def handle_call(:flush, _from, {client, queue}) do
items = :queue.to_list(queue)
if length(items) > 0, do: Segment.Http.batch(client, items)
{:reply, :ok, {client, :queue.new()}}
end
@impl true
def handle_info(:process_batch, {client, queue}) do
length = :queue.len(queue)
{items, queue} = extract_batch(queue, length)
if length(items) > 0, do: Segment.Http.batch(client, items)
schedule_batch_send()
{:noreply, {client, queue}}
end
# Helpers
defp schedule_batch_send do
Process.send_after(self(), :process_batch, Segment.Config.batch_every_ms())
end
defp enqueue(event, pid), do: GenServer.cast(pid, {:enqueue, event})
defp extract_batch(queue, 0),
do: {[], queue}
defp extract_batch(queue, length) do
max_batch_size = Segment.Config.max_batch_size()
if length >= max_batch_size do
:queue.split(max_batch_size, queue)
|> split_result()
else
:queue.split(length, queue) |> split_result()
end
end
defp split_result({q1, q2}), do: {:queue.to_list(q1), q2}
end
|
lib/segment/batcher.ex
| 0.912859
| 0.855187
|
batcher.ex
|
starcoder
|
defmodule Crudry.Query do
@moduledoc """
Generates Ecto Queries.
All functions in this module return an `Ecto.Query`.
Combining the functions in this module can be very powerful. For example, to do pagination with filter and search:
pagination_params = %{limit: 10, offset: 1, order_by: "id", sorting_order: :desc}
filter_params = %{username: ["username1", "username2"]}
search_params = %{text: "search text", fields: [:username]}
User
|> Crudry.Query.filter(filter_params)
|> Crudry.Query.list(pagination_params)
|> Crudry.Query.search(search_params.text, search_params.fields)
|> Repo.all()
"""
import Ecto.Query
@doc """
Applies some restrictions to the query.
Expects `opts` to be a keyword list or a map containing some of these fields:
* `limit`: defaults to not limiting
* `offset`: defaults to `0`
* `sorting_order`: defaults to `:asc` (only works if there is also a `order_by` specified)
* `order_by`: defaults to not ordering
* `custom_query`: A function that receives the initial query as argument and returns a custom query. Defaults to `initial_query`
## Examples
Crudry.Query.list(MySchema, [limit: 10])
Crudry.Query.list(MySchema, [limit: 10, offset: 3, sorting_order: :desc, order_by: :value])
Crudry.Query.list(MySchema, %{order_by: "value"})
Crudry.Query.list(MySchema, %{order_by: :value})
Crudry.Query.list(MySchema, %{order_by: ["age", "username"]})
Crudry.Query.list(MySchema, %{order_by: [:age, :username]})
Crudry.Query.list(MySchema, %{order_by: [asc: :age, desc: :username]})
Crudry.Query.list(MySchema, custom_query: &MySchema.scope_list/1)
"""
def list(initial_query, opts \\ []) do
access_module = get_access_module(opts)
custom_query = access_module.get(opts, :custom_query, nil)
limit = access_module.get(opts, :limit, nil)
offset = access_module.get(opts, :offset, 0)
sorting_order = access_module.get(opts, :sorting_order, :asc)
order_by = access_module.get(opts, :order_by)
order = parse_order_by_args(sorting_order, order_by)
initial_query
|> get_custom_query(custom_query)
|> limit(^limit)
|> offset(^offset)
|> order_by(^order)
end
defp get_access_module(opts) when is_map(opts), do: Map
defp get_access_module(opts) when is_list(opts), do: Keyword
@doc """
Searches for the `search_term` in the given `fields`.
## Examples
Crudry.Query.search(MySchema, "John", [:name])
"""
def search(initial_query, nil, _fields) do
initial_query
end
def search(initial_query, search_term, fields) do
Enum.reduce(fields, subquery(initial_query), fn
module_field, query_acc ->
query_acc
|> or_where(
[m],
fragment(
"CAST(? AS varchar) ILIKE ?",
field(m, ^module_field),
^"%#{search_term}%"
)
)
end)
end
@doc """
Filters the query.
## Examples
Crudry.Query.filter(MySchema, %{id: 5, name: "John"})
Crudry.Query.filter(MySchema, %{name: ["John", "Doe"]})
"""
def filter(initial_query, filters \\ []) do
Enum.reduce(filters, initial_query, fn
{field, filter_arr}, query_acc when is_list(filter_arr) ->
query_acc
|> where(
[m],
field(m, ^field) in ^filter_arr
)
{field, filter}, query_acc ->
query_acc
|> where(
[m],
field(m, ^field) == ^filter
)
end)
end
defp get_custom_query(initial_query, nil), do: initial_query
defp get_custom_query(initial_query, custom_query), do: custom_query.(initial_query)
defp parse_order_by_args(_, nil), do: []
defp parse_order_by_args(sorting_order, orders_by) when is_list(orders_by) do
Enum.map(orders_by, fn
{sort, order} -> {to_atom(sort), to_atom(order)}
order -> {to_atom(sorting_order), to_atom(order)}
end)
end
defp parse_order_by_args(sorting_order, order_by), do: parse_order_by_args(sorting_order, List.wrap(order_by))
defp to_atom(value) when is_atom(value), do: value
defp to_atom(value) when is_binary(value), do: String.to_atom(value)
end
|
lib/crudry_query.ex
| 0.883399
| 0.520801
|
crudry_query.ex
|
starcoder
|
defmodule Whisk do
@moduledoc """
The scrambler.
"""
@typedoc """
A string representation of a puzzle type.
See `puzzle_types/0` for supported values.
"""
@type puzzle_type :: atom()
@typedoc """
A scramble is string representing a sequence of moves, separated by spaces.
Moves are also strings. Different puzzle types have different valid moves.
"""
@type scramble :: String.t()
# A moveset is a 3-tuple containing the following:
# - a list of groups of turns on the same axis (e. g. R and L for cubes)
# - a list of turn modifiers (e. g. for cubes, ', 2, and nothing)
# - the default move count in a scramble
# A puzzle spec is one of:
# - a moveset
# - {moveset, moveset, integer}
# Interpretation:
# A scramble for a puzzle spec of the second shape takes the form
# (<moves of one pattern...> <moves of another pattern>) * <integer repetitions>
@puzzle_spec_2x2 {[~w(R), ~w(F), ~w(U)], ["", "'", "2"], 10}
@puzzle_spec_3x3 {[~w(R L), ~w(F B), ~w(U D)], ["", "'", "2"], 20}
@puzzle_spec_4x4 {[~w(R L Rw), ~w(F B Fw), ~w(U D Uw)], ["", "'", "2"], 40}
@puzzle_spec_5x5 {[~w(R L Rw Lw), ~w(F B Fw Bw), ~w(U D Uw Dw)], ["", "'", "2"], 60}
@puzzle_spec_6x6 {[~w(R L Rw Lw 3Rw), ~w(F B Fw Bw 3Fw), ~w(U D Uw Dw 3Uw)], ["", "'", "2"], 80}
@puzzle_spec_7x7 {[~w(R L Rw Lw 3Rw 3Lw), ~w(F B Fw Bw 3Fw 3Bw), ~w(U D Uw Dw 3Uw 3Dw)],
["", "'", "2"], 100}
@puzzle_spec_skewb {[~w(R), ~w(L), ~w(U), ~w(B)], ["", "'"], 11}
@puzzle_spec_pyraminx {
{[~w(U), ~w(L), ~w(R), ~w(B)], ["", "'"], 11},
{[~w(u), ~w(l), ~w(r), ~w(b)], ["", "'"], 0..1},
1
}
@puzzle_spec_megaminx {
{[~w(R), ~w(D)], ["++", "--"], 10},
{[~w(U)], ["", "'"], 1},
7
}
## API
@doc """
Returns a list of supported puzzle types.
```
iex> Whisk.puzzle_types()
[:"2x2", :"3x3", :"4x4", :"5x5", :"6x6", :"7x7", :Skewb, :Pyraminx, :Megaminx]
```
"""
@spec puzzle_types() :: [atom()]
def puzzle_types do
[:"2x2", :"3x3", :"4x4", :"5x5", :"6x6", :"7x7", :Skewb, :Pyraminx, :Megaminx]
end
@doc """
Generate a scramble for a puzzle type. Also accepts the puzzle type as a string.
Passing an unsupported puzzle type will generate an error.
## Options
- `:length` - the number of moves in the scramble
- `:reps` - for puzzles with repeating patterns, like Megaminx, specify the
number of repetitions
## Examples
```
iex> Whisk.scramble(:"3x3")
"L F2 U R D B U2 R F' R F R2 D2 B R' B' D R B U"
iex> Whisk.scramble(:"Skewb", length: 12)
"L R B' R U' R' B L' U' B' L R U' R' L'"
iex> Whisk.scramble(:Megaminx, length: 5, reps: 3)
"D-- R-- D++ R-- D++ U' D++ R-- D++ R-- D++ U R-- D-- R-- D-- R++ U"
```
"""
@spec scramble(puzzle_type() | String.t(), list()) :: scramble()
def scramble(puzzle_name, opts \\ [])
def scramble(puzzle_name, opts) when is_binary(puzzle_name) do
puzzle_type_atom =
try do
String.to_existing_atom(puzzle_name)
rescue
ArgumentError -> raise "Unsupported puzzle type: #{inspect(puzzle_name)}"
end
scramble(puzzle_type_atom, opts)
end
def scramble(puzzle_name, opts) when is_atom(puzzle_name) do
scramble_from_spec(puzzle_spec(puzzle_name), opts)
end
defp scramble_from_spec(spec, opts)
defp scramble_from_spec({
{axes1, modifiers1, default_length},
{axes2, modifiers2, length2},
default_reps
}, opts) do
validate_opts(opts)
length1 = opts[:length] || default_length
reps = opts[:reps] || default_reps
Enum.join(for _ <- 1..reps do
part1 = generate_scramble(axes1, modifiers1, length1)
part2 = generate_addon_scramble(axes2, modifiers2, length2)
String.trim(part1 <> " " <> part2)
end, " ")
end
defp scramble_from_spec({axes, modifiers, default_length}, opts) do
validate_opts(opts)
length = opts[:length] || default_length
generate_scramble(axes, modifiers, length)
end
defp validate_opts(opts) do
cond do
opts[:length] && opts[:length] < 0 -> raise "Invalid length: #{inspect(opts[:length])}"
opts[:reps] && opts[:reps] < 0 -> raise "Invalid reps: #{inspect(opts[:reps])}"
true -> nil
end
end
## Helpers
defp puzzle_spec(puzzle_name) do
case puzzle_name do
:"2x2" -> @puzzle_spec_2x2
:"3x3" -> @puzzle_spec_3x3
:"4x4" -> @puzzle_spec_4x4
:"5x5" -> @puzzle_spec_5x5
:"6x6" -> @puzzle_spec_6x6
:"7x7" -> @puzzle_spec_7x7
:Skewb -> @puzzle_spec_skewb
:Pyraminx -> @puzzle_spec_pyraminx
:Megaminx -> @puzzle_spec_megaminx
_ -> raise "Unsupported puzzle type: #{inspect(puzzle_name)}"
end
end
defp generate_scramble(axes, modifiers, length) when is_number(length) do
axis_idx = initial_axis_index(axes)
Enum.join(generate_moves(axes, modifiers, axis_idx, [], length), " ")
end
defp generate_moves(axes, modifiers, last_axis_index, acc, remaining) do
if remaining <= 0 do
acc
else
axis_idx = different_axis_index(axes, last_axis_index)
generate_moves(
axes,
modifiers,
axis_idx,
[generate_move(axes, modifiers, axis_idx) | acc],
remaining - 1
)
end
end
defp generate_move(axes, modifiers, axis_idx) do
Enum.random(Enum.at(axes, axis_idx)) <> Enum.random(modifiers)
end
defp generate_addon_scramble(axes, modifiers, %Range{} = range) do
axes
|> Enum.map(fn axis ->
reps = Enum.random(range)
1..reps//1
|> Enum.map(fn _ -> Enum.random(axis) <> Enum.random(modifiers) end)
|> Enum.join(" ")
end)
|> Enum.filter(fn move -> move != "" end)
|> Enum.join(" ")
end
defp generate_addon_scramble(axes, modifiers, length) when is_number(length) do
Enum.join(for axis <- axes do
Enum.join(for _ <- 1..length//1 do
Enum.random(axis) <> Enum.random(modifiers)
end, " ")
end, " ")
end
defp initial_axis_index(axes) when length(axes) == 1 do
0
end
defp initial_axis_index(axes) do
:rand.uniform(Enum.count(axes)) - 1
end
defp different_axis_index(axes, _index) when length(axes) == 1 do
0
end
defp different_axis_index(axes, index) do
axis_count = Enum.count(axes)
rem(index + :rand.uniform(axis_count - 1), axis_count)
end
end
|
lib/whisk.ex
| 0.858006
| 0.894329
|
whisk.ex
|
starcoder
|
defmodule Rolodex.Config do
@moduledoc """
A behaviour for defining Rolodex config and functions to parse config.
To define your config for Rolodex, `use` Rolodex.Config in a module and
override the default behaviour functions. Then, tell Rolodex the name of your
config module in your project's configuration files.
# Your config definition
defmodule MyRolodexConfig do
use Rolodex.Config
def spec() do
[
title: "My API",
description: "My API's description",
version: "1.0.0"
]
end
end
# In `config.exs`
config :rolodex, module: MyRolodexConfig
## Usage
Your Rolodex config module exports three functions, which each return an empty
list by default:
- `spec/0` - Basic configuration for your Rolodex setup
- `render_groups_spec/0` - Definitions for render targets for your API docs. A
render group is combination of: a Rolodex Router, a processor, a writer,
and options for the writer. You can specify more than one render group to create
multiple docs outputs for your API. At least one render group specification is
required.
- `auth_spec/0` - Definitions for shared auth patterns to be used in routes.
Auth definitions should follow the OpenAPI pattern, but keys can use snake_case
and will be converted to camelCase for the OpenAPI target.
- `pipelines_config/0` - Sets any shared defaults for your Phoenix Router
pipelines. See `Rolodex.PipelineConfig` for details about valid options and defaults
For `spec/0`, the following are valid options:
- `description` (required) - Description for your documentation output
- `title` (required) - Title for your documentation output
- `version` (required) - Your documentation's version
- `default_content_type` (default: "application/json") - Default content type
used for request body and response schemas
- `locale` (default: `"en"`) - Locale key to use when processing descriptions
- `pipelines` (default: `%{}`) - Map of pipeline configs. Used to set default
parameter values for all routes in a pipeline. See `Rolodex.PipelineConfig`.
- `render_groups` (default: `Rolodex.RenderGroupConfig`) - List of render
groups.
- `server_urls` (default: []) - List of base url(s) for your API paths
## Full Example
defmodule MyRolodexConfig do
use Rolodex.Config
def spec() do
[
title: "My API",
description: "My API's description",
version: "1.0.0",
default_content_type: "application/json+api",
locale: "en",
server_urls: ["https://myapp.io"]
]
end
def render_groups_spec() do
[
[router: MyRouter, writer_opts: [file_name: "api-public.json"]],
[router: MyRouter, writer_opts: [file_name: "api-private.json"]]
]
end
def auth_spec() do
[
BearerAuth: [
type: "http",
scheme: "bearer"
],
OAuth: [
type: "oauth2",
flows: [
authorization_code: [
authorization_url: "https://example.io/oauth2/authorize",
token_url: "https://example.io/oauth2/token",
scopes: [
"user.read",
"account.read",
"account.write"
]
]
]
]
]
end
def pipelines_spec() do
[
api: [
headers: ["X-Request-ID": :uuid],
query_params: [includes: :string]
]
]
end
end
"""
alias Rolodex.{PipelineConfig, RenderGroupConfig}
import Rolodex.Utils, only: [to_struct: 2, to_map_deep: 1]
@enforce_keys [
:description,
:locale,
:render_groups,
:title,
:version
]
defstruct [
:description,
:pipelines,
:render_groups,
:title,
:version,
default_content_type: "application/json",
locale: "en",
auth: %{},
server_urls: []
]
@type t :: %__MODULE__{
default_content_type: binary(),
description: binary(),
locale: binary(),
pipelines: pipeline_configs() | nil,
render_groups: [RenderGroupConfig.t()],
auth: map(),
server_urls: [binary()],
title: binary(),
version: binary()
}
@type pipeline_configs :: %{
optional(:atom) => PipelineConfig.t()
}
@callback spec() :: keyword() | map()
@callback pipelines_spec() :: keyword() | map()
@callback auth_spec() :: keyword() | map()
@callback render_groups_spec() :: list()
defmacro __using__(_) do
quote do
@behaviour Rolodex.Config
def spec(), do: %{}
def pipelines_spec(), do: %{}
def auth_spec(), do: %{}
def render_groups_spec(), do: [[]]
defoverridable spec: 0,
pipelines_spec: 0,
auth_spec: 0,
render_groups_spec: 0
end
end
@spec new(module()) :: t()
def new(module) do
module.spec()
|> Map.new()
|> set_pipelines_config(module)
|> set_auth_config(module)
|> set_render_groups_config(module)
|> to_struct(__MODULE__)
end
defp set_pipelines_config(opts, module) do
pipelines =
module.pipelines_spec()
|> Map.new(fn {k, v} -> {k, PipelineConfig.new(v)} end)
Map.put(opts, :pipelines, pipelines)
end
defp set_auth_config(opts, module),
do: Map.put(opts, :auth, module.auth_spec() |> to_map_deep())
defp set_render_groups_config(opts, module) do
groups = module.render_groups_spec() |> Enum.map(&RenderGroupConfig.new/1)
Map.put(opts, :render_groups, groups)
end
end
defmodule Rolodex.RenderGroupConfig do
@moduledoc """
Configuration for a render group, a serialization target for your docs. You can
specify one or more render groups via `Rolodex.Config` to render docs output(s)
for your API.
## Options
- `router` (required) - A `Rolodex.Router` definition
- `processor` (default: `Rolodex.Processors.OpenAPI`) - Module implementing
the `Rolodex.Processor` behaviour
- `writer` (default: `Rolodex.Writers.FileWriter`) - Module implementing the
`Rolodex.Writer` behaviour to be used to write out the docs
- `writer_opts` (default: `[file_name: "api.json"]`) - Options keyword list
passed into the writer behaviour.
"""
defstruct [
:router,
processor: Rolodex.Processors.OpenAPI,
writer: Rolodex.Writers.FileWriter,
writer_opts: [file_name: "api.json"]
]
@type t :: %__MODULE__{
router: module(),
processor: module(),
writer: module(),
writer_opts: keyword()
}
@spec new(list() | map()) :: t()
def new(params \\ []), do: struct(__MODULE__, params)
end
defmodule Rolodex.PipelineConfig do
@moduledoc """
Defines shared params to be applied to every route within a Phoenix pipeline.
## Options
- `body` (default: `%{}`)
- `headers` (default: `%{}`)
- `path_params` (default: `%{}`)
- `query_params` (default: `%{}`)
- `responses` (default: `%{}`)
## Example
%Rolodex.PipelineConfig{
body: %{id: :uuid, name: :string}
headers: %{"X-Request-Id" => :uuid},
query_params: %{account_id: :uuid},
responses: %{401 => SharedUnauthorizedResponse}
}
"""
import Rolodex.Utils, only: [to_struct: 2, to_map_deep: 1]
defstruct auth: [],
body: %{},
headers: %{},
path_params: %{},
query_params: %{},
responses: %{}
@type t :: %__MODULE__{
auth: list() | map(),
body: map(),
headers: map(),
path_params: map(),
query_params: map(),
responses: map()
}
@spec new(list() | map()) :: t()
def new(params \\ []) do
params
|> Map.new(fn {k, v} -> {k, to_map_deep(v)} end)
|> to_struct(__MODULE__)
end
end
|
lib/rolodex/config.ex
| 0.873525
| 0.482856
|
config.ex
|
starcoder
|
defmodule GrpcMock do
@moduledoc """
GrpcMock is library for easy gRPC server mocking to be used with
[grpc-elixir library](https://github.com/tony612/grpc-elixir).
### Concurrency
Unlike `mox`, GrpcMock is not thread-safe and cannot be used in concurrent tests.
## Example
As an example, imagine that your application is using a remote calculator,
with API defined in .proto file like this:
service Calculator {
rpc Add(AddRequest) returns (AddResponse);
rpc Mult(MultRequest) returns (MultResponse);
}
If you want to mock the calculator gRPC calls during tests, the first step
is to define the mock, usually in your `test_helper.exs`:
GrpcMock.defmock(CalcMock, for: Calculator)
Now in your tests, you can define expectations and verify them:
use ExUnit.Case
test "invokes add and mult" do
# Start the gRPC server
Server.start(CalcMock, 50_051)
# Connect to the serrver
{:ok, channel} = GRPC.Stub.connect("localhost:50051")
CalcMock
|> GrpcMock.expect(:add, fn req, _ -> AddResponse.new(sum: req.x + req.y) end)
|> GrpcMock.expect(:mult, fn req, _ -> AddResponse.new(sum: req.x * req.y) end)
request = AddRequest.new(x: 2, y: 3)
assert {:ok, reply} = channel |> Stub.add(request)
assert reply.sum == 5
request = MultRequest.new(x: 2, y: 3)
assert {:ok, reply} = channel |> Stub.mult(request)
assert reply.sum == 6
GrpcMock.verify!(CalcMock)
end
"""
alias GrpcMock.Server
defmodule UnexpectedCallError do
defexception [:message]
end
defmodule VerificationError do
defexception [:message]
end
@doc """
Define mock in runtime based on specificatin on pb.ex file
## Example
GrpcMock.defmock(CalcMock, for: Calculator)
"""
def defmock(name, options) do
service =
case Keyword.fetch(options, :for) do
{:ok, svc} -> svc
:error -> raise ArgumentError, ":for option is required on defmock"
end
body =
service.__rpc_calls__()
|> generate_mocked_funs(name)
Module.create(name, [header(service) | body], Macro.Env.location(__ENV__))
end
defp generate_mocked_funs(rpc_calls, name) do
for {fname_camel_atom, _, _} <- rpc_calls do
fname_snake = camel2snake(fname_camel_atom)
quote do
def unquote(fname_snake)(request, stream) do
GrpcMock.__dispatch__(unquote(name), unquote(fname_snake), [request, stream])
end
end
end
end
@doc """
Expect the `name` operation to be called `n` times.
## Examples
If `code_or_value` is a function, it will be invoked as stub body.
To expect `add` to be called five times:
expect(MyMock, :add, 5, fn request, stream -> ... end)
If `code_or_value` is anything other than a function,
it will be stub return value.
To expect `add` to be called once:
expect(CalcMock, :add, AddResponse.new(sum: 12) end)
`expect/4` can be invoked multiple times for the same `name`,
allowing different behaviours on each invocation.
"""
def expect(mock, name, n \\ 1, code_or_value), do: do_expect(mock, name, n, code_or_value)
@doc """
Simmilar to `expect/4` but there can be only one stubbed function.
Number of expected invocations cannot be defined.
## Example
If `code_or_value` is a function, it will be invoked as stub body.
stub(CalcMock, :add, fn(request, _) -> ... end)
If `code_or_value` is anything other than a function,
it will be stub return value.
stub(CalcMock, :add, AddResponse.new(sum: 12) end)
"""
def stub(mock, name, code_or_value), do: do_stub(mock, name, code_or_value)
@doc """
Verify that all operations for the specified mock are called expected number of times
and remove all expectations for it.
"""
def verify!(mock) do
pending = Server.verify(mock)
messages =
for {fname, total, remaining} <- pending do
mfa = Exception.format_mfa(mock, fname, 2)
called = total - remaining
" * expected #{mfa} to be invoked #{times(total)} but it was invoked #{times(called)}"
end
if messages != [] do
raise VerificationError,
"error while verifying calls for mock #{mock}:\n\n" <> Enum.join(messages, "\n")
end
:ok
end
defp header(service) do
quote do
use GRPC.Server, service: unquote(service)
end
end
defp camel2snake(atom) do
atom |> Atom.to_string() |> Macro.underscore() |> String.to_atom()
end
defp do_expect(mock, name, n, code) when is_function(code) do
calls = List.duplicate(code, n)
:ok = Server.add_expectation(mock, name, {n, calls, nil})
mock
end
defp do_expect(mock, name, n, resp) do
code = fn _request, _stream -> resp end
do_expect(mock, name, n, code)
end
defp do_stub(mock, name, code) when is_function(code) do
:ok = Server.add_expectation(mock, name, {0, [], code})
mock
end
defp do_stub(mock, name, resp) do
code = fn _request, _stream -> resp end
do_stub(mock, name, code)
end
def __dispatch__(mock, fname, args) do
mock
|> Server.fetch_fun(fname)
|> case do
:no_expectation ->
mfa = Exception.format_mfa(mock, fname, args)
raise UnexpectedCallError,
"no expectation defined for #{mfa}"
{:out_of_expectations, count} ->
mfa = Exception.format_mfa(mock, fname, args)
raise UnexpectedCallError,
"expected #{mfa} to be called #{times(count)} but it has been " <>
"called #{times(count + 1)}"
{:ok, fun_to_call} ->
apply(fun_to_call, args)
end
end
defp times(1), do: "once"
defp times(n), do: "#{n} times"
end
|
lib/grpc_mock.ex
| 0.907271
| 0.50061
|
grpc_mock.ex
|
starcoder
|
defimpl Cog.Eval, for: Piper.Permissions.Ast.BinaryExpr do
alias Cog.Eval
alias Cog.Permissions.Context
alias Piper.Permissions.Ast
def value_of(%Ast.BinaryExpr{op: op, left: lhs, right: rhs}, context) do
comparator = comparison_type_to_function(op)
{lhsv, context} = Eval.value_of(lhs, context)
{rhsv, context} = Eval.value_of(rhs, context)
compare(lhsv, rhsv, comparator, context)
end
defp compare({{:arg, type}, lhsv}, rhsv, comparator, context) when type in [:any, :all] do
lhsv = for {arg, index} <- Enum.with_index(lhsv), do: {index, arg}
cog_and_compare(:arg, type, lhsv, rhsv, comparator, context)
end
defp compare({{:arg, index}, lhsv}, rhsv, comparator, context) do
case comparator.(lhsv, rhsv) do
true ->
{true, Context.add_match(context, :arg, index)}
false ->
{false, context}
end
end
defp compare({{:option, name, nil}, lhsv}, rhsv, comparator, context) do
case comparator.(lhsv, rhsv) do
true ->
{true, Context.add_match(context, :option, name)}
false ->
{false, context}
end
end
defp compare({{:option, _name, match}, lhsv}, rhsv, comparator, context) when match in [:any, :all] do
cog_and_compare(:option, match, Map.to_list(lhsv), rhsv, comparator, context)
end
defp compare(lhsv, rhsv, comparator, context) do
{comparator.(lhsv, rhsv), context}
end
defp cog_and_compare(_kind, :any, [], _rhsv, _comparator, context) do
{false, context}
end
defp cog_and_compare(_kind, :all, [], _rhsv, _comparator, context) do
{true, context}
end
defp cog_and_compare(kind, :all, [{name, value}|t], rhsv, comparator, context) do
case comparator.(value, rhsv) do
true ->
cog_and_compare(kind, :all, t, rhsv, comparator, Context.add_match(context, kind, name))
false ->
{false, context}
end
end
defp cog_and_compare(kind, :any, [{name, value}|t], rhsv, comparator, context) do
case comparator.(value, rhsv) do
true ->
{true, Context.add_match(context, :option, name)}
false ->
cog_and_compare(kind, :any, t, rhsv, comparator, context)
end
end
defp comparison_type_to_function(:is) do
fn(lhs, %Regex{}=rhs) ->
Regex.match?(rhs, lhs)
(lhs, rhs) ->
lhs == rhs
end
end
defp comparison_type_to_function(:gt), do: &Kernel.>/2
defp comparison_type_to_function(:gte), do: &Kernel.>=/2
defp comparison_type_to_function(:lt), do: &Kernel.</2
defp comparison_type_to_function(:lte), do: &Kernel.<=/2
defp comparison_type_to_function(:equiv), do: &Kernel.==/2
defp comparison_type_to_function(:not_equiv), do: &Kernel.!=/2
defp comparison_type_to_function(:matches) do
fn(nil, _) ->
false
(value, %Regex{}=regex) ->
Regex.match?(regex, value)
end
end
defp comparison_type_to_function(:not_matches) do
fn(nil, _) ->
false
(value, %Regex{}=regex) ->
Regex.match?(regex, value) == false
end
end
defp comparison_type_to_function(:with), do: &Kernel.and/2
end
|
lib/cog/permissions/eval/binary_expr.ex
| 0.746046
| 0.448487
|
binary_expr.ex
|
starcoder
|
defmodule Brando.Images.Utils do
@moduledoc """
General utilities pertaining to the Images module
"""
@type id :: binary | integer
@type image_kind :: :image | :image_series | :image_field
@type image_schema :: Brando.Image.t()
@type image_series_schema :: Brando.ImageSeries.t()
@type image_struct :: Brando.Images.Image.t()
@type user :: Brando.Users.User.t() | :system
alias Brando.Image
alias Brando.ImageSeries
import Brando.Utils
import Ecto.Query, only: [from: 2]
@doc """
Delete all physical images depending on imageserie `series_id`
"""
@spec clear_media_for(:image_series, series_id :: integer) :: :ok
def clear_media_for(:image_series, series_id) do
images =
Brando.repo().all(
from i in Image,
where: i.image_series_id == ^series_id
)
for img <- images, do: delete_original_and_sized_images(img, :image)
:ok
end
@doc """
Goes through `image`, which is a schema with an image_field
then passing to `delete_media/2` for removal
## Example:
delete_original_and_sized_images(record, :cover)
"""
@spec delete_original_and_sized_images(schema :: term, key :: atom) :: {:ok, Image.t()}
def delete_original_and_sized_images(image, key) do
img = Map.get(image, key)
if img do
delete_sized_images(img)
delete_media(Map.get(img, :path))
end
{:ok, image}
end
@doc """
Delete sizes associated with `image`, but keep original.
"""
@spec delete_sized_images(image_struct :: image_struct) :: any
def delete_sized_images(nil), do: nil
def delete_sized_images(image) do
sizes = Map.get(image, :sizes)
for {_size, file} <- sizes do
delete_media(file)
end
end
@doc """
Deletes `file` after joining it with `media_path`
"""
@spec delete_media(file_name :: binary) :: any
def delete_media(nil), do: nil
def delete_media(""), do: nil
def delete_media(file) do
file = Path.join([Brando.config(:media_path), file])
File.rm(file)
end
@doc """
Splits `file` with `split_path/1`, adds `size` to the path before
concatenating it with the filename.
## Example
iex> get_sized_path("test/dir/filename.jpg", :thumb)
"test/dir/thumb/filename.jpg"
iex> get_sized_path("test/dir/filename.jpeg", :thumb)
"test/dir/thumb/filename.jpg"
"""
@spec get_sized_path(path :: binary, size :: atom | binary, type :: atom | nil) ::
binary
def get_sized_path(path, size, type \\ nil)
def get_sized_path(path, :original, _type) do
path
end
def get_sized_path(path, size, type) when is_binary(size) do
{dir, filename} = split_path(path)
filename = ensure_correct_extension(filename, type)
Path.join([dir, size, filename])
end
def get_sized_path(file, size, type) when is_atom(size),
do: get_sized_path(file, Atom.to_string(size), type)
@doc """
Adds `size` to the path before
## Example
iex> get_sized_dir("test/dir/filename.jpg", :thumb)
"test/dir/thumb"
"""
@spec get_sized_dir(path :: binary, size :: atom | binary) :: binary
def get_sized_dir(path, size) when is_binary(size) do
{dir, _} = split_path(path)
Path.join([dir, size])
end
def get_sized_dir(file, size) when is_atom(size), do: get_sized_dir(file, Atom.to_string(size))
@doc """
Returns image type atom.
"""
@spec image_type(filename :: binary) :: atom | no_return()
def image_type(filename) do
filename
|> Path.extname()
|> String.downcase()
|> do_image_type()
end
defp do_image_type(".jpg"), do: :jpg
defp do_image_type(".jpeg"), do: :jpg
defp do_image_type(".png"), do: :png
defp do_image_type(".gif"), do: :gif
defp do_image_type(".bmp"), do: :bmp
defp do_image_type(".tif"), do: :tiff
defp do_image_type(".tiff"), do: :tiff
defp do_image_type(".psd"), do: :psd
defp do_image_type(".svg"), do: :svg
defp do_image_type(".crw"), do: :crw
defp do_image_type(".webp"), do: :webp
defp do_image_type(".avif"), do: :avif
defp do_image_type(ext), do: raise("Unknown image type #{ext}")
@doc """
Return joined path of `file` and the :media_path config option
as set in your app's config.exs.
"""
@spec media_path() :: binary
@spec media_path(nil | binary) :: binary
def media_path, do: Brando.config(:media_path)
def media_path(nil), do: Brando.config(:media_path)
def media_path(file), do: Path.join([Brando.config(:media_path), file])
@doc """
Soft delete all images depending on imageserie `series_id`
"""
@spec delete_images_for(:image_series, series_id :: integer) :: :ok
def delete_images_for(:image_series, series_id) do
images =
Brando.repo().all(
from i in Image,
where: i.image_series_id == ^series_id
)
for img <- images do
Brando.repo().soft_delete!(img)
end
:ok
end
@doc """
Delete all imageseries dependant on `category_id`
"""
@spec delete_series_for(:image_category, category_id :: integer) :: [
image_series_schema | no_return
]
def delete_series_for(:image_category, category_id) do
image_series =
Brando.repo().all(
from m in ImageSeries,
where: m.image_category_id == ^category_id
)
for is <- image_series do
delete_images_for(:image_series, is.id)
Brando.repo().soft_delete!(is)
end
end
@doc """
Checks that the existing images' path matches the config. these may differ
when series has been renamed!
"""
@spec check_image_paths(module, map | image_series_schema, user) :: :unchanged | :changed
def check_image_paths(schema, image_series, user) do
upload_path = image_series.cfg.upload_path
{_, paths} =
Enum.map_reduce(image_series.images, [], fn image, acc ->
case check_image_path(schema, image, upload_path, user) do
nil -> {image, acc}
path -> {image, [path | acc]}
end
end)
case paths do
[] -> :unchanged
_ -> :changed
end
end
@spec check_image_path(module, map, binary, user) :: Ecto.Schema.t() | nil
defp check_image_path(schema, image, upload_dirname, user) do
image_path = image.image.path
image_dirname = Path.dirname(image.image.path)
image_basename = Path.basename(image.image.path)
image_struct =
do_check_image_path(image, image_path, image_dirname, image_basename, upload_dirname)
if image_struct != nil do
# store new image
image
|> schema.changeset(%{image: image_struct}, user)
|> Brando.repo().update!
end
end
defp do_check_image_path(_, _, ".", _, _) do
# something is wrong, just return nil and don't move anything
nil
end
@spec do_check_image_path(Ecto.Schema.t(), binary, binary, binary, binary) ::
image_struct
defp do_check_image_path(image, image_path, image_dirname, image_basename, upload_dirname) do
media_path = Path.expand(Brando.config(:media_path))
unless image_dirname == upload_dirname do
source_file = Path.join(media_path, image_path)
upload_path = Path.join(media_path, upload_dirname)
dest_file = Path.join(upload_path, image_basename)
new_image_path = Path.join(upload_dirname, image_basename)
File.mkdir_p(upload_path)
File.cp(source_file, dest_file)
Map.put(image.image, :path, new_image_path)
end
end
@doc """
Gets orphaned image_series.
"""
@spec get_orphaned_series([Ecto.Schema.t()], [Ecto.Schema.t()], Keyword.t()) ::
[binary] | []
def get_orphaned_series(categories, series, opts) do
starts_with = Keyword.fetch!(opts, :starts_with)
ignored_paths = Keyword.get(opts, :ignored_paths, [])
media_path = Path.expand(Brando.config(:media_path))
series_paths = Enum.map(series, &Path.join(media_path, &1.cfg.upload_path))
category_paths = Enum.map(categories, &Path.join(media_path, &1.cfg.upload_path))
upload_paths = series_paths ++ category_paths
check_upload_paths(upload_paths, media_path, starts_with, ignored_paths)
end
defp check_upload_paths(upload_paths, media_path, starts_with, ignored_paths) do
case upload_paths do
[] ->
[]
_ ->
path_to_check = Path.join(media_path, starts_with)
full_ignored_paths = Enum.map(ignored_paths, &Path.join(path_to_check, &1))
existing_category_paths = get_existing_category_paths(path_to_check, full_ignored_paths)
existing_series_paths = get_existing_series_paths(existing_category_paths)
existing_paths = existing_series_paths ++ existing_category_paths
existing_paths -- upload_paths
end
end
defp get_existing_category_paths(path_to_check, full_ignored_paths) do
path_to_check
|> Path.join("*")
|> Path.wildcard()
|> Enum.filter(&(&1 not in full_ignored_paths))
end
defp get_existing_series_paths(existing_category_paths) do
existing_category_paths
|> Enum.map(&Path.wildcard(Path.join(&1, "*")))
|> List.flatten()
end
end
|
lib/brando/images/utils.ex
| 0.904427
| 0.476701
|
utils.ex
|
starcoder
|
defmodule Elsa.Consumer.WorkerSupervisor do
@moduledoc """
Supervisor that starts and manages consumer worker processes based on
given configuration. Without a specified `:partition`, starts a worker for
each partition on the configured topic. Otherwise, starts a worker for the
single, specified partition.
"""
use Supervisor
@doc """
Start the consumer worker supervisor and link it to the current process.
Registers itself to the Elsa Registry.
"""
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(args) do
registry = Keyword.fetch!(args, :registry)
topic = Keyword.fetch!(args, :topic)
Supervisor.start_link(__MODULE__, args,
name: {:via, Elsa.Registry, {registry, :"topic_consumer_worker_supervisor_#{topic}"}}
)
end
@doc """
On startup, determines the partitons to subscribe to from given configuration
and generates a worker child spec for each.
"""
def init(args) do
registry = Keyword.fetch!(args, :registry)
topic = Keyword.fetch!(args, :topic)
brod_client = Elsa.Registry.whereis_name({registry, :brod_client})
Keyword.get_lazy(args, :partition, fn ->
:brod_client.get_partitions_count(brod_client, topic)
end)
|> to_child_specs(args)
|> Supervisor.init(strategy: :one_for_one)
end
defp to_child_specs({:ok, partitions}, args) do
topic = Keyword.fetch!(args, :topic)
0..(partitions - 1)
|> Enum.map(fn partition ->
name = :"topic_consumer_worker_#{topic}_#{partition}"
new_args = named_args(name, args) |> Keyword.put(:partition, partition)
Supervisor.child_spec({Elsa.Consumer.Worker, new_args}, id: name)
end)
end
defp to_child_specs(partition, args) when is_integer(partition) do
topic = Keyword.fetch!(args, :topic)
name = :"topic_consumer_worker_#{topic}_#{partition}"
{Elsa.Consumer.Worker, named_args(name, args)}
|> Supervisor.child_spec(id: name)
|> List.wrap()
end
defp named_args(name, args) do
registry = Keyword.fetch!(args, :registry)
Keyword.put(args, :name, {:via, Elsa.Registry, {registry, name}})
end
end
|
lib/elsa/consumer/worker_supervisor.ex
| 0.729327
| 0.432003
|
worker_supervisor.ex
|
starcoder
|
defmodule Geonames do
@moduledoc """
Geonames-Elixir is a simple wrapper around the API provided
by geonames.org. All interaction with the API is provied by
this module via easy to use functions.
Each API endpoint maps to a single function, all of which
requiring a map containing the parameters of the request.
If no arguments are required, then the hash can be omited
as it will default to %{}
## Examples
Below you will find a few examples on how to query the
geonames API.
###### General search
Geonames.search %{ q: "London" }
Geonames.search %{ q: "London, United Kingdom" }
###### Find cities in a bounding box
Geonames.cities %{ north: 44.1, south: -9.9, east: -22.4, west: 55.2 }
###### Find earthquakes in a bounding box
Geonames.earthquakes %{ north: 44.1, south: -9.9, east: -22.4, west: 55.2, date: "2015-05-30" }
As you can see, the interface is very simple to use. All
functions will return a map in the exact format returned
by the API. Currently, Geonames-Elixir will make no attempt
to format this response in any way.
"""
alias Geonames.Endpoints, as: EP
alias Geonames.Helpers
endpoints = [
EP.Astergdem,
EP.Children,
EP.Cities,
EP.Contains,
EP.CountryCode,
EP.CountryInfo,
EP.CountrySubdivision,
EP.Earthquakes,
EP.FindNearby,
EP.FindNearbyPlaceName,
EP.FindNearbyPostalCodes,
EP.FindNearbyStreets,
EP.FindNearbyStreetsOSM,
EP.FindNearbyWeather,
EP.FindNearbyWikipedia,
EP.FindNearestAddress,
EP.FindNearestIntersection,
EP.FindNearestIntersectionOSM,
EP.FindNearbyPOIsOSM,
EP.Get,
EP.GTOPO30,
EP.Hierarchy,
EP.Neighbourhood,
EP.Neighbours,
EP.Ocean,
EP.PostalCodeCountryInfo,
EP.PostalCodeLookup,
EP.PostalCodeSearch,
EP.Search,
EP.Siblings,
EP.SRTM1,
EP.SRTM3,
EP.Timezone,
EP.Weather,
EP.WeatherICAO,
EP.WikipediaBoundingBox,
EP.WikipediaSearch
]
for endpoint <- endpoints do
@doc """
Makes a request to the GeoNames endpoint `/#{endpoint.endpoint}`
The arguments map may contain the following keys:
#{Enum.map(endpoint.available_url_parameters, fn(e) -> "- #{e}\n" end)}
Each request parameter should be supplied in a map. For example,
Geonames.#{endpoint.function_name}(%{
#{Enum.join(Enum.map(endpoint.available_url_parameters, fn(e) -> "#{to_string(e)}: \"val\"" end), ",\n ")}
})
"""
@spec unquote(endpoint.function_name)(map) :: { Atom.t, map }
def unquote(endpoint.function_name)(args \\ %{}) do
url_params = unquote(endpoint).url_arguments(args)
case Helpers.required_parameters_provided?(unquote(endpoint).required_url_parameters, url_params) do
true ->
url = Helpers.build_url_string(unquote(endpoint).endpoint, url_params)
case perform_geonames_request(url) do
{ :ok, json_response } ->
json_response
{ :error, error_message } ->
raise RuntimeError, message: error_message
end
false ->
raise ArgumentError, message: "Not all required parameters were supplied"
end
end
end
@doc """
Performs a simple get request to the specified URL.
This is not specific to GeoNames and could be used
for any basic GET request, but it assumes the response
is in JSON.
This function will return one of two values
{ :ok, %{ ... }}
{ :error, "Reason" }
A successful request will return a parsed Map of the
response. If an error occurs, an :error tuple will
be returned with a string describing the error.
"""
def perform_geonames_request(url) do
case HTTPoison.get(url) do
{ :ok, %HTTPoison.Response{ status_code: 200, body: body }} ->
{ :ok, Poison.decode!(body) }
{ :ok, %HTTPoison.Response{ status_code: status_code, body: body }} ->
{ :error, "An unexpected #{status_code} response was received"}
{ :error, %HTTPoison.Error{ reason: reason }} ->
{ :error, "Request failed with the following error: #{to_string(reason)}" }
end
end
end
|
lib/geonames.ex
| 0.825906
| 0.580144
|
geonames.ex
|
starcoder
|
defmodule Absinthe.Adapter.LanguageConventions do
use Absinthe.Adapter
alias Absinthe.Utils
@moduledoc """
This defines an adapter that supports GraphQL query documents in their
conventional (in JS) camelcase notation, while allowing the schema to be
defined using conventional (in Elixir) underscore (snakecase) notation, and
tranforming the names as needed for lookups, results, and error messages.
For example, this document:
```
{
myUser: createUser(userId: 2) {
firstName
lastName
}
}
```
Would map to an internal schema that used the following names:
* `create_user` instead of `createUser`
* `user_id` instead of `userId`
* `first_name` instead of `firstName`
* `last_name` instead of `lastName`
Likewise, the result of executing this (camelcase) query document against our
(snakecase) schema would have its names transformed back into camelcase on the
way out:
```
%{
data: %{
"myUser" => %{
"firstName" => "Joe",
"lastName" => "Black"
}
}
}
```
Note variables are a client-facing concern (they may be provided as
parameters), so variable names should match the convention of the query
document (eg, camelCase).
"""
@doc "Converts a camelCase to snake_case"
def to_internal_name(nil, _role) do
nil
end
def to_internal_name("__" <> camelized_name, role) do
"__" <> to_internal_name(camelized_name, role)
end
def to_internal_name(camelized_name, :operation) do
camelized_name
end
def to_internal_name(camelized_name, _role) do
camelized_name
|> Macro.underscore
end
@doc "Converts a snake_case name to camelCase"
def to_external_name(nil, _role) do
nil
end
def to_external_name("__" <> underscored_name, role) do
"__" <> to_external_name(underscored_name, role)
end
def to_external_name(<< c :: utf8, _ :: binary>> = name, _) when c in ?A..?Z do
name |> Utils.camelize
end
def to_external_name(underscored_name, _role) do
underscored_name
|> Utils.camelize(lower: true)
end
end
|
deps/absinthe/lib/absinthe/adapter/language_conventions.ex
| 0.837819
| 0.770594
|
language_conventions.ex
|
starcoder
|
defmodule Asteroid.OAuth2.PKCE do
@moduledoc false
alias Asteroid.OAuth2
@type code_challenge :: String.t()
@type code_challenge_method :: :plain | :S256
@typedoc """
Must be the string representation of `t:code_challenge_method/0`
"""
@type code_challenge_method_str :: String.t()
@type code_verifier :: String.t()
defmodule MalformedCodeChallengeError do
@moduledoc """
Exception returned when a code challenge is malformed
Note that the length is restricted: from 43 to 128 characters.
"""
defexception [:code_challenge]
@impl true
def message(%{code_challenge: code_challenge}) when byte_size(code_challenge) < 43 do
"Code challenge must be at least 43 characters"
end
def message(%{code_challenge: code_challenge}) when byte_size(code_challenge) > 128 do
"Code challenge must be no more than 128 characters"
end
def message(%{code_challenge: code_challenge}) do
"Invalid character in code challenge `#{code_challenge}`"
end
end
defmodule UnsupportedCodeChallengeMethodError do
@moduledoc """
Exception returned when a code challenge method is not supported
Supported methods are those of `t:code_challenge_method/0` and are activated with the
#{Asteroid.Config.link_to_option(:oauth2_pkce_allowed_methods)}
configuration option.
"""
defexception [:code_challenge_method_str]
@impl true
def message(%{code_challenge_method_str: code_challenge_method_str}) do
"Invalid code challenge `#{code_challenge_method_str}`"
end
end
@doc """
Returns `:ok` if the code challenge is valid,
`{:error, %Asteroid.OAuth2.PKCE.InvalidCodeChallengeError{}}` otherwise
"""
@spec code_challenge_valid?(code_challenge()) :: boolean()
def code_challenge_valid?(code_challenge) do
Regex.run(~r<^[\x41-\x5A\x61-\x7A\x30-\x39._~-]{43,128}$>, code_challenge) != nil
end
@doc """
Returns `t:code_challenge_method/0` if the parameter is a valid code challenge method,
`nil` otherwise
"""
@spec code_challenge_method_from_string(String.t()) :: atom() | nil
def code_challenge_method_from_string("plain"), do: :plain
def code_challenge_method_from_string("S256"), do: :S256
def code_challenge_method_from_string(_), do: nil
@doc """
Returns `:ok` if the code verifier is validated against the code challenge,
`{:error, %Asteroid.OAuth2.PKCE.InvalidCodeVerifierError{}}` otherwise
"""
@spec verify_code_verifier(code_verifier(), code_challenge(), code_challenge_method()) ::
:ok
| {:error, %OAuth2.InvalidGrantError{}}
def verify_code_verifier(code_verifier, code_challenge, :plain) do
if code_verifier == code_challenge do
:ok
else
{:error,
OAuth2.InvalidGrantError.exception(
grant: "code_verifier",
reason: "invalid code verifier",
debug_details: "code_verifier: `#{code_verifier}, code_challenge_method: `:plain`)}"
)}
end
end
def verify_code_verifier(code_verifier, code_challenge, :S256) do
if Base.url_encode64(:crypto.hash(:sha256, code_verifier), padding: false) == code_challenge do
:ok
else
{:error,
OAuth2.InvalidGrantError.exception(
grant: "code_verifier",
reason: "invalid code verifier",
debug_details: "code_verifier: `#{code_verifier}, code_challenge_method: `:S256`)}"
)}
end
end
end
|
lib/asteroid/oauth2/pkce.ex
| 0.897673
| 0.54819
|
pkce.ex
|
starcoder
|
defmodule ExMock do
@moduledoc """
ExMock modules for testing purposes. Usually inside a unit test.
Please see the README file on github for a tutorial
## Example
defmodule MyTest do
use ExUnit.Case
import ExMock
test "get" do
with_mock HTTPotion,
[get: fn("http://example.com", _headers) ->
HTTPotion.Response.new(status_code: 200,
body: "hello") end] do
# Code which calls HTTPotion.get
# Check that the call was made as we expected
assert called HTTPotion.get("http://example.com", :_)
end
end
end
"""
@doc """
ExMock up `mock_module` with functions specified as a keyword
list of function_name:implementation `mocks` for the duration
of `test`.
`opts` List of optional arguments passed to meck. `:passthrough` will
passthrough arguments to the original module.
## Example
with_mock(HTTPotion, [get: fn("http://example.com") ->
"<html></html>" end] do
# Tests that make the expected call
assert called HTTPotion.get("http://example.com")
end
"""
defmacro with_mock(mock_module, opts \\ [], mocks, do: test) do
quote do
unquote(__MODULE__).with_mocks(
[{unquote(mock_module), unquote(opts), unquote(mocks)}], do: unquote(test))
end
end
@doc """
ExMock up multiple modules for the duration of `test`.
## Example
with_mocks([{HTTPotion, opts, [{get: fn("http://example.com") -> "<html></html>" end}]}]) do
# Tests that make the expected call
assert called HTTPotion.get("http://example.com")
end
"""
defmacro with_mocks(mocks, do: test) do
quote do
mock_modules =
unquote(mocks)
|> Enum.reduce([], fn({m, opts, mock_fns}, ms) ->
unless m in ms do
:meck.new(m, opts)
end
unquote(__MODULE__)._install_mock(m, mock_fns)
assert :meck.validate(m) == true
[ m | ms] |> Enum.uniq
end)
try do
unquote(test)
after
for m <- mock_modules, do: :meck.unload(m)
end
end
end
@doc """
Shortcut to avoid multiple blocks when a test requires a single
mock.
For full description see `with_mock`.
## Example
test_with_mock "test_name", HTTPotion,
[get: fn(_url) -> "<html></html>" end] do
HTTPotion.get("http://example.com")
assert called HTTPotion.get("http://example.com")
end
"""
defmacro test_with_mock(test_name, mock_module, opts \\ [], mocks, test_block) do
quote do
test unquote(test_name) do
unquote(__MODULE__).with_mock(
unquote(mock_module), unquote(opts), unquote(mocks), unquote(test_block))
end
end
end
@doc """
Shortcut to avoid multiple blocks when a test requires a single
mock. Accepts a context argument enabling information to be shared
between callbacks and the test.
For full description see `with_mock`.
## Example
setup do
doc = "<html></html>"
{:ok, doc: doc}
end
test_with_mock "test_with_mock with context", %{doc: doc}, HTTPotion, [],
[get: fn(_url) -> doc end] do
HTTPotion.get("http://example.com")
assert called HTTPotion.get("http://example.com")
end
"""
defmacro test_with_mock(test_name, context, mock_module, opts, mocks, test_block) do
quote do
test unquote(test_name), unquote(context) do
unquote(__MODULE__).with_mock(
unquote(mock_module), unquote(opts), unquote(mocks), unquote(test_block))
end
end
end
@doc """
Use inside a `with_mock` block to determine whether
a mocked function was called as expected.
## Example
assert called HTTPotion.get("http://example.com")
"""
defmacro called({ {:., _, [ module , f ]} , _, args }) do
quote do
:meck.called unquote(module), unquote(f), unquote(args)
end
end
@doc false
def _install_mock(_, []), do: :ok
def _install_mock(mock_module, [ {fn_name, value} | tail ]) do
:meck.expect(mock_module, fn_name, value)
_install_mock(mock_module, tail)
end
end
|
lib/ex_mock.ex
| 0.869174
| 0.661121
|
ex_mock.ex
|
starcoder
|
defmodule Kuddle.Encoder do
@moduledoc """
Encodes a Kuddle document into a KDL blob
"""
alias Kuddle.Value
alias Kuddle.Node
import Kuddle.Utils
@doc """
Encodes a kuddle document as a KDL string
"""
@spec encode(Kuddle.Decoder.document()) ::
{:ok, String.t()}
| {:error, term()}
def encode([]) do
{:ok, "\n"}
end
def encode(doc) do
case do_encode(doc, []) do
{:ok, rows} ->
{:ok, IO.iodata_to_binary(rows)}
end
end
defp do_encode([], rows) do
{:ok, Enum.reverse(rows)}
end
defp do_encode([%Node{name: name, attributes: attrs, children: nil} | rest], rows) do
node_name = encode_node_name(name)
result = [node_name]
result =
case encode_node_attributes(attrs, []) do
[] ->
result
node_attrs ->
[result, " ", Enum.intersperse(node_attrs, " ")]
end
do_encode(rest, [[result, "\n"] | rows])
end
defp do_encode([%Node{name: name, attributes: attrs, children: children} | rest], rows) do
node_name = encode_node_name(name)
result = [node_name]
result =
case encode_node_attributes(attrs, []) do
[] ->
result
node_attrs ->
[result, " ", Enum.intersperse(node_attrs, " ")]
end
result = [result, " {\n"]
result =
case children do
[] ->
result
children ->
case do_encode(children, []) do
{:ok, rows} ->
[
result,
indent(rows, " "),
"\n",
]
end
end
result = [result, "}\n"]
do_encode(rest, [result | rows])
end
defp encode_node_attributes([%Value{} = value | rest], acc) do
encode_node_attributes(rest, [encode_value(value) | acc])
end
defp encode_node_attributes([{%Value{} = key, %Value{} = value} | rest], acc) do
result = [encode_value(key), "=", encode_value(value)]
encode_node_attributes(rest, [result | acc])
end
defp encode_node_attributes([], acc) do
Enum.reverse(acc)
end
defp encode_value(%Value{value: nil}) do
"null"
end
defp encode_value(%Value{type: :boolean, value: value}) when is_boolean(value) do
Atom.to_string(value)
end
defp encode_value(%Value{type: :string, value: value}) when is_binary(value) do
encode_string(value)
end
defp encode_value(%Value{type: :integer, value: value, format: format}) when is_integer(value) do
case format do
:bin ->
["0b", Integer.to_string(value, 2)]
:oct ->
["0o", Integer.to_string(value, 8)]
:dec ->
Integer.to_string(value, 10)
:hex ->
["0x", String.downcase(Integer.to_string(value, 16))]
end
end
defp encode_value(%Value{type: :float, value: value}) when is_float(value) do
String.upcase(Float.to_string(value))
end
defp encode_value(%Value{type: :float, value: %Decimal{} = value}) do
String.upcase(Decimal.to_string(value, :scientific))
end
defp encode_value(%Value{type: :id, value: value}) when is_binary(value) do
value
end
defp encode_string(str) do
"\"" <> do_encode_string(str, []) <> "\""
end
defp do_encode_string(<<>>, acc) do
IO.iodata_to_binary(Enum.reverse(acc))
end
defp do_encode_string(<<"/", rest::binary>>, acc) do
do_encode_string(rest, ["\\/" | acc])
end
defp do_encode_string(<<"\\", rest::binary>>, acc) do
do_encode_string(rest, ["\\\\" | acc])
end
defp do_encode_string(<<"\"", rest::binary>>, acc) do
do_encode_string(rest, ["\\\"" | acc])
end
defp do_encode_string(<<"\b", rest::binary>>, acc) do
do_encode_string(rest, ["\\b" | acc])
end
defp do_encode_string(<<"\f", rest::binary>>, acc) do
do_encode_string(rest, ["\\f" | acc])
end
defp do_encode_string(<<"\r", rest::binary>>, acc) do
do_encode_string(rest, ["\\r" | acc])
end
defp do_encode_string(<<"\n", rest::binary>>, acc) do
do_encode_string(rest, ["\\n" | acc])
end
defp do_encode_string(<<"\t", rest::binary>>, acc) do
do_encode_string(rest, ["\\t" | acc])
end
defp do_encode_string(<<c::utf8, rest::binary>>, acc) do
do_encode_string(rest, [<<c::utf8>> | acc])
end
defp encode_node_name(name) do
if valid_identifier?(name) and not need_quote?(name) do
name
else
encode_string(name)
end
end
defp indent(rows, spacer) do
rows
|> IO.iodata_to_binary()
|> String.trim_trailing()
|> String.split("\n")
|> Enum.map(fn row ->
[spacer, row]
end)
|> Enum.intersperse("\n")
end
end
|
lib/kuddle/encoder.ex
| 0.703142
| 0.54952
|
encoder.ex
|
starcoder
|
import TypeClass
defclass Witchcraft.Bifunctor do
@moduledoc """
Similar to `Witchcraft.Functor`, but able to map two functions over two
separate portions of some data structure (some product type).
Especially helpful when you need different hebaviours on different fields.
## Type Class
An instance of `Witchcraft.Bifunctor` must also implement `Witchcraft.Functor`,
and define `Witchcraft.Apply.ap/2`.
Functor [map/2]
↓
Bifunctor [bimap/2]
"""
extend Witchcraft.Functor
alias __MODULE__
use Quark
@type t :: any()
defmacro __using__(opts \\ []) do
quote do
use Witchcraft.Functor, unquote(opts)
import unquote(__MODULE__), unquote(opts)
end
end
where do
@doc """
`map` separate fuctions over two fields in a product type.
The order of fields doesn't always matter in the map.
The first/second function application is determined by the instance.
It also does not have to map all fields in a product type.
## Diagram
┌------------------------------------┐
↓ |
%Combo{a: 5, b: :ok, c: "hello"} |> bimap(&(&1 * 100), &String.upcase/1)
↑ |
└---------------------------------┘
#=> %Combo{a: 500, b: :ok, c: "HELLO"}
## Examples
iex> {1, "a"} |> bimap(&(&1 * 100), &(&1 <> "!"))
{100, "a!"}
iex> {:msg, 42, "number is below 50"}
...> |> bimap(&(%{subject: &1}), &String.upcase/1)
{:msg, %{subject: 42}, "NUMBER IS BELOW 50"}
"""
@spec bimap(Bifunctor.t(), (any() -> any()), (any() -> any())) :: Bifunctor.t()
def bimap(data, f, g)
end
properties do
def identity(data) do
a = generate(data)
left = Bifunctor.bimap(a, &Quark.id/1, &Quark.id/1)
equal?(left, a)
end
def composition(data) do
a = generate(data)
f = &Witchcraft.Semigroup.append(&1, &1)
g = &inspect/1
h = &is_number/1
i = &!/1
left = Bifunctor.bimap(a, fn x -> f.(g.(x)) end, fn y -> h.(i.(y)) end)
right = a |> Bifunctor.bimap(g, i) |> Bifunctor.bimap(f, h)
equal?(left, right)
end
end
@doc """
The same as `bimap/3`, but with the functions curried
## Examples
iex> {:ok, 2, "hi"}
...> |> bilift(&*/2, &<>/2)
...> |> bimap(fn f -> f.(9) end, fn g -> g.("?!") end)
{:ok, 18, "hi?!"}
"""
@spec bilift(Bifunctor.t(), fun(), fun()) :: Bifunctor.t()
def bilift(data, f, g), do: bimap(data, curry(f), curry(g))
@doc """
`map` a function over the first value only
## Examples
iex> {:ok, 2, "hi"} |> map_first(&(&1 * 100))
{:ok, 200, "hi"}
"""
@spec map_first(Bifunctor.t(), (any() -> any())) :: Bifunctor.t()
def map_first(data, f), do: Bifunctor.bimap(data, f, &Quark.id/1)
@doc """
The same as `map_first`, but with a curried function
## Examples
iex> {:ok, 2, "hi"}
...> |> lift_first(&*/2)
...> |> map_first(fn f -> f.(9) end)
{:ok, 18, "hi"}
"""
@spec lift_first(Bifunctor.t(), fun()) :: Bifunctor.t()
def lift_first(data, f), do: map_first(data, curry(f))
@doc """
`map` a function over the second value only
## Examples
iex> {:ok, 2, "hi"} |> map_second(&(&1 <> "!?"))
{:ok, 2, "hi!?"}
"""
@spec map_second(Bifunctor.t(), (any() -> any())) :: Bifunctor.t()
def map_second(data, g), do: Bifunctor.bimap(data, &Quark.id/1, g)
@doc """
The same as `map_second`, but with a curried function
## Examples
iex> {:ok, 2, "hi"}
...> |> lift_second(&<>/2)
...> |> map_second(fn f -> f.("?!") end)
{:ok, 2, "hi?!"}
"""
@spec lift_second(Bifunctor.t(), fun()) :: Bifunctor.t()
def lift_second(data, g), do: map_second(data, curry(g))
end
definst Witchcraft.Bifunctor, for: Tuple do
# credo:disable-for-lines:6 Credo.Check.Refactor.PipeChainStart
custom_generator(_) do
fn -> TypeClass.Property.Generator.generate(nil) end
|> Stream.repeatedly()
|> Enum.take(Enum.random(2..12))
|> List.to_tuple()
end
def bimap(tuple, f, g) do
case tuple do
{a, b} ->
{f.(a), g.(b)}
{x, a, b} ->
{x, f.(a), g.(b)}
{x, y, a, b} ->
{x, y, f.(a), g.(b)}
{x, y, z, a, b} ->
{x, y, z, f.(a), g.(b)}
big_tuple when tuple_size(big_tuple) > 5 ->
index_a = tuple_size(big_tuple) - 2
mapped_a =
big_tuple
|> elem(index_a)
|> f.()
big_tuple
|> Witchcraft.Functor.map(g)
|> put_elem(index_a, mapped_a)
end
end
end
|
lib/witchcraft/bifunctor.ex
| 0.731251
| 0.502258
|
bifunctor.ex
|
starcoder
|
defmodule Riptide.Store.SQL do
defstruct [:table, :columns, :where, :mode, :set]
def new(table) do
%Riptide.Store.SQL{
table: table,
columns: [],
set: [],
where: %{}
}
end
def select(table) do
table
|> new()
|> Map.put(:mode, "SELECT")
end
def update(table) do
table
|> new()
|> Map.put(:mode, "UPDATE")
end
def delete(table) do
table
|> new()
|> Map.put(:mode, "DELETE")
end
def columns(query, columns) do
%{query | columns: query.columns ++ columns}
end
def set(query, set) do
%{query | set: query.set ++ set}
end
def where(query, where) do
%{
query
| where:
Map.merge(
query.where,
where
|> Enum.filter(fn {k, _v} -> k != :_ end)
|> Enum.into(%{})
)
}
end
def to_sql(query) do
list = Enum.into(query.where, [])
{
[
"#{query.mode} #{Enum.join(query.columns, ", ")}",
if query.mode !== "UPDATE" do
"FROM"
end,
query.table,
if query.set != [] do
[
"SET ",
query.set
|> Keyword.keys()
|> Stream.with_index()
|> Stream.map(fn {key, index} ->
"#{key} = $#{index + 1}"
end)
|> Enum.join(",")
]
end,
if list != [] do
[
"WHERE ",
list
|> Keyword.keys()
|> Stream.map(&Atom.to_string/1)
|> Stream.with_index()
|> Stream.map(fn {column, index} ->
column <> " = $#{index + 1 + Enum.count(query.set)}"
end)
|> Enum.join(" AND\n")
]
end
]
|> List.flatten()
|> Stream.filter(& &1)
|> Enum.join("\n"),
Keyword.values(query.set) ++ Keyword.values(list)
}
end
end
defmodule Riptide.Store.Next.SQL do
defstruct [
:table,
:mode,
:select,
:where,
:set
]
def new(table) do
%__MODULE__{
table: table,
mode: :unknown,
select: [],
where: [],
set: []
}
end
def select(%__MODULE__{} = token, columns) do
%{
token
| select: token.select ++ columns
}
end
def select(table, columns) when is_binary(table) do
%{
new(table)
| mode: :select
}
|> select(columns)
end
def where(%__MODULE__{} = token, clauses) do
where(token, :eq, clauses)
end
def where(%__MODULE__{} = token, comparator, clauses) do
built =
clauses
|> Stream.filter(fn {_col, val} -> val != nil end)
|> Stream.map(fn {col, val} -> {col, comparator, val} end)
|> Enum.to_list()
%{
token
| where: token.where ++ built
}
end
def to_sql(%__MODULE__{mode: :select} = token) do
sql =
[
"SELECT",
Enum.join(token.select, ", "),
"FROM",
token.table,
if token.where != [] do
[
"WHERE",
token.where
|> Stream.with_index()
|> Stream.map(fn {{col, comp, _val}, index} ->
[
col,
case comp do
:eq -> "="
:lt -> "<"
:lte -> "<="
:gt -> ">"
:gte -> ">="
end,
"$#{index + 1}"
]
end)
|> Stream.intersperse("AND")
|> Enum.to_list()
]
end
]
|> List.flatten()
|> Stream.filter(& &1)
|> Enum.join(" ")
{sql, Enum.map(token.where, fn {_col, _comp, val} -> val end)}
end
end
|
packages/elixir/lib/riptide/store/sql.ex
| 0.532668
| 0.449936
|
sql.ex
|
starcoder
|
defmodule Utils do
@moduledoc """
A module gathering function which provide functionalities used in different other modules.
"""
@numerator_of_probing_factor 1
@denominator_of_probing_factor 100
@type single_run_metrics :: %{list(any()) => any()}
defmodule TestOptions do
@enforce_keys [
:mode,
:number_of_elements,
:how_many_tries,
:tick,
:inital_generator_frequency,
:should_adjust_generator_frequency?,
:should_produce_plots?,
:chosen_metrics,
:reductions
]
defstruct @enforce_keys
@type t :: %__MODULE__{
mode: String.t(),
number_of_elements: integer(),
how_many_tries: integer(),
tick: integer(),
inital_generator_frequency: integer(),
should_adjust_generator_frequency?: integer(),
should_produce_plots?: boolean(),
chosen_metrics: list(atom()),
reductions: integer()
}
end
@doc """
Starts monitoring the process with given `pid` and waits until it terminates and sends `:DOWN` message
"""
@spec wait_for_complete(pid()) :: nil
def wait_for_complete(pid) do
ref = Process.monitor(pid)
receive do
{:DOWN, ^ref, :process, ^pid, _msg} -> nil
end
end
@doc """
Creates an .svg representation of a HowLongWasAMessagePassingThroughThePipeline(time_when_message_was_sent) plot with the use of ContEx library, based on the probe of points, the average time spent by a message in the pipeline, and the standard deviation of that value.
Args:
`times` - list of {x, y} tuples, where x is a time the message was sent and y is duration of the time period which elapsed between the message generation and that message arrival on the sink
`avg` - average time messages spent in the pipeline
`std` - standard deviation of the time messages spent in the pipeline
Returns the string containing the .svg file content which can be later saved in the filesystem.
"""
@spec prepare_plot(list({integer(), integer()}), float(), float()) :: any()
def prepare_plot(times, avg, std) do
times = times |> Enum.map(fn {x, y} -> {x / 1000_000, y / 1000_000} end)
ds = Contex.Dataset.new(times, ["x", "y"])
point_plot = Contex.PointPlot.new(ds)
plot =
Contex.Plot.new(600, 400, point_plot)
|> Contex.Plot.plot_options(%{legend_setting: :legend_right})
|> Contex.Plot.titles(
"AVG: #{:erlang.float_to_binary(avg / 1000_000, decimals: 3)} ms",
"STD: #{:erlang.float_to_binary(std / 1000_000, decimals: 3)} ms"
)
|> Contex.Plot.axis_labels("Time of sending[ms]", "Passing time[ms]")
{:safe, output} = Contex.Plot.to_svg(plot)
output
end
@doc """
Saves the test results in the filesystem, as a .csv file.
Args:
`metrics` - list of metrics gathered during a single test run, a value returned by `Utils.launch_test/1`,
`metric_names` - list of atoms describing the names of the metrics which should be saved in the filesystem,
`path` - path to the file where the result metrics should be stored,
`should_provide_metrics_header?` - `true` if the first line in the result file should contain the names of metrics, `false` otherwise.
Returns the invocation of the `File.write/3` function.
"""
@spec save_metrics(list(single_run_metrics()), list(atom()), String.t(), boolean()) ::
:ok | {:error, any()}
def save_metrics(metrics, metrics_names, path, should_provide_metrics_header?) do
if should_provide_metrics_header? do
provide_results_file_header(metrics_names, path)
end
metrics_to_be_written_to_csv =
metrics_names |> Enum.map(fn metric_name -> [:metrics, metric_name] end)
content =
metrics
|> Enum.map(fn one_try_metrics ->
one_try_metrics
|> Enum.filter(fn {key, _value} -> key in metrics_to_be_written_to_csv end)
|> Enum.map(fn {_key, value} -> value end)
|> Enum.join(",")
end)
|> Enum.reject(&(&1 == ""))
|> Enum.join("\n")
File.write(
path,
content,
[:append]
)
end
@doc """
Launches a test parametrized with the Utils.TestOptions structure and returns the metrics gathered during that test.
Args:
opts - TestOptions structure describing the parameters of the test.
Returns: a list of maps, where each map describes the metrics gathered during a single try of the test. The keys in each of these maps are lists of keys pointing to the desired information
in the internal state of Sink, and the value is the desired information. Exemplary maps describing the metrics gather during a single run:
```
%{
[:metrics, :generator_frequency] => 4375,
[:metrics, :passing_time_avg] => 3112845.262290126,
[:metrics, :passing_time_std] => 625614.153995784,
}
```
"""
@spec launch_test(TestOptions.t()) :: list(single_run_metrics)
def launch_test(opts) do
module =
case opts.mode do
"pull" ->
PullMode
"push" ->
PushMode
"autodemand" ->
AutoDemand
value ->
IO.puts("Unknown mode: #{value}")
end
chosen_metrics =
prepare_information_to_be_fetched_from_sink_state(
opts.chosen_metrics,
opts.should_produce_plots?
)
options = %{
number_of_elements: opts.number_of_elements,
source: nil,
filter: Module.concat(module, Filter).__struct__(reductions: opts.reductions),
sink:
Module.concat(module, Sink).__struct__(
tick: opts.tick,
how_many_tries: opts.how_many_tries,
numerator_of_probing_factor: @numerator_of_probing_factor,
denominator_of_probing_factor: @denominator_of_probing_factor,
should_produce_plots?: opts.should_produce_plots?,
supervisor_pid: self(),
chosen_metrics: chosen_metrics
)
}
{initial_lower_bound, initial_upper_bound} =
if opts.should_adjust_generator_frequency? do
{0, opts.inital_generator_frequency * 2}
else
{opts.inital_generator_frequency, opts.inital_generator_frequency}
end
options = %{
options
| source:
Module.concat(module, Source).__struct__(
initial_lower_bound: initial_lower_bound,
initial_upper_bound: initial_upper_bound
)
}
IO.puts(
"[Performance test] Starting test. MODE: #{opts.mode} NUMBER OF TRIES: #{opts.how_many_tries} tries+1 warmup try ESTIMATED DURATION: #{div(opts.tick * (opts.how_many_tries + 1), 1000)} s"
)
{:ok, pid} = Pipeline.start_link(options)
Pipeline.play(pid)
result_metrics = gather_metrics()
Pipeline.stop_and_terminate(pid, blocking?: true)
result_metrics
end
defp gather_metrics() do
receive do
{:new_metrics, new_metrics} -> [new_metrics | gather_metrics()]
:finished -> []
end
end
defp prepare_information_to_be_fetched_from_sink_state(chosen_metrics, should_prepare_plots) do
chosen_metrics = chosen_metrics |> Enum.map(fn key -> [:metrics, key] end)
chosen_metrics =
chosen_metrics ++
if should_prepare_plots do
[
[:single_try_state, :times],
[:metrics, :passing_time_avg],
[:metrics, :passing_time_std]
]
else
[]
end
MapSet.new(chosen_metrics) |> MapSet.to_list()
end
defp provide_results_file_header(metrics_names, path) do
if metrics_names != [] do
content = (metrics_names |> Enum.join(",")) <> "\n"
File.write(
path,
content,
[:append]
)
nil
end
end
end
|
lib/Utils.ex
| 0.899273
| 0.768255
|
Utils.ex
|
starcoder
|
defmodule Bolt.Sips.BoltKitCase do
@moduledoc """
tag your tests with `boltkit`, like this:
@tag boltkit: %{
url: "neo4j://127.0.0.1:9001/?name=molly&age=1",
scripts: [
{"test/scripts/get_routing_table_with_context.script", 9001},
{"test/scripts/return_x.bolt", 9002}
],
debug: true
}
and then use the prefix returned via the context, for working with the stubbed connection(s)
test "get_routing_table_with_context.script", %{prefix: prefix} do
assert ...
end
"""
@moduletag :boltkit
use ExUnit.CaseTemplate
use Retry
alias Porcelain.Process, as: Proc
require Logger
setup_all do
Porcelain.reinit(Porcelain.Driver.Basic)
end
setup %{boltkit: boltkit} do
prefix = Map.get(boltkit, :prefix, UUID.uuid4())
url = Map.get(boltkit, :url, "bolt://127.0.0.1")
porcelains = stub_servers(boltkit)
pid =
with {:ok, pid} <- connect(url, prefix) do
pid
else
_ -> raise RuntimeError, "cannot create a Bolt.Sips process"
end
on_exit(fn ->
porcelains
|> Enum.each(fn
{:ok, porcelain} ->
# wait for boltstub to finish
:timer.sleep(150)
with true <- Proc.alive?(porcelain),
%Proc{out: out} <- porcelain do
try do
Enum.into(out, IO.stream(:stdio, :line))
rescue
_ ->
Logger.debug("BoltStub's out was flushed.")
:rescued
end
else
_e ->
Logger.debug("BoltStub ended prematurely.")
end
e ->
Logger.error(inspect(e))
end)
end)
{:ok, porcelains: porcelains, prefix: prefix, sips: pid, url: url}
end
defp stub_servers(%{scripts: scripts} = args) do
opts =
if Map.get(args, :debug, false) do
[out: IO.stream(:stderr, :line)]
else
[]
end
scripts
|> Enum.map(fn {script, port} ->
with true <- File.exists?(script) do
sport = Integer.to_string(port)
porcelain = Porcelain.spawn("boltstub", [sport, script], opts)
wait_for_socket('127.0.0.1', port)
{:ok, porcelain}
else
_ -> {:error, script <> ", not found."}
end
end)
end
@sock_opts [:binary, active: false]
defp wait_for_socket(address, port) do
retry with: delay_stream() do
with {:ok, socket} <- :gen_tcp.connect(address, port, @sock_opts, 100) do
socket
end
end
end
defp delay_stream(delay \\ 150),
do:
delay
|> lin_backoff(2)
|> cap(1000)
|> Stream.take(3)
defp connect(url, prefix) do
conf = [
url: url,
basic_auth: [username: "neo4j", password: "password"],
retry_linear_backoff: [delay: 150, factor: 1, tries: 1],
# pool: DBConnection.Ownership,
pool_size: 1,
prefix: prefix
# after_connect_timeout: fn _ -> nil end,
# queue_timeout: 100,
# queue_target: 100,
# queue_interval: 10
]
Logger.debug("creating #{url}, prefix: #{prefix}")
Bolt.Sips.start_link(conf)
end
end
|
test/support/boltkit_case.ex
| 0.6137
| 0.44553
|
boltkit_case.ex
|
starcoder
|
defmodule Tensorflow.MetaGraphDef.MetaInfoDef.FunctionAliasesEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: String.t()
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: :string)
end
defmodule Tensorflow.MetaGraphDef.MetaInfoDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
meta_graph_version: String.t(),
stripped_op_list: Tensorflow.OpList.t() | nil,
any_info: Google.Protobuf.Any.t() | nil,
tags: [String.t()],
tensorflow_version: String.t(),
tensorflow_git_version: String.t(),
stripped_default_attrs: boolean,
function_aliases: %{String.t() => String.t()}
}
defstruct [
:meta_graph_version,
:stripped_op_list,
:any_info,
:tags,
:tensorflow_version,
:tensorflow_git_version,
:stripped_default_attrs,
:function_aliases
]
field(:meta_graph_version, 1, type: :string)
field(:stripped_op_list, 2, type: Tensorflow.OpList)
field(:any_info, 3, type: Google.Protobuf.Any)
field(:tags, 4, repeated: true, type: :string)
field(:tensorflow_version, 5, type: :string)
field(:tensorflow_git_version, 6, type: :string)
field(:stripped_default_attrs, 7, type: :bool)
field(:function_aliases, 8,
repeated: true,
type: Tensorflow.MetaGraphDef.MetaInfoDef.FunctionAliasesEntry,
map: true
)
end
defmodule Tensorflow.MetaGraphDef.CollectionDefEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Tensorflow.CollectionDef.t() | nil
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: Tensorflow.CollectionDef)
end
defmodule Tensorflow.MetaGraphDef.SignatureDefEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Tensorflow.SignatureDef.t() | nil
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: Tensorflow.SignatureDef)
end
defmodule Tensorflow.MetaGraphDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
meta_info_def: Tensorflow.MetaGraphDef.MetaInfoDef.t() | nil,
graph_def: Tensorflow.GraphDef.t() | nil,
saver_def: Tensorflow.SaverDef.t() | nil,
collection_def: %{String.t() => Tensorflow.CollectionDef.t() | nil},
signature_def: %{String.t() => Tensorflow.SignatureDef.t() | nil},
asset_file_def: [Tensorflow.AssetFileDef.t()],
object_graph_def: Tensorflow.SavedObjectGraph.t() | nil
}
defstruct [
:meta_info_def,
:graph_def,
:saver_def,
:collection_def,
:signature_def,
:asset_file_def,
:object_graph_def
]
field(:meta_info_def, 1, type: Tensorflow.MetaGraphDef.MetaInfoDef)
field(:graph_def, 2, type: Tensorflow.GraphDef)
field(:saver_def, 3, type: Tensorflow.SaverDef)
field(:collection_def, 4,
repeated: true,
type: Tensorflow.MetaGraphDef.CollectionDefEntry,
map: true
)
field(:signature_def, 5,
repeated: true,
type: Tensorflow.MetaGraphDef.SignatureDefEntry,
map: true
)
field(:asset_file_def, 6, repeated: true, type: Tensorflow.AssetFileDef)
field(:object_graph_def, 7, type: Tensorflow.SavedObjectGraph)
end
defmodule Tensorflow.CollectionDef.NodeList do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: [String.t()]
}
defstruct [:value]
field(:value, 1, repeated: true, type: :string)
end
defmodule Tensorflow.CollectionDef.BytesList do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: [binary]
}
defstruct [:value]
field(:value, 1, repeated: true, type: :bytes)
end
defmodule Tensorflow.CollectionDef.Int64List do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: [integer]
}
defstruct [:value]
field(:value, 1, repeated: true, type: :int64, packed: true)
end
defmodule Tensorflow.CollectionDef.FloatList do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: [float | :infinity | :negative_infinity | :nan]
}
defstruct [:value]
field(:value, 1, repeated: true, type: :float, packed: true)
end
defmodule Tensorflow.CollectionDef.AnyList do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
value: [Google.Protobuf.Any.t()]
}
defstruct [:value]
field(:value, 1, repeated: true, type: Google.Protobuf.Any)
end
defmodule Tensorflow.CollectionDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
kind: {atom, any}
}
defstruct [:kind]
oneof(:kind, 0)
field(:node_list, 1, type: Tensorflow.CollectionDef.NodeList, oneof: 0)
field(:bytes_list, 2, type: Tensorflow.CollectionDef.BytesList, oneof: 0)
field(:int64_list, 3, type: Tensorflow.CollectionDef.Int64List, oneof: 0)
field(:float_list, 4, type: Tensorflow.CollectionDef.FloatList, oneof: 0)
field(:any_list, 5, type: Tensorflow.CollectionDef.AnyList, oneof: 0)
end
defmodule Tensorflow.TensorInfo.CooSparse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
values_tensor_name: String.t(),
indices_tensor_name: String.t(),
dense_shape_tensor_name: String.t()
}
defstruct [
:values_tensor_name,
:indices_tensor_name,
:dense_shape_tensor_name
]
field(:values_tensor_name, 1, type: :string)
field(:indices_tensor_name, 2, type: :string)
field(:dense_shape_tensor_name, 3, type: :string)
end
defmodule Tensorflow.TensorInfo.CompositeTensor do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
type_spec: Tensorflow.TypeSpecProto.t() | nil,
components: [Tensorflow.TensorInfo.t()]
}
defstruct [:type_spec, :components]
field(:type_spec, 1, type: Tensorflow.TypeSpecProto)
field(:components, 2, repeated: true, type: Tensorflow.TensorInfo)
end
defmodule Tensorflow.TensorInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
encoding: {atom, any},
dtype: Tensorflow.DataType.t(),
tensor_shape: Tensorflow.TensorShapeProto.t() | nil
}
defstruct [:encoding, :dtype, :tensor_shape]
oneof(:encoding, 0)
field(:name, 1, type: :string, oneof: 0)
field(:coo_sparse, 4, type: Tensorflow.TensorInfo.CooSparse, oneof: 0)
field(:composite_tensor, 5,
type: Tensorflow.TensorInfo.CompositeTensor,
oneof: 0
)
field(:dtype, 2, type: Tensorflow.DataType, enum: true)
field(:tensor_shape, 3, type: Tensorflow.TensorShapeProto)
end
defmodule Tensorflow.SignatureDef.InputsEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Tensorflow.TensorInfo.t() | nil
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: Tensorflow.TensorInfo)
end
defmodule Tensorflow.SignatureDef.OutputsEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: Tensorflow.TensorInfo.t() | nil
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: Tensorflow.TensorInfo)
end
defmodule Tensorflow.SignatureDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
inputs: %{String.t() => Tensorflow.TensorInfo.t() | nil},
outputs: %{String.t() => Tensorflow.TensorInfo.t() | nil},
method_name: String.t()
}
defstruct [:inputs, :outputs, :method_name]
field(:inputs, 1,
repeated: true,
type: Tensorflow.SignatureDef.InputsEntry,
map: true
)
field(:outputs, 2,
repeated: true,
type: Tensorflow.SignatureDef.OutputsEntry,
map: true
)
field(:method_name, 3, type: :string)
end
defmodule Tensorflow.AssetFileDef do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
tensor_info: Tensorflow.TensorInfo.t() | nil,
filename: String.t()
}
defstruct [:tensor_info, :filename]
field(:tensor_info, 1, type: Tensorflow.TensorInfo)
field(:filename, 2, type: :string)
end
|
lib/tensorflow/core/protobuf/meta_graph.pb.ex
| 0.818701
| 0.451871
|
meta_graph.pb.ex
|
starcoder
|
defmodule Tesseract.Geometry.AABB3 do
alias Tesseract.Math.Vec3
@type t :: {Vec3.t(), Vec3.t()}
@spec make(Vec3.t(), Vec3.t()) :: t
def make(a, b) do
fix({a, b})
end
@spec make(t) :: t
def make(a) do
fix(a)
end
@spec fix(t) :: t
def fix({{a_x, a_y, a_z}, {b_x, b_y, b_z}}) do
{
{min(a_x, b_x), min(a_y, b_y), min(a_z, b_z)},
{max(a_x, b_x), max(a_y, b_y), max(a_z, b_z)}
}
end
@spec center(t) :: Vec3.t()
def center({a, b}) do
diag = b |> Vec3.subtract(a) |> Vec3.scale(0.5)
a |> Vec3.add(diag)
end
@spec union(t, t) :: t
def union({{a_x1, a_y1, a_z1}, {a_x2, a_y2, a_z2}}, {{b_x1, b_y1, b_z1}, {b_x2, b_y2, b_z2}}) do
{
{min(a_x1, b_x1), min(a_y1, b_y1), min(a_z1, b_z1)},
{max(a_x2, b_x2), max(a_y2, b_y2), max(a_z2, b_z2)}
}
end
def union(boxes) when is_list(boxes) do
boxes |> Enum.reduce(fn container, box -> union(container, box) end)
end
@spec intersection(t, t) :: t | nil
def intersection(
{{a_x1, a_y1, a_z1}, {a_x2, a_y2, a_z2}} = a,
{{b_x1, b_y1, b_z1}, {b_x2, b_y2, b_z2}} = b
) do
if intersects?(a, b) do
{
{max(a_x1, b_x1), max(a_y1, b_y1), max(a_z1, b_z1)},
{min(a_x2, b_x2), min(a_y2, b_y2), min(a_z2, b_z2)}
}
else
nil
end
end
def intersects?(
{{a_x1, a_y1, a_z1}, {a_x2, a_y2, a_z2}},
{{b_x1, b_y1, b_z1}, {b_x2, b_y2, b_z2}}
) do
min(a_x1, a_x2) <= max(b_x1, b_x2) && max(a_x1, a_x2) >= min(b_x1, b_x2) &&
(min(a_y1, a_y2) <= max(b_y1, b_y2) && max(a_y1, a_y2) >= min(b_y1, b_y2)) &&
(min(a_z1, a_z2) <= max(b_z1, b_z2) && max(a_z1, a_z2) >= min(b_z1, b_z2))
end
@spec volume(t) :: number
def volume({{x1, y1, z1}, {x2, y2, z2}}) do
(x2 - x1) * (y2 - y1) * (z2 - z1)
end
@spec intersection_volume(t, t) :: number | nil
def intersection_volume(box_a, box_b) do
case intersection(box_a, box_b) do
nil ->
0
intersection_box ->
volume(intersection_box)
end
end
end
|
lib/geometry/aabb3.ex
| 0.840292
| 0.544317
|
aabb3.ex
|
starcoder
|
defmodule Zamrazac.FlokiUtil do
@moduledoc """
Tools for forcing Floki to do something it really doesn't want to do--transform HTML.
"""
@doc """
Function to walk a floki-style dom and patch it up.
Here patching means discovering referenced images, downloading them, dithering them and encoding a smaller version,
and finally wrapping the image tag in an anchor to the original image url.
"""
def walk_dom({tag, attributes, children}, _image_storage_path), do: {tag, attributes, children}
def walk_dom(elements, image_storage_path) when is_list(elements) do
for element <- elements do
case element do
text when is_binary(text) ->
text
{"img", attributes, children} ->
attrs = attributes_to_keywords(attributes)
image_src = attrs[:src]
# IO.inspect("Referenced image #{image_src}", limit: :infinity)
{dithered_file_encoded, _temp_image_path, is_local} =
convert_image(image_src, image_storage_path)
patched_attrs =
Keyword.put(attrs, :src, dithered_file_encoded) |> keywords_to_attributes()
if is_local do
{"img", patched_attrs, walk_dom(children, image_storage_path)}
else
{"a", [{"href", image_src}],
[{"img", patched_attrs, walk_dom(children, image_storage_path)}]}
end
{tag, attributes, children} ->
{tag, attributes, walk_dom(children, image_storage_path)}
end
end
end
@doc """
Function to convert a keyword-style list made with string keys to having atomic keys.
"""
def attributes_to_keywords(attributes) do
attributes
|> Enum.map(fn {key, val} ->
{String.to_atom(key), val}
end)
end
@doc """
Function to convert a keyword-style list made with atomic keys to having string keys.
"""
def keywords_to_attributes(keywords) do
keywords
|> Enum.map(fn {key, val} ->
{Atom.to_string(key), val}
end)
end
@doc """
Function to do the image download and conversion and dithering given a url and a place to store the artifacts.
"""
def convert_image(url, image_storage_path) do
temp_image_name = Zamrazac.Util.shahexhash(url)
temp_image_path = Path.join(image_storage_path, temp_image_name)
uri = URI.parse(url)
cond do
uri.scheme in ["http", "https"] ->
temp_dithered_image_path = "#{temp_image_path}_dithered.png"
^temp_image_path = maybe_download_image(temp_image_path, url, false)
{dithered_file_encoded, ^temp_dithered_image_path} =
maybe_dither_image(temp_dithered_image_path, temp_image_path)
{dithered_file_encoded, temp_image_path, false}
uri.scheme == "file" ->
temp_dithered_image_path = "#{temp_image_path}_dithered.png"
^temp_image_path =
maybe_download_image(temp_image_path, Path.expand(Path.join(uri.host, uri.path)), true)
{dithered_file_encoded, ^temp_dithered_image_path} =
maybe_dither_image(temp_dithered_image_path, temp_image_path)
{dithered_file_encoded, temp_image_path, true}
true ->
# IO.inspect("\tFailed to locate image at #{url}...", limit: :infinity)
{"", "", true}
end
end
@doc """
Function to download an image from a url and save it somewhere if it isn't already there.
"""
def maybe_download_image(image_path, url, is_local) do
case File.exists?(image_path) do
true ->
# IO.inspect("\tReusing image #{image_path}...", limit: :infinity)
image_path
false ->
if is_local do
# IO.inspect("\tCopying local image from #{url} to #{image_path}...", limit: :infinity)
System.cmd("cp", [url, image_path])
image_path
else
# IO.inspect("\tDownloading image #{image_path}...", limit: :infinity)
System.cmd("curl", [url, "-s", "-L", "-o", image_path])
image_path
end
end
end
@doc """
Function to convert an image to the dithered form if it doesn't already exist.
"""
def maybe_dither_image(image_path, source_image_path) do
case File.exists?(image_path) do
true ->
# IO.inspect("\tReusing dithered image #{image_path}...", limit: :infinity)
nil
false ->
# IO.inspect("\tConverting dithered image #{image_path}...", limit: :infinity)
System.cmd("convert", [
source_image_path,
"-quiet",
"-colorspace",
"Gray",
"-ordered-dither",
"8x8",
image_path
])
end
dithered_file_encoded = Zamrazac.Util.get_file_as_data_uri(image_path, "image/png")
{dithered_file_encoded, image_path}
end
end
|
lib/floki_util.ex
| 0.659624
| 0.422028
|
floki_util.ex
|
starcoder
|
defmodule Plausible.Timezones do
@moduledoc "https://stackoverflow.com/a/52265733"
@options [
[key: "(GMT-12:00) International Date Line West", value: "Etc/GMT+12", offset: "720"],
[key: "(GMT-11:00) Midway Island, Samoa", value: "Pacific/Midway", offset: "660"],
[key: "(GMT-10:00) Hawaii", value: "Pacific/Honolulu", offset: "600"],
[key: "(GMT-09:00) Alaska", value: "US/Alaska", offset: "540"],
[key: "(GMT-08:00) Pacific Time (US & Canada)", value: "America/Los_Angeles", offset: "480"],
[key: "(GMT-08:00) Tijuana, Baja California", value: "America/Tijuana"],
[key: "(GMT-07:00) Arizona", value: "US/Arizona"],
[key: "(GMT-07:00) Chihuahua, La Paz, Mazatlan", value: "America/Chihuahua"],
[key: "(GMT-07:00) Mountain Time (US & Canada)", value: "US/Mountain", offset: "420"],
[key: "(GMT-06:00) Central America", value: "America/Managua"],
[key: "(GMT-06:00) Central Time (US & Canada)", value: "US/Central", offset: "360"],
[key: "(GMT-06:00) Guadalajara, Mexico City, Monterrey", value: "America/Mexico_City"],
[key: "(GMT-06:00) Saskatchewan", value: "Canada/Saskatchewan"],
[key: "(GMT-05:00) Bogota, Lima, Quito, Rio Branco", value: "America/Bogota"],
[key: "(GMT-05:00) Eastern Time (US & Canada)", value: "US/Eastern", offset: "300"],
[key: "(GMT-05:00) Indiana (East)", value: "US/East-Indiana"],
[key: "(GMT-04:00) Atlantic Time (Canada)", value: "Canada/Atlantic", offset: "240"],
[key: "(GMT-04:00) Caracas, La Paz", value: "America/Caracas"],
[key: "(GMT-04:00) Manaus", value: "America/Manaus"],
[key: "(GMT-04:00) Santiago", value: "America/Santiago"],
[key: "(GMT-03:30) Newfoundland", value: "Canada/Newfoundland"],
[key: "(GMT-03:00) Brasilia", value: "America/Sao_Paulo", offset: "180"],
[key: "(GMT-03:00) Buenos Aires, Georgetown", value: "America/Argentina/Buenos_Aires"],
[key: "(GMT-03:00) Greenland", value: "America/Godthab"],
[key: "(GMT-03:00) Montevideo", value: "America/Montevideo"],
[key: "(GMT-02:00) Mid-Atlantic", value: "America/Noronha", offset: "120"],
[key: "(GMT-01:00) Cape Verde Is.", value: "Atlantic/Cape_Verde", offset: "60"],
[key: "(GMT-01:00) Azores", value: "Atlantic/Azores"],
[key: "(GMT+00:00) Casablanca, Monrovia, Reykjavik", value: "Africa/Casablanca"],
[
key: "(GMT+00:00) Greenwich Mean Time : Dublin, Edinburgh, Lisbon, London",
value: "Etc/Greenwich",
offset: "0"
],
[
key: "(GMT+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna",
value: "Europe/Amsterdam",
offset: "-60"
],
[
key: "(GMT+01:00) Belgrade, Bratislava, Budapest, Ljubljana, Prague",
value: "Europe/Belgrade"
],
[key: "(GMT+01:00) Brussels, Copenhagen, Madrid, Paris", value: "Europe/Brussels"],
[key: "(GMT+01:00) Sarajevo, Skopje, Warsaw, Zagreb", value: "Europe/Sarajevo"],
[key: "(GMT+01:00) West Central Africa", value: "Africa/Lagos"],
[key: "(GMT+02:00) Amman", value: "Asia/Amman"],
[key: "(GMT+02:00) Athens, Bucharest, Istanbul", value: "Europe/Athens"],
[key: "(GMT+02:00) Beirut", value: "Asia/Beirut"],
[key: "(GMT+02:00) Cairo", value: "Africa/Cairo"],
[key: "(GMT+02:00) Harare, Pretoria", value: "Africa/Harare"],
[
key: "(GMT+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius",
value: "Europe/Helsinki",
offset: "-120"
],
[key: "(GMT+02:00) Jerusalem", value: "Asia/Jerusalem"],
[key: "(GMT+02:00) Minsk", value: "Europe/Minsk"],
[key: "(GMT+02:00) Windhoek", value: "Africa/Windhoek"],
[key: "(GMT+03:00) Kuwait, Riyadh, Baghdad", value: "Asia/Kuwait"],
[
key: "(GMT+03:00) Moscow, St. Petersburg, Volgograd",
value: "Europe/Moscow",
offset: "-180"
],
[key: "(GMT+03:00) Nairobi", value: "Africa/Nairobi"],
[key: "(GMT+03:00) Tbilisi", value: "Asia/Tbilisi"],
[key: "(GMT+03:30) Tehran", value: "Asia/Tehran", offset: "-210"],
[key: "(GMT+04:00) A<NAME>, Muscat", value: "Asia/Muscat", offset: "-240"],
[key: "(GMT+04:00) Baku", value: "Asia/Baku"],
[key: "(GMT+04:00) Yerevan", value: "Asia/Yerevan"],
[key: "(GMT+04:30) Kabul", value: "Asia/Kabul", offset: "-270"],
[key: "(GMT+05:00) Yekaterinburg", value: "Asia/Yekaterinburg"],
[key: "(GMT+05:00) Islamabad, Karachi, Tashkent", value: "Asia/Karachi", offset: "-300"],
[
key: "(GMT+05:30) Chennai, Kolkata, Mumbai, New Delhi",
value: "Asia/Calcutta",
offset: "-330"
],
[key: "(GMT+05:30) Sri Jayawardenapura", value: "Asia/Calcutta"],
[key: "(GMT+05:45) Kathmandu", value: "Asia/Katmandu", offset: "-345"],
[key: "(GMT+06:00) Almaty, Novosibirsk", value: "Asia/Almaty", offset: "-360"],
[key: "(GMT+06:00) Astana, Dhaka", value: "Asia/Dhaka"],
[key: "(GMT+06:30) Yangon (Rangoon)", value: "Asia/Rangoon", offset: "-390"],
[key: "(GMT+07:00) Bangkok, Hanoi, Jakarta", value: "Asia/Bangkok", offset: "-420"],
[key: "(GMT+07:00) Krasnoyarsk", value: "Asia/Krasnoyarsk"],
[
key: "(GMT+08:00) Beijing, Chongqing, Hong Kong, Urumqi",
value: "Asia/Hong_Kong",
offset: "-480"
],
[key: "(GMT+08:00) Kuala Lumpur, Singapore", value: "Asia/Kuala_Lumpur"],
[key: "(GMT+08:00) Irkutsk, Ulaan Bataar", value: "Asia/Irkutsk"],
[key: "(GMT+08:00) Perth", value: "Australia/Perth"],
[key: "(GMT+08:00) Taipei", value: "Asia/Taipei"],
[key: "(GMT+09:00) Osaka, Sapporo, Tokyo", value: "Asia/Tokyo", offset: "-540"],
[key: "(GMT+09:00) Seoul", value: "Asia/Seoul"],
[key: "(GMT+09:00) Yakutsk", value: "Asia/Yakutsk"],
[key: "(GMT+09:30) Adelaide", value: "Australia/Adelaide", offset: "-570"],
[key: "(GMT+09:30) Darwin", value: "Australia/Darwin"],
[key: "(GMT+10:00) Brisbane", value: "Australia/Brisbane", offset: "-600"],
[key: "(GMT+10:00) Canberra, Melbourne, Sydney", value: "Australia/Canberra"],
[key: "(GMT+10:00) Hobart", value: "Australia/Hobart"],
[key: "(GMT+10:00) Guam, Port Moresby", value: "Pacific/Guam"],
[key: "(GMT+10:00) Vladivostok", value: "Asia/Vladivostok"],
[
key: "(GMT+11:00) Magadan, Solomon Is., New Caledonia",
value: "Asia/Magadan",
offset: "-660"
],
[key: "(GMT+12:00) Auckland, Wellington", value: "Pacific/Auckland", offset: "-720"],
[key: "(GMT+12:00) Fiji, Kamchatka, Marshall Is.", value: "Pacific/Fiji"],
[key: "(GMT+13:00) Nuku'alofa", value: "Pacific/Tongatapu", offset: "-780"]
]
def options() do
@options
end
end
|
lib/plausible/timezones.ex
| 0.600305
| 0.713669
|
timezones.ex
|
starcoder
|
defmodule Scanner do
defstruct coords: [],
offset: [0,0,0],
rotation: [1,2,3]
@type coord::[integer()]
@type t :: %__MODULE__{
coords: [coord()],
offset: coord(),
rotation: coord()
}
def split_n_toi(x), do: String.split(x, ",", trim: true) |> Enum.map(&String.to_integer/1)
def from_strings([_label | coords]), do: %Scanner{coords: Enum.map(coords, &split_n_toi/1)}
defp directions(), do: [[1,2,3], [-2,1,3], [-1,-2,3], [2,-1,3], [1, -3, 2], [3, 1, 2], [-1, 3, 2], [-3, -1, 2],
[1, 3, -2], [-3, 1, -2], [-3, -1, -2], [-1,-3, -2], [3,-1, -2], [1, -2, -3], [2, 1, -3],[-2, 3, -1],
[-1, 2, -3], [-2, -1, -3], [-3, 2, 1], [-2, -3, 1], [3, -2, 1], [2, 3, 1], [3, 2, -1],[-3,-2,-1],[2,-3,-1]]
def rotate(%Scanner{coords: coords}, rotation), do: %Scanner{coords: Enum.map(coords, &(rotate(&1, rotation))), rotation: rotation}
def rotate(coord, [a,b,c]), do: [rotate(coord, a), rotate(coord, b), rotate(coord, c)]
def rotate(coord, a), do: Enum.at(coord, abs(a)-1) * div(a, abs(a))
@spec rotations(Scanner.t())::[Scanner.t()]
def rotations(scanner), do: Enum.map(directions(), &(rotate(scanner, &1)) )
def dist([x1, x2, x3], [y1, y2, y3]), do: [x1-y1, x2-y2, x3-y3]
def mov([x1, x2, x3], [y1, y2, y3]), do: [x1+y1, x2+y2, x3+y3]
def find_alignment(s, tf) do
shifts = for a <- s.coords, b <- tf.coords, do: dist(a, b)
{most, freq} = Enum.frequencies(shifts) |> Enum.max_by(&(elem(&1,1)))
if freq >= 12, do: most, else: nil
end
def find_fit(aligned, to_fit=%Scanner{}) do
for tf <- Scanner.rotations(to_fit) do
for s <- aligned, do: {find_alignment(s, tf), tf.rotation}
end
|> List.flatten()
|> Enum.filter( &( !is_nil(elem(&1,0)) ) )
|> List.first()
end
def find_fit(aligned, [hd|tl]) do
fit = find_fit(aligned, hd)
case fit do
nil -> find_fit(aligned, tl)
_ -> {hd, elem(fit, 0), elem(fit, 1)} #fit == {distance, rotation}
end
end
def rotate_and_move(fitting, distance, rotation), do: %Scanner{offset: distance, rotation: rotation,
coords: Enum.map(fitting.coords, &( mov(rotate(&1, rotation), distance) ))}
@spec reconstruct([Scanner.t()], [Scanner.t()])::[Scanner.t()]
def reconstruct(aligned, []), do: aligned
def reconstruct(aligned, to_align) do
{fitting, distance, rotation} = find_fit(aligned, to_align)
to_align = to_align -- [fitting]
aligned = [rotate_and_move(fitting, distance, rotation) |aligned]
reconstruct(aligned, to_align)
end
@spec reconstruct([Scanner.t()])::[Scanner.t()]
def reconstruct([start|rest]), do: reconstruct([start], rest)
def reduce_scanners(scanner, acc), do: acc ++ scanner.coords
def manhattan([a1,a2,a3], [b1, b2, b3]), do: abs(a1 - b1) + abs(a2 - b2) + abs(a3 - b3)
def manhattan(%Scanner{offset: a}, %Scanner{offset: b}), do: manhattan(a,b)
end
|
lib/scanner.ex
| 0.851968
| 0.776581
|
scanner.ex
|
starcoder
|
defmodule Mix.Tasks.Absinthe.Gen.Type do
use Mix.Task
alias Mix.AbsintheGeneratorUtils
@shortdoc "Generates an absinthe type"
@moduledoc """
Generates an Absinthe Type
### Options
#{NimbleOptions.docs(AbsintheGenerator.Type.definitions())}
### Specifying Types
To specify types we can utilize the following syntax
```bash
type_name:enum:VALUE_1:VALUE_2:VALUE_3
type_name:object:name:string:birthday:date:names:list_of(string)
```
### Example
```bash
mix absinthe.gen.type
animal:enum:CAT:DOG
user:object:name:string:birthday:date:id:id:animal:animal
--app-name MyApp
--type-name people
```
"""
@enum_regex ~r/enum:([A-Z]+(?!:)|([A-Z]+:[A-Z]+(?!:))+)/
@object_regex ~r/object:([a-z]+:[a-z]+(?!:))+$/
@object_or_enum_regex ~r/[a-z]+:(enum:([A-Z]+(?!:)|([A-Z]+:[A-Z]+(?!:))+)|object:([a-z]+:[a-z]+(?!:))+$)/
def run(args) do
AbsintheGeneratorUtils.ensure_not_in_umbrella!("absinthe.gen.type")
{args, extra_args} = AbsintheGeneratorUtils.parse_path_opts(args, [
path: :string,
app_name: :string,
moduledoc: :string,
type_name: :string
])
parsed_objects_and_enums = extra_args
|> validate_types_string
|> parse_object_and_enum_types
path = Keyword.get(
args,
:path,
"./lib/#{Macro.underscore(args[:app_name])}_web/types/#{Macro.underscore(args[:type_name])}.ex"
)
args
|> Map.new
|> Map.merge(parsed_objects_and_enums)
|> serialize_to_type_struct
|> AbsintheGenerator.Type.run
|> AbsintheGeneratorUtils.write_template(path)
end
defp validate_types_string(type_parts) do
if type_parts === [] or Enum.all?(type_parts, &Regex.match?(@object_or_enum_regex, &1)) do
type_parts
else
Mix.raise("""
\n
Object and Enum format don't match what's expected
Enums must be formatted according to the following regex
#{inspect @enum_regex}
Objects must be formatted according to the following regex
#{inspect @object_regex}
Example:
my_type_name:enum:OPTION_A:OPTION_B
my_type_name:object:user_name:string:age:integer
""")
end
end
defp parse_object_and_enum_types(type_parts) do
Enum.reduce(type_parts, %{enums: [], objects: []}, fn (type_part, acc_params) ->
if type_part =~ "enum" do
[type_name, "enum" | types] = String.split(type_part, ":")
new_value = %AbsintheGenerator.Type.EnumValue{
name: type_name,
values: types
}
Map.update!(acc_params, :enums, &(&1 ++ [new_value]))
else
[type_name, "object" | types] = String.split(type_part, ":")
new_value = %AbsintheGenerator.Type.Object{
name: type_name,
fields: types
|> Enum.chunk_every(2)
|> Enum.map(fn [name, type] ->
%AbsintheGenerator.Type.Object.Field{name: name, type: ":#{type}"}
end)
}
Map.update!(acc_params, :objects, &(&1 ++ [new_value]))
end
end)
end
defp serialize_to_type_struct(params) do
%AbsintheGenerator.Type{
app_name: params[:app_name],
moduledoc: params[:moduledoc],
type_name: params[:type_name],
enums: params[:enums],
objects: params[:objects]
}
end
end
|
lib/mix/tasks/type.ex
| 0.627837
| 0.699639
|
type.ex
|
starcoder
|
defmodule Knigge.Options do
@defaults [
delegate_at_runtime?: [only: :test],
do_not_delegate: [],
warn: true
]
@moduledoc """
Specifies valid `Knigge`-options and allows to validate and encapsulate the
options in a struct.
`Knigge` differentiates between **required** and _optional_ options:
## Required
`Knigge` requires a way to determine the implementation to delegate to. As
such it requires one of the following options (but not both):
- `implementation` directly passes the implementation to delegate to
- `otp_app` specifies the application for which the implementation has been configured
If both or neither are given an `ArgumentError` is being raised.
## Optional
These options do not have to be given but control aspects on how `Knigge` does
delegation:
### `behaviour`
The behaviour for which `Knigge` should generate delegations.
__Default__: the `use`ing `__MODULE__`.
### `config_key`
The configuration key from which `Knigge` should fetch the implementation.
Is only used when `otp_app` is passed.
__Default__: the `use`ing `__MODULE__`
### `delegate_at_runtime?`
A switch to move delegation to runtime, by defauly `Knigge` does as much work as possible at compile time.
Accepts:
- a boolean (`true` always delegate at runtime | `false` always at compile time)
- one or many environment names (atom or list of atoms) - only delegate at runtime in the given environments
- `[only: <envs>]` - equivalent to the option above
- `[except: <envs>]` - only delegates at runtime if the current environment is __not__ contained in the list
__Default__: `Application.get_env(:knigge, :delegate_at_runtime?, #{
inspect(@defaults[:delegate_at_runtime?])
})`
### `do_not_delegate`
A keyword list defining callbacks for which no delegation should happen.
__Default__: `[]`
### `warn`
Allows to control in which environments `Knigge` should generate warnings, use with care.
Accepts:
- a boolean (`true` always warns | `false` never warns)
- one or many environment names (atom or list of atoms) - only warns in the given environments
- `[only: <envs>]` - equivalent to the option above
- `[except: <envs>]` - only warns if the current environment is __not__ contained in the list
__Default__: `Application.get_env(:knigge, :warn, #{inspect(@defaults[:warn])})`
"""
import Keyword, only: [has_key?: 2, keyword?: 1]
@type raw :: [required() | list(optional())]
@type required :: {:implementation, module()} | {:otp_app, otp_app()}
@type optional :: [
behaviour: behaviour(),
config_key: config_key(),
delegate_at_runtime?: boolean_or_envs(),
do_not_delegate: do_not_delegate(),
warn: boolean_or_envs()
]
@type behaviour :: module()
@type boolean_or_envs :: boolean() | envs() | [only: envs()] | [except: envs()]
@type config_key :: atom()
@type delegate_at :: :compile_time | :runtime
@type do_not_delegate :: keyword(arity())
@type envs :: atom() | list(atom())
@type otp_app :: atom()
@type t :: %__MODULE__{
implementation: module() | {:config, otp_app(), config_key()},
behaviour: behaviour(),
delegate_at_runtime?: boolean(),
do_not_delegate: do_not_delegate(),
warn: boolean()
}
defstruct [
:behaviour,
:delegate_at_runtime?,
:do_not_delegate,
:implementation,
:warn
]
@doc """
Checks the validity of the given opts (`validate!/1`), applies defaults and
puts them into the `#{inspect(__MODULE__)}`-struct.
"""
@spec new(options :: raw()) :: t()
def new(opts) do
env = Keyword.get_lazy(opts, :env, &env/0)
opts =
opts
|> map_deprecated()
|> validate!()
|> with_defaults()
|> transform(with_env: env)
struct(__MODULE__, opts)
end
defp env do
if function_exported?(Mix, :env, 0) do
Mix.env()
else
:prod
end
end
defp map_deprecated(opts) when is_list(opts) do
opts
|> Enum.map(fn {key, _} = kv ->
case map_deprecated(kv) do
^kv ->
kv
{new_key, _} = kv when is_atom(new_key) ->
IO.warn("Knigge encountered the deprecated option `#{key}`, please use `#{new_key}`.")
kv
message when is_binary(message) ->
IO.warn(
"Knigge encountered the deprecated option `#{key}`, this option is no longer supported; #{
message
}."
)
nil
end
end)
|> Enum.reject(&is_nil/1)
end
defp map_deprecated({key, _})
when key in [:check_if_exists, :check_if_exists?],
do: "please use the mix task `mix knigge.verify`"
defp map_deprecated({:delegate_at, :compile_time}), do: {:delegate_at_runtime?, false}
defp map_deprecated({:delegate_at, :runtime}), do: {:delegate_at_runtime?, true}
defp map_deprecated(other), do: other
@doc """
Applies the defaults to the given options:
#{
@defaults
|> Enum.map(fn {key, value} ->
" - #{key} = #{inspect(value)}"
end)
|> Enum.join("\n")
}
"""
@spec with_defaults(raw()) :: raw()
def with_defaults(opts) do
@defaults
|> Keyword.merge(defaults_from_config())
|> Keyword.merge(opts)
|> Keyword.put_new_lazy(:implementation, fn ->
{:config, opts[:otp_app], opts[:config_key]}
end)
end
defp defaults_from_config do
:knigge
|> Application.get_all_env()
|> Keyword.take([:delegate_at_runtime?, :warn])
end
defp transform(opts, with_env: env) when is_list(opts) do
for {key, value} <- opts, do: {key, transform(key, value, with_env: env)}
end
defp transform(key, envs, with_env: env)
when key in [:delegate_at_runtime?, :warn],
do: active_env?(env, envs)
defp transform(_key, value, with_env: _), do: value
defp active_env?(_env, boolean) when is_boolean(boolean), do: boolean
defp active_env?(env, only: envs), do: env in List.wrap(envs)
defp active_env?(env, except: envs), do: env not in List.wrap(envs)
defp active_env?(env, envs), do: active_env?(env, only: envs)
@doc """
Validates the options passed to `Knigge`. It ensures that the required keys
are present and that no unknown keys are passed to `Knigge` which might
indicate a spelling error.
See the moduledocs for details on required and optional options.
## Examples
iex> Knigge.Options.validate!([1, 2, 3])
** (ArgumentError) Knigge expects a keyword list as options, instead received: [1, 2, 3]
iex> Knigge.Options.validate!([])
** (ArgumentError) Knigge expects either the :implementation or the :otp_app option but neither was given.
iex> Knigge.Options.validate!(implementation: SomeModule)
[implementation: SomeModule]
iex> Knigge.Options.validate!(otp_app: :knigge)
[otp_app: :knigge]
iex> Knigge.Options.validate!(implementation: SomeModule, otp_app: :knigge)
** (ArgumentError) Knigge expects either the :implementation or the :otp_app option but both were given.
iex> Knigge.Options.validate!(otp_app: :knigge, the_answer_to_everything: 42, another_weird_option: 1337)
** (ArgumentError) Knigge received unexpected options: [the_answer_to_everything: 42, another_weird_option: 1337]
iex> Knigge.Options.validate!(otp_app: "knigge")
** (ArgumentError) Knigge received invalid value for `otp_app`. Expected atom but received: "knigge"
iex> Knigge.Options.validate!(otp_app: :knigge, delegate_at_runtime?: "test")
** (ArgumentError) Knigge received invalid value for `delegate_at_runtime?`. Expected boolean or environment (atom or list of atoms) but received: "test"
"""
@spec validate!(raw()) :: no_return
def validate!(opts) do
validate_keyword!(opts)
validate_required!(opts)
validate_known!(opts)
validate_values!(opts)
opts
end
defp validate_keyword!(opts) do
unless keyword?(opts) do
raise ArgumentError,
"Knigge expects a keyword list as options, instead received: #{inspect(opts)}"
end
:ok
end
defp validate_required!(opts) do
case {has_key?(opts, :implementation), has_key?(opts, :otp_app)} do
{false, false} ->
raise ArgumentError,
"Knigge expects either the :implementation or the :otp_app option but neither was given."
{true, true} ->
raise ArgumentError,
"Knigge expects either the :implementation or the :otp_app option but both were given."
_ ->
:ok
end
end
defp validate_known!(opts) do
opts
|> Enum.reject(&known?/1)
|> case do
[] ->
:ok
unknown ->
raise ArgumentError, "Knigge received unexpected options: #{inspect(unknown)}"
end
end
defp validate_values!(opts) do
opts
|> Enum.reject(&valid_value?/1)
|> case do
[] ->
:ok
[{name, value} | _] ->
raise ArgumentError,
"Knigge received invalid value for `#{name}`. " <>
"Expected #{expected_value(name)} but received: #{inspect(value)}"
end
end
@option_types [
behaviour: :module,
delegate_at_runtime?: :envs,
do_not_delegate: :keyword,
implementation: :module,
otp_app: :atom,
config_key: :atom,
warn: :envs
]
@option_names Keyword.keys(@option_types)
defp known?({name, _}), do: name in @option_names
defp valid_value?({name, value}) do
@option_types
|> Keyword.fetch!(name)
|> valid_value?(value)
end
defp valid_value?(:atom, value), do: is_atom(value)
defp valid_value?(:boolean, value), do: is_boolean(value)
defp valid_value?(:module, value), do: is_atom(value)
defp valid_value?(:keyword, value), do: Keyword.keyword?(value)
defp valid_value?(:envs, only: envs), do: valid_envs?(envs)
defp valid_value?(:envs, except: envs), do: valid_envs?(envs)
defp valid_value?(:envs, envs), do: valid_envs?(envs)
defp valid_envs?(envs) do
is_boolean(envs) or is_atom(envs) or (is_list(envs) and Enum.all?(envs, &is_atom/1))
end
defp expected_value(name) do
case Keyword.fetch!(@option_types, name) do
:envs ->
"boolean or environment (atom or list of atoms)"
:keyword ->
"keyword list"
other ->
to_string(other)
end
end
end
|
lib/knigge/options.ex
| 0.844377
| 0.584894
|
options.ex
|
starcoder
|
defmodule Enki do
@moduledoc """
Enki is a simple queue that provides Mnesia persistence across nodes and
`ttf` (time-to-flight) capability.
Time-to-flight means that, when dequeuing a message, if the dequeue is not
ack'd within a given period of time, the message is automatically added
back to the queue. This ensures that no messages are lost.
Queues must be created by calling Enki.init and passing a list of model
module names. Each model must be created as
defmodule MyApp.MyModel do
use Enki.Message,
attributes: [:attr1, :attr2, :attr3]
end
This replaces the need to use `defstruct`, as it ensures the correct
Enki meta is used.
## Examples
Enki.init([MyModel])
Enki.enq(%MyModel{a: 1, b: 2})
%MyModel{enki_id: id, a: 1, b: 2}} = Enki.deq(MyModel)
:ok = Enki.ack(id)
"""
@moduledoc since: "0.1.0"
alias Enki.Message
defmodule Counter do
@moduledoc false
use Agent
def start_link(_opts) do
Agent.start_link(fn -> 0 end, name: __MODULE__)
end
def next_value() do
Agent.get(__MODULE__, &(&1 + 1))
end
end
@sup Enki.SupervisedClients
@doc """
Enki application start method.
Gets called automatically when included in your `mix`
`applications` list.
"""
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
worker(Counter, [[]]),
{DynamicSupervisor, strategy: :one_for_one, name: @sup}
]
Supervisor.start_link(children,
strategy: :one_for_one,
name: Enki.Supervisor,
max_restarts: 10_000
)
end
@doc """
Initialises the Queues.
## Example:
Enki.init([MyModel, MyOtherModel])
## Parameters
| name | description |
| ---- | ----------- |
| `types` | A list of model instances to use as queue types (required). |
"""
@spec init(list(atom())) :: :ok | no_return
def init(types) do
if file_persist() do
Memento.stop()
Memento.Schema.create(nodes())
Memento.start()
maybe_create_tables(types, disc_copies: nodes())
else
maybe_create_tables(types)
end
:ok
end
@doc """
Adds a message to the queue.
Returns a Message instance containing the `enki_id` of the message
on the queue and the message itself as a `payload`.
## Parameters
| name | description |
| ---- | ----------- |
| `message` | The model instance to queue (required). |
"""
@spec enq(Message.t()) :: Message.t()
def enq(message) do
Memento.transaction!(fn ->
id = Counter.next_value()
Map.put(message, :enki_id, "#{id}_#{UUID.uuid4(:hex)}")
|> Memento.Query.write()
end)
end
@doc """
Dequeues a message from the queue.
Returns the message in a `Message` model as its
`payload` parameter. The message is typically the oldest in
the queue.
## Parameters
| name | description |
| ---- | ----------- |
| `queue` | The module type (atom) of the message to dequeue (required). |
| `ttf` | The time-to-flight for the message. If provided, verrides the message in the config (optional). |
"""
def deq(queue, ttf \\ nil) do
with %{enki_id: id} = message <- deq_(queue),
{:ok, _pid} <-
DynamicSupervisor.start_child(
@sup,
Supervisor.child_spec(
{Enki.InFlight,
message: message, ttf: ttf || time_to_flight(), id: child_name(id)},
id: child_name(id),
restart: :transient
)
) do
message
end
end
@doc """
Acknowledges a dequeued message.
If the message is in-flight, it will not be re-added to the queue
after the alotted `ttf`.
## Parameters
| name | description |
| ---- | ----------- |
| `id` | The `id` of the message to acknowledge (required). |
"""
def ack(id) do
child_exit(id)
end
@doc """
Retrieves a message by `id` without dequeuing.
Recalling a message directly does NOT put it in flight.
## Parameters
| name | description |
| ---- | ----------- |
| `queue` | The module type (atom) of the message to retrieve (required). |
| `id` | The `id` of the message to retrieve (required). |
"""
def get(queue, id) do
Memento.transaction!(fn ->
Memento.Query.read(queue, id)
end)
end
@doc """
Deletes a message by `id`.
Directly deletes a message in the queue.
## Parameters
| name | description |
| ---- | ----------- |
| `queue` | The module type (atom) of the message to delete (required). |
| `id` | The `id` of the message to delete (required). |
"""
def delete(queue, id) do
Memento.transaction!(fn ->
case get(queue, id) do
%{enki_id: id} ->
child_exit(id)
Memento.Query.delete(queue, id)
_ ->
:ok
end
end)
end
@doc """
Delete all messages in the queue.
Any in-flight messages are cancelled, so messages are not
added back to the queue.
## Parameters
| name | description |
| ---- | ----------- |
| `queue` | The module type (atom) of the messages to delete (required). |
"""
def delete_all(queue) do
Memento.transaction!(fn ->
Memento.Query.all(queue)
|> Enum.each(fn rec ->
child_exit(rec.enki_id)
Memento.Query.delete_record(rec)
end)
end)
end
@doc false
def child_exists?(id),
do:
child_name(id)
|> Process.whereis()
|> is_alive?()
@doc false
def monitor(id),
do:
child_name(id)
|> Process.whereis()
|> monitor_()
defp maybe_create_tables(types, opts \\ []) do
Enum.each(types, fn t ->
try do
Memento.Table.info(t)
catch
:exit, _ -> Memento.Table.create!(t, opts)
end
end)
end
defp deq_(queue) do
Memento.transaction!(fn ->
with [%{} = msg] <- Memento.Query.select(queue, [], limit: 1),
_ <- Memento.Query.delete_record(msg) do
msg
else
[] ->
nil
end
end)
end
defp child_exit(id) do
if child_name(id) |> Process.whereis() |> is_alive?() do
GenServer.stop(child_name(id))
end
:ok
end
defp child_name(id),
do: "enki_#{inspect(id)}" |> String.to_atom()
defp is_alive?(pid) when is_pid(pid),
do: Process.alive?(pid)
defp is_alive?(_),
do: false
defp monitor_(pid) when is_pid(pid),
do: {Process.monitor(pid), pid}
defp monitor_(_),
do: nil
defp nodes(),
do: [node() | Node.list()]
defp time_to_flight(),
do: Application.get_env(:enki, :ttf, 5000)
defp file_persist(),
do: Application.get_env(:enki, :file_persist, false)
end
|
lib/enki.ex
| 0.887052
| 0.532
|
enki.ex
|
starcoder
|
defmodule Knigge.Code do
@moduledoc """
Internal module responsible of injecting the delegations into the calling module.
Injects the actual delegations to the implementing module. For this it gets
registered as a `before_compile`-hook from where it fetches all callbacks from
the behaviour and generates delegating functions for each callback.
It also defines the `defdefault` macro.
"""
alias Knigge.Code.{Default, Delegate}
alias Knigge.Error
alias Knigge.Warnings, as: Warn
defmacro defdefault({_name, _meta, _args} = definition, do: block) do
do_defdefault(definition, do: block)
end
defp do_defdefault({name, _meta, args}, do: block) when is_list(args) do
key = {name, length(args)}
value = {Macro.escape(args), Macro.escape(block)}
quote do
@__knigge__ {:defdefault, {unquote(key), unquote(value)}}
end
end
# The `args` are not a list for definitions like `defdefault my_default, do: :ok`
# where no parenthesis follow after `my_default`
defp do_defdefault({name, meta, _args}, do: block) do
do_defdefault({name, meta, []}, do: block)
end
defmacro __before_compile__(%{module: module} = env) do
generate(module, env)
end
# TODO: Refactor this into a separate module and pass in the arguments as a struct to make it more easily testable
def generate(module, env) do
behaviour = get_behaviour(module, env)
callbacks = get_callbacks(behaviour)
optional_callbacks = get_optional_callbacks(behaviour)
delegate_at_runtime? = get_option(module, :delegate_at_runtime?)
do_not_delegate = get_option(module, :do_not_delegate)
definitions = get_definitions(module)
defaults = get_defaults(module)
for callback <- callbacks do
cond do
callback in do_not_delegate ->
:ok
callback in definitions ->
Warn.definition_matching_callback(module, callback)
has_default?(defaults, callback) ->
unless callback in optional_callbacks do
Error.default_for_required_callback!(env)
end
for default <- get_defaults(defaults, callback) do
Default.callback_to_defdefault(callback,
from: module,
default: default,
delegate_at_runtime?: delegate_at_runtime?
)
end
true ->
Delegate.callback_to_defdelegate(callback,
from: module,
delegate_at_runtime?: delegate_at_runtime?
)
end
end
end
defp get_behaviour(module, env) do
module
|> Knigge.options!()
|> Knigge.Behaviour.fetch!()
|> Knigge.Module.ensure_exists!(env)
end
defp get_callbacks(module) do
Knigge.Behaviour.callbacks(module)
end
defp get_optional_callbacks(module) do
Knigge.Behaviour.optional_callbacks(module)
end
defp get_option(module, key) do
module
|> Knigge.options!()
|> Map.get(key)
end
defp get_definitions(module) do
Module.definitions_in(module)
end
defp get_defaults(module) do
module
|> Module.get_attribute(:__knigge__)
|> Keyword.get_values(:defdefault)
end
defp has_default?(defaults, callback) do
default = Enum.find(defaults, &match?({^callback, _}, &1))
not is_nil(default)
end
defp get_defaults(defaults, callback) do
defaults
|> Enum.filter(&match?({^callback, _}, &1))
|> Enum.map(fn {_, default} -> default end)
|> Enum.reverse()
end
end
|
lib/knigge/code.ex
| 0.593491
| 0.449574
|
code.ex
|
starcoder
|
defmodule SPARQL.Functions.Builtins do
require Logger
alias RDF.{IRI, BlankNode, Literal, XSD, NS}
@doc """
Value equality
see
- <https://www.w3.org/TR/sparql11-query/#OperatorMapping>
- <https://www.w3.org/TR/sparql11-query/#func-RDFterm-equal>
"""
def call(:=, [left, right], _) do
left |> RDF.Term.equal_value?(right) |> ebv()
end
@doc """
Value inequality
see
- <https://www.w3.org/TR/sparql11-query/#OperatorMapping>
- <https://www.w3.org/TR/sparql11-query/#func-RDFterm-equal>
"""
def call(:!=, [left, right], _) do
left |> RDF.Term.equal_value?(right) |> fn_not()
end
@doc """
`sameTerm` equality
see <https://www.w3.org/TR/sparql11-query/#func-sameTerm>
"""
def call(:sameTerm, [left, right], _) do
left |> RDF.Term.equal?(right) |> ebv()
end
@doc """
Less-than operator.
see
- <https://www.w3.org/TR/sparql11-query/#OperatorMapping>
"""
def call(:<, [%Literal{} = left, %Literal{} = right], _) do
case Literal.compare(left, right) do
:lt -> true
nil -> nil
_ -> false
end
|> ebv()
end
def call(:<, _, _), do: :error
@doc """
Greater-than operator.
see
- <https://www.w3.org/TR/sparql11-query/#OperatorMapping>
"""
def call(:>, [%Literal{} = left, %Literal{} = right], _) do
case Literal.compare(left, right) do
:gt -> true
nil -> nil
_ -> false
end
|> ebv()
end
def call(:>, _, _), do: :error
@doc """
Greater-or-equal operator.
see
- <https://www.w3.org/TR/sparql11-query/#OperatorMapping>
"""
def call(:>=, [%Literal{} = left, %Literal{} = right], _) do
case Literal.compare(left, right) do
:gt -> XSD.true
:eq -> XSD.true
:lt -> XSD.false
_ -> :error
end
end
def call(:>=, _, _), do: :error
@doc """
Less-or-equal operator.
see
- <https://www.w3.org/TR/sparql11-query/#OperatorMapping>
"""
def call(:<=, [%Literal{} = left, %Literal{} = right], _) do
case Literal.compare(left, right) do
:lt -> XSD.true
:eq -> XSD.true
:gt -> XSD.false
_ -> :error
end
end
def call(:<=, _, _), do: :error
@doc """
Logical `NOT`
Returns `RDF.XSD.true` if the effective boolean value of the given argument is
`RDF.XSD.false`, or `RDF.XSD.false` if it is `RDF.XSD.true`. Otherwise it returns `error`.
see <http://www.w3.org/TR/xpath-functions/#func-not>
"""
def call(:!, [argument], _) do
fn_not(argument)
end
@doc """
Numeric unary plus.
see <http://www.w3.org/TR/xpath-functions/#func-numeric-unary-plus>
"""
def call(:+, [number], _) do
if XSD.Numeric.datatype?(number) do
number
else
:error
end
end
@doc """
Numeric unary minus.
see <http://www.w3.org/TR/xpath-functions/#func-numeric-unary-minus>
"""
def call(:-, [number], _) do
if XSD.Numeric.datatype?(number) do
XSD.Numeric.multiply(number, -1)
else
:error
end
end
@doc """
Numeric addition.
see <http://www.w3.org/TR/xpath-functions/#func-numeric-add>
"""
def call(:+, [left, right], _) do
XSD.Numeric.add(left, right) || :error
end
@doc """
Numeric subtraction.
see <http://www.w3.org/TR/xpath-functions/#func-numeric-subtract>
"""
def call(:-, [left, right], _) do
XSD.Numeric.subtract(left, right) || :error
end
@doc """
Numeric multiplication.
see <http://www.w3.org/TR/xpath-functions/#func-numeric-multiply>
"""
def call(:*, [left, right], _) do
XSD.Numeric.multiply(left, right) || :error
end
@doc """
Numeric division.
see <http://www.w3.org/TR/xpath-functions/#func-numeric-divide>
"""
def call(:/, [left, right], _) do
XSD.Numeric.divide(left, right) || :error
end
@doc """
Checks if the given argument is an IRI.
see <https://www.w3.org/TR/sparql11-query/#func-isIRI>
"""
def call(:isIRI, [%IRI{}], _), do: XSD.true
def call(:isIRI, [:error], _), do: :error
def call(:isIRI, _, _), do: XSD.false
@doc """
Checks if the given argument is an IRI.
see <https://www.w3.org/TR/sparql11-query/#func-isIRI>
"""
def call(:isURI, args, execution), do: call(:isIRI, args, execution)
@doc """
Checks if the given argument is a blank node.
see <https://www.w3.org/TR/sparql11-query/#func-isBlank>
"""
def call(:isBLANK, [%BlankNode{}], _), do: XSD.true
def call(:isBLANK, [:error], _), do: :error
def call(:isBLANK, _, _), do: XSD.false
@doc """
Checks if the given argument is a RDF literal.
see <https://www.w3.org/TR/sparql11-query/#func-isLiteral>
"""
def call(:isLITERAL, [%Literal{}], _), do: XSD.true
def call(:isLITERAL, [:error], _), do: :error
def call(:isLITERAL, _, _), do: XSD.false
@doc """
Checks if the given argument is a RDF literal with a numeric datatype.
see <https://www.w3.org/TR/sparql11-query/#func-isNumeric>
"""
def call(:isNUMERIC, [%Literal{} = literal], _) do
if XSD.Numeric.datatype?(literal) and Literal.valid?(literal) do
XSD.true
else
XSD.false
end
end
def call(:isNUMERIC, [:error], _), do: :error
def call(:isNUMERIC, _, _), do: XSD.false
@doc """
Returns the lexical form of a literal or the codepoint representation of an IRI.
see <https://www.w3.org/TR/sparql11-query/#func-str>
"""
def call(:STR, [%Literal{} = literal], _), do: literal |> to_string() |> XSD.string()
def call(:STR, [%IRI{} = iri], _), do: iri |> to_string() |> XSD.string()
def call(:STR, _, _), do: :error
@doc """
Returns the language tag of language tagged literal.
It returns `~L""` if the given literal has no language tag. Note that the RDF
data model does not include literals with an empty language tag.
see <https://www.w3.org/TR/sparql11-query/#func-lang>
"""
def call(:LANG, [%Literal{} = literal], _),
do: literal |> Literal.language() |> to_string() |> XSD.string()
def call(:LANG, _, _), do: :error
@doc """
Returns the datatype IRI of a literal.
see <https://www.w3.org/TR/sparql11-query/#func-datatype>
"""
def call(:DATATYPE, [%Literal{} = literal], _), do: Literal.datatype_id(literal)
def call(:DATATYPE, _, _), do: :error
@doc """
Constructs a literal with lexical form and type as specified by the arguments.
see <https://www.w3.org/TR/sparql11-query/#func-strdt>
"""
def call(:STRDT, [%Literal{literal: %XSD.String{}} = literal, %IRI{} = datatype], _) do
literal |> Literal.lexical() |> Literal.new(datatype: datatype)
end
def call(:STRDT, _, _), do: :error
@doc """
Constructs a literal with lexical form and language tag as specified by the arguments.
see <https://www.w3.org/TR/sparql11-query/#func-strlang>
"""
def call(:STRLANG, [%Literal{literal: %XSD.String{}} = lexical_form_literal,
%Literal{literal: %XSD.String{}} = language_literal], _) do
language = language_literal |> to_string() |> String.trim()
if language != "" do
RDF.LangString.new(to_string(lexical_form_literal), language: language)
else
:error
end
end
def call(:STRLANG, _, _), do: :error
@doc """
Constructs an IRI from the given string argument.
It constructs an IRI by resolving the string argument (see RFC 3986 and RFC 3987
or any later RFC that supersedes RFC 3986 or RFC 3987). The IRI is resolved
against the base IRI of the query and must result in an absolute IRI.
see <https://www.w3.org/TR/sparql11-query/#func-iri>
"""
def call(:IRI, [%Literal{literal: %XSD.String{}} = literal], execution) do
literal |> to_string() |> IRI.absolute(Map.get(execution, :base)) || :error
end
def call(:IRI, [%IRI{} = iri], _), do: iri
def call(:IRI, _, _), do: :error
@doc """
Checks if the given argument is an IRI.
Alias for `IRI`.
see <https://www.w3.org/TR/sparql11-query/#func-isIRI>
"""
def call(:URI, args, execution), do: call(:IRI, args, execution)
@doc """
Constructs a blank node.
The constructed blank node is distinct from all blank nodes in the dataset
being queried and distinct from all blank nodes created by calls to this
constructor for other query solutions.
If the no argument form is used, every call results in a distinct blank node.
If the form with a simple literal is used, every call results in distinct
blank nodes for different simple literals, and the same blank node for calls
with the same simple literal within expressions for one solution mapping.
see <https://www.w3.org/TR/sparql11-query/#func-bnode>
"""
def call(:BNODE, [], %{bnode_generator: generator}) do
BlankNode.Generator.generate(generator)
end
def call(:BNODE, [%Literal{literal: %XSD.String{}} = literal],
%{bnode_generator: generator, solution_id: solution_id}) do
BlankNode.Generator.generate_for(generator, {solution_id, to_string(literal)})
end
def call(:BNODE, _, _), do: :error
@doc """
Return a fresh IRI from the UUID URN scheme.
Each call of UUID() returns a different UUID.
Currently, UUID v4 ids according to RFC 4122 are produced.
see <https://www.w3.org/TR/sparql11-query/#func-uuid>
"""
def call(:UUID, [], _), do: uuid(:urn) |> IRI.new()
def call(:UUID, _, _), do: :error
@doc """
Return a string literal that is the scheme specific part of UUID.
Currently, UUID v4 ids according to RFC 4122 are produced.
see <https://www.w3.org/TR/sparql11-query/#func-struuid>
"""
def call(:STRUUID, [], _), do: uuid(:default) |> XSD.string()
def call(:STRUUID, _, _), do: :error
@doc """
Returns an `xsd:integer` equal to the length in characters of the lexical form of a literal.
see:
- <https://www.w3.org/TR/sparql11-query/#func-strlen>
- <http://www.w3.org/TR/xpath-functions/#func-string-length>
"""
def call(:STRLEN, [%Literal{literal: %datatype{}} = literal], _)
when datatype in [XSD.String, RDF.LangString],
do: literal |> to_string() |> String.length() |> XSD.integer()
def call(:STRLEN, _, _), do: :error
@doc """
Returns a portion of a string .
The arguments startingLoc and length may be derived types of `xsd:integer`. The
index of the first character in a strings is 1.
Returns a literal of the same kind (simple literal, literal with language tag,
xsd:string typed literal) as the source input parameter but with a lexical form
formed from the substring of the lexical form of the source.
The substr function corresponds to the XPath `fn:substring` function.
see:
- <https://www.w3.org/TR/sparql11-query/#func-substr>
- <http://www.w3.org/TR/xpath-functions/#func-substring>
"""
def call(:SUBSTR, [%Literal{literal: %source_datatype{}} = source, %Literal{} = starting_loc], _)
when source_datatype in [XSD.String, RDF.LangString] do
if XSD.Integer.valid?(starting_loc) do
Literal.update(source, fn source_string ->
String.slice(source_string, (XSD.Integer.value(starting_loc) - 1) .. -1)
end)
else
:error
end
end
def call(:SUBSTR, [%Literal{literal: %source_datatype{}} = source,
%Literal{} = starting_loc, %Literal{} = length], _)
when source_datatype in [XSD.String, RDF.LangString] do
if XSD.Integer.valid?(starting_loc) and XSD.Integer.valid?(length) do
Literal.update(source, fn source_string ->
String.slice(source_string, (XSD.Integer.value(starting_loc) - 1), XSD.Integer.value(length))
end)
else
:error
end
end
def call(:SUBSTR, _, _), do: :error
@doc """
Returns a string literal whose lexical form is the upper case of the lexcial form of the argument.
The UCASE function corresponds to the XPath `fn:upper-case` function.
see:
- <https://www.w3.org/TR/sparql11-query/#func-ucase>
- <http://www.w3.org/TR/xpath-functions/#func-upper-case>
"""
def call(:UCASE, [%Literal{literal: %datatype{}} = str], _)
when datatype in [XSD.String, RDF.LangString] do
Literal.update(str, &String.upcase/1)
end
def call(:UCASE, _, _), do: :error
@doc """
Returns a string literal whose lexical form is the lower case of the lexcial form of the argument.
The LCASE function corresponds to the XPath `fn:lower-case` function.
see:
- <https://www.w3.org/TR/sparql11-query/#func-lcase>
- <http://www.w3.org/TR/xpath-functions/#func-lower-case>
"""
def call(:LCASE, [%Literal{literal: %datatype{}} = str], _)
when datatype in [XSD.String, RDF.LangString] do
Literal.update(str, &String.downcase/1)
end
def call(:LCASE, _, _), do: :error
@doc """
Returns true if the lexical form of arg1 starts with the lexical form of arg2, otherwise it returns false.
The STRSTARTS function corresponds to the XPath `fn:starts-with` function.
The arguments must be `compatible_arguments?/2` otherwise `:error` is returned.
see:
- <https://www.w3.org/TR/sparql11-query/#func-strstarts>
- <http://www.w3.org/TR/xpath-functions/#func-starts-with>
"""
def call(:STRSTARTS, [arg1, arg2], _) do
if compatible_arguments?(arg1, arg2) do
if arg1 |> to_string() |> String.starts_with?(to_string(arg2)) do
XSD.true
else
XSD.false
end
else
:error
end
end
def call(:STRSTARTS, _, _), do: :error
@doc """
Returns true if the lexical form of arg1 ends with the lexical form of arg2, otherwise it returns false.
The STRENDS function corresponds to the XPath `fn:ends-with` function.
The arguments must be `compatible_arguments?/2` otherwise `:error` is returned.
see:
- <https://www.w3.org/TR/sparql11-query/#func-strends>
- <http://www.w3.org/TR/xpath-functions/#func-ends-with>
"""
def call(:STRENDS, [arg1, arg2], _) do
if compatible_arguments?(arg1, arg2) do
if arg1 |> to_string() |> String.ends_with?(to_string(arg2)) do
XSD.true
else
XSD.false
end
else
:error
end
end
def call(:STRENDS, _, _), do: :error
@doc """
Returns true if the lexical form of arg1 contains the lexical form of arg2, otherwise it returns false.
The CONTAINS function corresponds to the XPath `fn:contains` function.
The arguments must be `compatible_arguments?/2` otherwise `:error` is returned.
see:
- <https://www.w3.org/TR/sparql11-query/#func-contains>
- <http://www.w3.org/TR/xpath-functions/#func-contains>
"""
def call(:CONTAINS, [arg1, arg2], _) do
if compatible_arguments?(arg1, arg2) do
if arg1 |> to_string() |> String.contains?(to_string(arg2)) do
XSD.true
else
XSD.false
end
else
:error
end
end
def call(:CONTAINS, _, _), do: :error
@doc """
Returns the substring of the lexical form of arg1 that precedes the first occurrence of the lexical form of arg2.
The STRBEFORE function corresponds to the XPath `fn:substring-before` function.
The arguments must be `compatible_arguments?/2` otherwise `:error` is returned.
For compatible arguments, if the lexical part of the second argument occurs as
a substring of the lexical part of the first argument, the function returns a
literal of the same kind as the first argument arg1 (simple literal, plain
literal same language tag, xsd:string). The lexical form of the result is the
substring of the lexical form of arg1 that precedes the first occurrence of
the lexical form of arg2. If the lexical form of arg2 is the empty string,
this is considered to be a match and the lexical form of the result is the
empty string.
If there is no such occurrence, an empty simple literal is returned.
see:
- <https://www.w3.org/TR/sparql11-query/#func-strbefore>
- <http://www.w3.org/TR/xpath-functions/#func-substring-before>
"""
def call(:STRBEFORE, [arg1, arg2], _) do
cond do
not compatible_arguments?(arg1, arg2) -> :error
Literal.lexical(arg2) == "" -> Literal.update(arg1, fn _ -> "" end)
true ->
case String.split(Literal.lexical(arg1), Literal.lexical(arg2), parts: 2) do
[left, _] -> Literal.update(arg1, fn _ -> left end)
[_] -> Literal.new("")
end
end
end
def call(:STRBEFORE, _, _), do: :error
@doc """
Returns the substring of the lexical form of arg1 that follows the first occurrence of the lexical form of arg2.
The STRAFTER function corresponds to the XPath `fn:substring-before` function.
The arguments must be `compatible_arguments?/2` otherwise `:error` is returned.
For compatible arguments, if the lexical part of the second argument occurs as
a substring of the lexical part of the first argument, the function returns a
literal of the same kind as the first argument arg1 (simple literal, plain
literal same language tag, xsd:string). The lexical form of the result is the
substring of the lexical form of arg1 that precedes the first occurrence of
the lexical form of arg2. If the lexical form of arg2 is the empty string,
this is considered to be a match and the lexical form of the result is the
lexical form of arg1.
If there is no such occurrence, an empty simple literal is returned.
see:
- <https://www.w3.org/TR/sparql11-query/#func-strafter>
- <http://www.w3.org/TR/xpath-functions/#func-substring-after>
"""
def call(:STRAFTER, [arg1, arg2], _) do
cond do
not compatible_arguments?(arg1, arg2) -> :error
Literal.lexical(arg2) == "" -> arg1
true ->
case String.split(Literal.lexical(arg1), Literal.lexical(arg2), parts: 2) do
[_, right] -> Literal.update(arg1, fn _ -> right end)
[_] -> Literal.new("")
end
end
end
def call(:STRAFTER, _, _), do: :error
@doc """
Returns a simple literal with the lexical form obtained from the lexical form of its input after translating reserved characters according to the fn:encode-for-uri function.
The ENCODE_FOR_URI function corresponds to the XPath `fn:encode-for-uri` function.
see:
- <https://www.w3.org/TR/sparql11-query/#func-encode>
- <http://www.w3.org/TR/xpath-functions/#func-encode-for-uri>
"""
def call(:ENCODE_FOR_URI, [%Literal{literal: %datatype{}} = str], _)
when datatype in [XSD.String, RDF.LangString] do
str
|> to_string()
|> URI.encode(&URI.char_unreserved?/1)
|> Literal.new()
end
def call(:ENCODE_FOR_URI, _, _), do: :error
@doc """
Returns a string literal with the lexical form being obtained by concatenating the lexical forms of its inputs.
If all input literals are typed literals of type `xsd:string`, then the returned
literal is also of type `xsd:string`, if all input literals are plain literals
with identical language tag, then the returned literal is a plain literal with
the same language tag, in all other cases, the returned literal is a simple literal.
The CONCAT function corresponds to the XPath `fn:concat` function.
see:
- <https://www.w3.org/TR/sparql11-query/#func-concat>
- <http://www.w3.org/TR/xpath-functions/#func-concat>
"""
def call(:CONCAT, [], _), do: XSD.string("")
def call(:CONCAT, [%Literal{literal: %datatype{}} = first |rest], _)
when datatype in [XSD.String, RDF.LangString] do
rest
|> Enum.reduce_while({to_string(first), Literal.language(first)}, fn
%Literal{literal: %datatype{}} = str, {acc, language}
when datatype in [XSD.String, RDF.LangString] ->
{:cont, {
acc <> to_string(str),
if language && language == Literal.language(str) do
language
else
nil
end
}
}
_, _ ->
{:halt, :error}
end)
|> case do
{str, nil} -> XSD.string(str)
{str, language} -> RDF.lang_string(str, language: language)
_ -> :error
end
end
def call(:CONCAT, _, _), do: :error
@doc """
Checks if a language tagged string literal or language tag matches a language range.
The check is performed per the basic filtering scheme defined in
[RFC4647](http://www.ietf.org/rfc/rfc4647.txt) section 3.3.1.
A language range is a basic language range per _Matching of Language Tags_ in
RFC4647 section 2.1.
A language range of `"*"` matches any non-empty language-tag string.
see <https://www.w3.org/TR/sparql11-query/#func-langMatches>
"""
def call(:LANGMATCHES, [%Literal{literal: %XSD.String{value: language_tag}},
%Literal{literal: %XSD.String{value: language_range}}], _) do
if RDF.LangString.match_language?(language_tag, language_range) do
XSD.true
else
XSD.false
end
end
def call(:LANGMATCHES, _, _), do: :error
@doc """
Matches text against a regular expression pattern.
The regular expression language is defined in _XQuery 1.0 and XPath 2.0 Functions and Operators_.
see
- <https://www.w3.org/TR/sparql11-query/#func-regex>
- <https://www.w3.org/TR/xpath-functions/#func-matches>
"""
def call(:REGEX, [text, pattern], _), do: match_regex(text, pattern, XSD.string(""))
def call(:REGEX, [text, pattern, flags], _), do: match_regex(text, pattern, flags)
def call(:REGEX, _, _), do: :error
@doc """
Replaces each non-overlapping occurrence of the regular expression pattern with the replacement string.
Regular expression matching may involve modifier flags. See REGEX.
see
- <https://www.w3.org/TR/sparql11-query/#func-replace>
- <http://www.w3.org/TR/xpath-functions/#func-replace>
"""
def call(:REPLACE, [text, pattern, replacement], _),
do: replace_regex(text, pattern, replacement, XSD.string(""))
def call(:REPLACE, [text, pattern, replacement, flags], _),
do: replace_regex(text, pattern, replacement, flags)
def call(:REPLACE, _, _), do: :error
@doc """
Returns the absolute value of the argument.
If the argument is not a numeric value `:error` is returned.
see
- <https://www.w3.org/TR/sparql11-query/#func-abs>
- <http://www.w3.org/TR/xpath-functions/#func-abs>
"""
def call(:ABS, [%Literal{} = literal], _) do
XSD.Numeric.abs(literal) || :error
end
def call(:ABS, _, _), do: :error
@doc """
Rounds a value to a specified number of decimal places, rounding upwards if two such values are equally near.
The function returns the nearest (that is, numerically closest) value to the
given literal value that is a multiple of ten to the power of minus `precision`.
If two such values are equally near (for example, if the fractional part in the
literal value is exactly .5), the function returns the one that is closest to
positive infinity.
If the argument is not a numeric value `:error` is returned.
see
- <https://www.w3.org/TR/sparql11-query/#func-round>
- <http://www.w3.org/TR/xpath-functions/#func-round>
"""
def call(:ROUND, [%Literal{} = literal], _) do
XSD.Numeric.round(literal) || :error
end
def call(:ROUND, _, _), do: :error
@doc """
Rounds a numeric value upwards to a whole number.
If the argument is not a numeric value `:error` is returned.
see
- <https://www.w3.org/TR/sparql11-query/#func-ceil>
- <http://www.w3.org/TR/xpath-functions/#func-ceil>
"""
def call(:CEIL, [%Literal{} = literal], _) do
XSD.Numeric.ceil(literal) || :error
end
def call(:CEIL, _, _), do: :error
@doc """
Rounds a numeric value downwards to a whole number.
If the argument is not a numeric value `:error` is returned.
see
- <https://www.w3.org/TR/sparql11-query/#func-floor>
- <http://www.w3.org/TR/xpath-functions/#func-floor>
"""
def call(:FLOOR, [%Literal{} = literal], _) do
XSD.Numeric.floor(literal) || :error
end
def call(:FLOOR, _, _), do: :error
@doc """
Returns a pseudo-random number between 0 (inclusive) and 1.0e0 (exclusive).
see <https://www.w3.org/TR/sparql11-query/#idp2130040>
"""
def call(:RAND, [], _) do
:rand.uniform() |> XSD.double()
end
def call(:RAND, _, _), do: :error
@doc """
Returns an XSD dateTime value for the current query execution.
All calls to this function in any one query execution return the same value.
see <https://www.w3.org/TR/sparql11-query/#func-now>
"""
def call(:NOW, [], %{time: time}) do
XSD.date_time(time)
end
def call(:NOW, _, _), do: :error
@doc """
Returns the year part of the given datetime as an integer.
see
- <https://www.w3.org/TR/sparql11-query/#func-year>
- <https://www.w3.org/TR/xpath-functions/#func-year-from-dateTime>
"""
def call(:YEAR, [%Literal{literal: %XSD.DateTime{} = literal}], _) do
naive_datetime_part(literal, :year)
end
def call(:YEAR, _, _), do: :error
@doc """
Returns the month part of the given datetime as an integer.
see
- <https://www.w3.org/TR/sparql11-query/#func-month>
- <https://www.w3.org/TR/xpath-functions/#func-month-from-dateTime>
"""
def call(:MONTH, [%Literal{literal: %XSD.DateTime{} = literal}], _) do
naive_datetime_part(literal, :month)
end
def call(:MONTH, _, _), do: :error
@doc """
Returns the day part of the given datetime as an integer.
see
- <https://www.w3.org/TR/sparql11-query/#func-day>
- <https://www.w3.org/TR/xpath-functions/#func-day-from-dateTime>
"""
def call(:DAY, [%Literal{literal: %XSD.DateTime{} = literal}], _) do
naive_datetime_part(literal, :day)
end
def call(:DAY, _, _), do: :error
@doc """
Returns the hours part of the given datetime as an integer.
see
- <https://www.w3.org/TR/sparql11-query/#func-hours>
- <https://www.w3.org/TR/xpath-functions/#func-hours-from-dateTime>
"""
def call(:HOURS, [%Literal{literal: %XSD.DateTime{} = literal}], _) do
naive_datetime_part(literal, :hour)
end
def call(:HOURS, _, _), do: :error
@doc """
Returns the minutes part of the given datetime as an integer.
see
- <https://www.w3.org/TR/sparql11-query/#func-minutes>
- <https://www.w3.org/TR/xpath-functions/#func-minutes-from-dateTime>
"""
def call(:MINUTES, [%Literal{literal: %XSD.DateTime{} = literal}], _) do
naive_datetime_part(literal, :minute)
end
def call(:MINUTES, _, _), do: :error
@doc """
Returns the seconds part of the given datetime as a decimal.
see
- <https://www.w3.org/TR/sparql11-query/#func-seconds>
- <https://www.w3.org/TR/xpath-functions/#func-seconds-from-dateTime>
"""
def call(:SECONDS, [%Literal{literal: %XSD.DateTime{} = literal}], _) do
if XSD.DateTime.valid?(literal) do
case literal.value.microsecond do
{_, 0} ->
literal.value.second
|> to_string() # This is needed to get the lexical integer form; required for the SPARQL 1.1 test suite
|> XSD.decimal()
{microsecond, _} ->
%Decimal{coef: microsecond, exp: -6}
|> Decimal.add(literal.value.second)
|> XSD.decimal()
_ ->
:error
end
else
:error
end
end
def call(:SECONDS, _, _), do: :error
@doc """
Returns the timezone part of the given datetime as an `xsd:dayTimeDuration` literal.
Returns `:error` if there is no timezone.
see
- <https://www.w3.org/TR/sparql11-query/#func-timezone>
- <http://www.w3.org/TR/xpath-functions/#func-timezone-from-dateTime>
"""
def call(:TIMEZONE, [%Literal{literal: %XSD.DateTime{} = literal}], _) do
literal
|> XSD.DateTime.tz()
|> tz_duration()
|| :error
end
def call(:TIMEZONE, _, _), do: :error
@doc """
Returns the timezone part of a given datetime as a simple literal.
Returns the empty string if there is no timezone.
see <https://www.w3.org/TR/sparql11-query/#func-tz>
"""
def call(:TZ, [%Literal{literal: %XSD.DateTime{} = literal}], _) do
if tz = XSD.DateTime.tz(literal) do
XSD.string(tz)
else
:error
end
end
def call(:TZ, _, _), do: :error
@doc """
Returns the MD5 checksum, as a hex digit string.
see <https://www.w3.org/TR/sparql11-query/#func-md5>
"""
def call(:MD5, [%Literal{literal: %XSD.String{}} = literal], _) do
hash(:md5, Literal.value(literal))
end
def call(:MD5, _, _), do: :error
@doc """
Returns the SHA1 checksum, as a hex digit string.
see <https://www.w3.org/TR/sparql11-query/#func-sha1>
"""
def call(:SHA1, [%Literal{literal: %XSD.String{}} = literal], _) do
hash(:sha, Literal.value(literal))
end
def call(:SHA1, _, _), do: :error
@doc """
Returns the SHA256 checksum, as a hex digit string.
see <https://www.w3.org/TR/sparql11-query/#func-sha256>
"""
def call(:SHA256, [%Literal{literal: %XSD.String{}} = literal], _) do
hash(:sha256, Literal.value(literal))
end
def call(:SHA256, _, _), do: :error
@doc """
Returns the SHA384 checksum, as a hex digit string.
see <https://www.w3.org/TR/sparql11-query/#fun c-sha384>
"""
def call(:SHA384, [%Literal{literal: %XSD.String{}} = literal], _) do
hash(:sha384, Literal.value(literal))
end
def call(:SHA384, _, _), do: :error
@doc """
Returns the SHA512 checksum, as a hex digit string.
see <https://www.w3.org/TR/sparql11-query/#func-sha512>
"""
def call(:SHA512, [%Literal{literal: %XSD.String{}} = literal], _) do
hash(:sha512, Literal.value(literal))
end
def call(:SHA512, _, _), do: :error
defp hash(type, value) do
:crypto.hash(type, value)
|> Base.encode16()
|> String.downcase()
|> XSD.string()
end
defp match_regex(%Literal{literal: %datatype{}} = text,
%Literal{literal: %XSD.String{}} = pattern,
%Literal{literal: %XSD.String{}} = flags)
when datatype in [XSD.String, RDF.LangString] do
text
|> Literal.matches?(pattern, flags)
|> ebv()
rescue
_error -> :error
end
defp match_regex(_, _, _), do: :error
defp replace_regex(%Literal{literal: %datatype{}} = text,
%Literal{literal: %XSD.String{} = pattern},
%Literal{literal: %XSD.String{} = replacement},
%Literal{literal: %XSD.String{} = flags})
when datatype in [XSD.String, RDF.LangString] do
case XSD.Utils.Regex.xpath_pattern(pattern.value, flags.value) do
{:regex, regex} ->
Literal.update(text, fn text_value ->
String.replace(text_value, regex, xpath_to_erlang_regex_variables(replacement.value))
end)
{:q, pattern} ->
Literal.update(text, fn text_value ->
String.replace(text_value, pattern, replacement.value)
end)
{:qi, _pattern} ->
Logger.error "The combination of the q and the i flag is currently not supported in REPLACE"
:error
_ ->
:error
end
end
defp replace_regex(_, _, _, _), do: :error
defp xpath_to_erlang_regex_variables(text) do
String.replace(text, ~r/(?<!\\)\$/, "\\")
end
defp naive_datetime_part(%XSD.DateTime{value: %DateTime{} = datetime,
uncanonical_lexical: nil}, field) do
datetime
|> Map.get(field)
|> XSD.integer()
end
defp naive_datetime_part(%XSD.DateTime{value: %NaiveDateTime{} = datetime}, field) do
datetime
|> Map.get(field)
|> XSD.integer()
end
defp naive_datetime_part(literal, field) do
with {:ok, datetime} <-
literal
|> XSD.DateTime.lexical()
|> NaiveDateTime.from_iso8601()
do
datetime
|> Map.get(field)
|> XSD.integer()
else
_ -> :error
end
end
defp tz_duration(""), do: nil
defp tz_duration("Z"), do: day_time_duration("PT0S")
defp tz_duration(tz) do
[_, sign, hours, minutes] = Regex.run(~r/\A(?:([\+\-])(\d{2}):(\d{2}))\Z/, tz)
sign = if sign == "-", do: "-", else: ""
hours = String.trim_leading(hours, "0") <> "H"
minutes = if minutes != "00", do: (minutes <> "M"), else: ""
day_time_duration(sign <> "PT" <> hours <> minutes)
end
# TODO: This is just a preliminary implementation until we have a proper XSD.Duration datatype
defp day_time_duration(value) do
Literal.new(value, datatype: NS.XSD.dayTimeDuration)
end
@doc """
Argument Compatibility Rules
see <https://www.w3.org/TR/sparql11-query/#func-arg-compatibility>
"""
def compatible_arguments?(arg1, arg2)
# The arguments are simple literals or literals typed as xsd:string
def compatible_arguments?(%Literal{literal: %XSD.String{}},
%Literal{literal: %XSD.String{}}), do: true
# The first argument is a plain literal with language tag and the second argument is a simple literal or literal typed as xsd:string
def compatible_arguments?(%Literal{literal: %RDF.LangString{}},
%Literal{literal: %XSD.String{}}), do: true
# The arguments are plain literals with identical language tags
def compatible_arguments?(%Literal{literal: %RDF.LangString{language: language}},
%Literal{literal: %RDF.LangString{language: language}}), do: true
def compatible_arguments?(_, _), do: false
defp ebv(value), do: XSD.Boolean.ebv(value) || :error
defp fn_not(value), do: XSD.Boolean.fn_not(value) || :error
defp uuid(format), do: UUID.uuid4(format)
end
|
lib/sparql/functions/builtins.ex
| 0.849971
| 0.711312
|
builtins.ex
|
starcoder
|
defmodule Membrane.RTP.Serializer do
@moduledoc """
Serializes RTP payload to RTP packets by adding the RTP header to each of them.
Accepts the following metadata under `:rtp` key: `:marker`, `:csrcs`, `:extension`.
See `Membrane.RTP.Header` for their meaning and specifications.
"""
use Membrane.Filter
alias Membrane.{Buffer, RTP, RemoteStream, Payload, Time}
@max_seq_num 65535
@max_timestamp 0xFFFFFFFF
def_input_pad :input, caps: RTP, demand_unit: :buffers
def_output_pad :output, caps: {RemoteStream, type: :packetized, content_format: RTP}
def_options ssrc: [spec: RTP.ssrc_t()],
payload_type: [spec: RTP.payload_type_t()],
clock_rate: [spec: RTP.clock_rate_t()],
alignment: [
default: 1,
spec: pos_integer(),
description: """
Number of bytes that each packet should be aligned to.
Alignment is achieved by adding RTP padding.
"""
]
defmodule State do
@moduledoc false
use Bunch.Access
defstruct sequence_number: 0,
init_timestamp: 0,
any_buffer_sent?: false,
stats_acc: %{
clock_rate: 0,
timestamp: 0,
rtp_timestamp: 0,
sender_packet_count: 0,
sender_octet_count: 0
}
@type t :: %__MODULE__{
sequence_number: non_neg_integer(),
init_timestamp: non_neg_integer(),
any_buffer_sent?: boolean(),
stats_acc: %{}
}
end
@impl true
def handle_init(options) do
state = %State{
sequence_number: Enum.random(0..@max_seq_num),
init_timestamp: Enum.random(0..@max_timestamp)
}
state = state |> put_in([:stats_acc, :clock_rate], options.clock_rate)
{:ok, Map.merge(Map.from_struct(options), state)}
end
@impl true
def handle_caps(:input, _caps, _ctx, state) do
caps = %RemoteStream{type: :packetized, content_format: RTP}
{{:ok, caps: {:output, caps}}, state}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_process(:input, %Buffer{payload: payload, metadata: metadata} = buffer, _ctx, state) do
state = update_counters(buffer, state)
{rtp_metadata, metadata} = Map.pop(metadata, :rtp, %{})
%{timestamp: timestamp} = metadata
rtp_offset = timestamp |> Ratio.mult(state.clock_rate) |> Membrane.Time.to_seconds()
rtp_timestamp = rem(state.init_timestamp + rtp_offset, @max_timestamp + 1)
header = %RTP.Header{
ssrc: state.ssrc,
marker: Map.get(rtp_metadata, :marker, false),
payload_type: state.payload_type,
timestamp: rtp_timestamp,
sequence_number: state.sequence_number,
csrcs: Map.get(rtp_metadata, :csrcs, []),
extension: Map.get(rtp_metadata, :extension)
}
packet = %RTP.Packet{header: header, payload: payload}
payload = RTP.Packet.serialize(packet, align_to: state.alignment)
buffer = %Buffer{payload: payload, metadata: metadata}
state = Map.update!(state, :sequence_number, &rem(&1 + 1, @max_seq_num + 1))
state = %{
state
| any_buffer_sent?: true,
stats_acc: %{state.stats_acc | timestamp: Time.vm_time(), rtp_timestamp: rtp_timestamp}
}
{{:ok, buffer: {:output, buffer}}, state}
end
@impl true
def handle_other(:send_stats, _ctx, state) do
stats = get_stats(state)
state = %{state | any_buffer_sent?: false}
{{:ok, notify: {:serializer_stats, stats}}, state}
end
@spec get_stats(State.t()) :: %{} | :no_stats
defp get_stats(%State{any_buffer_sent?: false}), do: :no_stats
defp get_stats(%State{stats_acc: stats}), do: stats
defp update_counters(%Buffer{payload: payload}, state) do
state
|> update_in(
[:stats_acc, :sender_octet_count],
&(&1 + Payload.size(payload))
)
|> update_in([:stats_acc, :sender_packet_count], &(&1 + 1))
end
end
|
lib/membrane/rtp/serializer.ex
| 0.874647
| 0.505737
|
serializer.ex
|
starcoder
|
defmodule Expression.Callbacks do
@moduledoc """
The function callbacks for the standard function set available
in FLOIP expressions.
This should be relatively swappable with another implementation.
The only requirement is the `handle/3` function.
FLOIP functions are case insensitive. All functions in this callback
module are implemented as lowercase names.
Some functions accept a variable amount of arguments. Elixir doesn't
support variable arguments in functions.
If a function accepts a variable number of arguments the convention
is to call the `<function_name>_vargs/2` callback where the context
is given as the first argument and the argument list as a second
argument.
Reserved names such as `and`, `if`, and `or` are suffixed with an
underscore.
"""
@reserved_words ~w[and if or]
@punctuation_pattern ~r/\s*[,:;!?.-]\s*|\s/
@doc """
Convert a string function name into an atom meant to handle
that function
Reserved words such as `and`, `if`, and `or` are automatically suffixed
with an `_` underscore.
"""
def atom_function_name(function_name) when function_name in @reserved_words,
do: atom_function_name("#{function_name}_")
def atom_function_name(function_name) do
String.to_atom(function_name)
end
@doc """
Handle a function call while evaluating the AST.
Handlers in this module are either:
1. The function name as is
2. The function name with an underscore suffix if the function name is a reserved word
3. The function name suffixed with `_vargs` if the takes a variable set of arguments
"""
@spec handle(function_name :: binary, arguments :: [any], context :: map) ::
{:ok, any} | {:error, :not_implemented}
def handle(function_name, arguments, context) do
exact_function_name = atom_function_name(function_name)
vargs_function_name = atom_function_name("#{function_name}_vargs")
cond do
# Check if the exact function signature has been implemented
function_exported?(__MODULE__, exact_function_name, length(arguments) + 1) ->
{:ok, apply(__MODULE__, exact_function_name, [context] ++ arguments)}
# Check if it's been implemented to accept a variable amount of arguments
function_exported?(__MODULE__, vargs_function_name, 2) ->
{:ok, apply(__MODULE__, vargs_function_name, [context, arguments])}
# Otherwise fail
true ->
{:error, "#{function_name} is not implemented."}
end
end
@doc """
Defines a new date value
```
This is a date @DATE(2012, 12, 25)
```
# Example
iex> to_string(Expression.Callbacks.date(%{}, 2012, 12, 25))
"2012-12-25 00:00:00Z"
"""
def date(_ctx, year, month, day) do
fields = [
calendar: Calendar.ISO,
year: year,
month: month,
day: day,
hour: 0,
minute: 0,
second: 0,
time_zone: "Etc/UTC",
zone_abbr: "UTC",
utc_offset: 0,
std_offset: 0
]
struct(DateTime, fields)
end
@doc """
Converts date stored in text to an actual date,
using `strftime` formatting.
It will fallback to "%Y-%m-%d %H:%M:%S" if no formatting is supplied
```
You joined on @DATEVALUE(contact.joined_date, "%Y-%m%-d")
```
# Example
iex> date = Expression.Callbacks.date(%{}, 2020, 12, 20)
iex> Expression.Callbacks.datevalue(%{}, date)
"2020-12-20 00:00:00"
iex> Expression.Callbacks.datevalue(%{}, date, "%Y-%m-%d")
"2020-12-20"
"""
def datevalue(ctx, date, format \\ "%Y-%m-%d %H:%M:%S")
def datevalue(_ctx, date, format) do
Timex.format!(date, format, :strftime)
end
@doc """
Returns only the day of the month of a date (1 to 31)
```
The current day is @DAY(contact.joined_date)
```
# Example
iex> now = DateTime.utc_now()
iex> day = Expression.Callbacks.day(%{}, now)
iex> day == now.day
true
"""
def day(_ctx, %{day: day} = _date) do
day
end
@doc """
Moves a date by the given number of months
```
Next month's meeting will be on @EDATE(date.today, 1)
```
# Example
iex> now = DateTime.utc_now()
iex> future = Timex.shift(now, months: 1)
iex> date = Expression.Callbacks.edate(%{}, now, 1)
iex> future == date
true
"""
def edate(_ctx, date, months) do
date |> Timex.shift(months: months)
end
@doc """
Returns only the hour of a datetime (0 to 23)
```
The current hour is @HOUR(NOW())
```
# Example
iex> now = DateTime.utc_now()
iex> hour = Expression.Callbacks.hour(%{}, now)
iex> now.hour == hour
true
"""
def hour(_ctx, %{hour: hour} = _date) do
hour
end
@doc """
Returns only the minute of a datetime (0 to 59)
```
The current minute is @MINUTE(NOW())
```
# Example
iex> now = DateTime.utc_now()
iex> minute = Expression.Callbacks.minute(%{}, now)
iex> now.minute == minute
true
"""
def minute(_ctx, %{minute: minute} = _date) do
minute
end
@doc """
Returns only the month of a date (1 to 12)
```
The current month is @MONTH(NOW())
```
# Example
iex> now = DateTime.utc_now()
iex> month = Expression.Callbacks.month(%{}, now)
iex> now.month == month
true
"""
def month(_ctx, %{month: month} = _date) do
month
end
@doc """
Returns the current date time as UTC
```
It is currently @NOW()
```
# Example
iex> DateTime.utc_now() == Expression.Callbacks.now(%{})
"""
def now(_ctx) do
DateTime.utc_now()
end
@doc """
Returns only the second of a datetime (0 to 59)
```
The current second is @SECOND(NOW())
```
# Example
iex> now = DateTime.utc_now()
iex> second = Expression.Callbacks.second(%{}, now)
iex> now.second == second
true
"""
def second(_ctx, %{second: second} = _date) do
second
end
@doc """
Defines a time value which can be used for time arithmetic
```
2 hours and 30 minutes from now is @(date.now + TIME(2, 30, 0))
```
# Example
iex> Expression.Callbacks.time(%{}, 12, 13, 14)
%Time{hour: 12, minute: 13, second: 14}
"""
def time(_ctx, hours, minutes, seconds) do
%Time{hour: hours, minute: minutes, second: seconds}
end
@doc """
Converts time stored in text to an actual time
```
Your appointment is at @(date.today + TIME("2:30"))
```
# Example
iex> Expression.Callbacks.timevalue(%{}, "2:30")
%Time{hour: 2, minute: 30, second: 0}
iex> Expression.Callbacks.timevalue(%{}, "2:30:55")
%Time{hour: 2, minute: 30, second: 55}
"""
def timevalue(_ctx, expression) do
parts =
expression
|> String.split(":")
|> Enum.map(&String.to_integer/1)
defaults = [
hour: 0,
minute: 0,
second: 0
]
fields =
[:hour, :minute, :second]
|> Enum.zip(parts)
struct(Time, Keyword.merge(defaults, fields))
end
@doc """
Returns the current date
```
Today's date is @TODAY()
```
# Example
iex> today = Date.utc_today()
iex> today == Expression.Callbacks.today(%{})
true
"""
def today(_ctx) do
Date.utc_today()
end
@doc """
Returns the day of the week of a date (1 for Sunday to 7 for Saturday)
```
Today is day no. @WEEKDAY(TODAY()) in the week
```
# Example
iex> today = DateTime.utc_now()
iex> expected = Timex.weekday(today)
iex> weekday = Expression.Callbacks.weekday(%{}, today)
iex> weekday == expected
true
"""
def weekday(_ctx, date) do
Timex.weekday(date)
end
@doc """
Returns only the year of a date
```
The current year is @YEAR(NOW())
```
# Example
iex> %{year: year} = now = DateTime.utc_now()
iex> year == Expression.Callbacks.year(%{}, now)
"""
def year(_ctx, %{year: year} = _date) do
year
end
@doc """
Returns TRUE if and only if all its arguments evaluate to TRUE
```
@AND(contact.gender = "F", contact.age >= 18)
```
# Example
iex> Expression.Callbacks.handle("and", [true, true], %{})
{:ok, true}
iex> Expression.Callbacks.and_vargs(%{}, [true, true])
true
iex> Expression.Callbacks.and_vargs(%{}, [true, false])
false
iex> Expression.Callbacks.and_vargs(%{}, [false, false])
false
"""
def and_vargs(_ctx, arguments) do
Enum.all?(arguments, fn
true -> true
_other -> false
end)
end
@doc """
Returns one value if the condition evaluates to TRUE, and another value if it evaluates to FALSE
```
Dear @IF(contact.gender = "M", "Sir", "Madam")
```
# Example
iex> Expression.Callbacks.handle("if", [true, "Yes", "No"], %{})
{:ok, "Yes"}
iex> Expression.Callbacks.handle("if", [false, "Yes", "No"], %{})
{:ok, "No"}
"""
def if_(_ctx, condition, yes, no) do
if(condition, do: yes, else: no)
end
@doc """
Returns TRUE if any argument is TRUE
```
@OR(contact.state = "GA", contact.state = "WA", contact.state = "IN")
```
# Example
iex> Expression.Callbacks.handle("or", [true, false], %{})
{:ok, true}
iex> Expression.Callbacks.handle("or", [true, true], %{})
{:ok, true}
iex> Expression.Callbacks.handle("or", [false, false], %{})
{:ok, false}
"""
def or_vargs(_ctx, arguments) do
Enum.any?(arguments, fn
true -> true
_anything_else -> false
end)
end
@doc """
Returns the absolute value of a number
```
The absolute value of -1 is @ABS(-1)
```
# Example
iex> Expression.Callbacks.abs(%{}, -1)
1
"""
def abs(_ctx, number) do
abs(number)
end
@doc """
Returns the maximum value of all arguments
```
Please complete at most @MAX(flow.questions, 10) questions
```
# Example
iex> Expression.Callbacks.handle("max", [1, 2, 3], %{})
{:ok, 3}
"""
def max_vargs(_ctx, arguments) do
Enum.max(arguments)
end
@doc """
Returns the minimum value of all arguments
```
Please complete at least @MIN(flow.questions, 10) questions
```
# Example
iex> Expression.Callbacks.handle("min", [1, 2, 3], %{})
{:ok, 1}
"""
def min_vargs(_ctx, arguments) do
Enum.min(arguments)
end
@doc """
Returns the result of a number raised to a power - equivalent to the ^ operator
```
2 to the power of 3 is @POWER(2, 3)
```
"""
def power(_ctx, a, b) do
:math.pow(a, b)
end
@doc """
Returns the sum of all arguments, equivalent to the + operator
```
You have @SUM(contact.reports, contact.forms) reports and forms
```
# Example
iex> Expression.Callbacks.handle("sum", [1, 2, 3], %{})
{:ok, 6}
"""
def sum_vargs(_ctx, arguments) do
Enum.sum(arguments)
end
@doc """
Returns the character specified by a number
```
As easy as @CHAR(65), @CHAR(66), @CHAR(67)
```
# Example
iex> Expression.Callbacks.char(%{}, 65)
"A"
"""
def char(_ctx, code) do
<<code>>
end
@doc """
Removes all non-printable characters from a text string
```
You entered @CLEAN(step.value)
```
# Example
iex> Expression.Callbacks.clean(%{}, <<65, 0, 66, 0, 67>>)
"ABC"
"""
def clean(_ctx, binary) do
binary
|> String.graphemes()
|> Enum.filter(&String.printable?/1)
|> Enum.join("")
end
@doc """
Returns a numeric code for the first character in a text string
```
The numeric code of A is @CODE("A")
```
# Example
iex> Expression.Callbacks.code(%{}, "A")
65
"""
def code(_ctx, <<code>>) do
code
end
@doc """
Joins text strings into one text string
```
Your name is @CONCATENATE(contact.first_name, " ", contact.last_name)
```
# Example
iex> Expression.Callbacks.handle("concatenate", ["name", " ", "surname"], %{})
{:ok, "name surname"}
"""
def concatenate_vargs(_ctx, arguments) do
Enum.join(arguments, "")
end
@doc """
Formats the given number in decimal format using a period and commas
```
You have @FIXED(contact.balance, 2) in your account
```
# Example
iex> Expression.Callbacks.fixed(%{}, 4.209922, 2, false)
"4.21"
iex> Expression.Callbacks.fixed(%{}, 4000.424242, 4, true)
"4,000.4242"
iex> Expression.Callbacks.fixed(%{}, 3.7979, 2, false)
"3.80"
iex> Expression.Callbacks.fixed(%{}, 3.7979, 2)
"3.80"
"""
def fixed(_ctx, number, precision, no_commas \\ false)
def fixed(_ctx, number, precision, true) do
Number.Delimit.number_to_delimited(number,
precision: precision,
delimiter: ",",
separator: "."
)
end
def fixed(_ctx, number, precision, false) do
Number.Delimit.number_to_delimited(number, precision: precision)
end
@doc """
Returns the first characters in a text string
```
You entered PIN @LEFT(step.value, 4)
```
# Example
iex> Expression.Callbacks.left(%{}, "foobar", 4)
"foob"
"""
def left(_ctx, binary, size) do
binary_part(binary, 0, size)
end
@doc """
Returns the number of characters in a text string
```
You entered @LEN(step.value) characters
```
# Example
iex> Expression.Callbacks.len(%{}, "foo")
3
iex> Expression.Callbacks.len(%{}, "zoë")
3
"""
def len(_ctx, binary) do
String.length(binary)
end
@doc """
Converts a text string to lowercase
````
Welcome @LOWER(contact)
```
# Example
iex> Expression.Callbacks.lower(%{}, "Foo Bar")
"foo bar"
"""
def lower(_ctx, binary) do
String.downcase(binary)
end
@doc """
Capitalizes the first letter of every word in a text string
```
Your name is @PROPER(contact)
```
# Example
iex> Expression.Callbacks.proper(%{}, "foo bar")
"Foo Bar"
"""
def proper(_ctx, binary) do
binary
|> String.split(" ")
|> Enum.map(&String.capitalize/1)
|> Enum.join(" ")
end
@doc """
Repeats text a given number of times
```
Stars! @REPT("*", 10)
```
# Example
iex> Expression.Callbacks.rept(%{}, "*", 10)
"**********"
"""
def rept(_ctx, value, amount) do
String.duplicate(value, amount)
end
@doc """
Returns the last characters in a text string
```
Your input ended with ...@RIGHT(step.value, 3)
```
# Example
iex> Expression.Callbacks.right(%{}, "testing", 3)
"ing"
"""
def right(_ctx, binary, size) do
String.slice(binary, -size, size)
end
@doc """
Substitutes new_text for old_text in a text string. If instance_num is given, then only that instance will be substituted
```
@SUBSTITUTE(step.value, "can't", "can")
```
# Example
iex> Expression.Callbacks.substitute(%{}, "I can't", "can't", "can do")
"I can do"
"""
def substitute(%{}, subject, pattern, replacement) do
String.replace(subject, pattern, replacement)
end
@doc """
Returns the unicode character specified by a number
```
As easy as @UNICHAR(65), @UNICHAR(66) , @UNICHAR(67)
```
# Example
iex> Expression.Callbacks.unichar(%{}, 65)
"A"
iex> Expression.Callbacks.unichar(%{}, 233)
"é"
"""
def unichar(_ctx, code) do
<<code::utf8>>
end
@doc """
Returns a numeric code for the first character in a text string
```
The numeric code of A is @UNICODE("A")
```
# Example
iex> Expression.Callbacks.unicode(%{}, "A")
65
iex> Expression.Callbacks.unicode(%{}, "é")
233
"""
def unicode(_ctx, <<code::utf8>>) do
code
end
@doc """
Converts a text string to uppercase
```
WELCOME @UPPER(contact)!!
```
# Example
iex> Expression.Callbacks.upper(%{}, "foo")
"FOO"
"""
def upper(_ctx, binary) do
String.upcase(binary)
end
@doc """
Returns the first word in the given text - equivalent to WORD(text, 1)
```
The first word you entered was @FIRST_WORD(step.value)
```
# Example
iex> Expression.Callbacks.first_word(%{}, "foo bar baz")
"foo"
"""
def first_word(_ctx, binary) do
[word | _] = String.split(binary, " ")
word
end
@doc """
Formats a number as a percentage
```
You've completed @PERCENT(contact.reports_done / 10) reports
```
# Example
iex> Expression.Callbacks.percent(%{}, 2/10)
"20%"
iex> Expression.Callbacks.percent(%{}, "0.2")
"20%"
iex> Expression.Callbacks.percent(%{}, Decimal.new("0.2"))
"20%"
"""
@spec percent(Expression.Context.t(), float) :: binary
def percent(ctx, float) when is_float(float) do
percent(ctx, Decimal.from_float(float))
end
@spec percent(Expression.Context.t(), binary) :: binary
def percent(ctx, binary) when is_binary(binary) do
percent(ctx, Decimal.new(binary))
end
@spec percent(Expression.Context.t(), Decimal.t()) :: binary
def percent(_ctx, decimal) do
Number.Percentage.number_to_percentage(Decimal.mult(decimal, 100), precision: 0)
end
@doc """
Formats digits in text for reading in TTS
```
Your number is @READ_DIGITS(contact.tel_e164)
```
# Example
iex> Expression.Callbacks.read_digits(%{}, "+271")
"plus two seven one"
"""
def read_digits(_ctx, binary) do
map = %{
"+" => "plus",
"0" => "zero",
"1" => "one",
"2" => "two",
"3" => "three",
"4" => "four",
"5" => "five",
"6" => "six",
"7" => "seven",
"8" => "eight",
"9" => "nine"
}
binary
|> String.graphemes()
|> Enum.map(fn grapheme -> Map.get(map, grapheme, nil) end)
|> Enum.reject(&is_nil/1)
|> Enum.join(" ")
end
@doc """
Removes the first word from the given text. The remaining text will be unchanged
```
You entered @REMOVE_FIRST_WORD(step.value)
```
# Example
iex> Expression.Callbacks.remove_first_word(%{}, "foo bar")
"bar"
iex> Expression.Callbacks.remove_first_word(%{}, "foo-bar", "-")
"bar"
"""
def remove_first_word(_ctx, binary, separator \\ " ")
def remove_first_word(_ctx, binary, separator) do
tl(String.split(binary, separator)) |> Enum.join(separator)
end
@doc """
Extracts the nth word from the given text string. If stop is a negative number,
then it is treated as count backwards from the end of the text. If by_spaces is
specified and is TRUE then the function splits the text into words only by spaces.
Otherwise the text is split by punctuation characters as well
# Example
iex> Expression.Callbacks.word(%{}, "hello cow-boy", 2)
"cow"
iex> Expression.Callbacks.word(%{}, "hello cow-boy", 2, true)
"cow-boy"
iex> Expression.Callbacks.word(%{}, "hello cow-boy", -1)
"boy"
"""
def word(ctx, binary, n, by_spaces \\ false)
def word(_ctx, binary, n, by_spaces) do
splitter = if(by_spaces, do: " ", else: @punctuation_pattern)
parts = String.split(binary, splitter)
# This slicing seems off.
[part] =
if n < 0 do
Enum.slice(parts, n, 1)
else
Enum.slice(parts, n - 1, 1)
end
part
end
@doc """
Returns the number of words in the given text string. If by_spaces is specified and is TRUE then the function splits the text into words only by spaces. Otherwise the text is split by punctuation characters as well
```
You entered @WORD_COUNT(step.value) words
```
# Example
iex> Expression.Callbacks.word_count(%{}, "hello cow-boy")
3
iex> Expression.Callbacks.word_count(%{}, "hello cow-boy", true)
2
"""
def word_count(ctx, binary, by_spaces \\ false)
def word_count(_ctx, binary, by_spaces) do
splitter = if(by_spaces, do: " ", else: @punctuation_pattern)
binary
|> String.split(splitter)
|> Enum.count()
end
@doc """
Extracts a substring of the words beginning at start, and up to but not-including stop.
If stop is omitted then the substring will be all words from start until the end of the text.
If stop is a negative number, then it is treated as count backwards from the end of the text.
If by_spaces is specified and is TRUE then the function splits the text into words only by spaces.
Otherwise the text is split by punctuation characters as well
# Example
iex> Expression.Callbacks.word_slice(%{}, "RapidPro expressions are fun", 2, 4)
"expressions are"
iex> Expression.Callbacks.word_slice(%{}, "RapidPro expressions are fun", 2)
"expressions are fun"
iex> Expression.Callbacks.word_slice(%{}, "RapidPro expressions are fun", 1, -2)
"RapidPro expressions"
iex> Expression.Callbacks.word_slice(%{}, "RapidPro expressions are fun", -1)
"fun"
"""
def word_slice(_ctx, binary, start) when start > 0 do
parts =
binary
|> String.split(" ")
parts
|> Enum.slice(start - 1, length(parts))
|> Enum.join(" ")
end
def word_slice(_ctx, binary, start) when start < 0 do
parts =
binary
|> String.split(" ")
parts
|> Enum.slice(start..length(parts))
|> Enum.join(" ")
end
def word_slice(_ctx, binary, start, stop, by_spaces \\ false)
def word_slice(_ctx, binary, start, stop, by_spaces) when stop > 0 do
splitter = if(by_spaces, do: " ", else: @punctuation_pattern)
binary
|> String.split(splitter)
|> Enum.slice((start - 1)..(stop - 2))
|> Enum.join(" ")
end
def word_slice(_ctx, binary, start, stop, by_spaces) when stop < 0 do
splitter = if(by_spaces, do: " ", else: @punctuation_pattern)
binary
|> String.split(splitter)
|> Enum.slice((start - 1)..(stop - 1))
|> Enum.join(" ")
end
@doc """
Returns TRUE if the argument is a number.
```
@ISNUMBER(contact.age) will return TRUE if the contact's age is a number.
```
# Example
iex> Expression.Callbacks.isnumber(%{}, 1)
true
iex> Expression.Callbacks.isnumber(%{}, 1.0)
true
iex> Expression.Callbacks.isnumber(%{}, Decimal.new("1.0"))
true
iex> Expression.Callbacks.isnumber(%{}, "1.0")
true
iex> Expression.Callbacks.isnumber(%{}, "a")
false
"""
def isnumber(_ctx, var) when is_float(var) or is_integer(var), do: true
def isnumber(_ctx, %{__struct__: Decimal}), do: true
def isnumber(_ctx, var) when is_binary(var) do
Decimal.new(var)
true
rescue
Decimal.Error ->
false
end
def isnumber(_ctx, _var), do: false
@doc """
Returns TRUE if the argument is a boolean.
```
@ISBOOL(block.value) will return TRUE if the block returned a boolean value.
```
# Example
iex> Expression.Callbacks.isbool(%{}, true)
true
iex> Expression.Callbacks.isbool(%{}, false)
true
iex> Expression.Callbacks.isbool(%{}, 1)
false
iex> Expression.Callbacks.isbool(%{}, 0)
false
iex> Expression.Callbacks.isbool(%{}, "true")
false
iex> Expression.Callbacks.isbool(%{}, "false")
false
"""
def isbool(_ctx, var) when var in [true, false], do: true
def isbool(_ctx, _var), do: false
@doc """
Returns TRUE if the argument is a string.
```
@ISSTRING(contact.name) will return TRUE if the contact's name is a string.
```
# Example
iex> Expression.Callbacks.isstring(%{}, "hello")
true
iex> Expression.Callbacks.isstring(%{}, false)
false
iex> Expression.Callbacks.isstring(%{}, 1)
false
iex> Expression.Callbacks.isstring(%{}, Decimal.new("1.0"))
false
"""
def isstring(_ctx, binary), do: is_binary(binary)
defp search_words(haystack, words) do
patterns =
words
|> String.split(" ")
|> Enum.map(&Regex.escape/1)
|> Enum.map(&Regex.compile!(&1, "i"))
results =
patterns
|> Enum.map(&Regex.run(&1, haystack))
|> Enum.map(fn
[match] -> match
nil -> nil
end)
|> Enum.reject(&is_nil/1)
{patterns, results}
end
@doc """
Tests whether all the words are contained in text
The words can be in any order and may appear more than once.
```
@(has_all_words("the quick brown FOX", "the fox")) → true
@(has_all_words("the quick brown fox", "red fox")) → false
```
NOTE: the flowspec supports `.match` which isn't support here yet.
```
@(has_all_words("the quick brown FOX", "the fox").match) → the FOX
```
# Example
iex> Expression.Callbacks.has_all_words(%{}, "the quick brown FOX", "the fox")
{:ok, true}
iex> Expression.Callbacks.has_all_words(%{}, "the quick brown FOX", "red fox")
{:ok, false}
"""
def has_all_words(_ctx, haystack, words) do
{patterns, results} = search_words(haystack, words)
# future match result: Enum.join(results, " ")
{:ok, Enum.count(patterns) == Enum.count(results)}
end
@doc """
Tests whether any of the words are contained in the text
Only one of the words needs to match and it may appear more than once.
```
@(has_any_word("The Quick Brown Fox", "fox quick")) → true
```
Unsupported:
```
@(has_any_word("The Quick Brown Fox", "fox quick").match) → Quick Fox
@(has_any_word("The Quick Brown Fox", "red fox").match) → Fox
```
# Example
iex> Expression.Callbacks.has_any_word(%{}, "The Quick Brown Fox", "fox quick")
{:ok, true}
iex> Expression.Callbacks.has_any_word(%{}, "The Quick Brown Fox", "yellow")
{:ok, false}
"""
def has_any_word(_ctx, haystack, words) do
{_patterns, results} = search_words(haystack, words)
# future match result Enum.join(results, " ")
{:ok, Enum.any?(results)}
end
@doc """
Tests whether text starts with beginning
Both text values are trimmed of surrounding whitespace, but otherwise matching is
strict without any tokenization.
Supported:
```
@(has_beginning("The Quick Brown", "the quick")) → true
@(has_beginning("The Quick Brown", "the quick")) → false
@(has_beginning("The Quick Brown", "quick brown")) → false
```
Unsupported
```
@(has_beginning("The Quick Brown", "the quick").match) → The Quick
```
# Example
iex> Expression.Callbacks.has_beginning(%{}, "The Quick Brown", "the quick")
{:ok, true}
iex> Expression.Callbacks.has_beginning(%{}, "The Quick Brown", "the quick")
{:ok, false}
iex> Expression.Callbacks.has_beginning(%{}, "The Quick Brown", "quick brown")
{:ok, false}
"""
def has_beginning(_ctx, text, beginning) do
case Regex.run(~r/^#{Regex.escape(beginning)}/i, text) do
# future match result: first
[_first | _remainder] -> {:ok, true}
nil -> {:ok, false}
end
end
defp extract_dateish(expression) do
expression = Regex.replace(~r/[a-z]/u, expression, "")
case DateTimeParser.parse_date(expression) do
{:ok, date} -> date
{:error, _} -> nil
end
end
@doc """
Tests whether `expression` contains a date formatted according to our environment
This is very naively implemented with a regular expression.
Supported:
```
@(has_date("the date is 15/01/2017")) → true
@(has_date("there is no date here, just a year 2017")) → false
```
Unsupported:
```
@(has_date("the date is 15/01/2017").match) → 2017-01-15T13:24:30.123456-05:00
```
# Example
iex> Expression.Callbacks.has_date(%{}, "the date is 15/01/2017")
{:ok, true}
iex> Expression.Callbacks.has_date(%{}, "there is no date here, just a year 2017")
{:ok, false}
"""
def has_date(_, expression) do
date = extract_dateish(expression)
# future match result: date
{:ok, !!date}
end
@doc """
Tests whether `expression` is a date equal to `date_string`
Supported:
```
@(has_date_eq("the date is 15/01/2017", "2017-01-15")) → true
@(has_date_eq("there is no date here, just a year 2017", "2017-06-01")) → false
@(has_date_eq("there is no date here, just a year 2017", "not date")) → ERROR
```
Not supported:
```
@(has_date_eq("the date is 15/01/2017", "2017-01-15").match) → 2017-01-15T13:24:30.123456-05:00
@(has_date_eq("the date is 15/01/2017 15:00", "2017-01-15").match) → 2017-01-15T15:00:00.000000-05:00
```
# Examples
iex> Expression.Callbacks.has_date_eq(%{}, "the date is 15/01/2017", "2017-01-15")
{:ok, true}
iex> Expression.Callbacks.has_date_eq(%{}, "there is no date here, just a year 2017", "2017-01-15")
{:ok, false}
"""
def has_date_eq(_ctx, expression, date_string) do
found_date = extract_dateish(expression)
test_date = extract_dateish(date_string)
# Future match result: found_date
{:ok, found_date == test_date}
end
@doc """
Tests whether `expression` is a date after the date `date_string`
```
@(has_date_gt("the date is 15/01/2017", "2017-01-01")) → true
@(has_date_gt("the date is 15/01/2017", "2017-03-15")) → false
@(has_date_gt("there is no date here, just a year 2017", "2017-06-01")) → false
@(has_date_gt("there is no date here, just a year 2017", "not date")) → ERROR
```
Not supported:
```
@(has_date_gt("the date is 15/01/2017", "2017-01-01").match) → 2017-01-15T13:24:30.123456-05:00
```
# Example
iex> Expression.Callbacks.has_date_gt(%{}, "the date is 15/01/2017", "2017-01-01")
{:ok, true}
iex> Expression.Callbacks.has_date_gt(%{}, "the date is 15/01/2017", "2017-03-15")
{:ok, false}
"""
def has_date_gt(_ctx, expression, date_string) do
found_date = extract_dateish(expression)
test_date = extract_dateish(date_string)
# future match result: found_date
{:ok, Date.compare(found_date, test_date) == :gt}
end
@doc """
Tests whether `expression` contains a date before the date `date_string`
```
@(has_date_lt("the date is 15/01/2017", "2017-06-01")) → true
@(has_date_lt("there is no date here, just a year 2017", "2017-06-01")) → false
@(has_date_lt("there is no date here, just a year 2017", "not date")) → ERROR
```
Not supported:
```
@(has_date_lt("the date is 15/01/2017", "2017-06-01").match) → 2017-01-15T13:24:30.123456-05:00
```
# Example
iex> Expression.Callbacks.has_date_lt(%{}, "the date is 15/01/2017", "2017-06-01")
{:ok, true}
iex> Expression.Callbacks.has_date_lt(%{}, "the date is 15/01/2021", "2017-03-15")
{:ok, false}
"""
def has_date_lt(_ctx, expression, date_string) do
found_date = extract_dateish(expression)
test_date = extract_dateish(date_string)
# future match result: found_date
{:ok, Date.compare(found_date, test_date) == :lt}
end
@doc """
Tests whether an email is contained in text
```
@(has_email("my email is <EMAIL>, please respond")) → true
@(has_email("i'm not sharing my email")) → false
```
Not supported:
```
@(has_email("my email is <EMAIL>, please respond").match) → <EMAIL>
@(has_email("my email is <<EMAIL>>").match) → <EMAIL>
```
# Example:
iex> Expression.Callbacks.has_email(%{}, "my email is <EMAIL>, please respond")
{:ok, true}
iex> Expression.Callbacks.has_email(%{}, "i'm not sharing my email")
{:ok, false}
"""
def has_email(_ctx, expression) do
case Regex.run(~r/([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)/, expression) do
# future match result: match
[_match | _] -> {:ok, true}
nil -> {:ok, false}
end
end
@doc """
Returns whether the contact is part of group with the passed in UUID
```
@(has_group(array(), "97fe7029-3a15-4005-b0c7-277b884fc1d5")) → false
```
Not supported:
```
@(has_group(contact.groups, "b7cf0d83-f1c9-411c-96fd-c511a4cfa86d").match) → {name: Testers, uuid: b7cf0d83-f1c9-411c-96fd-c511a4cfa86d}
```
# Example:
iex> contact = %{
...> "groups" => [%{
...> "uuid" => "b7cf0d83-f1c9-411c-96fd-c511a4cfa86d"
...> }]
...> }
iex> Expression.Callbacks.has_group(%{}, contact["groups"], "b7cf0d83-f1c9-411c-96fd-c511a4cfa86d")
{:ok, true}
iex> Expression.Callbacks.has_group(%{}, contact["groups"], "00000000-0000-0000-0000-000000000000")
{:ok, false}
"""
def has_group(_ctx, groups, uuid) do
group = Enum.find(groups, nil, &(&1["uuid"] == uuid))
# future match result: group
{:ok, !!group}
end
defp extract_numberish(expression) do
with [match] <-
Regex.run(~r/([0-9]+\.?[0-9]+)/u, replace_arabic_numerals(expression), capture: :first),
{decimal, ""} <- Decimal.parse(match) do
decimal
else
# Regex can return nil
nil -> nil
# Decimal parsing can return :error
:error -> nil
end
end
defp replace_arabic_numerals(expression) do
replace_numerals(expression, %{
"٠" => "0",
"١" => "1",
"٢" => "2",
"٣" => "3",
"٤" => "4",
"٥" => "5",
"٦" => "6",
"٧" => "7",
"٨" => "8",
"٩" => "9"
})
end
defp replace_numerals(expression, mapping) do
mapping
|> Enum.reduce(expression, fn {rune, replacement}, expression ->
String.replace(expression, rune, replacement)
end)
end
defp parse_decimal(float) when is_float(float), do: Decimal.from_float(float)
defp parse_decimal(number) when is_number(number), do: Decimal.new(number)
defp parse_decimal(binary) when is_binary(binary) do
case Decimal.parse(binary) do
{decimal, ""} -> decimal
:error -> :error
end
end
@doc """
Tests whether `expression` contains a number
```
@(has_number("the number is 42")) → true
@(has_number("the number is forty two")) → false
```
Not supported:
```
@(has_number("the number is 42").match) → 42
@(has_number("العدد ٤٢").match) → 42
```
# Example
iex> {:ok, true} = Expression.Callbacks.has_number(%{}, "the number is 42 and 5")
iex> {:ok, true} = Expression.Callbacks.has_number(%{}, "العدد ٤٢")
iex> {:ok, true} = Expression.Callbacks.has_number(%{}, "٠.٥")
iex> {:ok, true} = Expression.Callbacks.has_number(%{}, "0.6")
"""
def has_number(_ctx, expression) do
number = extract_numberish(expression)
# future match result: number
{:ok, !!number}
end
@doc """
Tests whether `expression` contains a number equal to the value
```
@(has_number_eq("the number is 42", 42)) → true
@(has_number_eq("the number is 42", 40)) → false
@(has_number_eq("the number is not there", 40)) → false
@(has_number_eq("the number is not there", "foo")) → ERROR
```
Not supported:
```
@(has_number_eq("the number is 42", 42).match) → 42
```
# Example
iex> {:ok, true} = Expression.Callbacks.has_number_eq(%{}, "the number is 42", 42)
iex> {:ok, true} = Expression.Callbacks.has_number_eq(%{}, "the number is 42", 42.0)
iex> {:ok, true} = Expression.Callbacks.has_number_eq(%{}, "the number is 42", "42")
iex> {:ok, true} = Expression.Callbacks.has_number_eq(%{}, "the number is 42.0", "42")
iex> {:ok, false} = Expression.Callbacks.has_number_eq(%{}, "the number is 40", "42")
iex> {:ok, false} = Expression.Callbacks.has_number_eq(%{}, "the number is 40", "foo")
iex> {:ok, false} = Expression.Callbacks.has_number_eq(%{}, "four hundred", "foo")
"""
def has_number_eq(_ctx, expression, decimal) do
with %Decimal{} = number <- extract_numberish(expression),
%Decimal{} = decimal <- parse_decimal(decimal) do
# Future match result: number
{:ok, Decimal.eq?(number, decimal)}
else
nil -> {:ok, false}
:error -> {:ok, false}
end
end
@doc """
Tests whether `expression` contains a number greater than min
```
@(has_number_gt("the number is 42", 40)) → true
@(has_number_gt("the number is 42", 42)) → false
@(has_number_gt("the number is not there", 40)) → false
@(has_number_gt("the number is not there", "foo")) → ERROR
```
Not supported:
```
@(has_number_gt("the number is 42", 40).match) → 42
```
# Example
iex> {:ok, true} = Expression.Callbacks.has_number_gt(%{}, "the number is 42", 40)
iex> {:ok, true} = Expression.Callbacks.has_number_gt(%{}, "the number is 42", 40.0)
iex> {:ok, true} = Expression.Callbacks.has_number_gt(%{}, "the number is 42", "40")
iex> {:ok, true} = Expression.Callbacks.has_number_gt(%{}, "the number is 42.0", "40")
iex> {:ok, false} = Expression.Callbacks.has_number_gt(%{}, "the number is 40", "40")
iex> {:ok, false} = Expression.Callbacks.has_number_gt(%{}, "the number is 40", "foo")
iex> {:ok, false} = Expression.Callbacks.has_number_gt(%{}, "four hundred", "foo")
"""
def has_number_gt(_ctx, expression, decimal) do
with %Decimal{} = number <- extract_numberish(expression),
%Decimal{} = decimal <- parse_decimal(decimal) do
# Future match result: number
{:ok, Decimal.gt?(number, decimal)}
else
nil -> {:ok, false}
:error -> {:ok, false}
end
end
@doc """
Tests whether `expression` contains a number greater than or equal to min
```
@(has_number_gte("the number is 42", 42)) → true
@(has_number_gte("the number is 42", 45)) → false
@(has_number_gte("the number is not there", 40)) → false
@(has_number_gte("the number is not there", "foo")) → ERROR
```
Not supported:
```
@(has_number_gte("the number is 42", 42).match) → 42
```
# Example
iex> {:ok, true} = Expression.Callbacks.has_number_gte(%{}, "the number is 42", 42)
iex> {:ok, true} = Expression.Callbacks.has_number_gte(%{}, "the number is 42", 42.0)
iex> {:ok, true} = Expression.Callbacks.has_number_gte(%{}, "the number is 42", "42")
iex> {:ok, false} = Expression.Callbacks.has_number_gte(%{}, "the number is 42.0", "45")
iex> {:ok, false} = Expression.Callbacks.has_number_gte(%{}, "the number is 40", "45")
iex> {:ok, false} = Expression.Callbacks.has_number_gte(%{}, "the number is 40", "foo")
iex> {:ok, false} = Expression.Callbacks.has_number_gte(%{}, "four hundred", "foo")
"""
def has_number_gte(_ctx, expression, decimal) do
with %Decimal{} = number <- extract_numberish(expression),
%Decimal{} = decimal <- parse_decimal(decimal) do
# Future match result: number
{:ok, Decimal.gt?(number, decimal) || Decimal.eq?(number, decimal)}
else
nil -> {:ok, false}
:error -> {:ok, false}
end
end
@doc """
Tests whether `expression` contains a number less than max
```
@(has_number_lt("the number is 42", 44)) → true
@(has_number_lt("the number is 42", 40)) → false
@(has_number_lt("the number is not there", 40)) → false
@(has_number_lt("the number is not there", "foo")) → ERROR
```
Not supported:
```
@(has_number_lt("the number is 42", 44).match) → 42
```
# Example
iex> {:ok, true} = Expression.Callbacks.has_number_lt(%{}, "the number is 42", 44)
iex> {:ok, true} = Expression.Callbacks.has_number_lt(%{}, "the number is 42", 44.0)
iex> {:ok, false} = Expression.Callbacks.has_number_lt(%{}, "the number is 42", "40")
iex> {:ok, false} = Expression.Callbacks.has_number_lt(%{}, "the number is 42.0", "40")
iex> {:ok, false} = Expression.Callbacks.has_number_lt(%{}, "the number is 40", "40")
iex> {:ok, false} = Expression.Callbacks.has_number_lt(%{}, "the number is 40", "foo")
iex> {:ok, false} = Expression.Callbacks.has_number_lt(%{}, "four hundred", "foo")
"""
def has_number_lt(_ctx, expression, decimal) do
with %Decimal{} = number <- extract_numberish(expression),
%Decimal{} = decimal <- parse_decimal(decimal) do
# Future match result: number
{:ok, Decimal.lt?(number, decimal)}
else
nil -> {:ok, false}
:error -> {:ok, false}
end
end
@doc """
Tests whether `expression` contains a number less than or equal to max
```
@(has_number_lte("the number is 42", 42)) → true
@(has_number_lte("the number is 42", 40)) → false
@(has_number_lte("the number is not there", 40)) → false
@(has_number_lte("the number is not there", "foo")) → ERROR
```
Not supported:
```
@(has_number_lte("the number is 42", 42).match) → 42
```
# Example
iex> {:ok, true} = Expression.Callbacks.has_number_lte(%{}, "the number is 42", 42)
iex> {:ok, true} = Expression.Callbacks.has_number_lte(%{}, "the number is 42", 42.0)
iex> {:ok, true} = Expression.Callbacks.has_number_lte(%{}, "the number is 42", "42")
iex> {:ok, false} = Expression.Callbacks.has_number_lte(%{}, "the number is 42.0", "40")
iex> {:ok, false} = Expression.Callbacks.has_number_lte(%{}, "the number is 40", "foo")
iex> {:ok, false} = Expression.Callbacks.has_number_lte(%{}, "four hundred", "foo")
"""
def has_number_lte(_ctx, expression, decimal) do
with %Decimal{} = number <- extract_numberish(expression),
%Decimal{} = decimal <- parse_decimal(decimal) do
# Future match result: number
{:ok, Decimal.lt?(number, decimal) || Decimal.eq?(number, decimal)}
else
nil -> {:ok, false}
:error -> {:ok, false}
end
end
@doc """
Tests whether the text contains only phrase
The phrase must be the only text in the text to match
```
@(has_only_phrase("Quick Brown", "quick brown")) → true
@(has_only_phrase("The Quick Brown Fox", "quick brown")) → false
@(has_only_phrase("the Quick Brown fox", "")) → false
@(has_only_phrase("", "").match) →
@(has_only_phrase("The Quick Brown Fox", "red fox")) → false
```
Not supported:
```
@(has_only_phrase("Quick Brown", "quick brown").match) → Quick Brown
```
# Example
iex> Expression.Callbacks.has_only_phrase(%{}, "Quick Brown", "quick brown")
{:ok, true}
iex> Expression.Callbacks.has_only_phrase(%{}, "", "")
{:ok, true}
iex> Expression.Callbacks.has_only_phrase(%{}, "The Quick Brown Fox", "quick brown")
{:ok, false}
"""
def has_only_phrase(_ctx, expression, phrase) do
case Enum.map([expression, phrase], &String.downcase/1) do
# Future match result: expression
[same, same] -> {:ok, true}
_anything_else -> {:ok, false}
end
end
@doc """
Returns whether two text values are equal (case sensitive). In the case that they are, it will return the text as the match.
```
@(has_only_text("foo", "foo")) → true
@(has_only_text("foo", "FOO")) → false
@(has_only_text("foo", "bar")) → false
@(has_only_text("foo", " foo ")) → false
@(has_only_text(results.webhook.category, "Failure")) → false
```
Not supported:
```
@(has_only_text("foo", "foo").match) → foo
@(has_only_text(run.status, "completed").match) → completed
@(has_only_text(results.webhook.category, "Success").match) → Success
```
# Example
iex> Expression.Callbacks.has_only_text(%{}, "foo", "foo")
{:ok, true}
iex> Expression.Callbacks.has_only_text(%{}, "", "")
{:ok, true}
iex> Expression.Callbacks.has_only_text(%{}, "foo", "FOO")
{:ok, false}
"""
def has_only_text(_ctx, expression, expression) when is_binary(expression),
# future match result: expression
do: {:ok, true}
def has_only_text(_ctx, _expression, _something_else),
# Future match result: expression
do: {:ok, false}
@doc """
Tests whether `expression` matches the regex pattern
Both text values are trimmed of surrounding whitespace and matching is case-insensitive.
```
@(has_pattern("Buy cheese please", "buy (\w+)")) → true
@(has_pattern("Sell cheese please", "buy (\w+)")) → false
```
Not supported:
```
@(has_pattern("Buy cheese please", "buy (\w+)").match) → Buy cheese
@(has_pattern("Buy cheese please", "buy (\w+)").extra) → {0: Buy cheese, 1: cheese}
```
# Examples
iex> Expression.Callbacks.has_pattern(%{}, "Buy cheese please", "buy (\\\\w+)")
{:ok, true}
iex> Expression.Callbacks.has_pattern(%{}, "Sell cheese please", "buy (\\\\w+)")
{:ok, false}
"""
def has_pattern(_ctx, expression, pattern) do
with {:ok, regex} <- Regex.compile(String.trim(pattern), "i"),
[[_first | _remainder]] <- Regex.scan(regex, String.trim(expression), capture: :all) do
# Future match result: first
{:ok, true}
else
_ -> {:ok, false}
end
end
@doc """
Tests whether `expresssion` contains a phone number.
The optional country_code argument specifies the country to use for parsing.
```
@(has_phone("my number is +12067799294 thanks")) → true
@(has_phone("my number is none of your business", "US")) → false
```
Not supported:
```
@(has_phone("my number is +12067799294").match) → +12067799294
@(has_phone("my number is 2067799294", "US").match) → +12067799294
@(has_phone("my number is 206 779 9294", "US").match) → +12067799294
```
# Example
iex> Expression.Callbacks.has_phone(%{}, "my number is +12067799294 thanks")
{:ok, true}
iex> Expression.Callbacks.has_phone(%{}, "my number is 2067799294 thanks", "US")
{:ok, true}
iex> Expression.Callbacks.has_phone(%{}, "my number is 206 779 9294 thanks", "US")
{:ok, true}
iex> Expression.Callbacks.has_phone(%{}, "my number is none of your business", "US")
{:ok, false}
"""
def has_phone(%{}, expression, country_code \\ "") do
letters_removed = Regex.replace(~r/[a-z]/i, expression, "")
case ExPhoneNumber.parse(letters_removed, country_code) do
# Future match result: ExPhoneNumber.format(pn, :es164)
{:ok, _pn} -> {:ok, true}
_ -> {:ok, false}
end
end
@doc """
Tests whether phrase is contained in `expression`
The words in the test phrase must appear in the same order with no other words in between.
```
@(has_phrase("the quick brown fox", "brown fox")) → true
@(has_phrase("the Quick Brown fox", "quick fox")) → false
@(has_phrase("the Quick Brown fox", "").match) →
```
Not supported:
```
@(has_phrase("the quick brown fox", "brown fox").match) → brown fox
```
# Examples
iex> Expression.Callbacks.has_phrase(%{}, "the quick brown fox", "brown fox")
{:ok, true}
iex> Expression.Callbacks.has_phrase(%{}, "the quick brown fox", "quick fox")
{:ok, false}
iex> Expression.Callbacks.has_phrase(%{}, "the quick brown fox", "")
{:ok, true}
"""
def has_phrase(_ctx, expression, phrase) do
lower_expression = String.downcase(expression)
lower_phrase = String.downcase(phrase)
found? = String.contains?(lower_expression, lower_phrase)
# Future match result: phrase
{:ok, found?}
end
@doc """
Tests whether there the `expression` has any characters in it
```
@(has_text("quick brown")) → true
@(has_text("")) → false
@(has_text(" \n")) → false
@(has_text(contact.fields.not_set)) → false
```
Not supported:
```
@(has_text("quick brown").match) → quick brown
@(has_text(123).match) → 123
```
# Examples
iex> Expression.Callbacks.has_text(%{}, "quick brown")
{:ok, true}
iex> Expression.Callbacks.has_text(%{}, "")
{:ok, false}
iex> Expression.Callbacks.has_text(%{}, " \\n")
{:ok, false}
iex> Expression.Callbacks.has_text(%{}, 123)
{:ok, true}
iex> Expression.Callbacks.has_text(%{}, nil)
{:ok, false}
"""
def has_text(ctx, expression) when not is_binary(expression),
do: has_text(ctx, to_string(expression))
def has_text(_ctx, expression) when is_binary(expression) do
case String.trim(expression) do
"" -> {:ok, false}
# Future match result: any_other_binary
_any_other_binary -> {:ok, true}
end
end
@doc """
Tests whether `expression` contains a time.
```
@(has_time("the time is 10:30")) → true
@(has_time("the time is 10:30:45").match) → 10:30:45.000000
@(has_time("there is no time here, just the number 25")) → false
```
Not supported:
```
@(has_time("the time is 10:30").match) → 10:30:00.000000
@(has_time("the time is 10 PM").match) → 22:00:00.000000
```
# Examples
iex> Expression.Callbacks.has_time(%{}, "the time is 10:30")
{:ok, true}
iex> Expression.Callbacks.has_time(%{}, "the time is 10:00 pm")
{:ok, true}
iex> Expression.Callbacks.has_time(%{}, "the time is 10:30:45")
{:ok, true}
iex> Expression.Callbacks.has_time(%{}, "there is no time here, just the number 25")
{:ok, false}
"""
def has_time(_ctx, expression) do
case DateTimeParser.parse_time(expression) do
# Future match result: time
{:ok, _time} -> {:ok, true}
_ -> {:ok, false}
end
end
end
|
lib/expression/callbacks.ex
| 0.927215
| 0.854095
|
callbacks.ex
|
starcoder
|
defmodule Poison do
alias Poison.{Encoder, EncodeError}
alias Poison.{Parser, ParseError}
alias Poison.{Decode, Decoder, DecodeError}
@doc """
Encode a value to JSON.
iex> Poison.encode([1, 2, 3])
{:ok, "[1,2,3]"}
"""
@spec encode(Encoder.t, keyword | Encoder.options) :: {:ok, iodata}
| {:error, EncodeError.t}
def encode(value, options \\ %{}) do
{:ok, encode!(value, options)}
rescue
exception in [EncodeError] ->
{:error, exception}
end
def encode_to_iodata(value, options \\ %{}) do
{:ok, encode_to_iodata!(value, options)}
rescue
exception in [EncodeError] ->
{:error, exception}
end
@doc """
Encode a value to JSON, raises an exception on error.
iex> Poison.encode!([1, 2, 3])
"[1,2,3]"
"""
@spec encode!(Encoder.t, keyword | Encoder.options) :: iodata | no_return
def encode!(value, options \\ %{})
def encode!(value, options) when is_list(options) do
encode!(value, Map.new(options))
end
def encode!(value, options) do
iodata = Encoder.encode(value, options)
if options[:iodata] do
iodata
else
iodata |> IO.iodata_to_binary
end
end
def encode_to_iodata!(value, options) when is_list(options) do
encode_to_iodata!(value, Map.new(options))
end
def encode_to_iodata!(value, options \\ %{}) do
Encoder.encode(value, options)
end
@doc """
Decode JSON to a value.
iex> Poison.decode("[1,2,3]")
{:ok, [1, 2, 3]}
"""
@spec decode(iodata) :: {:ok, Parser.t}
| {:error, ParseError.t}
@spec decode(iodata, keyword | Decoder.options) :: {:ok, any}
| {:error, ParseError.t | DecodeError.t}
def decode(iodata, options \\ %{}) do
{:ok, decode!(iodata, options)}
rescue
exception in [ParseError, DecodeError] ->
{:error, exception}
end
@doc """
Decode JSON to a value, raises an exception on error.
iex> Poison.decode!("[1,2,3]")
[1, 2, 3]
"""
@spec decode!(iodata) :: Parser.t | no_return
def decode!(value) do
Parser.parse!(value, %{})
end
@spec decode!(iodata, keyword | Decoder.options) :: Decoder.t | no_return
def decode!(value, options) when is_list(options) do
decode!(value, Map.new(options))
end
def decode!(value, %{as: as} = options) when as != nil do
value
|> Parser.parse!(options)
|> Decode.transform(options)
|> Decoder.decode(options)
end
def decode!(value, options) do
Parser.parse!(value, options)
end
end
|
lib/poison.ex
| 0.775987
| 0.519765
|
poison.ex
|
starcoder
|
defmodule CyberSourceSDK do
@moduledoc """
This CyberSource module communicates with the Simple Order API
service (SOAP) of CyberSource.
"""
use Application
alias CyberSourceSDK.Client
alias CyberSourceSDK.Helper
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
worker(CyberSourceSDK.Client, [])
]
opts = [strategy: :one_for_one, name: CyberSourceSDK.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Send an authorization request, making sure that the user have the necessary
amount of money in his/her account.
## Parameters
- price: Float that represents the price to be charged to the user.
- merchant_reference_code: String that represents the order. Normally you should pass an unique identifier like `order_id`.
- card_type: String with the name of card type, like VISA, MASTERCARD, etc.
- encrypted_payment: String that must be in Base64 received by Apple/Android payment system.
- bill_to: Structure generated by `CyberSourceSDK.bill_to()`. (Optional)
- worker: Atom with name of the structure in configurations to be used. (Optional)
"""
@spec authorize(float(), String.t(), String.t(), String.t(), list(String.t()), atom()) ::
{:ok} | {:error, atom()} | {:error, String.t()}
def authorize(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to \\ [],
worker \\ :merchant
) do
CyberSourceSDK.Client.authorize(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to,
worker
)
end
@doc """
Send a capture request to charge the user account.
"""
@spec capture(String.t(), String.t(), list(), atom()) ::
{:ok} | {:error, atom()} | {:error, String.t()}
def capture(order_id, request_id, items \\ [], worker \\ :merchant) do
CyberSourceSDK.Client.capture(order_id, request_id, items, worker)
end
@doc """
Send a refund request o remove the hold on user money.
"""
@spec refund(String.t(), String.t(), list(), atom()) ::
{:ok} | {:error, atom()} | {:error, String.t()}
def refund(order_id, request_id, items \\ [], worker \\ :merchant) do
CyberSourceSDK.Client.refund(order_id, request_id, items, worker)
end
@doc """
Pay with Android Pay request
"""
@spec pay_with_android_pay(
float(),
String.t(),
String.t(),
String.t(),
list(String.t()),
atom()
) :: {:ok} | {:error, atom()} | {:error, String.t()}
def pay_with_android_pay(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to \\ [],
worker \\ :merchant
) do
CyberSourceSDK.Client.pay_with_android_pay(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to,
worker
)
end
@doc """
Pay with Apple Pay request
"""
@spec pay_with_apple_pay(float(), String.t(), String.t(), String.t(), list(String.t()), atom()) ::
{:ok} | {:error, atom()} | {:error, String.t()}
def pay_with_apple_pay(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to \\ [],
worker \\ :merchant
) do
CyberSourceSDK.Client.pay_with_apple_pay(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to,
worker
)
end
@doc """
Create a credit card token
## Example
```
bill_to = CyberSourceSDK.bill_to("John", "Doe", "Marylane Street", "34", "New York", "12345", "NY", "USA", "<EMAIL>")
credit_card = CyberSourceSDK.credit_card("4111111111111111", "12", "2020")
create_credit_card_token("1<PASSWORD>", credit_card, bill_to)
```
"""
@spec create_credit_card_token(
String.t(),
keyword() | nil,
keyword() | nil,
atom()
) :: {:ok, map()} | {:error, atom()} | {:error, String.t()}
def create_credit_card_token(merchant_reference_code, credit_card, bill_to, worker \\ :merchant) do
Client.create_credit_card_token(merchant_reference_code, credit_card, bill_to, worker)
end
@doc """
Update a credit card
## Example
```
bill_to = CyberSourceSDK.bill_to(nil, nil, nil, nil, nil, nil, nil, nil, "<EMAIL>") # can also be nil
credit_card = CyberSourceSDK.credit_card(nil, "12", "2024", nil) # can also be nil
update_credit_card("1234", "XXXXXXXX", credit_card, bill_to)
```
"""
@spec update_credit_card(
String.t(),
String.t(),
keyword(),
keyword(),
atom()
) :: {:ok, map()} | {:error, atom()} | {:error, String.t()}
def update_credit_card(
merchant_reference_code,
token,
credit_card,
bill_to,
worker \\ :merchant
) do
Client.update_credit_card(merchant_reference_code, token, credit_card, bill_to, worker)
end
@doc """
Retrieve a credit card by token + reference
## Example
```
retrieve_credit_card("1234", "XXXXXXXXXXXXXXXX")
```
"""
@spec retrieve_credit_card(
String.t(),
String.t(),
atom()
) :: {:ok, map()} | {:error, atom()} | {:error, String.t()}
def retrieve_credit_card(merchant_reference_code, token, worker \\ :merchant) do
Client.retrieve_credit_card(merchant_reference_code, token, worker)
end
@doc """
Delete a credit card by token + reference
## Example
```
delete_credit_card("1234", "XXXXXXXXXXXXXXXX")
```
"""
@spec delete_credit_card(
String.t(),
String.t(),
atom()
) :: {:ok, map()} | {:error, atom()} | {:error, String.t()}
def delete_credit_card(merchant_reference_code, token, worker \\ :merchant) do
Client.delete_credit_card(merchant_reference_code, token, worker)
end
@doc """
Charge a credit card by token
## Example
```
charge_credit_card(10.00, "1234", "XXXXXXXXXXXXXXXX")
```
"""
@spec charge_credit_card(
float(),
String.t(),
String.t(),
atom()
) :: {:ok, map()} | {:error, atom()} | {:error, String.t()}
def charge_credit_card(price, merchant_reference_code, token, worker \\ :merchant) do
Client.charge_credit_card(price, merchant_reference_code, token, worker)
end
@doc """
Authorise a credit card by token
## Example
```
auth_credit_card(10.00, "1234", "XXXXXXXXXXXXXXXX")
```
"""
@spec auth_credit_card(
float(),
String.t(),
String.t(),
atom()
) :: {:ok, map()} | {:error, atom()} | {:error, String.t()}
def auth_credit_card(price, merchant_reference_code, token, worker \\ :merchant) do
Client.auth_credit_card(price, merchant_reference_code, token, worker)
end
@doc """
Generate BillTo object to replace parameters in request XML
## Examples
iex> CyberSourceSDK.bill_to("John", "Doe", "Main Street", "2 Left", "New York", "12345", "NY", "USA", "<EMAIL>")
[first_name: "John", last_name: "Doe", street1: "Main Street", street2: "2 Left", city: "New York", post_code: "12345", state: "NY", country: "USA", email: "<EMAIL>"]
"""
@spec bill_to(
String.t() | nil,
String.t() | nil,
String.t() | nil,
String.t() | nil,
String.t() | nil,
String.t() | nil,
String.t() | nil,
String.t() | nil,
String.t() | nil
) :: list(String.t())
def bill_to(first_name, last_name, street1, street2, city, post_code, state, country, email) do
[
first_name: first_name,
last_name: last_name,
street1: street1,
street2: street2,
city: city,
post_code: post_code,
state: state,
country: country,
email: email
]
end
@doc """
Generate creditCard object to replace parameters in request XML
## Examples
iex> CyberSourceSDK.credit_card("4111111111111111", "12", "2020")
[card_number: "4111111111111111", expiration_month: "12", expiration_year: "2020", card_type: "001"]
"""
@spec credit_card(
String.t() | nil,
String.t() | nil,
String.t() | nil
) :: list(String.t())
def credit_card(card_number, expiration_month, expiration_year) do
[
card_number: card_number,
expiration_month: expiration_month,
expiration_year: expiration_year,
card_type:
if(is_nil(card_number),
do: nil,
else: Client.get_card_type(Helper.card_type_from_number(card_number))
)
]
end
end
|
lib/cybersource-sdk/cybersource_sdk.ex
| 0.865153
| 0.436802
|
cybersource_sdk.ex
|
starcoder
|
defmodule DateTimeParser.Parser.Serial do
@moduledoc """
Parses a spreadsheet Serial timestamp. This is gated by the number of present digits. It must
contain 1 through 5 digits that represent days, with an optional precision of up to 10 digits that
represents time. Negative serial timestamps are supported.
Microsoft Excel has, since its earliest versions, incorrectly considered 1900 to be a leap year,
and therefore that February 29, 1900 comes between February 28 and March 1 of that year. The bug
originated from Lotus 1-2-3 and was purposely implemented in Excel for the purpose of backward
compatibility. Microsoft has written an article about this bug, explaining the reasons for
treating 1900 as a leap year. This bug has been promoted into a requirement in the Ecma Office
Open XML (OOXML) specification.
Microsoft Excel on Macintosh defaults to using the 1904 date system. By default, this parser will
assume the 1900. If you want to opt-into the 1904 date system, see the `t:use_1904_date_system`
option.
See more at https://en.wikipedia.org/wiki/Leap_year_bug
"""
@behaviour DateTimeParser.Parser
@serial_regex ~r|\A(?<days>-?\d{1,5})(?:\.(?<time>\d{1,10}))?\z|
@impl DateTimeParser.Parser
def preflight(%{string: string} = parser) do
case Regex.named_captures(@serial_regex, string) do
nil -> {:error, :not_compatible}
results -> {:ok, %{parser | preflight: results}}
end
end
@impl DateTimeParser.Parser
def parse(%{preflight: %{"time" => nil, "day" => day}} = parser) do
case Integer.parse(day) do
{num, ""} -> from_tokens(parser, num)
_ -> {:error, :failed_to_parse_integer}
end
end
def parse(%{string: string} = parser) do
case Float.parse(string) do
{num, ""} -> from_tokens(parser, num)
_ -> {:error, :failed_to_parse_float}
end
end
defp from_tokens(%{context: context, opts: opts}, serial) do
with serial <- set_date_system(serial, opts),
{:ok, date_or_datetime} <- from_serial(serial) do
for_context(context, date_or_datetime, opts[:assume_time])
end
end
defp for_context(:datetime, %NaiveDateTime{} = ndt, _), do: {:ok, ndt}
defp for_context(:datetime, %Date{} = date, true), do: assume_time(date, ~T[00:00:00])
defp for_context(:datetime, %Date{} = date, %Time{} = time), do: assume_time(date, time)
defp for_context(:date, %Date{} = date, _), do: {:ok, date}
defp for_context(:date, %NaiveDateTime{} = ndt, _), do: {:ok, NaiveDateTime.to_date(ndt)}
defp for_context(:time, %NaiveDateTime{} = ndt, _), do: {:ok, NaiveDateTime.to_time(ndt)}
defp for_context(context, result, _opts) do
{:error, "cannot convert #{inspect(result)} to context #{context}"}
end
defp from_serial(float) when is_float(float) do
{serial_date, serial_time} = split_float(float)
erl_time = time_from_serial(serial_time)
erl_date = date_from_serial(serial_date)
NaiveDateTime.from_erl({erl_date, erl_time})
end
defp from_serial(integer) when is_integer(integer) do
erl_date = date_from_serial(integer)
Date.from_erl(erl_date)
end
defp assume_time(%Date{} = date, %Time{} = time) do
NaiveDateTime.new(
date.year,
date.month,
date.day,
time.hour,
time.minute,
time.second,
time.microsecond
)
end
defp time_from_serial(0.0), do: {0, 0, 0}
defp time_from_serial(serial_time) do
{hours, min_fraction} = split_float(serial_time * 24)
{minutes, sec_fraction} = split_float(min_fraction * 60)
{seconds, _microseconds} = split_float(sec_fraction * 60)
{hours, minutes, seconds}
end
defp date_from_serial(serial_date) do
{1899, 12, 31}
|> :calendar.date_to_gregorian_days()
|> Kernel.+(serial_date)
|> adjust_for_lotus_bug
|> :calendar.gregorian_days_to_date()
end
defp split_float(float) when float >= 0 do
whole = float |> Float.floor() |> round()
{whole, float - whole}
end
defp split_float(float) when float < 0 do
whole = abs(float) |> Float.floor() |> round()
fraction = 1 - (abs(float) - whole)
fraction = if fraction == 1.0, do: 0.0, else: fraction
{whole * -1, fraction}
end
defp adjust_for_lotus_bug(day) when day > 59, do: day - 1
defp adjust_for_lotus_bug(day), do: day
defp set_date_system(serial, opts) do
if Keyword.get(opts, :use_1904_date_system, false), do: serial + 1462, else: serial
end
end
|
lib/parser/serial.ex
| 0.881971
| 0.471041
|
serial.ex
|
starcoder
|
defmodule Meeple.Territory.One do
alias Sim.Grid
alias Meeple.Pawn
def all() do
%{
fields: fields(),
group_names: group_names() |> Enum.map(fn {_n, key} -> key end),
groups: %{headquarter: group(0), mountains_south: group(1)}
}
end
def headquarter(), do: {7, 1}
def pawns() do
[%Pawn{id: 1, x: 7, y: 1}]
end
def inventory() do
[flintstone: 1, berry: 3]
end
def xp_pool() do
%{red: 0, yellow: 0, green: 0, blue: 0, purple: 0}
end
def create_fog(7, 1), do: 5
def create_fog(4, 5), do: 1
def create_fog(11, 5), do: 1
def create_fog(_x, _y), do: 0
def create_ground(width, height) do
[x, y, z] = Enum.shuffle([:home_hills, :home_mountains, :home_woods])
[a, b, c, d] = Enum.shuffle([:woods, :planes, :hills, :lake])
[m, n, o, p] = Enum.shuffle([:mountains, :mountains, :enemy_hills, :swamp])
Grid.create(width, height, fields())
|> populate(%{
0 => group(:headquarter) |> Enum.shuffle(),
1 => group(:back_mountains) |> Enum.shuffle(),
2 => group(x) |> Enum.shuffle(),
3 => group(y) |> Enum.shuffle(),
4 => group(z) |> Enum.shuffle(),
5 => group(a) |> Enum.shuffle(),
6 => group(b) |> Enum.shuffle(),
7 => group(c) |> Enum.shuffle(),
8 => group(d) |> Enum.shuffle(),
9 => group(m) |> Enum.shuffle(),
10 => group(n) |> Enum.shuffle(),
11 => group(o) |> Enum.shuffle(),
12 => group(p) |> Enum.shuffle(),
13 => group(a) |> Enum.shuffle()
})
end
def reduce_grid(width, height, acc, func) do
grid = Grid.create(width, height)
{_, new_grid} =
Enum.reduce(0..14, {acc, grid}, fn x, {acc, grid} ->
Enum.reduce(0..6, {acc, grid}, fn y, {acc, grid} ->
{field, new_acc} = func.(x, y, acc)
new_grid = Grid.put(grid, x, y, field)
{new_acc, new_grid}
end)
end)
new_grid
end
def populate(template, groups) do
reduce_grid(15, 7, groups, fn x, y, groups ->
key = Grid.get(template, x, y)
case Map.get(groups, key) do
nil -> {key, groups}
[field | rest] -> {field, Map.put(groups, key, rest)}
end
end)
end
# 15 * 7 fields
def fields() do
[
[11, 11, 11, 8, 8, 8, 8, 13, 7, 7, 7, 7, 12, 12, 12],
[11, 11, 5, 8, 8, 8, 8, 4, 7, 7, 7, 7, 6, 12, 12],
[11, 5, 5, 5, 8, 8, 4, 4, 4, 7, 7, 6, 6, 6, 12],
[5, 5, 5, 5, 5, 3, 4, 4, 4, 2, 6, 6, 6, 6, 6],
[9, 5, 5, 5, 3, 3, 3, 4, 2, 2, 2, 6, 6, 6, 10],
[9, 9, 5, 3, 3, 3, 3, 0, 2, 2, 2, 2, 6, 10, 10],
[9, 9, 9, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10, 10, 10]
]
end
def group_names() do
[
{0, :headquarter},
# 9
{1, :mountains_south},
# 8
{2, :home_east},
# 8
{3, :home_west},
# 8
{4, :home_north},
# 13
{5, :west},
# 13
{6, :east},
# 12
{7, :north_east},
# 12
{8, :north_west},
# 6 / 13
{9, :mountains_south_west},
# 6 / 13
{10, :mountains_south_east},
# 6 / 13
{11, :mountains_west},
# 6 / 13
{12, :mountains_east},
# 1, 13
{13, :north}
]
end
def group(:headquarter) do
[%{vegetation: :mountains, building: :headquarter}]
end
def group(:back_mountains) do
[
%{vegetation: :high_mountains, flora: [:berry, 2]},
%{vegetation: :high_mountains, flora: [:berry, 2], danger: [:snake, 3]},
%{vegetation: :high_mountains, danger: [:rockfall, 3]},
%{vegetation: :high_mountains, danger: [:rockfall, 2]},
%{vegetation: :high_mountains, danger: [:rockfall, 2]},
%{vegetation: :high_mountains},
%{vegetation: :high_mountains},
%{vegetation: :high_mountains},
%{vegetation: :high_mountains}
]
end
def group(:home_woods) do
[
%{vegetation: :woods, predator: [:fox, 3]},
%{vegetation: :woods, herbivore: [:rabbit, 5]},
%{vegetation: :woods, flora: [:berry, 4], herbivore: [:rabbit, 4]},
%{vegetation: :woods, flora: [:berry, 4], herbivore: [:partridge, 3]},
%{vegetation: :woods, flora: [:berry, 4], herbivore: [:partridge, 3]},
%{vegetation: :woods, flora: [:berry, 4], herbivore: [:boar, 8]},
%{vegetation: :woods, flora: [:berry, 4]},
%{vegetation: :woods, danger: [:snake, 1]}
]
end
def group(:home_mountains) do
[
%{vegetation: :mountains, predator: [:lynx, 3]},
%{vegetation: :mountains, herbivore: [:rabbit, 5]},
%{vegetation: :mountains, herbivore: [:goat, 4]},
%{vegetation: :mountains, flora: [:berry, 4], herbivore: [:goat, 3]},
%{vegetation: :mountains, flora: [:berry, 4]},
%{vegetation: :mountains, flora: [:berry, 4], herbivore: [:partridge, 3]},
%{vegetation: :mountains, danger: [:rockfall, 2]},
%{vegetation: :mountains, danger: [:snake, 1]}
]
end
def group(:home_hills) do
[
%{vegetation: :hills, predator: [:fox, 3]},
%{vegetation: :hills, herbivore: [:partridge, 5]},
%{vegetation: :hills, flora: [:berry, 4], herbivore: [:partridge, 4]},
%{vegetation: :hills, flora: [:berry, 4], herbivore: [:partridge, 3]},
%{vegetation: :hills, flora: [:berry, 4], herbivore: [:rabbit, 3]},
%{vegetation: :hills, flora: [:berry, 4]},
%{vegetation: :hills},
%{vegetation: :hills, danger: [:snake, 1]}
]
end
def group(:hills) do
[
%{vegetation: :hills, flora: [:berry, 2], predator: [:wolf, 3]},
%{vegetation: :hills, flora: [:berry, 2], predator: [:bear, 3]},
%{vegetation: :hills, flora: [:berry, 2], predator: [:fox, 3]},
%{vegetation: :hills, flora: [:berry, 3], herbivore: [:rabbit, 5]},
%{vegetation: :hills, flora: [:berry, 2], herbivore: [:rabbit, 4]},
%{vegetation: :hills, flora: [:root, 4], herbivore: [:rabbit, 3]},
%{vegetation: :hills, flora: [:root, 4], herbivore: [:partridge, 3]},
%{vegetation: :hills, flora: [:root, 2], herbivore: [:partridge, 3]},
%{vegetation: :hills, flora: [:mushroom, 4], herbivore: [:partridge, 3]},
%{vegetation: :hills, flora: [:mushroom, 3], herbivore: [:partridge, 3]},
%{vegetation: :hills, flora: [:berry, 2], herbivore: [:deer, 3]},
%{vegetation: :hills, flora: [:berry, 2], herbivore: [:deer, 3]},
%{vegetation: :hills, flora: [:berry, 2], herbivore: [:aurochs, 3]}
]
end
def group(:planes) do
[
%{vegetation: :planes, flora: [:berry, 2], predator: [:cave_lion, 3]},
%{vegetation: :planes, flora: [:berry, 2], predator: [:wolf, 3]},
%{vegetation: :planes, flora: [:berry, 2], predator: [:fox, 3]},
%{vegetation: :planes, flora: [:berry, 3], herbivore: [:rabbit, 5]},
%{vegetation: :planes, flora: [:berry, 2], herbivore: [:rabbit, 4]},
%{vegetation: :planes, flora: [:root, 4], herbivore: [:rabbit, 3]},
%{vegetation: :planes, flora: [:root, 4], herbivore: [:partridge, 3]},
%{vegetation: :planes, flora: [:root, 2], herbivore: [:partridge, 3]},
%{vegetation: :planes, flora: [:mushroom, 4], herbivore: [:partridge, 3]},
%{vegetation: :planes, flora: [:mushroom, 3], herbivore: [:partridge, 3]},
%{vegetation: :planes, flora: [:berry, 2], herbivore: [:aurochs, 3]},
%{vegetation: :planes, flora: [:berry, 2], herbivore: [:aurochs, 3]},
%{vegetation: :planes, flora: [:berry, 2], herbivore: [:aurochs, 3]}
]
end
def group(:woods) do
[
%{vegetation: :woods, flora: [:berry, 2], predator: [:bear, 3]},
%{vegetation: :woods, flora: [:berry, 2], predator: [:wolf, 3]},
%{vegetation: :woods, flora: [:berry, 2], predator: [:fox, 3]},
%{vegetation: :woods, flora: [:berry, 3], herbivore: [:rabbit, 5]},
%{vegetation: :woods, flora: [:berry, 2], herbivore: [:rabbit, 4]},
%{vegetation: :woods, flora: [:root, 4], herbivore: [:rabbit, 3]},
%{vegetation: :woods, flora: [:root, 4], herbivore: [:partridge, 3]},
%{vegetation: :woods, flora: [:root, 2], herbivore: [:partridge, 3]},
%{vegetation: :woods, flora: [:mushroom, 4], herbivore: [:partridge, 3]},
%{vegetation: :woods, flora: [:mushroom, 3], herbivore: [:partridge, 3]},
%{vegetation: :woods, flora: [:berry, 2], herbivore: [:deer, 3]},
%{vegetation: :woods, flora: [:berry, 2], herbivore: [:deer, 3]},
%{vegetation: :woods, flora: [:berry, 2], herbivore: [:deer, 3]}
]
end
def group(:lake) do
[
%{vegetation: :woods, flora: [:berry, 2], predator: [:bear, 3]},
%{vegetation: :woods, flora: [:berry, 2], predator: [:wolf, 3]},
%{vegetation: :woods, flora: [:berry, 2], predator: [:fox, 3]},
%{vegetation: :woods, flora: [:berry, 3], herbivore: [:rabbit, 5]},
%{vegetation: :woods, flora: [:berry, 2], herbivore: [:rabbit, 4]},
%{vegetation: :woods, flora: [:root, 4], herbivore: [:rabbit, 3]},
%{vegetation: :planes, flora: [:root, 4], herbivore: [:partridge, 3]},
%{vegetation: :planes, flora: [:root, 2], herbivore: [:partridge, 3]},
%{vegetation: :planes, flora: [:mushroom, 4], herbivore: [:partridge, 3]},
%{vegetation: :planes, flora: [:mushroom, 3], herbivore: [:partridge, 3]},
%{vegetation: :hills, flora: [:berry, 2], herbivore: [:deer, 3]},
%{vegetation: :hills, flora: [:berry, 2], herbivore: [:deer, 3]},
%{vegetation: :lake, herbivore: [:fish, 5]}
]
end
def group(:mountains) do
[
%{vegetation: :mountains, flora: [:berry, 2], predator: [:bear, 3]},
%{vegetation: :mountains, flora: [:berry, 2], predator: [:lynx, 3]},
%{vegetation: :mountains, flora: [:berry, 2], predator: [:cave_lion, 3]},
%{vegetation: :mountains, flora: [:berry, 3], herbivore: [:rabbit, 5]},
%{vegetation: :mountains, flora: [:berry, 2], herbivore: [:rabbit, 4]},
%{vegetation: :mountains, flora: [:root, 4], herbivore: [:rabbit, 3]},
%{vegetation: :mountains, flora: [:root, 4], herbivore: [:goat, 3]},
%{vegetation: :mountains, flora: [:root, 2], herbivore: [:goat, 3]},
%{vegetation: :mountains, flora: [:mushroom, 4], herbivore: [:partridge, 3]},
%{vegetation: :mountains, flora: [:mushroom, 3], herbivore: [:partridge, 3]},
%{vegetation: :mountains, flora: [:berry, 2], herbivore: [:goat, 3]},
%{vegetation: :mountains, flora: [:berry, 2], herbivore: [:goat, 3]},
%{vegetation: :mountains, flora: [:herbs, 2], herbivore: [:goat, 3]}
]
end
def group(:swamp) do
[
%{vegetation: :swamps, flora: [:berry, 2], predator: [:bear, 3]},
%{vegetation: :swamps, flora: [:berry, 2], predator: [:wolf, 3]},
%{vegetation: :swamps, flora: [:berry, 2], predator: [:fox, 3]},
%{vegetation: :swamps, flora: [:berry, 3], herbivore: [:rabbit, 5]},
%{vegetation: :swamps, flora: [:berry, 2], herbivore: [:rabbit, 4]},
%{vegetation: :swamps, flora: [:root, 4], herbivore: [:rabbit, 3]},
%{vegetation: :swamps, flora: [:root, 4], herbivore: [:boar, 3]},
%{vegetation: :swamps, flora: [:root, 2], herbivore: [:boar, 3]},
%{vegetation: :swamps, flora: [:mushroom, 4], herbivore: [:partridge, 3]}
]
end
def group(:enemy_hills) do
[
%{vegetation: :hills, flora: [:berry, 2], predator: [:bear, 3]},
%{vegetation: :hills, flora: [:berry, 2], predator: [:wolf, 3]},
%{vegetation: :hills, flora: [:berry, 2], predator: [:fox, 3]},
%{vegetation: :hills, flora: [:berry, 3], herbivore: [:rabbit, 5]},
%{vegetation: :hills, flora: [:berry, 2], herbivore: [:rabbit, 4]},
%{vegetation: :hills, flora: [:root, 4], herbivore: [:rabbit, 3]},
%{vegetation: :hills, flora: [:root, 4], herbivore: [:boar, 3]},
%{vegetation: :hills, flora: [:root, 2], herbivore: [:boar, 3]},
%{vegetation: :hills, flora: [:mushroom, 4], herbivore: [:partridge, 3]}
]
end
end
|
apps/meeple/lib/meeple/territory/one.ex
| 0.515864
| 0.463687
|
one.ex
|
starcoder
|
defmodule Beamchmark.Suite.CPU.CpuTask do
@moduledoc """
This module contains the CPU benchmarking task.
Measurements are performed using [`:cpu_sup.util/1`](https://www.erlang.org/doc/man/cpu_sup.html)
Currently (according to docs), as busy processor states we identify:
- user
- nice_user (low priority use mode)
- kernel
Run example:
```
CpuTask.start_link()
```
"""
use Task
alias Beamchmark.Suite.Measurements.CpuInfo
alias Beamchmark.Utils
@interfere_timeout 100
@doc """
"""
@spec start_link(cpu_interval :: pos_integer(), duration :: pos_integer()) :: Task.t()
def start_link(cpu_interval, duration) do
Task.async(fn ->
run_poll(cpu_interval, duration)
end)
end
@spec run_poll(number(), number()) :: {:ok, CpuInfo.t()}
defp run_poll(cpu_interval, duration) do
do_run_poll(Utils.get_os_name(), cpu_interval, duration)
end
@spec do_run_poll(atom(), number(), number()) :: {:ok, CpuInfo.t()}
defp do_run_poll(:Windows, cpu_interval, duration) do
iterations_number = trunc(duration / cpu_interval)
pid = self()
spawn_snapshot = fn _it_num ->
spawn(fn -> cpu_snapshot_windows(pid) end)
Process.sleep(cpu_interval)
end
Task.async(fn ->
Enum.each(0..(iterations_number - 1), spawn_snapshot)
end)
cpu_snapshots = receive_snapshots(iterations_number)
{:ok, CpuInfo.from_cpu_snapshots(cpu_snapshots)}
end
defp do_run_poll(_os, cpu_interval, duration) do
iterations_number = trunc(duration / cpu_interval)
:cpu_sup.start()
# First run returns garbage acc to docs
:cpu_sup.util([:per_cpu])
# And the fact of measurement is polluting the results,
# So we need to wait for @interfere_timeout
Process.sleep(@interfere_timeout)
if cpu_interval < @interfere_timeout do
raise "cpu_interval (#{cpu_interval}) can't be less than #{@interfere_timeout}"
end
cpu_snapshots =
Enum.reduce(0..(iterations_number - 1), [], fn _x, cpu_snapshots ->
cpu_snapshots = [cpu_snapshot() | cpu_snapshots]
Process.sleep(cpu_interval)
cpu_snapshots
end)
{:ok, CpuInfo.from_cpu_snapshots(cpu_snapshots)}
end
defp receive_snapshots(snapshots_no, cpu_snapshots \\ []) do
case snapshots_no do
0 ->
cpu_snapshots
_snapshots_no ->
cpu_snapshots =
receive do
{:cpu_snapshot, snapshot} ->
[snapshot | cpu_snapshots]
end
receive_snapshots(snapshots_no - 1, cpu_snapshots)
end
end
@spec cpu_snapshot_windows(pid()) :: nil
defp cpu_snapshot_windows(pid) do
{cpu_util_result, 0} = System.cmd("wmic", ["cpu", "get", "loadpercentage"])
average_all_cores =
try do
cpu_util_result
|> String.split("\r\r\n")
|> Enum.at(1)
|> String.trim()
|> Float.parse()
|> elem(0)
rescue
ArgumentError -> 0.0
end
send(
pid,
{:cpu_snapshot,
%{
cpu_usage: %{},
average_all_cores: average_all_cores
}}
)
end
@spec cpu_snapshot() :: CpuInfo.cpu_snapshot_t()
defp cpu_snapshot() do
cpu_util_result = :cpu_sup.util([:per_cpu])
cpu_core_usage_map =
Enum.reduce(cpu_util_result, %{}, fn {core_id, usage, _idle, _mix}, cpu_core_usage_acc ->
Map.put(cpu_core_usage_acc, core_id, usage)
end)
average_all_cores =
Enum.reduce(cpu_core_usage_map, 0, fn {_core_id, usage}, average_all_cores_acc ->
average_all_cores_acc + usage
end) / map_size(cpu_core_usage_map)
%{
cpu_usage: cpu_core_usage_map,
average_all_cores: average_all_cores
}
end
end
|
lib/beamchmark/suite/cpu/cpu_task.ex
| 0.831143
| 0.789437
|
cpu_task.ex
|
starcoder
|
defmodule Rediscl.Query.Pipe do
@moduledoc """
Pipe query builder
"""
defstruct [
:set,
:get,
:mset,
:mget,
:del,
:lpush,
:rpush,
:lrange,
:lrem,
:lset,
:append,
:exists,
:setex,
:setnx,
:setrange,
:psetex,
:getrange,
:getset,
:strlen,
:incr,
:incrby,
:incrbyfloat,
:msetnx,
:decr,
:decrby,
:sadd,
:scard,
:sdiff,
:sdiffstore,
:sinter,
:sinterstore,
:sismember,
:smembers,
:smove,
:spop,
:srandmember,
:srem,
:sscan,
:sunion,
:sunionstore,
:zadd,
:zcard,
:zcount,
:zincrby,
:zinter,
:zinterstore,
:zlexcount,
:zrange,
:zrangebylex,
:zrangebyscore,
:zrank,
:zrem,
:zremrangebylex,
:zremrangebyrank,
:zremrangebyscore,
:zrevrange,
:zrevrangebylex,
:zrevrangebyscore,
:zrevrank,
:zscore,
:zunion,
:zunionstore,
:zscan
]
@pipes [
:set,
:get,
:mset,
:mget,
:del,
:lpush,
:rpush,
:lrange,
:lrem,
:lset,
:append,
:exists,
:setex,
:setnx,
:setrange,
:psetex,
:getrange,
:getset,
:strlen,
:incr,
:incrby,
:incrbyfloat,
:msetnx,
:decr,
:decrby,
:sadd,
:scard,
:sdiff,
:sdiffstore,
:sinter,
:sinterstore,
:sismember,
:smembers,
:smove,
:spop,
:srandmember,
:srem,
:sscan,
:sunion,
:sunionstore,
:zadd,
:zcard,
:zcount,
:zincrby,
:zinter,
:zinterstore,
:zlexcount,
:zrange,
:zrangebylex,
:zrangebyscore,
:zrank,
:zrem,
:zremrangebylex,
:zremrangebyrank,
:zremrangebyscore,
:zrevrange,
:zrevrangebylex,
:zrevrangebyscore,
:zrevrank,
:zscore,
:zunion,
:zunionstore,
:zscan
]
import Rediscl.Query.Api
defmacro begin(pipes \\ []) when is_list(pipes) do
build(pipes)
end
@doc ""
def build(pipes) when is_list(pipes) do
Enum.into(pipes, %{})
|> Enum.map(&build(&1))
end
@doc ""
def build({type, expr}) when type in @pipes do
build(type, expr)
end
@doc ""
def build(type, expr) when type == :exists and is_binary(expr) do
exists(expr)
end
@doc ""
def build(type, expr) when type == :append and is_list(expr) do
unless Enum.count(expr) == 2 and Enum.any?(expr, &typeof!(&1)),
do: raise(ArgumentError, "append given parameters not valid")
append(Enum.at(expr, 0), Enum.at(expr, 0))
end
@doc ""
def build(type, expr) when type == :set and is_list(expr) do
unless Enum.count(expr) == 2 and Enum.any?(expr, &typeof!(&1)),
do: raise(ArgumentError, "set given parameters not valid")
set(Enum.at(expr, 0), Enum.at(expr, 1))
end
@doc ""
def build(type, expr) when type == :setex and is_list(expr) do
unless Enum.count(expr) == 3 and typeof!(Enum.at(expr, 0)) and
is_integer(Enum.at(expr, 1)) and
(typeof!(Enum.at(expr, 2)) or is_integer(Enum.at(expr, 2))),
do: raise(ArgumentError, "setex given parameters not valid")
set_ex(Enum.at(expr, 0), Enum.at(expr, 1), Enum.at(expr, 2))
end
@doc ""
def build(type, expr) when type == :setnx and is_list(expr) do
unless Enum.count(expr) == 2 and typeof!(Enum.at(expr, 0)) and
(is_integer(Enum.at(expr, 1)) or typeof!(Enum.at(expr, 1))),
do: raise(ArgumentError, "setnx given parameters not valid")
set_nx(Enum.at(expr, 0), Enum.at(expr, 1))
end
@doc ""
def build(type, expr) when type == :setrange and is_list(expr) do
unless Enum.count(expr) == 3 and typeof!(Enum.at(expr, 0)) and
is_integer(Enum.at(expr, 1)) and typeof!(Enum.at(expr, 2)),
do: raise(ArgumentError, "setrange given parameters not valid")
set_range(Enum.at(expr, 0), Enum.at(expr, 1), Enum.at(expr, 2))
end
@doc ""
def build(type, expr) when type == :psetex and is_list(expr) do
unless Enum.count(expr) == 3 and typeof!(Enum.at(expr, 0)) and
is_integer(Enum.at(expr, 1)) and
(typeof!(Enum.at(expr, 2)) or is_integer(Enum.at(expr, 2))),
do: raise(ArgumentError, "psetex given parameters not valid")
pset_ex(Enum.at(expr, 0), Enum.at(expr, 1), Enum.at(expr, 2))
end
@doc ""
def build(type, expr) when type == :get and is_binary(expr) do
get(expr)
end
@doc ""
def build(type, expr) when type == :getrange and is_list(expr) do
unless Enum.count(expr) == 3 and typeof!(Enum.at(expr, 0)) and
Enum.any?(expr, &is_integer/1),
do: raise(ArgumentError, "getrange given parameters not valid")
get_range(Enum.at(expr, 0), Enum.at(expr, 1), Enum.at(expr, 2))
end
@doc ""
def build(type, expr) when type == :getset and is_list(expr) do
unless Enum.count(expr) == 2 and Enum.any?(expr, &typeof!/1),
do: raise(ArgumentError, "getset given parameters not valid")
get_set(Enum.at(expr, 0), Enum.at(expr, 1))
end
@doc ""
def build(type, expr) when type == :strlen and is_binary(expr) do
strlen(expr)
end
@doc ""
def build(type, expr) when type == :incr and is_binary(expr) do
incr(expr)
end
@doc ""
def build(type, expr) when type == :incrby and is_list(expr) do
unless Enum.count(expr) == 2 and typeof!(Enum.at(expr, 0)) and
is_integer(Enum.at(expr, 1)),
do: raise(ArgumentError, "incrby given parameters not valid")
incr_by(Enum.at(expr, 0), Enum.at(expr, 1))
end
@doc ""
def build(type, expr) when type == :incrbyfloat and is_list(expr) do
unless Enum.count(expr) == 2 and Enum.any?(expr, &typeof!/1),
do: raise(ArgumentError, "incrbyfloat given parameters not valid")
incr_by_float(Enum.at(expr, 0), Enum.at(expr, 1))
end
@doc ""
def build(type, expr) when type == :decr and is_binary(expr) do
decr(expr)
end
@doc ""
def build(type, expr) when type == :decrby and is_list(expr) do
unless Enum.count(expr) == 2 and typeof!(Enum.at(expr, 0)) and
is_integer(Enum.at(expr, 1)),
do: raise(ArgumentError, "decrby given parameters not valid")
decr_by(Enum.at(expr, 0), Enum.at(expr, 1))
end
@doc ""
def build(type, expr) when type == :mset and is_list(expr) do
unless Enum.count(expr) >= 2 and Enum.any?(expr, &typeof!(&1)),
do:
raise(
ArgumentError,
"mset given parameters must be greater than 2 " <>
"or given parameters not valid"
)
mset(expr)
end
@doc ""
def build(type, expr) when type == :msetnx and is_list(expr) do
mset_nx(expr)
end
@doc ""
def build(type, expr) when type == :mget do
unless Enum.count(expr) >= 2 and Enum.any?(expr, &typeof!(&1)),
do:
raise(
ArgumentError,
"mget given parameters must be greater than or equal to 1" <>
" or given parameters not valid"
)
mget(expr)
end
@doc ""
def build(type, expr)
when (type == :del and
is_list(expr)) or is_binary(expr) do
unless Enum.any?(expr, &typeof!(&1)),
do: raise(ArgumentError, "del given parameters not valid")
del(expr)
end
@doc ""
def build(type, expr) when type == :lpush and is_list(expr) do
unless Enum.count(expr) == 2 and typeof!(Enum.at(expr, 0)) and
is_list(Enum.at(expr, 1)) and
Enum.any?(Enum.at(expr, 1), &typeof!(&1)),
do:
raise(
ArgumentError,
"lpush given parameters must be greater than 2 or " <>
"values not list or values not valid type"
)
lpush(Enum.at(expr, 0), Enum.at(expr, 1))
end
@doc ""
def build(type, expr) when type == :rpush and is_list(expr) do
unless Enum.count(expr) == 2 and typeof!(Enum.at(expr, 0)) and
is_list(Enum.at(expr, 1)) and
Enum.any?(Enum.at(expr, 1), &typeof!(&1)),
do:
raise(
ArgumentError,
"rpush given parameters must be greater than 2 or " <>
"values not list or values not valid type"
)
rpush(Enum.at(expr, 0), Enum.at(expr, 1))
end
@doc ""
def build(type, expr) when type == :lset do
unless Enum.count(expr) === 3 and typeof!(Enum.at(expr, 0)) and
is_integer(Enum.at(expr, 1)) and typeof!(Enum.at(expr, 2)),
do:
raise(
ArgumentError,
"lset given parameters count equal to 3 or " <>
"values not valid type"
)
lset(Enum.at(expr, 0), Enum.at(expr, 1), Enum.at(expr, 2))
end
@doc ""
def build(type, expr) when type == :lrange do
unless Enum.count(expr) == 3 and typeof!(Enum.at(expr, 0)) and
is_integer(Enum.at(expr, 1)) and is_integer(String.to_integer(Enum.at(expr, 2))),
do:
raise(
ArgumentError,
"lrange given parameters count equal to 3 or " <>
" values not valid type"
)
lrange(Enum.at(expr, 0), Enum.at(expr, 1), Enum.at(expr, 2))
end
@doc ""
def build(type, expr) when type == :lrem do
unless Enum.count(expr) == 3 and typeof!(Enum.at(expr, 0)) and
is_integer(Enum.at(expr, 1)) and typeof!(Enum.at(expr, 2)),
do:
raise(
ArgumentError,
"lrem given parameters count equal to or " <>
"values not valid type"
)
lrem(Enum.at(expr, 0), Enum.at(expr, 1), Enum.at(expr, 2))
end
@doc false
defp typeof!(v) do
Rediscl.Typeable.typeof(v) == "string"
end
end
|
lib/rediscl/query/pipe.ex
| 0.595257
| 0.408454
|
pipe.ex
|
starcoder
|
defmodule Roadtrip.Garage.Vehicle do
@moduledoc """
Describes a vehicle registered in the system.
Vehicles are the base unit of record-keeping in Roadtrip: they own, and give
context to, a series of odometer, fuel, and maintenance measurements.
"""
use Roadtrip.Schema
import Ecto.Changeset
alias Roadtrip.Garage.Measurement
schema "vehicles" do
field :make, :string
field :model, :string
field :name, :string
field :vin, :string
field :year, :integer
has_many :measurements, Measurement
timestamps()
end
@doc false
def changeset(vehicle, attrs) do
vehicle
|> cast(attrs, [:name, :make, :model, :year, :vin])
# Names are optional
|> validate_required([:make, :model, :year, :vin])
# Constrain the vehicle year to be between 1954 (creation of the North
# American VIN) and now.
|> validate_number(:year,
greater_than_or_equal_to: 1954,
message: "Vehicles older than the North American VIN are currently unsupported"
)
|> validate_number(:year,
less_than_or_equal_to: DateTime.utc_now().year,
message: "Your vehicle cannot be from the future"
)
# Name, Make, and Model are all limited to 32 bytes in the database
|> validate_length(:name,
max: 32,
message: "This is currently required to be no more than 32 bytes"
)
|> validate_length(:make,
max: 32,
message: "This is currently required to be no more than 32 bytes"
)
|> validate_length(:model,
max: 32,
message: "This is currently required to be no more than 32 bytes"
)
# Uppercase the VIN
|> update_change(:vin, &String.upcase/1)
# Validate the VIN’s length, alphabet, and checksum.
|> validate_change(:vin, fn :vin, vin ->
case Roadtrip.Garage.Vin.na_checksum(vin) do
{:ok, _} -> []
# Enforce North American checksumming for now
{:warn, warn} -> [vin: warn]
{:error, err} -> [vin: err]
end
end)
# VINs are unique in the system and, ideally, the real world.
|> unique_constraint(:vin, message: "This VIN has already been registered in the system")
end
@doc """
Renders a name for a vehicle. When the vehicle is named, this uses its name;
when it is unnamed, it uses its make/model/year as produced by `describe/1`.
"""
@spec show_name(%__MODULE__{}) :: String.t()
def show_name(%__MODULE__{name: name} = vehicle) do
case name do
"" -> vehicle |> describe()
nil -> vehicle |> describe()
name -> name |> to_string()
end
end
@doc """
Describes a vehicle by make, model, and year.
"""
@spec describe(%__MODULE__{}) :: String.t()
def describe(%__MODULE__{make: make, model: model, year: year}),
do: "#{make} #{model} (#{year})"
end
|
apps/roadtrip/lib/roadtrip/garage/vehicle.ex
| 0.798933
| 0.555556
|
vehicle.ex
|
starcoder
|
defmodule ChangesFollower do
@moduledoc """
A behaviour module for following live changes in a CouchDB.
It is basically an extended `GenServer` and also inherits all of `GenServer`'s
callbacks and semantics. The main difference is the expected return value of
`init/1` and the new `handle_change/2` callback.
## Resilence
`ChangesFollower` tries to be as resilent as possible, automatically
restarting closed connections and resuming on last known sequence numbers for
certain kinds of errors. It also checks if the server actually sends
heartbeats within the expected timeframe and resets the connection on failures.
## Callbacks
There are 7 callbacks required to be implemented in a `ChangesFollower`. By
adding `use ChangesFollower` to your module, Elixir will automatically define
all 7 callbacks for you, leaving it up to you to implement the ones you want
to customize.
## Implementation Details
Internally, an ibrowse worker is spawned and monitored. Therefore a
`ChangesFollower` is not part of a load balancing pool.
## Additional Note
When setting the `doc_ids` option, any given `filter` option will be ignored.
"""
require Logger
@type on_start :: {:ok, pid} | :ignore | {:error, {:already_started, pid} | term}
@type name :: atom | {:global, term} | {:via, module, term}
@type gen_option :: {:debug, debug} | {:name, name} | {:timeout, timeout} |
{:spawn_opt, Process.spawn_opt}
@type debug :: [:trace | :log | :statistics | {:log_to_file, Path.t}]
@type changes_follower :: pid | name | {atom, node}
@type from :: {pid, tag :: term}
@type option :: ICouch.open_changes_option |
{:longpoll, boolean} | {:heartbeat, integer} | {:timeout, integer}
@behaviour GenServer
@callback init(args :: term) ::
{:ok, db :: ICouch.DB.t, state} |
{:ok, db :: ICouch.DB.t, opts :: [option], state} |
{:ok, db :: ICouch.DB.t, opts :: [option], state, timeout | :hibernate} |
:ignore |
{:stop, reason :: any} when state: term
@callback handle_change(change :: %{}, state :: term) ::
{:ok, new_state} |
{:ok, new_state, timeout | :hibernate} |
{:stop, reason :: term, new_state} when new_state: term
@callback handle_call(request :: term, from, state :: term) ::
{:reply, reply, new_state} |
{:reply, reply, new_state, timeout | :hibernate} |
{:noreply, new_state} |
{:noreply, new_state, timeout | :hibernate} |
{:stop, reason, reply, new_state} |
{:stop, reason, new_state} when reply: term, new_state: term, reason: term
@callback handle_cast(request :: term, state :: term) ::
{:noreply, new_state} |
{:noreply, new_state, timeout | :hibernate} |
{:stop, reason :: term, new_state} when new_state: term
@callback handle_info(msg :: :timeout | term, state :: term) ::
{:noreply, new_state} |
{:noreply, new_state, timeout | :hibernate} |
{:stop, reason :: term, new_state} when new_state: term
@callback handle_error(error :: term, state :: term) ::
{:retry, new_state} |
{:retry, wait_time :: integer | :infinity, new_state} |
{:stop, reason :: term, new_state} when new_state: term
@callback terminate(reason, state :: term) ::
term when reason: :normal | :shutdown | {:shutdown, term} | term
@callback code_change(old_vsn, state :: term, extra :: term) ::
{:ok, new_state :: term} |
{:error, reason :: term} when old_vsn: term | {:down, term}
defstruct [
:module,
:mstate,
:infoid,
:db,
:query,
:lkg_seq, # Last known good sequence number
:res_id,
:res_discard,
:tref,
:buffer
]
defmacro __using__(_) do
quote location: :keep do
@behaviour ChangesFollower
def init(args) do
{:ok, args, nil}
end
def handle_change(_row, state) do
{:ok, state}
end
def handle_call(_request, _from, state) do
{:noreply, state}
end
def handle_cast(_request, state) do
{:noreply, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
def handle_error(_error, state) do
{:retry, 60_000, state}
end
def terminate(_reason, _state) do
:ok
end
def code_change(_old, state, _extra) do
{:ok, state}
end
defoverridable [init: 1, handle_change: 2, handle_call: 3, handle_cast: 2,
handle_info: 2, handle_error: 2, terminate: 2, code_change: 3]
end
end
@spec start_link(module, any, options :: [gen_option]) :: on_start
def start_link(module, args, options \\ []) when is_atom(module) and is_list(options) do
GenServer.start_link(__MODULE__, {module, args}, options)
end
@spec start(module, any, options :: [gen_option]) :: on_start
def start(module, args, options \\ []) when is_atom(module) and is_list(options) do
GenServer.start(__MODULE__, {module, args}, options)
end
@spec stop(changes_follower, reason :: term, timeout) :: :ok
def stop(changes_follower, reason \\ :normal, timeout \\ :infinity) do
GenServer.stop(changes_follower, reason, timeout)
end
@spec call(changes_follower, term, timeout) :: term
def call(changes_follower, request, timeout \\ 5000) do
GenServer.call(changes_follower, request, timeout)
end
@spec cast(changes_follower, term) :: :ok
def cast(changes_follower, request) do
GenServer.cast(changes_follower, request)
end
@spec reply(from, term) :: :ok
def reply(client, reply) do
GenServer.reply(client, reply)
end
@spec whereis(changes_follower) :: pid | {atom, node} | nil
def whereis(changes_follower) do
GenServer.whereis(changes_follower)
end
@spec retry_now(changes_follower) :: :ok
def retry_now(changes_follower) do
send(changes_follower, :'$changes_follower_retry_now')
:ok
end
# -- Callbacks --
def code_change(old_vsn, %__MODULE__{module: module, mstate: mstate} = state, extra) do
case module.code_change(old_vsn, mstate, extra) do
{:ok, new_state} -> {:ok, %{state | mstate: new_state}}
other -> other
end
end
def handle_call(request, from, %__MODULE__{module: module, mstate: mstate} = state) do
case module.handle_call(request, from, mstate) do
{:reply, reply, new_state} -> {:reply, reply, %{state | mstate: new_state}}
{:reply, reply, new_state, timeout} -> {:reply, reply, %{state | mstate: new_state}, timeout}
{:noreply, new_state} -> {:noreply, %{state | mstate: new_state}}
{:noreply, new_state, timeout} -> {:noreply, %{state | mstate: new_state}, timeout}
{:stop, reason, reply, new_state} -> {:stop, reason, reply, %{state | mstate: new_state}}
{:stop, reason, new_state} -> {:stop, reason, %{state | mstate: new_state}}
end
end
def handle_cast(request, %__MODULE__{module: module, mstate: mstate} = state) do
case module.handle_cast(request, mstate) do
{:noreply, new_state} -> {:noreply, %{state | mstate: new_state}}
{:noreply, new_state, timeout} -> {:noreply, %{state | mstate: new_state}, timeout}
{:stop, reason, new_state} -> {:stop, reason, %{state | mstate: new_state}}
end
end
def handle_info(:'$changes_follower_retry_now', %__MODULE__{db: %{server: %{direct: nil}}} = state) do
state |> cancel_timer() |> start_stream()
end
def handle_info(:'$changes_follower_retry_now', state) do
{:noreply, state}
end
def handle_info({infoid, :start_stream}, %__MODULE__{infoid: infoid} = state),
do: start_stream(%{state | tref: nil})
def handle_info({infoid, :heartbeat_missing}, %__MODULE__{module: module, infoid: infoid} = state) do
Logger.warn "Heartbeat missing", via: module
restart_stream(%{state | tref: nil})
end
def handle_info({:ibrowse_async_headers, res_id, code, _headers}, %__MODULE__{module: module, res_id: res_id, query: query, lkg_seq: lkg_seq} = state) do
case code do
'200' ->
case query[:since] do
nil -> {:noreply, reset_heartbeat(state)}
seq -> {:noreply, reset_heartbeat(%{state | lkg_seq: seq})}
end
'400' ->
if lkg_seq != nil do
Logger.warn "Bad request detected, trying last known good sequence number; failed seq was: #{inspect query[:since]}", via: module
%{state | query: Map.put(query, :since, lkg_seq), lkg_seq: nil, res_id: nil, res_discard: res_id} |> stop_stream() |> start_stream()
else
Logger.error "Bad request, cannot continue", via: module
{:stop, :bad_request, state}
end
_ ->
Logger.error "Request returned error code: #{code}", via: module
reason = case ICouch.RequestError.parse_status_code(code) do
{:error, reason} -> reason
:ok -> {:status_code, List.to_integer(code)}
end
handle_error(reason, %{state | res_id: nil, res_discard: res_id} |> stop_stream())
end
end
def handle_info({:ibrowse_async_response, res_id, _}, %__MODULE__{res_discard: res_id} = state) do
{:noreply, state}
end
def handle_info({:ibrowse_async_response, res_id, "\n"}, %__MODULE__{module: _module, res_id: res_id} = state) do
{:noreply, reset_heartbeat(state)}
end
def handle_info({:ibrowse_async_response, res_id, chunk}, %__MODULE__{module: module, mstate: mstate, query: query, lkg_seq: lkg_seq, res_id: res_id, buffer: buffer} = state) when is_binary(chunk) do
if query[:feed] == :continuous do
[buffer | blocks] = buffer <> chunk
|> String.split("\n") # always yields a list of at least one entry
|> Enum.reverse()
blocks
|> Enum.filter(&String.length(&1) > 0)
|> Enum.reverse()
|> Enum.map(&Poison.decode!/1)
|> case do
[] -> {:empty, buffer}
[%{"error" => error} = change | _] -> {:error, error, change["reason"]}
[%{"id" => _, "seq" => last_seq} = change] -> {:ok, [change], last_seq || lkg_seq, buffer}
[%{"last_seq" => last_seq}] -> {:ok, [], last_seq || lkg_seq, buffer}
changes -> {:ok, Enum.filter(changes, fn %{"id" => _} -> true; _ -> false end), get_last_seq(changes, lkg_seq), buffer}
end
else
chunk
|> Poison.decode!()
|> case do
%{"error" => error} = change ->
{:error, error, change["reason"]}
%{"results" => changes, "last_seq" => last_seq} ->
{:ok, changes, last_seq, ""}
end
end
|>
case do
{:empty, buffer} ->
{:noreply, %{state | buffer: buffer}}
{:error, error, reason} ->
Logger.error "Received error: #{error} - #{reason}", via: module
{:noreply, %{state | buffer: ""}}
{:ok, changes, last_seq, buffer} ->
changes = if query[:include_docs] do
Enum.map(changes, fn
%{"doc" => doc} = row ->
%{row | "doc" => ICouch.Document.from_api!(doc)}
other ->
other
end)
else
changes
end
Logger.debug "Received changes for: #{inspect Enum.map(changes, &(&1["id"]))}", via: module
query = Map.put(query, :since, last_seq)
case handle_changes(changes, module, mstate) do
{:ok, new_state} -> {:noreply, %{state | mstate: new_state, query: query, buffer: buffer} |> reset_heartbeat}
{:ok, new_state, timeout} -> {:noreply, %{state | mstate: new_state, query: query, buffer: buffer} |> reset_heartbeat, timeout}
{:stop, reason, new_state} -> {:stop, reason, %{state | mstate: new_state, query: query, buffer: buffer} |> reset_heartbeat}
end
end
end
def handle_info({:ibrowse_async_response, res_id, {:error, reason}}, %__MODULE__{module: module, res_id: res_id} = state) do
Logger.error "Error: #{inspect(reason)}", via: module
handle_error(reason, state |> cancel_timer())
end
def handle_info({:ibrowse_async_response_timeout, res_id}, %__MODULE__{res_discard: res_id} = state) do
{:noreply, state}
end
def handle_info({:ibrowse_async_response_timeout, res_id}, %__MODULE__{module: module, res_id: res_id} = state) do
Logger.debug "Request timed out (usually as expected), will restart", via: module
restart_stream(state)
end
def handle_info({:ibrowse_async_response_end, res_id}, %__MODULE__{res_discard: res_id} = state) do
{:noreply, %{state | res_discard: nil}}
end
def handle_info({:ibrowse_async_response_end, res_id}, %__MODULE__{module: module, res_id: res_id} = state) do
Logger.debug "Response ended (usually as expected), will restart", via: module
restart_stream(%{state | res_id: nil} |> cancel_timer())
end
def handle_info({:DOWN, _, :process, ibworker, reason}, %__MODULE__{module: module, db: %{server: %{direct: ibworker} = server} = db, res_id: res_id} = state) do
Logger.error "Connection process died, will restart: #{inspect reason}", via: module
state = cancel_timer(%{state | db: %{db | server: %{server | direct: nil}}, res_id: nil, res_discard: res_id})
:timer.sleep(:rand.uniform(500))
start_stream(state)
end
def handle_info(msg, %__MODULE__{module: module, mstate: mstate} = state) do
case module.handle_info(msg, mstate) do
{:noreply, new_state} -> {:noreply, %{state | mstate: new_state}}
{:noreply, new_state, timeout} -> {:noreply, %{state | mstate: new_state}, timeout}
{:stop, reason, new_state} -> {:stop, reason, %{state | mstate: new_state}}
end
end
def init({module, args}) do
Process.flag :trap_exit, true
case module.init(args) do
{:ok, %ICouch.DB{} = db, mstate} ->
%__MODULE__{module: module, mstate: mstate, infoid: make_ref(), db: db, buffer: ""}
|> parse_options([])
|> start_stream()
|> case do
{:noreply, state} -> {:ok, state}
{:stop, reason, _} -> {:stop, reason}
other -> other
end
{:ok, %ICouch.DB{} = db, opts, mstate} ->
%__MODULE__{module: module, mstate: mstate, infoid: make_ref(), db: db, buffer: ""}
|> parse_options(opts)
|> start_stream()
|> case do
{:noreply, state} -> {:ok, state}
{:stop, reason, _} -> {:stop, reason}
other -> other
end
{:ok, %ICouch.DB{} = db, opts, mstate, timeout} ->
%__MODULE__{module: module, mstate: mstate, infoid: make_ref(), db: db, buffer: ""}
|> parse_options(opts)
|> start_stream()
|> case do
{:noreply, state} -> {:ok, state, timeout}
{:stop, reason, _} -> {:stop, reason}
other -> other
end
:ignore ->
:ignore
{:stop, reason} ->
{:stop, reason}
end
end
def terminate(reason, %__MODULE__{module: module, mstate: mstate, db: %{server: %{direct: ibworker}}}) do
if ibworker != nil do
:ibrowse.stop_worker_process(ibworker)
end
module.terminate(reason, mstate)
end
# -- Private --
defp parse_options(%__MODULE__{db: %{server: server} = db} = state, opts) do
query = Map.merge(%{heartbeat: 60_000, timeout: 7_200_000}, Map.new(opts))
|> Map.put(:feed, :continuous)
|> Map.pop(:longpoll)
|> case do
{true, query} -> %{query | feed: :longpoll}
{_, query} -> query
end
r_timeout = case query[:timeout] do
nil -> nil
t -> t + 5_000
end
%{state | db: %{db | server: %{server | timeout: r_timeout}}, query: query}
end
defp start_stream(%__MODULE__{db: %{server: %{direct: nil, uri: uri} = server} = db} = state) do
{:ok, ibworker} = URI.to_string(uri) |> to_charlist() |> :ibrowse.spawn_worker_process()
Process.monitor(ibworker)
start_stream(%{state | db: %{db | server: %{server | direct: ibworker}}})
end
defp start_stream(%__MODULE__{module: module, db: db, query: query, res_id: old_res_id} = state) do
ib_options = [stream_to: self(), stream_chunk_size: :infinity] ++ (if query[:feed] == :continuous, do: [stream_full_chunks: true], else: [])
{query, method, body, headers} = case query do
%{doc_ids: doc_ids} ->
{Map.delete(query, :doc_ids) |> Map.put(:filter, "_doc_ids"), :post, Poison.encode!(%{"doc_ids" => doc_ids}), [{"Content-Type", "application/json"}, {"Accept", "application/json"}]}
_ ->
{query, :get, nil, [{"Accept", "application/json"}]}
end
case ICouch.DB.send_raw_req(db, {"_changes", query}, method, body, headers, ib_options) do
{:ibrowse_req_id, res_id} ->
if old_res_id == nil do
Logger.info "Started stream", via: module
else
Logger.info "Restarted stream", via: module
end
{:noreply, %{state | res_id: res_id} |> reset_heartbeat()}
{:error, {:conn_failed, _} = reason} ->
Logger.warn "Connection failed", via: module
handle_error(reason, state |> stop_stream())
{:error, :sel_conn_closed} ->
restart_stream(state)
end
end
defp start_stream_later(%__MODULE__{infoid: infoid} = state, wait_time) do
{:ok, tref} = :timer.send_after(wait_time + :rand.uniform(500), {infoid, :start_stream})
%{state | tref: tref}
end
defp stop_stream(%__MODULE__{db: %{server: %{direct: nil}}} = state),
do: cancel_timer(state)
defp stop_stream(%__MODULE__{db: %{server: %{direct: ibworker} = server} = db, res_id: res_id} = state) do
state = cancel_timer(state)
:ibrowse.stop_worker_process(ibworker)
if res_id != nil do
receive do
{:ibrowse_async_response, ^res_id, {:error, :closing_on_request}} -> nil
after
10 -> nil
end
end
receive do
{:DOWN, _, :process, ^ibworker, _} -> nil
after
10 -> nil
end
%{state | db: %{db | server: %{server | direct: nil}}}
end
defp restart_stream(state) do
state = stop_stream(state)
:timer.sleep(:rand.uniform(500))
start_stream(state)
end
defp cancel_timer(%__MODULE__{tref: nil} = state),
do: state
defp cancel_timer(%__MODULE__{tref: tref} = state) do
:timer.cancel(tref)
%{state | tref: nil}
end
defp reset_heartbeat(%__MODULE__{query: query, infoid: infoid} = state) do
state = cancel_timer(state)
case query do
%{feed: :continuous, heartbeat: heartbeat} ->
{:ok, tref} = :timer.send_after(heartbeat * 2, {infoid, :heartbeat_missing})
%{state | tref: tref}
_ ->
state
end
end
defp get_last_seq(changes, last_seq),
do: Enum.reduce(changes, last_seq, fn change, acc -> change["seq"] || change["last_seq"] || acc end)
defp handle_changes([], _, mstate),
do: {:ok, mstate}
defp handle_changes([last_change], module, mstate),
do: module.handle_change(last_change, mstate)
defp handle_changes([change | tchanges], module, mstate) do
case module.handle_change(change, mstate) do
{:ok, new_state} -> handle_changes(tchanges, module, new_state)
{:ok, new_state, _} -> handle_changes(tchanges, module, new_state)
other -> other
end
end
defp handle_error(reason, %__MODULE__{module: module, mstate: mstate} = state) do
case module.handle_error(reason, mstate) do
{:retry, new_state} ->
:timer.sleep(:rand.uniform(500))
%{state | mstate: new_state}
|> start_stream()
{:retry, :infinity, new_state} ->
{:noreply, %{state | mstate: new_state}}
{:retry, wait_time, new_state} when is_integer(wait_time) ->
{:noreply, %{state | mstate: new_state} |> start_stream_later(wait_time)}
{:stop, reason, new_state} ->
{:stop, reason, %{state | mstate: new_state}}
end
end
end
|
lib/changes_follower.ex
| 0.858467
| 0.511412
|
changes_follower.ex
|
starcoder
|
defmodule Perpetual.Server do
@moduledoc false
use GenServer
@timeout 0
def init(opts \\ []) do
init_fun = Keyword.fetch!(opts, :init_fun)
next_fun = Keyword.fetch!(opts, :next_fun)
_ = initial_call(init_fun, next_fun)
init_value = run(init_fun, [])
state = %{
next_fun: next_fun,
value: init_value
}
{:ok, state, @timeout}
end
def handle_info(:timeout, state) do
next_value = run(state.next_fun, [state.value])
next_state = %{state | value: next_value}
{:noreply, next_state, @timeout}
end
def handle_call({:get, fun}, _from, state) do
{:reply, run(fun, [state.value]), state, @timeout}
end
def handle_call({:get_and_update, fun}, _from, state) do
case run(fun, [state.value]) do
{reply, next_value} ->
next_state = %{state | value: next_value}
{:reply, reply, next_state, @timeout}
other ->
{:stop, {:bad_return_value, other}, state}
end
end
def handle_call({:update, fun}, _from, state) do
next_value = run(fun, [state.value])
next_state = %{state | value: next_value}
{:reply, :ok, next_state, @timeout}
end
def handle_call({:swarm, :begin_handoff}, _from, state) do
{:reply, {:resume, state}, state, @timeout}
end
def handle_call({:swarm, :end_handoff, incoming_state}, _from, _state) do
{:noreply, incoming_state, @timeout}
end
def handle_cast({:cast, fun}, state) do
next_value = run(fun, [state.value])
next_state = %{state | value: next_value}
{:noreply, next_state, @timeout}
end
def code_change(_old, state, fun) do
next_value = run(fun, [state.value])
next_state = %{state | value: next_value}
{:ok, next_state}
end
defp initial_call(init_fun, next_fun) do
_ = Process.put(:"$initial_call", get_initial_call(init_fun))
_ = Process.put(:"$next_call", get_initial_call(next_fun))
:ok
end
defp get_initial_call(fun) when is_function(fun, 0) do
{:module, module} = Function.info(fun, :module)
{:name, name} = Function.info(fun, :name)
{module, name, 0}
end
defp get_initial_call(fun) when is_function(fun, 1) do
{:module, module} = Function.info(fun, :module)
{:name, name} = Function.info(fun, :name)
{module, name, 1}
end
defp get_initial_call({mod, fun, args}) do
{mod, fun, length(args)}
end
defp run({m, f, a}, extra), do: apply(m, f, extra ++ a)
defp run(fun, extra), do: apply(fun, extra)
end
|
lib/perpetual/server.ex
| 0.631594
| 0.421492
|
server.ex
|
starcoder
|
defmodule Flex.System do
@moduledoc """
An interface to create a Fuzzy Logic Control System (FLS).
The Fuzzy controllers are very simple conceptually. They consist of an input stage (fuzzification), a processing stage (inference), and an output stage (defuzzification).
"""
use GenServer
require Logger
alias Flex.EngineAdapter
alias Flex.EngineAdapter.{ANFIS, Mamdani, TakagiSugeno}
defmodule State do
@moduledoc false
defstruct rules: nil,
antecedent: nil,
consequent: nil,
engine_type: Mamdani,
engine_output: %EngineAdapter.State{},
sets_in_rules: [],
learning_rate: 0.05,
initial_gamma: 1000
end
@typedoc """
Fuzzy Logic System state.
- `:rules` - (list) A list of rules that defines the behavior of the Fuzzy logic systems.
- `:consequent` - Output variable.
- `:antecedent` - a list of the input variables.
- `:engine_type` - defines the inference engine behavior (default: Mamdini).
- `:sets_in_rules` - list of sets involve in the rules (optional, required by ANFIS).
- `:learning_rate` - is the speed at which the system parameters are adjusted (ANFIS only).
- `:initial_gamma` - is the speed at which the system parameters are adjusted (LSE, ANFIS only).
"""
@type t :: %Flex.System.State{
rules: [Flex.Rule.t(), ...],
antecedent: [Flex.Variable.t(), ...],
consequent: Flex.Variable.t(),
engine_type: Mamdani | TakagiSugeno | ANFIS,
engine_output: EngineAdapter.engine_state(),
sets_in_rules: list(),
learning_rate: number(),
initial_gamma: number()
}
@doc """
Spawns a Fuzzy Logic System.
The following options are require:
- `:rules` - Defines the behavior of the system based on a list of rules.
- `:antecedent` - (list) Defines the input variables.
- `:consequent` - Defines the output variable.
"""
def start_link(params, opt \\ []) do
GenServer.start_link(__MODULE__, params, opt)
end
def stop(pid) do
GenServer.stop(pid)
end
@doc """
Computes the Fuzzy Logic System output for a given input vector.
"""
@spec compute(atom | pid | {atom, any} | {:via, atom, any}, list) :: any
def compute(pid, input_vector) when is_list(input_vector) do
GenServer.call(pid, {:compute, input_vector})
end
@doc """
Adjust the consequent free parameters of the FIS (only avaliable with ANFIS engine), using the following methods:
- Learning method: Steepest gradient Backpropagation.
- Energy function: 0.5 * (target - output)^2
"""
@spec forward_pass(atom | pid | {atom, any} | {:via, atom, any}, number()) ::
{:ok, number()} | {:error, :einval}
def forward_pass(pid, desired_output) when is_number(desired_output) do
GenServer.call(pid, {:forward_pass, desired_output})
end
@doc """
Adjust the premise free parameters of the FIS (only avaliable with ANFIS engine), using the following methods:
- Learning method: Steepest gradient Backpropagation.
- Energy function: 0.5 * (target - output)^2
"""
@spec backward_pass(atom | pid | {atom, any} | {:via, atom, any}, number()) ::
{:ok, number()} | {:error, :einval}
def backward_pass(pid, desired_output) when is_number(desired_output) do
GenServer.call(pid, {:backward_pass, desired_output})
end
@doc """
Adjust the free parameters of the FIS (only avaliable with ANFIS engine), using the following methods:
- Learning method: Steepest gradient Backpropagation.
- Energy function: 0.5 * (target - output)^2
Note: this functions fires both forward and backward passes.
"""
@spec hybrid_online_learning(atom | pid | {atom, any} | {:via, atom, any}, number()) ::
{:ok, number()} | {:error, :einval}
def hybrid_online_learning(pid, desired_output) when is_number(desired_output) do
GenServer.call(pid, {:hybrid_online_learning, desired_output})
end
@doc """
Adjust the free parameters of the FIS (only avaliable with ANFIS engine), using the following methods:
- Forward method: Least Square Estimate.
- Learning method: Steepest gradient Backpropagation.
- Energy function: 0.5 * (target - output)^2
Note: this functions fires both forward and backward passes with a batch of data.
"""
@spec hybrid_offline_learning(
atom | pid | {atom, any} | {:via, atom, any},
list(),
list(),
number()
) ::
{:ok, number()} | {:error, :einval}
def hybrid_offline_learning(pid, inputs, targets, epochs)
when is_list(inputs) and is_list(targets) and is_number(epochs) do
GenServer.call(pid, {:hybrid_offline_learning, inputs, targets, epochs}, :infinity)
end
@doc """
Sets the Inference Engine type.
"""
@spec set_engine_type(atom | pid | {atom, any} | {:via, atom, any}, atom) ::
:ok | {:error, :einval}
def set_engine_type(pid, type) when type in [Mamdani, TakagiSugeno, ANFIS] do
GenServer.call(pid, {:set_engine_type, type})
end
def set_engine_type(_pid, _type), do: {:error, :einval}
@doc """
Sets the Learning rate (etha).
"""
@spec set_learning_rate(atom | pid | {atom, any} | {:via, atom, any}, number()) ::
:ok | {:error, :einval}
def set_learning_rate(pid, learning_rate) when is_number(learning_rate) do
GenServer.call(pid, {:set_learning_rate, learning_rate})
end
@doc """
Gets the current system state.
"""
@spec get_state(atom | pid | {atom, any} | {:via, atom, any}) :: Flex.System.t()
def get_state(pid) do
GenServer.call(pid, :get_state)
end
def init(params) do
rules = Keyword.fetch!(params, :rules)
antecedent = Keyword.fetch!(params, :antecedent)
consequent = Keyword.fetch!(params, :consequent)
engine_type = Keyword.get(params, :engine_type, Mamdani)
learning_rate = Keyword.get(params, :learning_rate, 0.05)
initial_gamma = Keyword.get(params, :initial_gamma, 1000)
sets_in_rules = Keyword.get(params, :sets_in_rules, [])
{:ok,
%State{
rules: rules,
antecedent: antecedent,
consequent: consequent,
engine_type: engine_type,
learning_rate: learning_rate,
sets_in_rules: sets_in_rules,
initial_gamma: initial_gamma
}}
end
def handle_call({:compute, input_vector}, _from, state) do
output = compute_fis(input_vector, state)
{:reply, output.crisp_output, %{state | engine_output: output}}
end
def handle_call(
{:forward_pass, target},
_from,
%{engine_type: engine_type, engine_output: engine_output} = state
)
when engine_type == ANFIS do
de_do5 = -(target - engine_output.crisp_output)
consequent = ANFIS.forward_pass(de_do5, state.learning_rate, engine_output)
{:reply, {:ok, de_do5}, %{state | consequent: consequent}}
end
def handle_call(
{:backward_pass, target},
_from,
%{engine_type: engine_type, engine_output: engine_output} = state
)
when engine_type == ANFIS do
de_do5 = -(target - engine_output.crisp_output)
antecedent = ANFIS.backward_pass(de_do5, state, engine_output)
{:reply, {:ok, de_do5}, %{state | antecedent: antecedent}}
end
def handle_call(
{:hybrid_online_learning, target},
_from,
%{engine_type: engine_type, engine_output: engine_output} = state
)
when engine_type == ANFIS do
de_do5 = -(target - engine_output.crisp_output)
consequent = ANFIS.forward_pass(de_do5, state.learning_rate, engine_output)
antecedent = ANFIS.backward_pass(de_do5, state, engine_output)
{:reply, {:ok, de_do5}, %{state | consequent: consequent, antecedent: antecedent}}
end
def handle_call(
{:hybrid_offline_learning, inputs, b_matrix, epochs},
_from,
%{
engine_type: engine_type,
initial_gamma: initial_gamma,
antecedent: antecedent,
consequent: consequent
} = state
)
when engine_type == ANFIS do
{antecedent, consequent} =
for _epoch <- 1..epochs, reduce: {antecedent, consequent} do
{antecedent, consequent} ->
a_matrix =
build_matrix_a(inputs, %{state | antecedent: antecedent, consequent: consequent})
consequent = ANFIS.least_square_estimate(a_matrix, b_matrix, initial_gamma, state)
antecedent =
for {input_vector, target} <- Enum.zip(inputs, b_matrix), reduce: antecedent do
antecedent ->
back_learning_state = %{state | antecedent: antecedent, consequent: consequent}
prediction = compute_fis(input_vector, back_learning_state)
de_do5 = -(target - prediction.crisp_output)
ANFIS.backward_pass(de_do5, back_learning_state, prediction)
end
{antecedent, consequent}
end
{:reply, :ok, %{state | antecedent: antecedent, consequent: consequent}}
end
def handle_call({:set_engine_type, type}, _from, state) do
{:reply, :ok, %{state | engine_type: type}}
end
def handle_call({:set_learning_rate, learning_rate}, _from, %{engine_type: engine_type} = state)
when engine_type == ANFIS do
{:reply, :ok, %{state | learning_rate: learning_rate}}
end
def handle_call({:set_learning_rate, _learning_rate}, _from, state),
do: {:reply, {:error, :einval}, state}
def handle_call(:get_state, _from, state),
do: {:reply, {:ok, state}, state}
# Catch invalid calls
def handle_call({_call, _target}, _from, state),
do: {:reply, {:error, :einval}, state}
defp compute_fis(input_vector, %{engine_type: engine_type} = state) do
%EngineAdapter.State{input_vector: input_vector, type: engine_type}
|> EngineAdapter.validation(state.antecedent, state.rules, state.consequent)
|> EngineAdapter.fuzzification(state.antecedent)
|> EngineAdapter.inference(state.rules, state.consequent)
|> EngineAdapter.defuzzification()
end
defp build_matrix_a(inputs, state) do
inputs
|> Enum.map(fn input_vector ->
output = compute_fis(input_vector, state)
w_n = get_wn(output.fuzzy_consequent)
build_vector_at(w_n, input_vector)
end)
end
defp get_wn(fuzzy_consequent) do
w =
fuzzy_consequent.fuzzy_sets
|> Enum.reduce([], fn output_fuzzy_set, acc ->
acc ++ [fuzzy_consequent.mf_values[output_fuzzy_set.tag]]
end)
|> List.flatten()
ws = Enum.sum(w)
Enum.map(w, fn w_i -> w_i / ws end)
end
defp build_vector_at(w_n, input_vector) do
Enum.reduce(w_n, [], fn w_n_i, acc ->
acc ++ Enum.map(input_vector ++ [1], fn x_i -> x_i * w_n_i end)
end)
end
end
|
lib/system.ex
| 0.913802
| 0.697171
|
system.ex
|
starcoder
|
defmodule Elixpath.Parser do
@moduledoc """
Parses Elixpath expressions.
"""
import NimbleParsec
@type option :: {:unsafe_atom, boolean} | {:prefer_keys, :string | :atom}
defmodule ParseError do
@moduledoc """
Syntax error while parsing an Elixpath string.
"""
defexception [:message]
end
defparsecp(:parse_path, __MODULE__.Grammar.path())
@doc """
Parses an Elixpath expression.
See [this page](readme.html) for syntax.
Warning: when `unsafe_atom: true` is specified, this function creates new atom using `String.to_atom/1`.
Do not specify `unsafe_atom: true` for untrusted input.
See `String.to_atom/1` for details.
## Options
- `:unsafe_atom` - if `true`, allows to create non-existing atoms, defaults to `false`.
- `:prefer_keys` - unquoted keys are converted to string (`:string`) or atom (`:atom`). Defaults to `:string`.
"""
@spec parse(String.t() | Elixpath.t(), [option]) ::
{:ok, Elixpath.t()} | {:error, reason :: term}
def parse(str_or_path, opts \\ [])
def parse(%Elixpath{} = path, _opts) do
{:ok, path}
end
def parse(str, opts) when is_binary(str) do
case parse_path(str, context: %{opts: opts}) do
{:ok, result, "", _context, _line, _column} ->
{:ok, %Elixpath{path: result}}
{:ok, result, rest, _context, _line, _column} ->
# we mustn't be here because Grammer.path ends with eos()
{:error, "did not reach the end of string. result: #{inspect(result)}, rest: #{rest}"}
{:error, reason, _rest, _context, _line, _column} ->
{:error, reason}
end
end
def parse(other, _opts) do
{:error, "unexpected input type: #{inspect(other)}"}
end
@doc """
Parses an Elixpath expression.
Raises on error.
See `parse/2` for available options.
"""
@spec parse!(String.t() | Elixpath.t(), [option]) :: Elixpath.t() | no_return
def parse!(str_or_path, opts \\ []) do
case parse(str_or_path, opts) do
{:ok, result} ->
result
{:error, reason} when is_binary(reason) ->
raise ParseError, message: "error parsing path #{inspect(str_or_path)}: #{reason}"
end
end
end
|
lib/elixpath/parser.ex
| 0.853562
| 0.45538
|
parser.ex
|
starcoder
|
defmodule Asteroid.Utils do
@doc """
Returns the current UNIX timestamp
"""
@spec now() :: non_neg_integer()
def now(), do: System.system_time(:second)
@doc """
Returns a secure random base 64 string of `size` bytes of randomness
"""
@spec secure_random_b64(non_neg_integer()) :: String.t()
def secure_random_b64(bytes \\ 32) do
:crypto.strong_rand_bytes(bytes)
|> Base.url_encode64(padding: false)
end
@doc """
Returns the configuration option value
In tests, checks first the process dictionary for the value and fall backs to the standard
configuration, so that one can set configuration at the testing process level using:
```elixir
Process.put(:configuration_option, value)
```
"""
@spec astrenv(atom()) :: any()
if Mix.env() == :test do
def astrenv(key, default_value \\ nil) do
if key in Keyword.keys(Process.get()) do
Process.get(key)
else
Application.get_env(:asteroid, key, default_value)
end
end
else
def astrenv(key, default_value \\ nil) do
Application.get_env(:asteroid, key, default_value)
end
end
@spec put_if_not_nil(map(), Map.key(), Map.value()) :: map()
def put_if_not_nil(map, _, nil), do: map
def put_if_not_nil(map, key, value), do: Map.put(map, key, value)
@spec put_if_not_empty_string(map(), Map.key(), String.t()) :: map()
def put_if_not_empty_string(map, _, ""), do: map
def put_if_not_empty_string(map, key, value), do: Map.put(map, key, value)
@spec put_if_not_empty(map(), Map.key(), [any()]) :: map()
def put_if_not_empty(map, _, []), do: map
def put_if_not_empty(map, key, list), do: Map.put(map, key, list)
@doc """
Returns the parameter unchanged
"""
@spec id(any()) :: any()
def id(param), do: param
@doc """
Returns the first parameter unchanged
"""
@spec id_first_param(any(), any()) :: any()
def id_first_param(param, _), do: param
@doc """
Always returns nil
"""
@spec always_nil(any(), any()) :: nil
def always_nil(_, _ \\ nil), do: nil
@spec conn_not_authenticated?(Plug.Conn.t()) :: boolean()
def conn_not_authenticated?(conn), do: not APIac.authenticated?(conn)
@doc """
Returns `true` if a list of headers contains, `false` otherwise
## Example
```elixir
iex> headers
[
{"Date", "Sun, 28 Jul 2019 21:07:14 GMT"},
{"Content-Type", "text/html;charset=utf-8"},
{"Transfer-Encoding", "chunked"},
{"Server", "Apache"},
{"X-Powered-By", "PHP/5.6"},
{"Vary", "Accept-Encoding"},
{"Set-Cookie", "SERVERID100401=1520152|XT4Oh|XT4Oh; path=/"},
{"Cache-control", "private"},
{"X-IPLB-Instance", "28305"}
]
iex> Asteroid.Utils.headers_contain_content_type?(headers, "text", "html")
true
iex> Asteroid.Utils.headers_contain_content_type?(headers, "application", "xml")
false
```
"""
@spec headers_contain_content_type?(list(), String.t(), String.t()) :: boolean()
def headers_contain_content_type?(headers, type, subtype) do
case Enum.find_value(
headers,
fn
{header, value} ->
if String.downcase(header) == "content-type" do
value
else
false
end
end
) do
nil ->
false
media_type ->
case ContentType.content_type(media_type) do
{:ok, ^type, ^subtype, _} ->
true
_ ->
false
end
end
end
end
|
lib/asteroid/utils.ex
| 0.871181
| 0.795817
|
utils.ex
|
starcoder
|
defmodule IslandsEngine.Rules do
@moduledoc """
Handles which actions can be applied to a game based on its state
"""
defstruct state: :initialized, player1: :island_not_set, player2: :island_not_set
@typedoc """
A struct that holds the state of a game
"""
@type t :: %__MODULE__{
state: atom(),
player1: atom(),
player2: atom()
}
@typedoc """
All possible actions to be used with `check/2` function
"""
@type actions ::
:add_player
| {:position_island, :player1 | :player2}
| {:set_islands, :player1 | :player2}
| {:guess_coordinate, :player1 | :player2}
| {:win_check, :win | :no_win}
@doc """
Creates a new and empty rules struct
"""
@spec new :: t()
def new, do: %__MODULE__{}
@doc """
Tries to perform an action based on the current state of the game. If the action is valid, an `{:ok, %Rules{}}` value
is returned with the updated rule state. If the action is invalid this function will return `:error`
"""
@spec check(rules :: t(), action :: actions()) :: {:ok, t()} | :error
def check(rules, action)
# State change: initialized -> players_set
def check(%__MODULE__{state: :initialized} = rules, :add_player) do
{:ok, %__MODULE__{rules | state: :players_set}}
end
def check(%__MODULE__{state: :players_set} = rules, {:position_islands, player}) do
case Map.fetch!(rules, player) do
:island_set -> :error
:island_not_set -> {:ok, rules}
end
end
# State change: players_set -> player1_turn
def check(%__MODULE__{state: :players_set} = rules, {:set_islands, player}) do
rules = Map.put(rules, player, :islands_set)
case both_players_islands_set?(rules) do
true -> {:ok, %__MODULE__{rules | state: :player1_turn}}
false -> {:ok, rules}
end
end
# State change: player1_turn -> player2_turn
def check(%__MODULE__{state: :player1_turn} = rules, {:guess_coordinate, :player1}) do
{:ok, %__MODULE__{rules | state: :player2_turn}}
end
# State change: player1_turn -> game_over
def check(%__MODULE__{state: :player1_turn} = rules, {:win_check, win_or_not}) do
case win_or_not do
:win ->
{:ok, %__MODULE__{rules | state: :game_over}}
:no_win ->
{:ok, rules}
end
end
# State change: player2_turn -> player1_turn
def check(%__MODULE__{state: :player2_turn} = rules, {:guess_coordinate, :player2}) do
{:ok, %__MODULE__{rules | state: :player1_turn}}
end
# State change: player2_turn -> game_over
def check(%__MODULE__{state: :player2_turn} = rules, {:win_check, win_or_not}) do
case win_or_not do
:win ->
{:ok, %__MODULE__{rules | state: :game_over}}
:no_win ->
{:ok, rules}
end
end
def check(_state, _action), do: :error
defp both_players_islands_set?(rules) do
rules.player1 == :islands_set && rules.player2 == :islands_set
end
end
|
lib/islands_engine/rules.ex
| 0.889174
| 0.697725
|
rules.ex
|
starcoder
|
defmodule Cafex.Protocol.OffsetFetch do
@moduledoc """
This api reads back a consumer position previously written using the OffsetCommit api.
The offset fetch request support version 0, 1 and 2.
To read more details, visit the [A Guide to The Kafka Protocol](https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetFetchRequest).
"""
use Cafex.Protocol, api: :offset_fetch
defrequest do
field :api_version, [default: 0], api_version
field :consumer_group, binary
field :topics, [topic]
@type api_version :: 0 | 1
@type topic :: {topic_name :: String.t, partitions :: [partition]}
@type partition :: integer
end
defresponse do
field :topics, [topic]
@type topic :: {partition :: integer,
offset :: integer,
metadata :: String.t,
error :: Cafex.Protocol.error}
end
def api_version(%{api_version: api_version}), do: api_version
def encode(%{consumer_group: consumer_group, topics: topics}) do
[encode_string(consumer_group),
encode_array(topics, &encode_topic/1)]
|> IO.iodata_to_binary
end
defp encode_topic({topic, partitions}) do
[encode_string(topic),
encode_array(partitions, &encode_partition/1)]
end
defp encode_partition(partition), do: << partition :: 32-signed >>
@spec decode(binary) :: Response.t
def decode(data) when is_binary(data) do
{topics, _} = decode_array(data, &decode_topic/1)
%Response{topics: topics}
end
defp decode_topic(<< size :: 16-signed, topic :: size(size)-binary, rest :: binary >>) do
{partitions, rest} = decode_array(rest, &decode_partition/1)
{{topic, partitions}, rest}
end
defp decode_partition(<< partition :: 32-signed, offset :: 64-signed,
-1 :: 16-signed, error_code :: 16-signed, rest :: binary >>) do
{{partition, offset, "", decode_error(error_code)}, rest}
end
defp decode_partition(<< partition :: 32-signed, offset :: 64-signed,
size :: 16-signed, metadata :: size(size)-binary,
error_code :: 16-signed, rest :: binary >>) do
{{partition, offset, metadata, decode_error(error_code)}, rest}
end
end
|
lib/cafex/protocol/offset_fetch.ex
| 0.77552
| 0.550909
|
offset_fetch.ex
|
starcoder
|
defmodule DgraphEx.Expr.Uid do
@moduledoc """
https://docs.dgraph.io/query-language/#uid
Syntax Examples:
q(func: uid(<uid>))
predicate @filter(uid(<uid1>, ..., <uidn>))
predicate @filter(uid(a)) for variable a
q(func: uid(a,b)) for variables a and b
"""
alias DgraphEx.Expr.Uid
alias DgraphEx.Util
defstruct [
:value,
:type,
]
defmacro __using__(_) do
quote do
def uid(value) do
DgraphEx.Expr.Uid.new(value)
end
end
end
@types [
:literal,
:expression,
]
def new(value) when is_binary(value) do
new(value, :literal)
end
def new(value) when is_atom(value) do
new(value, :expression)
end
def new(uids) when is_list(uids) do
# lists of uid literals are rendered inside a `uid(<uids_here>)` function (as in @filter)
# lists of uid variables are rendered inside a `uid(<uids_here>)` function (as in @filter)
# therefore any list is an uid expression
new(uids, :expression)
end
def new(value, type) when (is_atom(value) or is_binary(value) or is_list(value)) and type in @types do
%Uid{
value: value,
type: type,
}
end
@doc """
This function is used by Func to ensure that a uid string ("0x9") is rendered
as an expression literal `uid(0x9)` instead of an actual literal `<0x9>`
"""
def as_expression(%Uid{} = u) do
%{ u | type: :expression }
end
def as_literal(%Uid{} = u) do
%{ u | type: :literal }
end
def as_naked(%Uid{} = u) do
%{ u | type: :naked}
end
def render(%Uid{value: value}) when is_atom(value) do
render_expression([value])
end
def render(%Uid{value: value, type: :literal}) when is_binary(value) do
{:ok, uid_literal} = Util.as_literal(value, :uid)
uid_literal
end
def render(%Uid{value: value, type: :naked}) when is_binary(value) do
value
end
def render(%Uid{value: value, type: :expression}) when (is_atom(value) or is_binary(value)) do
render_expression([value])
end
def render(%Uid{value: value, type: :expression}) when is_list(value) do
render_expression(value)
end
defp render_expression(uids) when is_list(uids) do
args =
uids
|> Enum.map(&to_string/1)
|> Enum.join(", ")
"uid("<>args<>")"
end
end
defimpl String.Chars, for: DgraphEx.Expr.Uid do
def to_string(uid) do
DgraphEx.Expr.Uid.render(uid)
end
end
|
lib/dgraph_ex/expr/uid.ex
| 0.687735
| 0.518668
|
uid.ex
|
starcoder
|
defmodule AWS.DatabaseMigration do
@moduledoc """
AWS Database Migration Service
AWS Database Migration Service (AWS DMS) can migrate your data to and from the
most widely used commercial and open-source databases such as Oracle,
PostgreSQL, Microsoft SQL Server, Amazon Redshift, MariaDB, Amazon Aurora,
MySQL, and SAP Adaptive Server Enterprise (ASE).
The service supports homogeneous migrations such as Oracle to Oracle, as well as
heterogeneous migrations between different database platforms, such as Oracle to
MySQL or SQL Server to PostgreSQL.
For more information about AWS DMS, see [What Is AWS Database Migration Service?](https://docs.aws.amazon.com/dms/latest/userguide/Welcome.html) in the
*AWS Database Migration User Guide.*
"""
@doc """
Adds metadata tags to an AWS DMS resource, including replication instance,
endpoint, security group, and migration task.
These tags can also be used with cost allocation reporting to track cost
associated with DMS resources, or used in a Condition statement in an IAM policy
for DMS. For more information, see [ `Tag`
](https://docs.aws.amazon.com/dms/latest/APIReference/API_Tag.html) data type
description.
"""
def add_tags_to_resource(client, input, options \\ []) do
request(client, "AddTagsToResource", input, options)
end
@doc """
Applies a pending maintenance action to a resource (for example, to a
replication instance).
"""
def apply_pending_maintenance_action(client, input, options \\ []) do
request(client, "ApplyPendingMaintenanceAction", input, options)
end
@doc """
Cancels a single premigration assessment run.
This operation prevents any individual assessments from running if they haven't
started running. It also attempts to cancel any individual assessments that are
currently running.
"""
def cancel_replication_task_assessment_run(client, input, options \\ []) do
request(client, "CancelReplicationTaskAssessmentRun", input, options)
end
@doc """
Creates an endpoint using the provided settings.
"""
def create_endpoint(client, input, options \\ []) do
request(client, "CreateEndpoint", input, options)
end
@doc """
Creates an AWS DMS event notification subscription.
You can specify the type of source (`SourceType`) you want to be notified of,
provide a list of AWS DMS source IDs (`SourceIds`) that triggers the events, and
provide a list of event categories (`EventCategories`) for events you want to be
notified of. If you specify both the `SourceType` and `SourceIds`, such as
`SourceType = replication-instance` and `SourceIdentifier = my-replinstance`,
you will be notified of all the replication instance events for the specified
source. If you specify a `SourceType` but don't specify a `SourceIdentifier`,
you receive notice of the events for that source type for all your AWS DMS
sources. If you don't specify either `SourceType` nor `SourceIdentifier`, you
will be notified of events generated from all AWS DMS sources belonging to your
customer account.
For more information about AWS DMS events, see [Working with Events and Notifications](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Events.html)
in the *AWS Database Migration Service User Guide.*
"""
def create_event_subscription(client, input, options \\ []) do
request(client, "CreateEventSubscription", input, options)
end
@doc """
Creates the replication instance using the specified parameters.
AWS DMS requires that your account have certain roles with appropriate
permissions before you can create a replication instance. For information on the
required roles, see [Creating the IAM Roles to Use With the AWS CLI and AWS DMS API](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Security.html#CHAP_Security.APIRole).
For information on the required permissions, see [IAM Permissions Needed to Use AWS
DMS](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Security.html#CHAP_Security.IAMPermissions).
"""
def create_replication_instance(client, input, options \\ []) do
request(client, "CreateReplicationInstance", input, options)
end
@doc """
Creates a replication subnet group given a list of the subnet IDs in a VPC.
"""
def create_replication_subnet_group(client, input, options \\ []) do
request(client, "CreateReplicationSubnetGroup", input, options)
end
@doc """
Creates a replication task using the specified parameters.
"""
def create_replication_task(client, input, options \\ []) do
request(client, "CreateReplicationTask", input, options)
end
@doc """
Deletes the specified certificate.
"""
def delete_certificate(client, input, options \\ []) do
request(client, "DeleteCertificate", input, options)
end
@doc """
Deletes the connection between a replication instance and an endpoint.
"""
def delete_connection(client, input, options \\ []) do
request(client, "DeleteConnection", input, options)
end
@doc """
Deletes the specified endpoint.
All tasks associated with the endpoint must be deleted before you can delete the
endpoint.
"""
def delete_endpoint(client, input, options \\ []) do
request(client, "DeleteEndpoint", input, options)
end
@doc """
Deletes an AWS DMS event subscription.
"""
def delete_event_subscription(client, input, options \\ []) do
request(client, "DeleteEventSubscription", input, options)
end
@doc """
Deletes the specified replication instance.
You must delete any migration tasks that are associated with the replication
instance before you can delete it.
"""
def delete_replication_instance(client, input, options \\ []) do
request(client, "DeleteReplicationInstance", input, options)
end
@doc """
Deletes a subnet group.
"""
def delete_replication_subnet_group(client, input, options \\ []) do
request(client, "DeleteReplicationSubnetGroup", input, options)
end
@doc """
Deletes the specified replication task.
"""
def delete_replication_task(client, input, options \\ []) do
request(client, "DeleteReplicationTask", input, options)
end
@doc """
Deletes the record of a single premigration assessment run.
This operation removes all metadata that AWS DMS maintains about this assessment
run. However, the operation leaves untouched all information about this
assessment run that is stored in your Amazon S3 bucket.
"""
def delete_replication_task_assessment_run(client, input, options \\ []) do
request(client, "DeleteReplicationTaskAssessmentRun", input, options)
end
@doc """
Lists all of the AWS DMS attributes for a customer account.
These attributes include AWS DMS quotas for the account and a unique account
identifier in a particular DMS region. DMS quotas include a list of resource
quotas supported by the account, such as the number of replication instances
allowed. The description for each resource quota, includes the quota name,
current usage toward that quota, and the quota's maximum value. DMS uses the
unique account identifier to name each artifact used by DMS in the given region.
This command does not take any parameters.
"""
def describe_account_attributes(client, input, options \\ []) do
request(client, "DescribeAccountAttributes", input, options)
end
@doc """
Provides a list of individual assessments that you can specify for a new
premigration assessment run, given one or more parameters.
If you specify an existing migration task, this operation provides the default
individual assessments you can specify for that task. Otherwise, the specified
parameters model elements of a possible migration task on which to base a
premigration assessment run.
To use these migration task modeling parameters, you must specify an existing
replication instance, a source database engine, a target database engine, and a
migration type. This combination of parameters potentially limits the default
individual assessments available for an assessment run created for a
corresponding migration task.
If you specify no parameters, this operation provides a list of all possible
individual assessments that you can specify for an assessment run. If you
specify any one of the task modeling parameters, you must specify all of them or
the operation cannot provide a list of individual assessments. The only
parameter that you can specify alone is for an existing migration task. The
specified task definition then determines the default list of individual
assessments that you can specify in an assessment run for the task.
"""
def describe_applicable_individual_assessments(client, input, options \\ []) do
request(client, "DescribeApplicableIndividualAssessments", input, options)
end
@doc """
Provides a description of the certificate.
"""
def describe_certificates(client, input, options \\ []) do
request(client, "DescribeCertificates", input, options)
end
@doc """
Describes the status of the connections that have been made between the
replication instance and an endpoint.
Connections are created when you test an endpoint.
"""
def describe_connections(client, input, options \\ []) do
request(client, "DescribeConnections", input, options)
end
@doc """
Returns information about the type of endpoints available.
"""
def describe_endpoint_types(client, input, options \\ []) do
request(client, "DescribeEndpointTypes", input, options)
end
@doc """
Returns information about the endpoints for your account in the current region.
"""
def describe_endpoints(client, input, options \\ []) do
request(client, "DescribeEndpoints", input, options)
end
@doc """
Lists categories for all event source types, or, if specified, for a specified
source type.
You can see a list of the event categories and source types in [Working with Events and
Notifications](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Events.html)
in the *AWS Database Migration Service User Guide.*
"""
def describe_event_categories(client, input, options \\ []) do
request(client, "DescribeEventCategories", input, options)
end
@doc """
Lists all the event subscriptions for a customer account.
The description of a subscription includes `SubscriptionName`, `SNSTopicARN`,
`CustomerID`, `SourceType`, `SourceID`, `CreationTime`, and `Status`.
If you specify `SubscriptionName`, this action lists the description for that
subscription.
"""
def describe_event_subscriptions(client, input, options \\ []) do
request(client, "DescribeEventSubscriptions", input, options)
end
@doc """
Lists events for a given source identifier and source type.
You can also specify a start and end time. For more information on AWS DMS
events, see [Working with Events and Notifications](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Events.html)
in the *AWS Database Migration User Guide.*
"""
def describe_events(client, input, options \\ []) do
request(client, "DescribeEvents", input, options)
end
@doc """
Returns information about the replication instance types that can be created in
the specified region.
"""
def describe_orderable_replication_instances(client, input, options \\ []) do
request(client, "DescribeOrderableReplicationInstances", input, options)
end
@doc """
For internal use only
"""
def describe_pending_maintenance_actions(client, input, options \\ []) do
request(client, "DescribePendingMaintenanceActions", input, options)
end
@doc """
Returns the status of the RefreshSchemas operation.
"""
def describe_refresh_schemas_status(client, input, options \\ []) do
request(client, "DescribeRefreshSchemasStatus", input, options)
end
@doc """
Returns information about the task logs for the specified task.
"""
def describe_replication_instance_task_logs(client, input, options \\ []) do
request(client, "DescribeReplicationInstanceTaskLogs", input, options)
end
@doc """
Returns information about replication instances for your account in the current
region.
"""
def describe_replication_instances(client, input, options \\ []) do
request(client, "DescribeReplicationInstances", input, options)
end
@doc """
Returns information about the replication subnet groups.
"""
def describe_replication_subnet_groups(client, input, options \\ []) do
request(client, "DescribeReplicationSubnetGroups", input, options)
end
@doc """
Returns the task assessment results from Amazon S3.
This action always returns the latest results.
"""
def describe_replication_task_assessment_results(client, input, options \\ []) do
request(client, "DescribeReplicationTaskAssessmentResults", input, options)
end
@doc """
Returns a paginated list of premigration assessment runs based on filter
settings.
These filter settings can specify a combination of premigration assessment runs,
migration tasks, replication instances, and assessment run status values.
This operation doesn't return information about individual assessments. For this
information, see the `DescribeReplicationTaskIndividualAssessments` operation.
"""
def describe_replication_task_assessment_runs(client, input, options \\ []) do
request(client, "DescribeReplicationTaskAssessmentRuns", input, options)
end
@doc """
Returns a paginated list of individual assessments based on filter settings.
These filter settings can specify a combination of premigration assessment runs,
migration tasks, and assessment status values.
"""
def describe_replication_task_individual_assessments(client, input, options \\ []) do
request(client, "DescribeReplicationTaskIndividualAssessments", input, options)
end
@doc """
Returns information about replication tasks for your account in the current
region.
"""
def describe_replication_tasks(client, input, options \\ []) do
request(client, "DescribeReplicationTasks", input, options)
end
@doc """
Returns information about the schema for the specified endpoint.
"""
def describe_schemas(client, input, options \\ []) do
request(client, "DescribeSchemas", input, options)
end
@doc """
Returns table statistics on the database migration task, including table name,
rows inserted, rows updated, and rows deleted.
Note that the "last updated" column the DMS console only indicates the time that
AWS DMS last updated the table statistics record for a table. It does not
indicate the time of the last update to the table.
"""
def describe_table_statistics(client, input, options \\ []) do
request(client, "DescribeTableStatistics", input, options)
end
@doc """
Uploads the specified certificate.
"""
def import_certificate(client, input, options \\ []) do
request(client, "ImportCertificate", input, options)
end
@doc """
Lists all metadata tags attached to an AWS DMS resource, including replication
instance, endpoint, security group, and migration task.
For more information, see [ `Tag`
](https://docs.aws.amazon.com/dms/latest/APIReference/API_Tag.html) data type
description.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Modifies the specified endpoint.
"""
def modify_endpoint(client, input, options \\ []) do
request(client, "ModifyEndpoint", input, options)
end
@doc """
Modifies an existing AWS DMS event notification subscription.
"""
def modify_event_subscription(client, input, options \\ []) do
request(client, "ModifyEventSubscription", input, options)
end
@doc """
Modifies the replication instance to apply new settings.
You can change one or more parameters by specifying these parameters and the new
values in the request.
Some settings are applied during the maintenance window.
"""
def modify_replication_instance(client, input, options \\ []) do
request(client, "ModifyReplicationInstance", input, options)
end
@doc """
Modifies the settings for the specified replication subnet group.
"""
def modify_replication_subnet_group(client, input, options \\ []) do
request(client, "ModifyReplicationSubnetGroup", input, options)
end
@doc """
Modifies the specified replication task.
You can't modify the task endpoints. The task must be stopped before you can
modify it.
For more information about AWS DMS tasks, see [Working with Migration Tasks](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.html) in the
*AWS Database Migration Service User Guide*.
"""
def modify_replication_task(client, input, options \\ []) do
request(client, "ModifyReplicationTask", input, options)
end
@doc """
Reboots a replication instance.
Rebooting results in a momentary outage, until the replication instance becomes
available again.
"""
def reboot_replication_instance(client, input, options \\ []) do
request(client, "RebootReplicationInstance", input, options)
end
@doc """
Populates the schema for the specified endpoint.
This is an asynchronous operation and can take several minutes. You can check
the status of this operation by calling the DescribeRefreshSchemasStatus
operation.
"""
def refresh_schemas(client, input, options \\ []) do
request(client, "RefreshSchemas", input, options)
end
@doc """
Reloads the target database table with the source data.
"""
def reload_tables(client, input, options \\ []) do
request(client, "ReloadTables", input, options)
end
@doc """
Removes metadata tags from an AWS DMS resource, including replication instance,
endpoint, security group, and migration task.
For more information, see [ `Tag`
](https://docs.aws.amazon.com/dms/latest/APIReference/API_Tag.html) data type
description.
"""
def remove_tags_from_resource(client, input, options \\ []) do
request(client, "RemoveTagsFromResource", input, options)
end
@doc """
Starts the replication task.
For more information about AWS DMS tasks, see [Working with Migration Tasks
](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.html) in the *AWS
Database Migration Service User Guide.*
"""
def start_replication_task(client, input, options \\ []) do
request(client, "StartReplicationTask", input, options)
end
@doc """
Starts the replication task assessment for unsupported data types in the source
database.
"""
def start_replication_task_assessment(client, input, options \\ []) do
request(client, "StartReplicationTaskAssessment", input, options)
end
@doc """
Starts a new premigration assessment run for one or more individual assessments
of a migration task.
The assessments that you can specify depend on the source and target database
engine and the migration type defined for the given task. To run this operation,
your migration task must already be created. After you run this operation, you
can review the status of each individual assessment. You can also run the
migration task manually after the assessment run and its individual assessments
complete.
"""
def start_replication_task_assessment_run(client, input, options \\ []) do
request(client, "StartReplicationTaskAssessmentRun", input, options)
end
@doc """
Stops the replication task.
"""
def stop_replication_task(client, input, options \\ []) do
request(client, "StopReplicationTask", input, options)
end
@doc """
Tests the connection between the replication instance and the endpoint.
"""
def test_connection(client, input, options \\ []) do
request(client, "TestConnection", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "dms"}
host = build_host("dms", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AmazonDMSv20160101.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/database_migration.ex
| 0.874212
| 0.490297
|
database_migration.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule Plug.Session.MNEMONIX do
@moduledoc """
Stores the session in a Mnemonix store.
This store does not create the Mnemonix store; it expects that a reference
to an existing store server is passed in as an argument.
We recommend carefully choosing the store type for production usage.
Consider: persistence, cleanup, and cross-node availability (or lack thereof).
The session id is used as a key to reference the session within the store;
the session itself is encoded as the two-tuple:
{timestamp :: :erlang.timestamp, session :: map}
The timestamp is updated whenever there is a read or write to the table,
and may be used to detect if a session is still active.
## Options
* `:mnemonix` - `t:GenServer.name/0` reference (required)
## Examples
# Start a named store when the application starts
Mnemonix.Stores.Map.start_link(name: My.Plug.Session)
# Use the session plug with the store name
plug Plug.Session, store: :mnemonix, key: "_my_app_session", mnemonix: My.Plug.Session
"""
@behaviour Plug.Session.Store
alias Plug.Session.Store
defmodule Exception do
defexception [:message]
def exception(opts) do
%__MODULE__{message: Keyword.get(opts, :message, "error in Mnemonix session plug")}
end
end
@sid_bytes 96
@spec init(Plug.opts()) :: GenServer.name() | no_return
def init(opts) do
Keyword.fetch!(opts, :mnemonix)
end
@spec get(Plug.Conn.t(), Store.cookie(), GenServer.name()) :: {Store.sid(), Store.session()}
def get(conn, cookie, store) do
with {:ok, {_ts, data}} <- Mnemonix.fetch(store, cookie) do
{put(conn, cookie, data, store), data}
else
:error ->
{nil, %{}}
end
end
@spec put(Plug.Conn.t(), Store.sid(), any, GenServer.name()) :: Store.cookie()
def put(conn, sid, data, store)
def put(conn, nil, data, store) do
put(conn, make_sid(), data, store)
end
def put(_conn, sid, data, store) when is_map(data) do
with ^store <- Mnemonix.put(store, sid, {timestamp(), data}) do
sid
end
end
def put(conn, sid, data, store) do
put(conn, sid, Enum.into(data, %{}), store)
end
@spec delete(Plug.Conn.t(), Store.sid(), GenServer.name()) :: :ok
def delete(conn, sid, store)
def delete(_conn, sid, store) do
with ^store <- Mnemonix.delete(store, sid) do
:ok
end
end
defp make_sid do
@sid_bytes |> :crypto.strong_rand_bytes() |> Base.encode64()
end
defp timestamp() do
:os.timestamp()
end
end
end
|
lib/plug/session/mnemonix.ex
| 0.841077
| 0.479565
|
mnemonix.ex
|
starcoder
|
defmodule Sim.Laboratory.Registry do
alias Sim.Laboratory.{InVitro, InVitroSupervisor}
# 1 hour in seconds
@expires_in 60 * 60
def create(state, pub_sub) do
id = generate_token()
{:ok, pid} = create_entry(id, pub_sub)
ref = Process.monitor(pid)
entry = %{id: id, pid: pid, ref: ref, timestamp: DateTime.utc_now()}
{{:ok, entry}, update_state(state, entry)}
end
def get(state, id) do
case Map.get(state, id) do
nil -> {:error, :not_found}
entry -> entry
end
end
def find_by_ref(state, ref) do
Enum.find(state, fn {_key, value} -> value.ref == ref end)
end
def update_object(state, id, func) when is_function(func) do
case Map.get(state, id) do
nil ->
{{:error, :not_found}, state}
entry ->
GenServer.call(entry.pid, {:update_object, func})
end
end
def update(state, id, key, value) do
case Map.get(state, id) do
nil ->
{{:error, :not_found}, state}
entry ->
new_entry = Map.put(entry, key, value)
{new_entry, update_state(state, new_entry)}
end
end
def delete(state, id) do
case Map.get(state, id) do
nil ->
{{:error, :not_found}, state}
entry ->
:ok = DynamicSupervisor.terminate_child(InVitroSupervisor, entry.pid)
{:ok, Map.delete(state, id)}
end
end
def prune(state) do
DateTime.utc_now()
|> DateTime.add(-@expires_in)
|> terminate_expired(state)
end
defp terminate_expired(expire_date, state) do
Enum.reduce(state, state, fn {_key, item}, state ->
case DateTime.compare(item.timestamp, expire_date) do
:lt ->
{:ok, state} = delete(state, item.id)
state
_ ->
state
end
end)
end
defp create_entry(id, pub_sub) do
DynamicSupervisor.start_child(InVitroSupervisor, {InVitro, entry_id: id, pub_sub: pub_sub})
end
defp update_state(state, entry) do
Map.put(state, entry.id, entry)
end
defp generate_token do
:crypto.strong_rand_bytes(16) |> :crypto.bytes_to_integer() |> Integer.to_string()
end
end
|
apps/sim/lib/sim/laboratory/registry.ex
| 0.538255
| 0.430686
|
registry.ex
|
starcoder
|
defmodule BSV.TxOut do
@moduledoc """
A TxOut is a data structure representing a single output in a `t:BSV.Tx.t/0`.
A TxOut consists of the number of satoshis being locked in the output, and a
`t:BSV.Script.t/0`, otherwise known as the locking script. The output can
later be spent by creating an input in a new transaction with a corresponding
unlocking script.
The index of the output within it's containing `t:BSV.Tx.t/0`, denotes it's
`t:BSV.TxOut.vout/0`.
"""
alias BSV.{Script, Serializable, VarInt}
import BSV.Util, only: [decode: 2, encode: 2]
defstruct satoshis: 0, script: %Script{}
@typedoc "TxOut struct"
@type t() :: %__MODULE__{
satoshis: non_neg_integer(),
script: Script.t()
}
@typedoc """
Vout - Vector of an output in a Bitcoin transaction
In integer representing the index of a TxOut.
"""
@type vout() :: non_neg_integer()
@doc """
Parses the given binary into a `t:BSV.TxOut.t/0`.
Returns the result in an `:ok` / `:error` tuple pair.
## Options
The accepted options are:
* `:encoding` - Optionally decode the binary with either the `:base64` or `:hex` encoding scheme.
"""
@spec from_binary(binary(), keyword()) :: {:ok, t()} | {:error, term()}
def from_binary(data, opts \\ []) when is_binary(data) do
encoding = Keyword.get(opts, :encoding)
with {:ok, data} <- decode(data, encoding),
{:ok, txout, _rest} <- Serializable.parse(%__MODULE__{}, data)
do
{:ok, txout}
end
end
@doc """
Parses the given binary into a `t:BSV.TxOut.t/0`.
As `from_binary/2` but returns the result or raises an exception.
"""
@spec from_binary!(binary(), keyword()) :: t()
def from_binary!(data, opts \\ []) when is_binary(data) do
case from_binary(data, opts) do
{:ok, txout} ->
txout
{:error, error} ->
raise BSV.DecodeError, error
end
end
@doc """
Returns the number of bytes of the given `t:BSV.TxOut.t/0`.
"""
@spec get_size(t()) :: non_neg_integer()
def get_size(%__MODULE__{} = txout),
do: to_binary(txout) |> byte_size()
@doc """
Serialises the given `t:BSV.TxOut.t/0` into a binary.
## Options
The accepted options are:
* `:encoding` - Optionally encode the binary with either the `:base64` or `:hex` encoding scheme.
"""
@spec to_binary(t()) :: binary()
def to_binary(%__MODULE__{} = txout, opts \\ []) do
encoding = Keyword.get(opts, :encoding)
txout
|> Serializable.serialize()
|> encode(encoding)
end
defimpl Serializable do
@impl true
def parse(txout, data) do
with <<satoshis::little-64, data::binary>> <- data,
{:ok, script, rest} <- VarInt.parse_data(data),
{:ok, script} <- Script.from_binary(script)
do
{:ok, struct(txout, [
satoshis: satoshis,
script: script
]), rest}
end
end
@impl true
def serialize(%{satoshis: satoshis, script: script}) do
script_data = script
|> Script.to_binary()
|> VarInt.encode_binary()
<<
satoshis::little-64,
script_data::binary
>>
end
end
end
|
lib/bsv/tx_out.ex
| 0.896925
| 0.797714
|
tx_out.ex
|
starcoder
|
defmodule Militerm.Parsers.VerbSyntax do
@moduledoc """
Provides a parser to take a simple string description of a verb form and parse it into
something that can be matched against player input.
"""
@doc """
Parses the string into a set of directives and texts
## Examples
iex> VerbSyntax.parse("[<adverb>] at <direct:object'thing> {through|with} <instrument:object:me>")
%{
pattern: [
{:optional, "adverb", "adverb"}, {:word_list, ["at"], nil},
{:direct, :object, :singular, [:me, :near]},
{:word_list, ["through", "with"], nil},
{:instrument, :object, :singular, [:me]}
],
short: "[<adverb>] at <thing> {through|with} <object>",
weight: 39
}
iex> VerbSyntax.parse("<string'message>")
%{
pattern: [
{:string, "message"}
],
short: "<message>",
weight: 5
}
iex> VerbSyntax.parse("<direct:object'thing>")
%{
pattern: [
{:direct, :object, :singular, [:me, :near]}
],
short: "<thing>",
weight: 7
}
iex> VerbSyntax.parse("<number> <direct:objects'things> with <indirect:object:me>")
%{
pattern: [
{:number, nil},
{:direct, :object, :plural, [:me, :near]},
{:word_list, ["with"], nil},
{:indirect, :object, :singular, [:me]}
],
short: "<number> <things> with <object>",
weight: 34
}
iex> VerbSyntax.parse("<direct:object:me'something> with <string:quoted'phrase>")
%{
pattern: [
{:direct, :object, :singular, [:me]},
{:word_list, ["with"], nil},
{:quoted_string, "phrase"}
],
short: "<something> with \\"<phrase>\\"",
weight: 25
}
"""
# {pattern, short, weight}
def parse(string) when is_binary(string),
do: parse(%{source: string, pattern: [], short: [], weight: 0})
def parse(error) when is_tuple(error), do: error
def parse(%{source: "", pattern: pattern, short: short, weight: weight} = state) do
%{
pattern: Enum.reverse(pattern),
short: String.trim(to_string(Enum.reverse(short))),
weight: weight
}
end
def parse(state) do
state
|> trim_leading_space()
|> try_string()
|> trim_leading_space()
|> try_number()
|> trim_leading_space()
|> try_fraction()
|> trim_leading_space()
|> try_slot()
|> trim_leading_space()
|> try_word_list()
|> trim_leading_space()
|> try_optional()
|> trim_leading_space()
|> expect_word()
|> parse()
end
def trim_leading_space(%{source: source} = state) do
%{state | source: String.trim_leading(source)}
end
def trim_leading_space(state), do: state
def try_string(%{
source: <<"<string:", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
# allow arbitrary string input in a command
case parse_string_expectation(rest) do
{:error, _} = error ->
error
{type, name, raw_name, heft, remaining} ->
%{
source: remaining,
pattern: [{type, raw_name} | pattern],
short: [[" ", name] | short],
weight: weight + heft
}
end
end
def try_string(%{
source: <<"<string>", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
# allow arbitrary string input in a command
%{
source: rest,
pattern: [{:string, nil} | pattern],
short: [" string" | short],
weight: weight + 5
}
end
def try_string(%{
source: <<"<string'", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
# allow arbitrary string input in a command
case String.split(rest, ">", parts: 2) do
[_] ->
{:error, "String slot not terminated"}
[name, remaining] ->
%{
source: remaining,
pattern: [{:string, name} | pattern],
short: [[" <", name, ">"] | short],
weight: weight + 5
}
end
end
def try_string(state), do: state
def try_number(%{
source: <<"<number>", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
%{
source: rest,
pattern: [{:number, nil} | pattern],
short: [[" <number>"] | short],
weight: weight + 10
}
end
def try_number(%{
source: <<"<number'", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
case String.split(rest, ">", parts: 2) do
[_] ->
{:error, "Number slot not terminated"}
[name, remainder] ->
%{
source: rest,
pattern: [{:number, nil} | pattern],
short: [[" <", name, ">"] | short],
weight: weight + 10
}
end
end
def try_number(state), do: state
def try_fraction(%{
source: <<"<fraction>", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
%{
source: rest,
pattern: [{:fraction, nil} | pattern],
short: [[" <fraction>"] | short],
weight: weight + 10
}
end
def try_fraction(%{
source: <<"<fraction'", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
case String.split(rest, ">", parts: 2) do
[_] ->
{:error, "Fraction slot not terminated"}
[name, remainder] ->
%{
source: rest,
pattern: [{:fraction, name} | pattern],
short: [[" <", name, ">"] | short],
weight: weight + 10
}
end
end
def try_fraction(state), do: state
def try_slot(%{source: <<"<", rest::binary>>, pattern: pattern, short: short, weight: weight}) do
# parse slot description
case parse_slot(rest) do
{:error, _} = error ->
error
{:ok, slot, short_bit, heft, remaining} ->
%{
source: remaining,
pattern: [slot | pattern],
short: [[" ", short_bit] | short],
weight: weight + heft
}
end
end
def try_slot(%{source: <<"[<", rest::binary>>, pattern: pattern, short: short, weight: weight}) do
# parse slot description
case parse_optional_slot(rest) do
{:error, _} = error ->
error
{:ok, slot, short_bit, heft, remaining} ->
%{
source: remaining,
pattern: [slot | pattern],
short: [[" ", short_bit] | short],
weight: weight + heft
}
end
end
def try_slot(state), do: state
def try_word_list(%{
source: <<"{", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
# parse word option list
case parse_options(rest) do
{:error, _} = error ->
error
{words, short_bit, heft, remaining} ->
%{
source: remaining,
pattern: [words | pattern],
short: [[" ", short_bit] | short],
weight: weight + heft
}
end
end
def try_word_list(%{
source: <<"[{", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
# parse word option list
case parse_options(rest) do
{:error, _} = error ->
error
{words, short_bit, heft, remaining} ->
%{
source: remaining,
pattern: [words | pattern],
short: [[" ", short_bit] | short],
weight: weight + heft
}
end
end
def try_word_list(state), do: state
def try_optional(%{
source: <<"[", rest::binary>>,
pattern: pattern,
short: short,
weight: weight
}) do
# parse optional word sequence
case String.split(rest, "]", parts: 2) do
[words, remaining] ->
bits = String.split(words, ~r{\s+}, trim: true)
type =
case bits do
[_] -> :optional
[_, _ | _] -> :optional_spaces
_ -> nil
end
if type do
%{
source: remaining,
pattern: [{type, bits, nil} | pattern],
short: [[" [", Enum.intersperse(bits, " "), "]"] | short],
weight: weight + 5
}
else
{:error, "empty []"}
end
otherwise ->
{:error, "missing closing ]"}
end
end
def try_optional(state), do: state
def expect_word(%{source: ""} = state), do: state
def expect_word(%{source: string, pattern: pattern, short: short, weight: weight} = state) do
# find the first word (to a space or end of sentence) and add it to the pattern as a word literal
if String.match?(string, ~r{^[A-Za-z0-9]}) do
case String.split(string, " ", parts: 2) do
[word, remaining] ->
%{
source: remaining,
pattern: [{:word_list, [word], nil} | pattern],
short: [[" ", word] | short],
weight: weight + 10
}
[word] ->
%{
source: "",
pattern: [{:word_list, [word], nil} | pattern],
short: [[" ", word] | short],
weight: weight + 10
}
end
else
state
end
end
def parse_slot(string) do
[description, remaining] = String.split(string, ">", parts: 2)
[bits, maybe_name] =
case String.split(description, "'", parts: 2) do
[_, _] = result -> result
[result] -> [result, nil]
end
bits = String.split(bits, ":")
{pattern, name, heft} =
case bits do
[word] ->
# pre-defined word list
{
{:word_list, word, if(is_nil(maybe_name), do: word, else: maybe_name)},
if(is_nil(maybe_name), do: word, else: maybe_name),
10
}
[slot_name, object_type | env_bits] ->
slot = String.to_atom(slot_name)
with {:ok, type, number} <- interpret_objective_type(object_type),
{:ok, env} <- interpret_objective_env(env_bits) do
# part of speech, etc.
{
{slot, type, number, env},
if(is_nil(maybe_name), do: object_type, else: maybe_name),
7
}
else
error -> error
end
end
{:ok, pattern, ["<", name, ">"], heft, remaining}
end
def parse_optional_slot(string) do
[description, remaining] = String.split(string, ">]", parts: 2)
[list_name, maybe_name] =
case String.split(description, "'", parts: 2) do
[_, _] = result -> result
[result] -> [result, result]
end
{:ok, {:optional, list_name, maybe_name},
["[<", if(is_nil(maybe_name), do: list_name, else: maybe_name), ">]"], 5, remaining}
end
def parse_options(string) do
[words, remaining] = String.split(string, "}", parts: 2)
bits = String.split(words, "|", trim: true)
type =
if Enum.any?(bits, &String.contains?(&1, " ")) do
:word_list_spaces
else
:word_list
end
{{type, bits, nil}, ["{", Enum.join(bits, "|"), "}"], 10, remaining}
end
def parse_string_expectation(string) do
case String.split(string, ">", parts: 2) do
[definition, rest] ->
case String.split(definition, "'", parts: 2) do
[type, raw_name] ->
name = if type == "quoted", do: ["\"<", raw_name, ">\""], else: ["<", raw_name, ">"]
case type do
"quoted" -> {:quoted_string, name, raw_name, 8, rest}
"small" -> {:short_string, name, raw_name, 6, rest}
"long" -> {:long_string, name, raw_name, 5, rest}
_ -> {:string, name, raw_name, 5, rest}
end
[type] ->
name = if type == "quoted", do: "\"<string>\"", else: "<string>"
case type do
"quoted" -> {:quoted_string, name, nil, 8, rest}
"small" -> {:short_string, name, nil, 6, rest}
"long" -> {:long_string, name, nil, 5, rest}
_ -> {:string, name, nil, 5, rest}
end
end
_ ->
{:error, "Missing > in <string:...>"}
end
end
def interpret_objective_type(type) do
case type do
"living" ->
{:ok, :living, :singular}
"livings" ->
{:ok, :living, :plural}
"object" ->
{:ok, :object, :singular}
"objects" ->
{:ok, :object, :plural}
"player" ->
{:ok, :player, :singular}
"players" ->
{:ok, :player, :plural}
_ = unknown ->
{:error, "Unknown type of direct (#{unknown})."}
end
end
def interpret_objective_env(envs, acc \\ [])
def interpret_objective_env([], []), do: {:ok, [:me, :near]}
def interpret_objective_env([], acc), do: {:ok, Enum.reverse(acc)}
def interpret_objective_env(["here" | rest], acc),
do: interpret_objective_env(rest, [:here | acc])
def interpret_objective_env(["me" | rest], acc), do: interpret_objective_env(rest, [:me | acc])
def interpret_objective_env(["direct" | rest], acc),
do: interpret_objective_env(rest, [:direct | acc])
def interpret_objective_env(["indirect" | rest], acc),
do: interpret_objective_env(rest, [:indirect | acc])
def interpret_objective_env(["close" | rest], acc),
do: interpret_objective_env(rest, [:close | acc])
def interpret_objective_env(["near" | rest], acc),
do: interpret_objective_env(rest, [:near | acc])
def interpret_objective_env([env | _], _), do: {:error, "Unknown environment (#{env})"}
end
|
lib/militerm/parsers/verb_syntax.ex
| 0.87153
| 0.585309
|
verb_syntax.ex
|
starcoder
|
defmodule ExMicrosoftBot.TokenValidation do
@moduledoc """
This module provides functions to validate the authorization token recived by the bot service
from the Microsoft Bot Framework
"""
require Logger
alias ExMicrosoftBot.SigningKeysManager
@doc """
Helper function to validate the authentication information for the bot
"""
@spec validate_bot_credentials?(Keyword.t()) :: boolean
def validate_bot_credentials?(headers) do
Logger.debug(
"ExMicrosoftBot.TokenValidation.validate_bot_credentials?: Going to validate the bot credentials"
)
validate_bot_credentials?(
headers,
Application.get_env(:ex_microsoftbot, :disable_token_validation)
)
end
@spec validate_bot_credentials?(Keyword.t(), boolean) :: boolean
defp validate_bot_credentials?(_headers, true) do
Logger.debug(
"ExMicrosoftBot.TokenValidation.validate_bot_credentials?: Going to skip token validation as its an emulator"
)
# In case bot emulator is used ignore the credentials
true
end
defp validate_bot_credentials?(headers, _) do
# Convert the list of key value tuple to a map and get the authorization header
auth_header =
Enum.reduce(headers, %{}, fn {k, v}, acc -> Map.put(acc, k, v) end)
|> Map.get("authorization", nil)
case auth_header do
"Bearer " <> auth_token ->
validate_auth_token(auth_token)
_ ->
Logger.debug("ExMicrosoftBot.TokenValidation.validate_bot_credentials? Failed")
false
end
end
defp validate_auth_token(token) do
with {:ok, jwt, _jws} <- get_jwt_from_string(token),
true <- contains_valid_issuer?(jwt),
true <- contains_valid_audience?(jwt),
true <- contains_valid_app_id_claim?(jwt),
true <- token_not_expired?(jwt),
true <- has_valid_cryptographic_sig?(token) do
Logger.debug("ExMicrosoftBot.TokenValidation.validate_auth_token: Passed")
true
else
failure_value ->
Logger.debug(
"ExMicrosoftBot.TokenValidation.validate_auth_token: Failed #{inspect(failure_value)}"
)
false
end
end
defp get_jwt_from_string(token) do
Logger.debug("ExMicrosoftBot.TokenValidation.get_jwt_from_string: Going to parse token")
{:ok, JOSE.JWT.peek_payload(token), JOSE.JWT.peek_protected(token)}
rescue
_e ->
Logger.debug("ExMicrosoftBot.TokenValidation.get_jwt_from_string: Failed")
{:error, "Unable to parse the token"}
end
defp expected_issuer_claim, do: Application.get_env(:ex_microsoftbot, :issuer_claim)
defp contains_valid_issuer?(%JOSE.JWT{} = jwt),
do: contains_valid_issuer?(expected_issuer_claim(), jwt)
defp contains_valid_issuer?(issuer_claim, %JOSE.JWT{fields: %{"iss" => issuer_claim}}), do: true
defp contains_valid_issuer?(_issuer_claim, %JOSE.JWT{}) do
Logger.debug("ExMicrosoftBot.TokenValidation.contains_valid_issuer? Failed")
false
end
defp expected_audience_claim, do: Application.get_env(:ex_microsoftbot, :app_id)
defp contains_valid_audience?(%JOSE.JWT{} = jwt),
do: contains_valid_audience?(expected_audience_claim(), jwt)
defp contains_valid_audience?(audience_claim, %JOSE.JWT{fields: %{"aud" => audience_claim}}),
do: true
defp contains_valid_audience?(_audience_claim, %JOSE.JWT{}) do
Logger.debug("ExMicrosoftBot.TokenValidation.contains_valid_audience? Failed")
false
end
defp contains_valid_app_id_claim?(%JOSE.JWT{} = jwt) do
contains_valid_app_id_claim?(jwt, Application.get_env(:ex_microsoftbot, :using_bot_emulator))
end
defp contains_valid_app_id_claim?(%JOSE.JWT{fields: %{"appid" => app_id}}, true),
do: app_id == Application.get_env(:ex_microsoftbot, :app_id)
defp contains_valid_app_id_claim?(token, true) do
# In case extra bot validation is required and app id isn't in claim then fail
Logger.debug(
"ExMicrosoftBot.TokenValidation.contains_valid_app_id_claim? Failed. Token received #{
inspect(token)
}"
)
false
end
# This will occur for prod
defp contains_valid_app_id_claim?(_, _), do: true
defp token_not_expired?(%JOSE.JWT{fields: %{"exp" => expiry}}) do
with expiry_time <- Timex.from_unix(expiry),
time_to_compare_with <- Timex.now() |> Timex.shift(minutes: 5),
true <- Timex.before?(time_to_compare_with, expiry_time) do
true
else
_ ->
Logger.debug("ExMicrosoftBot.TokenValidation.token_not_expired? Failed")
false
end
end
defp has_valid_cryptographic_sig?(token) do
case SigningKeysManager.get_keys() do
{:ok, keys} ->
keys
|> Enum.map(fn key ->
JOSE.JWT.verify(key, token)
end)
|> Enum.map(fn {val, _, _} -> val end)
|> Enum.filter(fn val -> val == true end)
|> length
|> Kernel.>(0)
{:error, _} ->
false
end
end
end
|
lib/token_validation.ex
| 0.751466
| 0.40928
|
token_validation.ex
|
starcoder
|
if Code.ensure_loaded?(Ecto.Type) do
defmodule Massex.Ecto.Type do
@moduledoc """
Provides a type for Ecto to store masses with their units. The underlying type
should be a map, JSONB would be perfect in a PostgreSQL database.
## Migration Example
create table(:foo) do
add :mass, :jsonb
end
## Schema Example
schema "foo" do
field :mass, Massex.Ecto.Type
end
"""
if macro_exported?(Ecto.Type, :__using__, 1) do
use Ecto.Type
else
@behaviour Ecto.Type
end
@spec type() :: :map
def type, do: :map
def embed_as(_), do: :dump
@spec cast(Massex.t() | {integer(), String.t()} | map() | any()) :: :error | {:ok, Massex.t()}
def cast(%Massex{} = mass) do
{:ok, mass}
end
def cast({amount, unit})
when (is_integer(amount) or is_binary(amount)) and (is_binary(unit) or is_atom(unit)) do
{:ok, Massex.new(amount, unit)}
end
def cast({%Decimal{} = amount, unit})
when is_binary(unit) or is_atom(unit) do
{:ok, Massex.new(amount, unit)}
end
def cast(%{"amount" => amount, "unit" => unit})
when (is_integer(amount) or is_binary(amount)) and (is_binary(unit) or is_atom(unit)) do
{:ok, Massex.new(amount, unit)}
end
def cast(%{"amount" => %Decimal{} = amount, "unit" => unit})
when is_binary(unit) or is_atom(unit) do
{:ok, Massex.new(amount, unit)}
end
def cast(%{amount: amount, unit: unit})
when (is_integer(amount) or is_binary(amount)) and
(is_binary(unit) or is_atom(unit)) do
{:ok, Massex.new(amount, unit)}
end
def cast(%{amount: %Decimal{} = amount, unit: unit})
when is_binary(unit) or is_atom(unit) do
{:ok, Massex.new(amount, unit)}
end
def cast(_), do: :error
@spec dump(any()) :: :error | {:ok, {integer(), String.t()}}
def dump(%Massex{} = mass) do
{:ok, %{"amount" => mass.amount, "unit" => to_string(mass.unit)}}
end
def dump(_), do: :error
@spec load(map()) :: {:ok, Massex.t()}
def load(%{"amount" => amount, "unit" => unit})
when is_integer(amount) or is_binary(amount) do
{:ok, Massex.new(amount, String.to_atom(unit))}
end
end
end
|
lib/library_support/ecto.ex
| 0.837421
| 0.467332
|
ecto.ex
|
starcoder
|
defmodule FlowMonitor do
@moduledoc """
Measure progress of each step in a `Flow` pipeline.
"""
alias FlowMonitor.{Collector, Inspector}
@doc """
Runs the metrics collector on a given `Flow` pipeline.
Results are store in a directory `{graph_name}-{timestamp}` in a given path.
See `FlowMonitor.Config` for configurable options which can be passed as keyword list `opts`.
## Examples:
#### Specify path for collected metrics, name and title
opts = [
path: "./metrics",
graph_name: "collected-metrics",
graph_title: "Metrics collected from a Flow execution"
]
FlowMonitor.run(
1..100_000
|> Flow.from_enumerable()
|> Flow.map(&(&1 * &1)),
opts
)
#### Specify other graph parameters
opts = [
font_name: "Verdana",
font_size: 12,
graph_size: {800, 600},
graph_range: {1000, 15000}
]
FlowMonitor.run(
1..100_000
|> Flow.from_enumerable()
|> Flow.map(&(&1 * &1)),
opts
)
"""
@spec run(any(), keyword()) :: any()
defmacro run(pipeline, opts \\ []) do
names =
pipeline
|> Inspector.extract_names()
|> Enum.map(&String.to_atom/1)
quote do
{flow_pid, flow_ref, collector_pid} =
FlowMonitor.start_flow(unquote(pipeline), unquote(names), unquote(opts))
receive do
{:DOWN, ^flow_ref, :process, ^flow_pid, :normal} ->
Collector.stop(collector_pid)
end
end
end
@doc false
@spec start_flow(Flow.t(), [String.t()], keyword()) :: {pid(), reference(), pid()}
def start_flow(%Flow{} = flow, names, opts) do
scopes = Inspector.extract_producer_names(flow) ++ names
{:ok, collector_pid} = Collector.start_link(opts |> Keyword.put(:scopes, scopes))
{:ok, flow_pid} =
flow
|> FlowMonitor.inject(collector_pid, names)
|> Flow.start_link()
flow_ref = Process.monitor(flow_pid)
{flow_pid, flow_ref, collector_pid}
end
@doc false
@spec inject(Flow.t(), pid(), [String.t()]) :: Flow.t()
def inject(
%Flow{
operations: operations,
producers: producers
} = flow,
pid,
names
) do
flow = %Flow{
flow
| operations:
Inspector.inject_monitors(
pid,
operations,
names
)
}
case producers do
{:enumerables, enumerables} ->
%Flow{
flow
| producers: {:enumerables, Inspector.inject_enumerable_monitors(pid, enumerables)}
}
_ ->
flow
end
end
end
|
lib/flow_monitor.ex
| 0.917958
| 0.66186
|
flow_monitor.ex
|
starcoder
|
defmodule Solarex.Sun do
import Solarex.Math
@moduledoc """
Solarex.Sun is module for calculating sunrise, sunset and solar noon for particular
date, latitude and longitude
"""
@doc """
Returns sunrise for passed Date, latitude, longitude.
iex> Solarex.Sun.rise(~D[2017-01-01], 50.0598054, 14.3251989)
{:ok, ~N[2017-01-01 07:01:40.231]}
"""
@spec rise(Date.t(), number(), number()) :: {:ok, DateTime.t()} | {:error, term()}
def rise(%Date{} = date, latitude, longitude) do
rise(timestamp(date), latitude, longitude)
end
@doc """
Returns sunrise for passed timestamp in milliseconds, latitude and longitude.
iex> Solarex.Sun.rise(1483228800000, 50.0598054, 14.3251989)
{:ok, ~N[2017-01-01 07:01:40.231]}
"""
@spec rise(integer(), number(), number()) :: {:ok, DateTime.t()} | {:error, term()}
def rise(timestamp, latitude, longitude) do
noon = noon(timestamp, longitude)
rise_hour_angle = rise_hour_angle(noon, latitude)
do_rise(noon, rise_hour_angle)
end
defp do_rise(noon, {:ok, rise_hour_angle}) do
rise =
(noon + rise_hour_angle * 4 * 1000 * 60)
|> round()
|> DateTime.from_unix!(:millisecond)
|> DateTime.to_naive()
{:ok, rise}
end
defp do_rise(_noon, {:error, reason}) do
{:error, reason}
end
@doc """
Returns sunset for passed Date, latitude, longitude.
iex> Solarex.Sun.set(~D[2017-01-01], 50.0598054, 14.3251989)
"2017-01-01T15:11:28.135+00:00"
"""
@spec set(Date.t(), number(), number()) :: {:ok, DateTime.t()} | {:error, term()}
def set(%Date{} = date, latitude, longitude) do
set(timestamp(date), latitude, longitude)
end
@doc """
Returns sunset for passed timestamp in milliseconds, latitude and longitude.
iex> Solarex.Sun.set(1483228800000, 50.0598054, 14.3251989)
{:ok, ~N[2017-01-01 15:11:28.135]}
"""
@spec set(integer(), number(), number()) :: {:ok, DateTime.t()} | {:error, term()}
def set(timestamp, latitude, longitude) do
noon = noon(timestamp, longitude)
rise_hour_angle = rise_hour_angle(noon, latitude)
do_set(noon, rise_hour_angle)
end
defp do_set(noon, {:ok, rise_hour_angle}) do
set =
(noon - rise_hour_angle * 4 * 1000 * 60)
|> round()
|> DateTime.from_unix!(:millisecond)
|> DateTime.to_naive()
{:ok, set}
end
defp do_set(_noon, {:error, reason}) do
{:error, reason}
end
@doc """
Returns Timex.Duration of daylight for given Date, latitude and longitude.
iex> Solarex.Sun.hours(~D[2017-06-13], 50.0598054, 14.3251989) |> Timex.Duration.to_hours
16.333333333333332
"""
@spec hours(Date.t(), number(), number()) :: Timex.Duration.t()
def hours(%Date{} = date, latitude, _longitude) do
timestamp = timestamp(date)
rise_hour_angle(timestamp, latitude)
|> do_hours(timestamp, latitude)
end
defp do_hours({:ok, delta}, _timestamp, _latitude) do
(8 * -delta)
|> round()
|> Timex.Duration.from_minutes()
end
defp do_hours({:error, _}, timestamp, latitude) do
timestamp
|> century()
|> declination()
|> do_hours(latitude)
end
defp do_hours(delta, latitude) when latitude < 0 do
case delta do
delta when delta < 0.833 -> Timex.Duration.from_hours(24)
_ -> Timex.Duration.from_hours(0)
end
end
defp do_hours(delta, latitude) when latitude >= 0 do
case delta do
delta when delta > -0.833 -> Timex.Duration.from_hours(24)
_ -> Timex.Duration.from_hours(0)
end
end
@doc """
Returns the time of the solar noon for Date, latitude and longitude.
[https://en.wikipedia.org/wiki/Noon#Solar_noon](https://en.wikipedia.org/wiki/Noon#Solar_noon)
iex> noon = Solarex.Sun.noon(~D[2017-01-01], 50.0598054, 14.3251989)
...> Timex.format!(noon, "{ISO:Extended}")
"2017-01-01T11:06:34.183+00:00"
"""
@spec noon(Date.t(), number(), number()) :: DateTime.t()
def noon(%Date{} = date, _latitude, longitude) do
noon(timestamp(date), longitude)
|> round()
|> DateTime.from_unix!(:millisecond)
end
@doc """
Returns the unix timestamp in milliseconds of the solar noon for passed timestamp in milliseconds and longitude.
[https://en.wikipedia.org/wiki/Noon#Solar_noon](https://en.wikipedia.org/wiki/Noon#Solar_noon)
iex> Solarex.Sun.noon(1483228800000, 14.3251989)
1483268794183
"""
@spec noon(integer(), number()) :: integer()
def noon(timestamp, longitude) do
# First approximation
t = century(timestamp + (12 - longitude * 24 / 360) * 3_600_000)
# First correction
o1 = 720 - longitude * 4 - equation_of_time(t - longitude / (360 * 36525))
# Second correction
o2 = 720 - longitude * 4 - equation_of_time(t + o1 / (1440 * 36525))
(timestamp + o2 * 1000 * 60)
|> round()
end
@doc """
Returns the fraction number of centures since the J2000.0 epoch, 2000-01-01T12:00:00Z for passed unix timestamp in milliseconds.
iex> Solarex.Sun.century(1483228800000.0)
0.17000684462696783
"""
@spec century(number()) :: float()
def century(timestamp) when is_float(timestamp) do
timestamp
|> round()
|> century()
end
@doc """
Returns the fraction number of centures since the J2000.0 epoch, 2000-01-01T12:00:00Z for passed unix timestamp in milliseconds.
iex> Solarex.Sun.century(1483228800000)
0.17000684462696783
"""
# number of miliseconds from 2000-01-01T12:00:00Z Etc/UTC
@epoch 946_728_000_000
def century(timestamp) when is_integer(timestamp) do
(timestamp - @epoch) / 3_155_760_000_000
end
@doc """
Returns hour angle of sunrise for the given unix timestamp and latitude in degrees.
[https://en.wikipedia.org/wiki/Hour_angle](https://en.wikipedia.org/wiki/Hour_angle)
iex> Solarex.Sun.rise_hour_angle(1497026562000, 37.7749)
{:ok, -110.40483214814614}
"""
@spec rise_hour_angle(integer(), number()) :: {:ok, float()} | {:error, term()}
def rise_hour_angle(timestamp, latitude) do
phi = radians(latitude)
theta =
timestamp
|> century()
|> declination()
|> radians()
case cos(radians(90.833)) / (cos(phi) * cos(theta)) - tan(phi) * tan(theta) do
ratio when ratio > -1 and ratio < 1 -> {:ok, -degrees(acos(ratio))}
ratio -> {:error, ":math.acos not defined for #{ratio}"}
end
end
@doc """
Returns the equation of time the for given t in J2000.0 centuries.
[https://en.wikipedia.org/wiki/Equation_of_time](https://en.wikipedia.org/wiki/Equation_of_time)
iex> Solarex.Sun.equation_of_time(0.17437909156589854)
0.6590584715529293
"""
@spec equation_of_time(number()) :: number()
def equation_of_time(t) do
epsilon = obliquity_of_ecliptic(t)
l0 = mean_longitude(t)
e = orbit_eccentricity(t)
m = mean_anomaly(t)
y = pow(tan(radians(epsilon) / 2), 2)
sin2l0 = sin(2 * radians(l0))
sinm = sin(radians(m))
cos2l0 = cos(2 * radians(l0))
sin4l0 = sin(4 * radians(l0))
sin2m = sin(2 * radians(m))
etime =
y * sin2l0 - 2 * e * sinm + 4 * e * y * sinm * cos2l0 - 0.5 * y * y * sin4l0 -
1.25 * e * e * sin2m
degrees(etime) * 4
end
@doc """
Returns the sun's equation of center for given t in J2000.0 centuries.
[https://en.wikipedia.org/wiki/Equation_of_the_center](https://en.wikipedia.org/wiki/Equation_of_the_center)
iex> Solarex.Sun.equation_of_center(0.17437909156589854)
0.7934457966327464
"""
@spec equation_of_center(number()) :: number()
def equation_of_center(t) do
m = radians(mean_anomaly(t))
sinm = sin(m)
sin2m = sin(m * 2)
sin3m = sin(m * 3)
sinm * (1.914602 - t * (0.004817 + 0.000014 * t)) + sin2m * (0.019993 - 0.000101 * t) +
sin3m * 0.000289
end
@doc """
Returns the solar declination in degrees for given t in J2000.0 centuries.
[https://en.wikipedia.org/wiki/Position_of_the_Sun#Declination_of_the_Sun_as_seen_from_Earth](https://en.wikipedia.org/wiki/Position_of_the_Sun#Declination_of_the_Sun_as_seen_from_Earth)
iex> Solarex.Sun.declination(0.17437909156589854)
22.982073772785167
"""
@spec declination(number()) :: number()
def declination(t) do
degrees(asin(sin(radians(obliquity_of_ecliptic(t))) * sin(radians(apparent_longitude(t)))))
end
@doc """
Returns the obliquity of the Earth’s ecliptic in degrees for given t in J2000.0 centuries.
[https://en.wikipedia.org/wiki/Ecliptic#Obliquity_of_the_ecliptic](https://en.wikipedia.org/wiki/Ecliptic#Obliquity_of_the_ecliptic)
iex> Solarex.Sun.obliquity_of_ecliptic(0.17437909156589854)
23.43485798269169
"""
@spec obliquity_of_ecliptic(number()) :: number()
def obliquity_of_ecliptic(t) do
e0 = 23 + (26 + (21.448 - t * (46.815 + t * (0.00059 - t * 0.001813))) / 60) / 60
omega = 125.04 - 1934.136 * t
e0 + 0.00256 * cos(radians(omega))
end
@doc """
Returns the sun's mean longitude in degrees for given t in J2000.0 centuries.
[https://en.wikipedia.org/wiki/Mean_longitude](https://en.wikipedia.org/wiki/Mean_longitude)
iex> Solarex.Sun.mean_longitude(0.17437909156589854)
78.24800784813306
"""
@spec mean_longitude(number()) :: number()
def mean_longitude(t) do
l = modulo(280.46646 + t * (36000.76983 + t * 0.0003032), 360)
get_mean_longitude(l)
end
defp get_mean_longitude(l) when l < 0, do: l + 360
defp get_mean_longitude(l), do: l
@doc """
Returns the sun's true longitude in degrees for given t in J2000.0 centuries.
[https://en.wikipedia.org/wiki/True_longitude](https://en.wikipedia.org/wiki/True_longitude)
iex> Solarex.Sun.true_longitude(0.17437909156589854)
79.04145364476581
"""
@spec true_longitude(number()) :: number()
def true_longitude(t) do
mean_longitude(t) + equation_of_center(t)
end
@doc """
Returns the sun's apparent longitude in degrees for given t in J2000.0 centuries.
[https://en.wikipedia.org/wiki/Apparent_longitude](https://en.wikipedia.org/wiki/Apparent_longitude)
iex> Solarex.Sun.apparent_longitude(0.17437909156589854)
79.0332141755133
"""
@spec apparent_longitude(number()) :: number()
def apparent_longitude(t) do
true_longitude(t) - 0.00569 - 0.00478 * sin(radians(125.04 - 1934.136 * t))
end
@doc """
Returns the sun's mean anomaly in degrees for given t in J2000.0 centuries.
[https://en.wikipedia.org/wiki/Mean_anomaly](https://en.wikipedia.org/wiki/Mean_anomaly)
iex> Solarex.Sun.mean_anomaly(0.17437909156589854)
6635.010792131577
"""
@spec mean_anomaly(number()) :: number()
def mean_anomaly(t) do
357.52911 + t * (35999.05029 - 0.0001537 * t)
end
@doc """
Returns eccentricity for given t in J2000.0 centuries.
[https://en.wikipedia.org/wiki/Orbital_eccentricity](https://en.wikipedia.org/wiki/Orbital_eccentricity)
iex> Solarex.Sun.orbit_eccentricity(0.17437909156589854)
0.016701299773425684
"""
@spec orbit_eccentricity(number()) :: number()
def orbit_eccentricity(t) do
0.016708634 - t * (0.000042037 + 0.0000001267 * t)
end
defp timestamp(%Date{} = date) do
Timex.to_unix(date) * 1000
end
end
|
lib/solarex/sun.ex
| 0.918018
| 0.704147
|
sun.ex
|
starcoder
|
defmodule Identicon do
@moduledoc """
Generates an identicon based on a string
"""
def main(input) do
input
|> hash_string
|> pick_color
|> build_grid
|> filter_odd_squares
|> build_pixel_map
|> draw_image
|> save_image(input)
end
@doc """
Returns an Identicon.Image struct, containing hex as a list of integers representing hash.
The `input` is a string to calculate hash.
## Example
iex> Identicon.hash_string("identicon")
%Identicon.Image{
color: {},
hex: [173, 43, 65, 97, 60, 135, 2, 181, 55, 43, 189, 201, 168, 16, 112, 64]
}
"""
def hash_string(input) do
hex =
:crypto.hash(:md5, input)
|> :binary.bin_to_list
%Identicon.Image{hex: hex}
end
@doc """
Returns an Identicon.Image struct, containing a RGB color as a list of integers.
The `image` is a Identicon.Image struct to calculate RGB color.
## Example
iex> image = Identicon.hash_string("identicon")
iex> Identicon.pick_color(image)
%Identicon.Image{
color: {173, 43, 65},
grid: [],
hex: [173, 43, 65, 97, 60, 135, 2, 181, 55, 43, 189, 201, 168, 16, 112, 64],
painted_cells: [],
pixel_map: {}
}
"""
def pick_color(%Identicon.Image{hex: [r, g, b | _rest_of_hex]} = image) do
%Identicon.Image{image | color: {r, g, b}}
end
@doc """
Returns an Identicon.Image struct, containing a list of integers lists representing a grid.
The `image` is a Identicon.Image struct to generate a grid.
## Example
iex> image = Identicon.hash_string("identicon")
iex> Identicon.build_grid(image)
%Identicon.Image{
color: {},
grid: [
{43, 0}, {65, 1}, {97, 2}, {65, 3}, {43, 4},
{60, 5}, {135, 6}, {2, 7}, {135, 8}, {60, 9},
{181, 10}, {55, 11}, {43, 12}, {55, 13}, {181, 14},
{189, 15}, {201, 16}, {168, 17}, {201, 18}, {189, 19},
{16, 20}, {112, 21}, {64, 22}, {112, 23}, {16, 24}
],
hex: [173, 43, 65, 97, 60, 135, 2, 181, 55, 43, 189, 201, 168, 16, 112, 64],
painted_cells: [],
pixel_map: {}
}
"""
def build_grid(%Identicon.Image{hex: hex} = image) do
grid =
hex
|> tl
|> Enum.chunk_every(3, 3, :discard)
|> Enum.flat_map(&mirror_row/1)
|> Enum.with_index
%Identicon.Image{image | grid: grid}
end
@doc """
Returns a row mirrored by the last element.
The `row` is any list of data.
## Example
iex> Identicon.mirror_row([1, 2, 3])
[1, 2, 3, 2, 1]
"""
def mirror_row(row) do
right_side =
row
|> :lists.reverse
|> tl
row ++ right_side
end
@doc """
Returns an Identicon.Image struct, containing a filtered (by even) list of integers lists representing a grid.
The `image` is a Identicon.Image struct to filter even integers from a grid data.
## Example
iex> image = Identicon.hash_string("identicon")
iex> image = Identicon.build_grid(image)
iex> Identicon.filter_odd_squares(image)
%Identicon.Image{
color: {},
grid: [
{43, 0}, {65, 1}, {97, 2}, {65, 3}, {43, 4},
{60, 5}, {135, 6}, {2, 7}, {135, 8}, {60, 9},
{181, 10}, {55, 11}, {43, 12}, {55, 13}, {181, 14},
{189, 15}, {201, 16}, {168, 17}, {201, 18}, {189, 19},
{16, 20}, {112, 21}, {64, 22}, {112, 23}, {16, 24}
],
hex: [173, 43, 65, 97, 60, 135, 2, 181, 55, 43, 189, 201, 168, 16, 112, 64],
painted_cells: [
{60, 5},
{2, 7},
{60, 9},
{168, 17},
{16, 20},
{112, 21},
{64, 22},
{112, 23},
{16, 24}
],
pixel_map: {}
}
"""
def filter_odd_squares(%Identicon.Image{grid: grid} = image) do
painted_cells = Enum.filter grid, fn {value, _index} = _cell ->
rem(value, 2) == 0
end
%Identicon.Image{image | painted_cells: painted_cells}
end
@doc """
Returns an Identicon.Image struct, containing a pixel map representing squares to be colored.
The `image` is a Identicon.Image struct to transform `painted_cells` in `pixel_map`.
## Example
iex> image = Identicon.hash_string("identicon")
iex> image = Identicon.build_grid(image)
iex> image = Identicon.filter_odd_squares(image)
iex> Identicon.build_pixel_map(image)
%Identicon.Image{
color: {},
grid: [
{43, 0}, {65, 1}, {97, 2}, {65, 3}, {43, 4},
{60, 5}, {135, 6}, {2, 7}, {135, 8}, {60, 9},
{181, 10}, {55, 11}, {43, 12}, {55, 13}, {181, 14},
{189, 15}, {201, 16}, {168, 17}, {201, 18}, {189, 19},
{16, 20}, {112, 21}, {64, 22}, {112, 23}, {16, 24}
],
hex: [173, 43, 65, 97, 60, 135, 2, 181, 55, 43, 189, 201, 168, 16, 112, 64],
painted_cells: [
{60, 5}, {2, 7}, {60, 9},
{168, 17}, {16, 20}, {112, 21},
{64, 22}, {112, 23}, {16, 24}
],
pixel_map: [
{{2, 52}, {48, 98}},
{{102, 52}, {148, 98}},
{{202, 52}, {248, 98}},
{{102, 152}, {148, 198}},
{{2, 202}, {48, 248}},
{{52, 202}, {98, 248}},
{{102, 202}, {148, 248}},
{{152, 202}, {198, 248}},
{{202, 202}, {248, 248}}
]
}
"""
def build_pixel_map(%Identicon.Image{grid: grid, painted_cells: painted_cells} = image) do
pixel_map = Enum.map painted_cells, fn {_value, index} = _painted_cell ->
length(grid)
|> :math.sqrt
|> round
|> get_coordinates(index)
end
%Identicon.Image{image | pixel_map: pixel_map}
end
@doc """
Returns a tuple of tuples, representing two coordinates (x, y) in space.
The `size` means "how many cells has in this space/plane".
The `index` means the cell position in this space.
## Example
Get coordinate of X in this plane:
._._._.
|_|_|_|
|_|_|X|
|_|_|_|
iex> Identicon.get_coordinates(3, 5)
{{102, 52}, {148, 98}}
"""
def get_coordinates(size, index) do
top_left_x = rem(index, size) * 50 + 2
top_left_y = div(index, size) * 50 + 2
bot_right_x = top_left_x + 46
bot_right_y = top_left_y + 46
{{top_left_x, top_left_y}, {bot_right_x, bot_right_y}}
end
@doc """
Returns a raw image (generated by Erlang's :egd).
The `image` is a Identicon.Image struct to generate a raw image.
"""
def draw_image(%Identicon.Image{color: color, pixel_map: pixel_map} = _image) do
raw_image = :egd.create(250, 250)
fill = :egd.color(color)
Enum.each pixel_map, fn {top_left, bot_right} = _coordinates ->
:egd.filledRectangle(raw_image, top_left, bot_right, fill)
end
:egd.render(raw_image)
end
@doc """
Returns a tuple {status, result}.
Status could be both: `:ok` or `:error`.
Result could be both: `path` or `Something went wrong`.
The `image` is a raw image (generated by Erlang's :egd).
The `path` is a absolute path to where image will be saved.
"""
def save_image(image, path) do
case File.write("#{path}.png", image) do
:ok -> {:ok, path}
{:error, _reason} -> {:error, "Something went wrong" }
end
end
end
|
lib/identicon.ex
| 0.952397
| 0.437824
|
identicon.ex
|
starcoder
|
defmodule Chunker do
@moduledoc """
Provides functions to interact with chunked files.
"""
alias Chunker.ChunkedFile
@type t :: ChunkedFile.t()
@type reason :: any
@type success_tuple :: {:ok, t}
@type error_tuple :: {:error, reason}
@type result :: success_tuple | error_tuple
@doc """
Appends `data` to the given `chunked_file`.
"""
@spec append_chunk(t, bitstring) :: result
def append_chunk(chunked_file, data) do
case __MODULE__.length(chunked_file) do
{:ok, length} -> insert_chunk(chunked_file, data, length)
err -> err
end
end
@doc """
Inserts `data` to the given `chunked_file` at the position specified
by `index`.
"""
@spec insert_chunk(t, bitstring, integer) :: result
defdelegate insert_chunk(chunked_file, data, index), to: ChunkedFile
@doc """
Removes the chunk with the corresponding `index` from the given
`chunked_file`.
"""
@spec remove_chunk(t, integer) :: result
defdelegate remove_chunk(chunked_file, index), to: ChunkedFile
@doc """
Prepends `data` to the given `chunked_file`.
"""
@spec prepend_chunk(t, bitstring) :: result
def prepend_chunk(chunked_file, data) do
insert_chunk(chunked_file, data, 0)
end
@doc """
Replaces the chunk with the corresponding `index` with the given `data`.
"""
@spec replace_chunk(t, bitstring, integer) :: result
def replace_chunk(chunked_file, data, index) do
case remove_chunk(chunked_file, index) do
{:ok, chunked_file} -> insert_chunk(chunked_file, data, index)
err -> err
end
end
@doc """
Returns the data of the chunk with `index` from the given
`chunked_file`.
"""
@spec get_chunk(t, integer) :: {:ok, bitstring} | error_tuple
defdelegate get_chunk(chunked_file, index), to: ChunkedFile
@doc """
Returns the number of individual chunks the given `chunked_file`
consists of.
"""
@spec length(t) :: {:ok, integer} | error_tuple
defdelegate length(chunked_file), to: ChunkedFile
@doc """
Commits the given `chunked_file`.
After the file has been committed, chunks can no longer be added or
removed.
"""
@spec commit(t) :: result
defdelegate commit(chunked_file), to: ChunkedFile
@doc """
Returns `true` if chunks can be added or removed from the given
`chunked_file`.
"""
@spec writeable?(t) :: boolean
defdelegate writeable?(chunked_file), to: ChunkedFile
@doc """
Removes the given `chunked_file`.
"""
@spec remove(t) :: :ok | error_tuple
defdelegate remove(chunked_file), to: ChunkedFile
@doc """
Closes the given `chunked_file`.
After the file has been closed, it is not possible to read from it
nor write to it.
"""
@spec close(t) :: :ok | error_tuple
defdelegate close(chunked_file), to: ChunkedFile
@doc """
Returns `true` if the given `chunked_file` has already been closed.
"""
@spec closed?(t) :: boolean
defdelegate closed?(chunked_file), to: ChunkedFile
end
|
lib/chunker.ex
| 0.830078
| 0.713806
|
chunker.ex
|
starcoder
|
defmodule CodeReloader.Server do
@moduledoc """
Recompiles modified files in the current mix project by invoking configured reloadable compilers.
Specify the compilers that should be run for reloading when starting the server, e.g.:
```
children = [{CodeReloader.Server, [:elixir, :erlang]}]
Supervisor.start_link(children, [strategy: :one_for_one])
```
Code can then be reloaded by calling:
```
CodeReloader.Server.reload!(mod)
```
where `mod` will normally be a `Plug.Router` module containing the `CodeReloader.Plug`
used to instigate a code reload on every web-server call (it could potentially
be any another module being used to kick-off the reload).
The `mod` argument is used for two purposes:
* To avoid race conditions from multiple calls: all code reloads from the same
module are funneled through a sequential call operation.
* To back-up the module's `.beam` file so if compilation of the module itself fails,
it can be restored to working order, otherwise code reload through that
module would no-longer be available, which would kill an endpoint.
We also keep track of the last time that we compiled the code, so that if the code changes
outside of the VM, e.g. an external tool recompiles the code, we notice that the manifest
is newer than when we compiled, and explicitly reload all modified modules (see `:code.modified_modules/0`)
since compiling will potentially be a no-op.
This code is based on that in the [Pheonix Project](https://github.com/phoenixframework/phoenix),
without the Phoenix dependencies, and modified to deal with the edge-case of projects recompiled
outside of the `CodeReloader.Server` (the original only copes with modified source code).
"""
use GenServer
require Logger
alias CodeReloader.Proxy
def start_link(reloadable_compilers) do
GenServer.start_link(__MODULE__, reloadable_compilers, name: __MODULE__)
end
def check_symlinks do
GenServer.call(__MODULE__, :check_symlinks, :infinity)
end
def reload!(endpoint) do
GenServer.call(__MODULE__, {:reload!, endpoint}, :infinity)
end
## Callbacks
def init(reloadable_compilers) do
{:ok, {false, reloadable_compilers, System.os_time(:seconds)}}
end
def handle_call(:check_symlinks, _from, {checked?, reloadable_compilers, last_compile_time}) do
if not checked? and Code.ensure_loaded?(Mix.Project) do
build_path = Mix.Project.build_path()
symlink = Path.join(Path.dirname(build_path), "#{__MODULE__}")
case File.ln_s(build_path, symlink) do
:ok ->
File.rm(symlink)
{:error, :eexist} ->
File.rm(symlink)
{:error, _} ->
Logger.warn(
"App is unable to create symlinks. CodeReloader will run " <>
"considerably faster if symlinks are allowed." <> os_symlink(:os.type())
)
end
end
{:reply, :ok, {true, reloadable_compilers, last_compile_time}}
end
def handle_call({:reload!, endpoint}, from, {checked?, compilers, last_compile_time}) do
backup = load_backup(endpoint)
froms = all_waiting([from], endpoint)
{res, out} =
proxy_io(fn ->
try do
mix_compile(Code.ensure_loaded(Mix.Task), compilers, last_compile_time)
catch
:exit, {:shutdown, 1} ->
:error
kind, reason ->
IO.puts(Exception.format(kind, reason, System.stacktrace()))
:error
end
end)
reply =
case res do
:ok ->
{:ok, out}
:error ->
write_backup(backup)
{:error, out}
end
Enum.each(froms, &GenServer.reply(&1, reply))
{:noreply, {checked?, compilers, System.os_time(:seconds)}}
end
def handle_info(_, state) do
{:noreply, state}
end
defp os_symlink({:win32, _}),
do: " On Windows, such can be done by starting the shell with \"Run as Administrator\"."
defp os_symlink(_), do: ""
defp load_backup(mod) do
mod
|> :code.which()
|> read_backup()
end
defp read_backup(path) when is_list(path) do
case File.read(path) do
{:ok, binary} -> {:ok, path, binary}
_ -> :error
end
end
defp read_backup(_path), do: :error
defp write_backup({:ok, path, file}), do: File.write!(path, file)
defp write_backup(:error), do: :ok
defp all_waiting(acc, endpoint) do
receive do
{:"$gen_call", from, {:reload!, ^endpoint}} -> all_waiting([from | acc], endpoint)
after
0 -> acc
end
end
# TODO: Remove the function_exported call after 1.3 support is removed
# and just use loaded. apply/3 is used to prevent a compilation
# warning.
defp mix_compile({:module, Mix.Task}, compilers, last_compile_time) do
if Mix.Project.umbrella?() do
deps =
if function_exported?(Mix.Dep.Umbrella, :cached, 0) do
apply(Mix.Dep.Umbrella, :cached, [])
else
Mix.Dep.Umbrella.loaded()
end
Enum.each(deps, fn dep ->
Mix.Dep.in_dependency(dep, fn _ ->
mix_compile_unless_stale_config(compilers, last_compile_time)
end)
end)
else
mix_compile_unless_stale_config(compilers, last_compile_time)
:ok
end
end
defp mix_compile({:error, _reason}, _, _) do
raise "the Code Reloader is enabled but Mix is not available. If you want to " <>
"use the Code Reloader in production or inside an escript, you must add " <>
":mix to your applications list. Otherwise, you must disable code reloading " <>
"in such environments"
end
defp mix_compile_unless_stale_config(compilers, last_compile_time) do
manifests = Mix.Tasks.Compile.Elixir.manifests()
configs = Mix.Project.config_files()
# did the manifest change outside of us compiling the project?
manifests_last_updated =
Enum.map(manifests, &File.stat!(&1, time: :posix).mtime) |> Enum.max()
out_of_date? = manifests_last_updated > last_compile_time
case Mix.Utils.extract_stale(configs, manifests) do
[] ->
do_mix_compile(compilers, out_of_date?)
files ->
raise """
could not compile application: #{Mix.Project.config()[:app]}.
You must restart your server after changing the following config or lib files:
* #{Enum.map_join(files, "\n * ", &Path.relative_to_cwd/1)}
"""
end
end
defp do_mix_compile(compilers, out_of_date?) do
all = Mix.Project.config()[:compilers] || Mix.compilers()
compilers =
for compiler <- compilers, compiler in all do
Mix.Task.reenable("compile.#{compiler}")
compiler
end
# We call build_structure mostly for Windows so new
# assets in priv are copied to the build directory.
Mix.Project.build_structure()
res = Enum.map(compilers, &Mix.Task.run("compile.#{&1}", []))
if :ok in res && consolidate_protocols?() do
Mix.Task.reenable("compile.protocols")
Mix.Task.run("compile.protocols", [])
end
if(out_of_date?, do: reload_modules())
res
end
defp consolidate_protocols? do
Mix.Project.config()[:consolidate_protocols]
end
defp reload_modules() do
:code.modified_modules()
|> Enum.each(fn mod ->
IO.puts("Reloading #{inspect(mod)}\n")
case :code.soft_purge(mod) do
true ->
:code.load_file(mod)
false ->
Process.sleep(500)
:code.purge(mod)
:code.load_file(mod)
end
end)
end
defp proxy_io(fun) do
original_gl = Process.group_leader()
{:ok, proxy_gl} = Proxy.start()
Process.group_leader(self(), proxy_gl)
try do
{fun.(), Proxy.stop(proxy_gl)}
after
Process.group_leader(self(), original_gl)
Process.exit(proxy_gl, :kill)
end
end
end
|
lib/code_reloader/server.ex
| 0.745584
| 0.78838
|
server.ex
|
starcoder
|
defmodule Pandora do
require QueueWrapper, as: Queue
require Pandora.Data, as: Data
require Pandora.Parse, as: Parse
def create_document(declaration, doctype, nodes) do
Data.document(
declaration: declaration,
doctype: doctype,
nodes: Queue.from_list(nodes)
)
end
def create_element(tag_name, attributes \\ %{}, children \\ [])
when is_binary(tag_name) and is_map(attributes) and is_list(children) do
Data.element(
name: tag_name,
namespace: nil,
attributes: attributes,
children: Queue.from_list(children)
)
end
def create_ns_element(ns, tn, attributes \\ %{}, children \\ [])
when is_binary(tn) and is_binary(ns) and is_map(attributes) and is_list(children) do
Data.element(
name: tn,
namespace: ns,
attributes: attributes,
children: Queue.from_list(children)
)
end
def create_text(body) when is_binary(body) do
Data.text(value: body)
end
def create_comment(body) when is_binary(body) do
Data.comment(body: body)
end
def create_cdata(body, encoded \\ true) when is_binary(body) do
Data.cdata(value: body, encoded: encoded)
end
def prepend(root, path, new_node) when not Data.is_node(new_node) do
{:error, {:xml, {:bad_prepend, root, path, new_node}}}
end
def prepend(root, path, new_node) do
prepend_impl(root, path, new_node, [])
end
defp prepend_impl(root, [], new_node, _prev) when Data.is_element(root) do
children = Queue.in_front(new_node, Data.element(root, :children))
{:ok, Data.element(root, children: children)}
end
defp prepend_impl(root, [x | xs], new_node, prev) when Data.is_element(root) do
with {:ok, {index, child}} <- get_child_with_replace(root, x),
{:ok, child} <- prepend_impl(child, xs, new_node, [x | prev]) do
children = Data.element(root, :children)
children = Queue.replace_at(children, index, child)
{:ok, Data.element(root, children: children)}
end
end
defp get_child_with_replace(root, match) do
init = {:not_found, 0}
children = Data.element(root, :children)
result =
Queue.reduce_while(children, init, fn
element, {:not_found, index} ->
if match_element(element, match),
do: {:halt, {:found, index, element}},
else: {:cont, {:not_found, index + 1}}
end)
case result do
{:not_found, _} ->
{:error, {:get_child_with_replace, {:failed_to_find, root, :tag_name}}}
{:found, index, element} ->
{:ok, {index, element}}
end
end
defp match_element(node, {:el, tag_name})
when Data.is_element(node)
when tag_name == Data.element(node, :name) do
true
end
defp match_element(_, _) do
false
end
@spec from_string(string :: String.t()) :: {:ok, Data.document()} | {:error, any}
def from_string(string) do
Parse.from_string(string)
end
def to_string({:document, declaration, doctype, nodes}) do
with_state = fn state, next -> "#{state}#{next}" end
state = ""
state = to_string_xml_declaration(declaration, state, with_state)
state = to_string_doctype(doctype, state, with_state)
Queue.reduce(nodes, state, fn node, state ->
to_string_impl(node, state, with_state)
end)
end
def to_string(node) do
with_state = fn state, next -> "#{state}#{next}" end
to_string_impl(node, "", with_state)
end
defp to_string_xml_declaration(nil, state, _), do: state
defp to_string_xml_declaration(declaration, state, with_state) do
{:declaration, version, encoding, standalone} = declaration
state = with_state.(state, "<?xml")
state =
if version != nil,
do: with_state.(state, " version=\"#{version}\""),
else: state
state =
case encoding do
nil -> state
:utf8 -> with_state.(state, " encoding=\"UTF-8\"")
end
state =
case standalone do
nil -> state
true -> with_state.(state, " standalone=\"yes\"")
false -> with_state.(state, " standalone=\"no\"")
end
with_state.(state, "?>")
end
defp to_string_doctype(nil, state, _), do: state
defp to_string_doctype(doctype, state, with_state) do
{:doctype, root_node, dtds} = doctype
state = with_state.(state, "<!DOCTYPE #{root_node}")
state =
Enum.reduce(dtds, state, fn dtd, state ->
case dtd do
{:public, location, url} ->
with_state.(state, " PUBLIC \"#{location}\" \"#{url}\"")
{:system, url} ->
with_state.(state, " SYSTEM \"#{url}\"")
{:inlined, content} ->
with_state.(state, " [#{content}]")
end
end)
with_state.(state, ">")
end
defp to_string_impl(node, state, with_state) do
tag_name_to_string = fn
nil, name -> name
namespace, name -> "#{namespace}:#{name}"
end
attributes_to_string = fn state, attributes ->
Enum.reduce(attributes, state, fn
{{nk, ak}, v}, state -> with_state.(state, " #{nk}:#{ak}=\"#{v}\"")
{k, v}, state -> with_state.(state, " #{k}=\"#{v}\"")
end)
end
case node do
{:text, value} ->
with_state.(state, value)
{:cdata, value, true} ->
with_state.(state, "<![CDATA[#{value}]]>")
{:comment, body} ->
with_state.(state, "<!--#{body}-->")
{:element, name, ns, attributes, children} ->
tag_name_str = tag_name_to_string.(ns, name)
if Queue.length(children) > 0 do
state = with_state.(state, "<#{tag_name_str}")
state = attributes_to_string.(state, attributes)
state = with_state.(state, ">")
state =
Queue.reduce(children, state, fn child, state ->
to_string_impl(child, state, with_state)
end)
with_state.(state, "</#{tag_name_str}>")
else
state = with_state.(state, "<#{tag_name_str}")
state = attributes_to_string.(state, attributes)
with_state.(state, "/>")
end
end
end
def equal(left, right) do
case {left, right} do
{{:text, a}, {:text, b}} ->
a == b
{{:comment, a}, {:comment, b}} ->
a == b
{{:cdata, aa, ab}, {:cdata, ba, bb}} ->
aa == ba && ab == bb
{{:element, a_n, a_ns, a_a, a_c}, {:element, b_n, b_ns, b_a, b_c}} ->
a_n == b_n && a_ns == b_ns && a_a == b_a && Queue.equal(a_c, b_c, &equal/2)
{{:document, aa, ab, ac}, {:document, ba, bb, bc}} ->
aa == ba && ab == bb && Queue.equal(ac, bc, &equal/2)
_ ->
false
end
end
end
|
lib/pandora.ex
| 0.558568
| 0.472623
|
pandora.ex
|
starcoder
|
defmodule ElixirRigidPhysics.Geometry.LineSegment do
alias Graphmath.Vec3
@moduledoc """
Line segment geometry module.
Line segemnts go from a start point `a` to an end point `b`.
"""
require Record
Record.defrecord(:line_segment, a: {0.0, 0.0, 0.0}, b: {0.0, 0.0, 0.0})
@type line_segment :: record(:line_segment, a: Vec3.vec3(), b: Vec3.vec3())
@doc """
Creates a line segment.
## Examples
iex> # IO.puts("Test line segment creation.")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> LineSegment.create( {0.0, 1.0, 0.0}, {0.0, 1.0, 1.0} )
{:line_segment, {0.0, 1.0, 0.0}, {0.0, 1.0, 1.0} }
"""
@spec create(Vec3.vec3(), Vec3.vec3()) :: line_segment
def create(a, b), do: line_segment(a: a, b: b)
@doc """
Projects a query point `q` onto the same line as line segment `ab`. **Note that the point may not be on the segment itself.**
## Examples
iex> #IO.puts("Check segment coincident with a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {0.0, 0.0, 0.0}
iex> LineSegment.project( segment, q)
{0.0, 0.0, 0.0}
iex> #IO.puts("Check segment coincident with b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {1.0, 0.0, 0.0}
iex> LineSegment.project( segment, q)
{1.0, 0.0, 0.0}
iex> #IO.puts("Check segment interior of segment")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {0.5, 0.0, 0.0}
iex> LineSegment.project( segment, q)
{0.5, 0.0, 0.0}
iex> #IO.puts("Check segment in voronoi region of a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {-0.5, 0.0, 0.0}
iex> LineSegment.project( segment, q)
{-0.5, 0.0, 0.0}
iex> #IO.puts("Check segment in voronoi region of b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {1.5, 0.0, 0.0}
iex> LineSegment.project( segment, q)
{1.5, 0.0, 0.0}
iex> #IO.puts("Check offset segment in voronoi region of a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {-0.5, 3.0, 4.0}
iex> LineSegment.project( segment, q)
{-0.5, 0.0, 0.0}
iex> #IO.puts("Check offset segment in voronoi region of b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {1.5, 3.0, 4.0}
iex> LineSegment.project( segment, q)
{1.5, 0.0, 0.0}
iex> #IO.puts("Check offset segment in voronoi region of segment")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {0.45, 3.0, 4.0}
iex> LineSegment.project( segment, q)
{0.45, 0.0, 0.0}
"""
@spec project(line_segment, Vec3.vec3()) :: Vec3.vec3()
def project(line_segment(a: a, b: b), q) do
dir = b
|> Vec3.subtract(a)
|> Vec3.normalize()
Vec3.add(a, Vec3.scale(dir, Vec3.dot(q, dir)))
end
@doc """
Gets the [Barycentric coordinates](https://en.wikipedia.org/wiki/Barycentric_coordinate_system) of a query point projected on the segment.
See the [2010 Catto Erin GJK presentation](https://code.google.com/archive/p/box2d/downloads) for details.
Note that this is safe to use with points not collinear with the line segment--projection is built in.
## Examples
iex> #IO.puts("Check segment coincident with a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {0.0, 0.0, 0.0}
iex> LineSegment.to_barycentric( segment, q)
{1.0, 0.0}
iex> #IO.puts("Check segment coincident with b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {1.0, 0.0, 0.0}
iex> LineSegment.to_barycentric( segment, q)
{0.0, 1.0}
iex> #IO.puts("Check segment interior of segment")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {0.5, 0.0, 0.0}
iex> LineSegment.to_barycentric( segment, q)
{0.5, 0.5}
iex> #IO.puts("Check segment in voronoi region of a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {-0.5, 0.0, 0.0}
iex> LineSegment.to_barycentric( segment, q)
{1.5, -0.5}
iex> #IO.puts("Check segment in voronoi region of b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {1.5, 0.0, 0.0}
iex> LineSegment.to_barycentric( segment, q)
{-0.5, 1.5}
iex> #IO.puts("Check offset segment in voronoi region of a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {-0.5, 3.0, 4.0}
iex> LineSegment.to_barycentric( segment, q)
{1.5, -0.5}
iex> #IO.puts("Check offset segment in voronoi region of b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {1.5, 3.0, 4.0}
iex> LineSegment.to_barycentric( segment, q)
{-0.5, 1.5}
iex> #IO.puts("Check offset segment in voronoi region of segment")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {0.5, 3.0, 4.0}
iex> LineSegment.to_barycentric( segment, q)
{0.5, 0.5}
"""
@spec to_barycentric(line_segment, Vec3.vec3()) :: {float, float}
def to_barycentric(line_segment(a: a, b: b), q) do
segment_vec = Vec3.subtract(b, a)
segment_vec_length = Vec3.length(segment_vec)
segment_vec_unit = Vec3.scale(segment_vec, 1.0 / segment_vec_length)
qa = Vec3.subtract(q, a)
bq = Vec3.subtract(b, q)
u = Vec3.dot(bq, segment_vec_unit) / segment_vec_length
v = Vec3.dot(qa, segment_vec_unit) / segment_vec_length
{u, v}
end
@doc """
Gets a point collinear (and possible coincident) with line segment given barycentric coordinates.
Note that the sum of the coordinates must equal 1 for this to return a useful value.
## Eaxmples
iex> #IO.puts("Check coordinate at start of line")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {-1.0, -1.0, -1.0}, {1.0, 1.0, 1.0}}
iex> q = {1.0, 0.0}
iex> LineSegment.from_barycentric( segment, q)
{-1.0, -1.0, -1.0}
iex> #IO.puts("Check coordinate at end of line")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {-1.0, -1.0, -1.0}, {1.0, 1.0, 1.0}}
iex> q = {0.0, 1.0}
iex> LineSegment.from_barycentric( segment, q)
{1.0, 1.0, 1.0}
iex> #IO.puts("Check coordinate at middle of line")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {-1.0, -1.0, -1.0}, {1.0, 1.0, 1.0}}
iex> q = {0.5, 0.5}
iex> LineSegment.from_barycentric( segment, q)
{0.0, 0.0, 0.0}
iex> #IO.puts("Check coordinate in voronoi region of a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {-1.0, -1.0, -1.0}, {1.0, 1.0, 1.0}}
iex> q = {1.25, -0.25}
iex> LineSegment.from_barycentric( segment, q)
{-1.5, -1.5, -1.5}
iex> #IO.puts("Check coordinate in voronoi region of b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {-1.0, -1.0, -1.0}, {1.0, 1.0, 1.0}}
iex> q = {-0.25, 1.25}
iex> LineSegment.from_barycentric( segment, q)
{1.5, 1.5, 1.5}
"""
@spec from_barycentric(line_segment, {number, number}) :: Vec3.vec3()
def from_barycentric(line_segment(a: a, b: b), {u, v}) do
Vec3.weighted_sum(u, a, v, b)
end
@doc """
Projects a query point `q` onto the neartest point on line segment `ab`, giving the nearest point.
## Examples
iex> #IO.puts("Check segment coincident with a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {0.0, 0.0, 0.0}
iex> LineSegment.nearest_point( segment, q)
{0.0, 0.0, 0.0}
iex> #IO.puts("Check segment coincident with b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {1.0, 0.0, 0.0}
iex> LineSegment.nearest_point( segment, q)
{1.0, 0.0, 0.0}
iex> #IO.puts("Check segment interior of segment")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {0.5, 0.0, 0.0}
iex> LineSegment.nearest_point( segment, q)
{0.5, 0.0, 0.0}
iex> #IO.puts("Check segment in voronoi region of a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {-0.5, 0.0, 0.0}
iex> LineSegment.nearest_point( segment, q)
{0.0, 0.0, 0.0}
iex> #IO.puts("Check segment in voronoi region of b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {1.5, 0.0, 0.0}
iex> LineSegment.nearest_point( segment, q)
{1.0, 0.0, 0.0}
iex> #IO.puts("Check offset segment in voronoi region of a")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {-0.5, 3.0, 4.0}
iex> LineSegment.nearest_point( segment, q)
{0.0, 0.0, 0.0}
iex> #IO.puts("Check offset segment in voronoi region of b")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {1.5, 3.0, 4.0}
iex> LineSegment.nearest_point( segment, q)
{1.0, 0.0, 0.0}
iex> #IO.puts("Check offset segment in voronoi region of segment")
iex> require ElixirRigidPhysics.Geometry.LineSegment, as: LineSegment
iex> segment = {:line_segment, {0.0, 0.0, 0.0}, {1.0, 0.0, 0.0}}
iex> q = {0.45, 3.0, 4.0}
iex> LineSegment.nearest_point( segment, q)
{0.45, 0.0, 0.0}
"""
@spec nearest_point(line_segment, Vec3.vec3()) :: Vec3.vec3()
def nearest_point(line_segment(a: a, b: b), q) do
segment_vec = Vec3.subtract(b, a)
segment_vec_length = Vec3.length(segment_vec)
segment_vec_unit = Vec3.scale(segment_vec, 1.0 / segment_vec_length)
qa = Vec3.subtract(q, a)
bq = Vec3.subtract(b, q)
u = Vec3.dot(bq, segment_vec_unit) / segment_vec_length
v = Vec3.dot(qa, segment_vec_unit) / segment_vec_length
cond do
v < 0 -> a
u < 0 -> b
true -> Vec3.weighted_sum(u, a, v, b)
end
end
end
|
lib/geometry/line_segment.ex
| 0.918745
| 0.678686
|
line_segment.ex
|
starcoder
|
defmodule ExRabbitMQ.Producer do
@moduledoc """
A behaviour module that abstracts away the handling of RabbitMQ connections and channels.
It also provides hooks to allow the programmer to publish a message without having to directly
access the AMPQ interfaces.
For a connection configuration example see `ExRabbitMQ.Config.Connection`.
#### Example usage for a producer implementing a `GenServer`
```elixir
defmodule MyExRabbitMQProducer do
@module __MODULE__
use GenServer
use ExRabbitMQ.Producer
def start_link do
GenServer.start_link(@module, :ok)
end
def init(state) do
new_state =
xrmq_init(:my_connection_config, state)
|> xrmq_extract_state()
{:ok, new_state}
end
def handle_cast({:publish, something}, state) do
xrmq_basic_publish(something, "", "my_queue")
{:noreply, state}
end
# optional override when there is a need to do setup the channel right after the connection has been established.
def xrmq_channel_setup(channel, state) do
# any other channel setup goes here...
{:ok, state}
end
end
```
"""
alias ExRabbitMQ.AST.Common, as: C
alias ExRabbitMQ.Config.Session
require ExRabbitMQ.AST.Common
require ExRabbitMQ.AST.Producer.GenServer
@doc """
Setup the process for producing messages on RabbitMQ.
Initiates a connection or reuses an existing one.
When a connection is established then a new channel is opened.
Next, `c:xrmq_channel_setup/2` is called to do any extra work on the opened channel.
The function accepts the following arguments:
* `connection` - The configuration information for the RabbitMQ connection.
It can either be a `ExRabbitMQ.Config.Connection` struct or an atom that will be used as the `key` for reading the
the `:exrabbitmq` configuration part from the enviroment.
For more information on how to configure the connection, check `ExRabbitMQ.Config.Connection`.
* `state` - The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_init(C.connection(), atom | Session.t(), term) :: C.result()
@doc """
Returns a part of the `:exrabbitmq` configuration section, specified with the
`key` argument.
For the configuration format see the top section of `ExRabbitMQ.Producer`.
**Deprecated:** Use `ExRabbitMQ.Config.Connection.from_env/2` instead.
"""
@callback xrmq_get_env_config(atom) :: keyword
@doc """
Returns the connection configuration as it was passed to `c:xrmq_init/2`.
This configuration is set in the wrapper process's dictionary.
For the configuration format see the top section of `ExRabbitMQ.Producer`.
**Deprecated:** Use `ExRabbitMQ.State.get_connection_config/0` instead.
"""
@callback xrmq_get_connection_config :: term
@doc """
This hook is called when a connection has been established and a new channel has been opened.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_channel_setup(AMQP.Channel.t(), term) :: C.result()
@doc """
This hook is called when a connection has been established and a new channel has been opened,
right after `c:xrmq_channel_setup/2`.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_channel_open(AMQP.Channel.t(), term) :: C.result()
@doc """
This overridable function publishes the **binary** `payload` to the `exchange` using the provided `routing_key`.
The wrapper process's state is passed in to allow the callback to mutate it if overriden.
"""
@callback xrmq_basic_publish(String.t(), String.t(), String.t(), [term]) ::
C.basic_publish_result()
@doc """
Helper function that extracts the `state` argument from the passed in tuple.
"""
@callback xrmq_extract_state({:ok, term} | {:error, term, term}) :: state :: term
defmacro __using__(_) do
common_ast = ExRabbitMQ.AST.Common.ast()
inner_ast = ExRabbitMQ.AST.Producer.GenServer.ast()
quote location: :keep do
require Logger
alias ExRabbitMQ.Config.Connection, as: XRMQConnectionConfig
alias ExRabbitMQ.Config.Session, as: XRMQSessionConfig
unquote(inner_ast)
def xrmq_init(connection_config, session_config \\ nil, state) do
connection_config = XRMQConnectionConfig.get(connection_config)
session_config = XRMQSessionConfig.get(session_config)
case xrmq_connection_setup(connection_config) do
:ok ->
XRMQState.set_session_config(session_config)
xrmq_open_channel_setup(state)
{:error, reason} ->
{:error, reason, state}
end
end
def xrmq_open_channel_setup(state) do
case xrmq_open_channel(state) do
{:ok, state} ->
{channel, _} = XRMQState.get_channel_info()
session_config = XRMQState.get_session_config()
xrmq_session_setup(channel, session_config, state)
{:error, _reason, _state} = error ->
error
{:error, reason} ->
{:error, reason, state}
error ->
{:error, error, state}
end
end
unquote(common_ast)
end
end
end
|
lib/ex_rabbit_m_q/producer.ex
| 0.881526
| 0.82963
|
producer.ex
|
starcoder
|
defmodule Grapevine.Gossip.Rumor do
@moduledoc false
use Grapevine.Gossip
alias Grapevine.Gossip.Rumor.State
alias Grapevine.Gossip.Message
def handle_info(
:gc,
%{self: pid, updates: updates, meta: meta, gc: timeout, ttl: ttl} = state
) do
gc(pid, timeout)
{:noreply, Map.merge(state, clean(updates, meta, ttl))}
end
def handle_info(:initialized, %{membership_module: msm} = state) do
value = %{node: msm.self(), action: :initialized}
{:noreply, Map.merge(state, add_new_update(Message.hash!(value), value, state))}
end
def handle_info({:feedback, known_updates}, state) do
{:noreply, %{state | meta: do_feedback(known_updates, state)}}
end
def do_init(%{self: self} = state, opts) do
if Keyword.get(opts, :gc, true), do: gc(self)
initialized(self)
state =
Map.merge(
state,
%{
level: Keyword.get(opts, :level, 1),
rounds: Keyword.get(opts, :rounds, 3),
gc: Keyword.get(opts, :gc, 1000),
ttl: Keyword.get(opts, :ttl, 30000),
meta: Map.new()
}
)
{:ok, state}
end
def filter(%{updates: updates, meta: meta}) do
Enum.filter(updates, fn {k, _v} ->
Map.get(meta, k) |> State.infected?()
end)
|> Map.new()
end
def push(
new_updates,
from,
%{
handler: handler,
updates: updates,
meta: meta,
rounds: rounds
}
) do
new_updates =
Enum.reduce(new_updates, Map.new(), fn {k, v}, acc ->
Map.put(acc, k, %{value: v})
end)
case apply(handler, :push, [new_updates, updates]) do
:ok ->
send_feedback(updates, new_updates, from)
%{updates: Map.merge(updates, new_updates), meta: add_new_meta(new_updates, meta, rounds)}
{:ok, new_updates} ->
send_feedback(updates, new_updates, from)
%{updates: Map.merge(updates, new_updates), meta: add_new_meta(new_updates, meta, rounds)}
{:reset, new_updates} ->
send_feedback(updates, new_updates, from)
%{updates: new_updates, meta: add_new_meta(new_updates, meta, rounds)}
:ignore ->
%{updates: updates}
end
end
def neighbours(%{membership_module: msm, level: level}), do: msm.list() |> random(level)
def merge(id, value, state), do: add_new_update(id, value, state)
defp add_new_meta(updates, meta, rounds),
do:
Enum.reduce(updates, meta, fn {k, _v}, acc -> Map.merge(%{k => State.new(rounds)}, acc) end)
defp add_new_update(id, value, %{updates: updates, meta: meta, rounds: rounds}) do
%{
updates: Map.put(updates, id, %{value: value}),
meta: Map.put(meta, id, State.new(rounds))
}
end
defp send_feedback(updates, new_updates, from),
do: send(from, {:feedback, known(updates, new_updates)})
defp do_feedback(known_updates, %{meta: meta}), do: dec(meta, known_updates)
defp random(nodes, n) when n < 1, do: nodes
defp random(nodes, n), do: Enum.shuffle(nodes) |> Enum.take(n)
defp gc(pid, timeout \\ 0), do: Process.send_after(pid, :gc, timeout)
defp initialized(pid, timeout \\ 0), do: Process.send_after(pid, :initialized, timeout)
defp clean(updates, meta, threshold) do
meta
|> Enum.reduce(%{updates: updates, meta: meta}, fn {k, v}, acc ->
case State.expired?(v, threshold) do
false ->
acc
true ->
%{updates: updates, meta: meta} = acc
%{updates: Map.drop(updates, [k]), meta: Map.drop(meta, [k])}
end
end)
|> Map.new()
end
defp known(updates, new_updates) do
new_keys = Map.keys(new_updates)
Map.take(updates, new_keys) |> Map.keys()
end
defp dec(updates, keys) do
Enum.reduce(keys, updates, fn k, acc ->
case Map.get(updates, k) |> State.dec() do
%{rounds: 0} = message -> Map.merge(acc, %{k => State.remove(message)})
message -> Map.merge(acc, %{k => message})
end
end)
end
end
|
lib/grapevine/gossip/rumor.ex
| 0.606265
| 0.418994
|
rumor.ex
|
starcoder
|
defmodule Xandra.Protocol do
@moduledoc false
import Bitwise
@valid_flag_bits for shift <- 0..7, do: 1 <<< shift
@flag_mask_range 0x00..0xFF
@type flag_mask() :: 0x00..0xFF
@type flag_bit() ::
unquote(Enum.reduce(@valid_flag_bits, "e(do: unquote(&1) | unquote(&2))))
defp assert_not_a_variable(ast) do
if not match?({var, _context, nil} when is_atom(var), ast) do
raise ArgumentError,
"the right-hand side of <- must be a variable, got: #{Macro.to_string(ast)}"
end
end
# Takes a protocol module and returns the protocol "format", that is, whether frames (= envelopes) should be wrapped inside the v5+ frame wrapper or not.
@spec frame_protocol_format(module()) :: :v4_or_less | :v5_or_more
def frame_protocol_format(protocol_module)
def frame_protocol_format(Xandra.Protocol.V3), do: :v4_or_less
def frame_protocol_format(Xandra.Protocol.V4), do: :v4_or_less
def frame_protocol_format(Xandra.Protocol.V5), do: :v5_or_more
# Decodes a "string" as per
# https://github.com/apache/cassandra/blob/dcf3d58c4b22b8b69e8505b170829172ea3c4f5c/doc/native_protocol_v5.spec#L361
# > "A [short] n, followed by n bytes representing an UTF-8 string."
defmacro decode_string({:<-, _, [value, buffer]}) do
assert_not_a_variable(buffer)
quote do
<<size::16, unquote(value)::size(size)-bytes, unquote(buffer)::bits>> = unquote(buffer)
end
end
defmacro decode_value({:<-, _, [value, buffer]}, type, do: block) do
assert_not_a_variable(buffer)
quote do
<<size::32-signed, unquote(buffer)::bits>> = unquote(buffer)
if size < 0 do
unquote(value) = nil
unquote(block)
else
<<data::size(size)-bytes, unquote(buffer)::bits>> = unquote(buffer)
unquote(value) = decode_value(data, unquote(type))
unquote(block)
end
end
end
# Decodes a "uuid".
defmacro decode_uuid({:<-, _, [value, buffer]}) do
assert_not_a_variable(buffer)
quote do
<<unquote(value)::16-bytes, unquote(buffer)::bits>> = unquote(buffer)
end
end
# A [short] n, followed by n [string].
# https://github.com/apache/cassandra/blob/ce4ae43a310a809fb0c82a7f48001a0f8206e156/doc/native_protocol_v5.spec#L383
@spec decode_string_list(bitstring()) :: {[String.t()], bitstring()}
def decode_string_list(<<count::16, buffer::bits>>) do
decode_string_list(buffer, count, [])
end
defp decode_string_list(<<buffer::bits>>, 0, acc) do
{Enum.reverse(acc), buffer}
end
defp decode_string_list(<<buffer::bits>>, count, acc) do
decode_string(item <- buffer)
decode_string_list(buffer, count - 1, [item | acc])
end
# Only used in native protocol v4+.
@spec decode_warnings(bitstring(), boolean()) :: {[String.t()], bitstring()}
def decode_warnings(body, _warning? = false), do: {[], body}
def decode_warnings(body, _warning? = true), do: decode_string_list(body)
@spec date_from_unix_days(integer()) :: Calendar.date()
def date_from_unix_days(days) when is_integer(days) do
Date.add(~D[1970-01-01], days)
end
@spec date_to_unix_days(Calendar.date()) :: integer()
def date_to_unix_days(date) do
Date.diff(date, ~D[1970-01-01])
end
@spec time_from_nanoseconds(integer()) :: Calendar.time()
def time_from_nanoseconds(nanoseconds) when is_integer(nanoseconds) do
Time.add(~T[00:00:00], nanoseconds, :nanosecond)
end
@spec time_to_nanoseconds(Calendar.time()) :: integer()
def time_to_nanoseconds(time) do
Time.diff(time, ~T[00:00:00.000000], :nanosecond)
end
@spec set_flag(flag_mask(), pos_integer(), term()) :: flag_mask()
def set_flag(bitmask, flag_bit, value_present)
when is_integer(bitmask) and bitmask in @flag_mask_range and is_integer(flag_bit) and
flag_bit in @valid_flag_bits do
if value_present do
bitmask ||| flag_bit
else
bitmask
end
end
end
|
lib/xandra/protocol/protocol.ex
| 0.784319
| 0.449272
|
protocol.ex
|
starcoder
|
defmodule ElixirLS.LanguageServer.Providers.ExecuteCommand.ManipulatePipes.AST do
@moduledoc """
AST manipulation helpers for the `ElixirLS.LanguageServer.Providers.ExecuteCommand.ManipulatePipes`\
command.
"""
@doc "Parses a string and converts the first function call, pre-order depth-first, into a pipe."
def to_pipe(code_string) do
{piped_ast, _} =
code_string
|> Code.string_to_quoted!()
|> Macro.prewalk(%{has_piped: false}, &do_to_pipe/2)
Macro.to_string(piped_ast)
end
@doc "Parses a string and converts the first pipe call, post-order depth-first, into a function call."
def from_pipe(code_string) do
{unpiped_ast, _} =
code_string
|> Code.string_to_quoted!()
|> Macro.postwalk(%{has_unpiped: false}, fn
{:|>, line, [h, {{:., line, [{_, _, nil}]} = anonymous_function_node, line, t}]},
%{has_unpiped: false} = acc ->
{{anonymous_function_node, line, [h | t]}, Map.put(acc, :has_unpiped, true)}
{:|>, line, [left, {function, _, args}]}, %{has_unpiped: false} = acc ->
{{function, line, [left | args]}, Map.put(acc, :has_unpiped, true)}
node, acc ->
{node, acc}
end)
Macro.to_string(unpiped_ast)
end
defp do_to_pipe({:|>, line, [left, right]}, %{has_piped: false} = acc) do
{{:|>, line, [left |> do_to_pipe(acc) |> elem(0), right]}, Map.put(acc, :has_piped, true)}
end
defp do_to_pipe(
{{:., line, [{_, _, nil}]} = anonymous_function_node, _meta, [h | t]},
%{has_piped: false} = acc
) do
{{:|>, line, [h, {anonymous_function_node, line, t}]}, Map.put(acc, :has_piped, true)}
end
defp do_to_pipe({{:., line, _args} = function, _meta, args}, %{has_piped: false} = acc)
when args != [] do
{{:|>, line, [hd(args), {function, line, tl(args)}]}, Map.put(acc, :has_piped, true)}
end
defp do_to_pipe({function, line, [h | t]} = node, %{has_piped: false} = acc)
when is_atom(function) and function not in [:., :__aliases__, :"::", :{}, :|>] and t != [] do
with :error <- Code.Identifier.binary_op(function),
:error <- Code.Identifier.unary_op(function) do
{{:|>, line, [h, {function, line, t}]}, Map.put(acc, :has_piped, true)}
else
_ ->
{node, acc}
end
end
defp do_to_pipe(node, acc) do
{node, acc}
end
end
|
apps/language_server/lib/language_server/providers/execute_command/manipulate_pipes/ast.ex
| 0.756537
| 0.420748
|
ast.ex
|
starcoder
|
defmodule RTypes.Generator.StreamData do
@moduledoc """
The module contains functions to derive generators to be used with StreamData library.
"""
import StreamData
@behaviour RTypes.Generator
@doc """
Derive a StreamData generator for the specified type AST.
"""
@spec derive(RTypes.Extractor.type()) :: StreamData.t(v)
when v: term()
@impl RTypes.Generator
def derive({:type, _line, :any, _args}), do: term()
def derive({:type, _line, :atom, _args}), do: atom(:alphanumeric)
def derive({:type, _line, :integer, _args}), do: integer()
def derive({:type, _line, :float, _args}), do: float()
## literals
def derive({:atom, _line, term}), do: constant(term)
def derive({:integer, _line, term}), do: constant(term)
## ranges
def derive({:type, _, :range, [{:integer, _, l}, {:integer, _, u}]}) do
integer(l..u)
end
## binary
def derive({:type, _line, :binary, []}), do: binary()
## bitstrings
def derive({:type, _line, :binary, [{:integer, _, 0}, {:integer, _, 0}]}) do
bitstring(length: 0)
end
def derive({:type, _line, :binary, [{:integer, _, 0}, {:integer, _, units}]}) do
bind(one_of([constant(0), positive_integer()]), fn count ->
bitstring(length: units * count)
end)
end
def derive({:type, _line, :binary, [{:integer, _, size}, _]}) do
bitstring(length: size)
end
## empty list
def derive({:type, _line, nil, _args}), do: constant([])
## composite types
## lists
def derive({:type, _line, :list, []}), do: list_of(term())
def derive({:type, _line, :list, [typ]}) do
list_of(derive(typ))
end
def derive({:type, _line, :nonempty_list, []}) do
nonempty(list_of(term()))
end
def derive({:type, _line, :nonempty_list, [typ]}) do
nonempty(list_of(derive(typ)))
end
def derive({:type, _line, :maybe_improper_list, []}) do
maybe_improper_list_of(term(), term())
end
def derive({:type, _line, :maybe_improper_list, [typ1, typ2]}) do
maybe_improper_list_of(derive(typ1), derive(typ2))
end
def derive({:type, _line, :nonempty_maybe_improper_list, []}) do
nonempty_improper_list_of(term(), term())
end
def derive({:type, _line, :nonempty_maybe_improper_list, [typ1, typ2]}) do
nonempty_improper_list_of(derive(typ1), derive(typ2))
end
## maps
def derive({:type, _line, :map, :any}), do: map_of(term(), term())
def derive({:type, _line, :map, typs}) do
typs
|> Enum.map(&derive_map_field/1)
|> Enum.reduce(constant(%{}), fn gen, acc_gen ->
bind({gen, acc_gen}, fn {m, acc} ->
constant(Map.merge(acc, m))
end)
end)
end
## tuples
def derive({:type, _line, :tuple, :any}) do
bind(one_of([constant(0), positive_integer()]), fn count ->
Stream.repeatedly(&term/0)
|> Enum.take(count)
|> List.to_tuple()
end)
end
def derive({:type, _line, :tuple, typs}) do
typs
|> Enum.map(&derive/1)
|> List.to_tuple()
|> tuple()
end
def derive({:type, _line, :neg_integer, []}) do
bind(positive_integer(), fn x -> constant(-1 * x) end)
end
def derive({:type, _line, :non_neg_integer, []}) do
one_of([constant(0), positive_integer()])
end
def derive({:type, _line, :pos_integer, []}), do: positive_integer()
def derive({:type, _line, :timeout, []}) do
one_of([constant(:infinity), constant(0), positive_integer()])
end
def derive({:type, _line, :string, []}) do
list_of(integer(0..0x10FFFF))
end
def derive({:type, _line, :nonempty_string, []}) do
nonempty(list_of(integer(0..0x10FFFF)))
end
def derive({:type, _line, :number, []}), do: one_of([float(), integer()])
def derive({:type, _line, :module, []}), do: atom(:alphanumeric)
def derive({:type, _line, :iolist, []}), do: iolist()
def derive({:type, _line, :iodata, []}), do: iodata()
def derive({:type, _line, :byte, []}), do: integer(0..255)
def derive({:type, _line, :char, []}), do: integer(0..0x10FFFF)
def derive({:type, _line, :boolean, []}), do: boolean()
def derive({:type, _line, :bitstring, []}), do: bitstring()
def derive({:type, _line, :arity, []}), do: integer(0..255)
def derive({:type, _line, :term, []}), do: term()
def derive({:type, _, :union, types}), do: one_of(Enum.map(types, &derive/1))
def derive({:type, _line, typ, _args}) do
raise "can not derive a generator for type #{typ}"
end
# required field where key is a known atom
defp derive_map_field({:type, _, :map_field_exact, [{:atom, _, field}, val_typ]}) do
fixed_map(%{field => derive(val_typ)})
end
# required field
defp derive_map_field({:type, _, :map_field_exact, [field_typ, val_typ]}) do
map_of(derive(field_typ), derive(val_typ), length: 1)
end
# optional field
defp derive_map_field({:type, _, :map_field_assoc, [field_typ, val_typ]}) do
bind(derive(field_typ), fn key ->
optional_map(%{key => derive(val_typ)})
end)
end
end
|
lib/rtypes/generator/stream_data.ex
| 0.795181
| 0.501404
|
stream_data.ex
|
starcoder
|
defmodule Process do
@moduledoc """
This module provides convenience functions around processes and
the process dictionary. In Erlang, most of these functions are
auto-imported, but in Elixir they are grouped in a module for
convenience. Notice that these functions, different from Erlang's,
always return nil instead of undefined. You can use their Erlang
version if you want the undefined value.
"""
@doc """
Returns true if the process exists and is alive, that is,
is not exiting and has not exited. Otherwise, returns false.
`pid` must refer to a process at the local node.
"""
@spec alive?(pid) :: boolean
def alive?(pid) do
:erlang.is_process_alive(pid)
end
@doc """
Returns all key-values in the dictionary.
"""
@spec get :: [{term, term}]
def get do
:erlang.get()
end
@doc """
Returns the value for the given key.
"""
@spec get(term) :: term
@spec get(term, default :: term) :: term
def get(key, default // nil) do
case :erlang.get(key) do
:undefined ->
default
value ->
value
end
end
@doc """
Returns all keys that have the given `value`.
"""
@spec get_keys(term) :: [term]
def get_keys(value) do
:erlang.get_keys(value)
end
@doc """
Stores the given key-value in the process dictionary.
"""
@spec put(term, term) :: term | nil
def put(key, value) do
nillify :erlang.put(key, value)
end
@doc """
Deletes all items in the dictionary.
"""
@spec delete :: [{term, term}]
def delete() do
:erlang.erase()
end
@doc """
Deletes the given key from the dictionary.
"""
@spec delete(term) :: term | nil
def delete(key) do
nillify :erlang.erase(key)
end
@doc """
Sends an exit signal with the given reason to the pid.
The following behavior applies if reason is any term except `:normal` or `:kill`:
1) If pid is not trapping exits, pid will exit with the given reason;
2) If pid is trapping exits, the exit signal is transformed into a message
{'EXIT', from, reason} and delivered to the message queue of pid;
3) If reason is the atom `:normal`, pid will not exit. If it is trapping exits,
the exit signal is transformed into a message {'EXIT', from, :normal} and
delivered to its message queue;
4) If reason is the atom `:kill`, that is if `exit(pid, :kill)` is called, an
untrappable exit signal is sent to pid which will unconditionally exit with
exit reason `:killed`.
## Examples
Process.exit(pid, :kill)
"""
@spec exit(pid, term) :: true
def exit(pid, reason) do
:erlang.exit(pid, reason)
end
@doc """
Returns the pid (process identifier) of the calling process.
"""
@spec self() :: pid
def self() do
:erlang.self()
end
@doc """
Returns the pid of a new process started by the application of `fun`.
It behaves exactly the same as `Kernel.spawn/1`.
"""
@spec spawn((() -> any)) :: pid
def spawn(fun) do
:erlang.spawn(fun)
end
@type spawn_opt :: :link | :monitor | {:priority, :low | :normal | :high} |
{:fullsweep_after, non_neg_integer} |
{:min_heap_size, non_neg_integer} |
{:min_bin_vheap_size, non_neg_integer}
@type spawn_opts :: [spawn_opt]
@doc """
Returns the pid of a new process started by the application of `fun`.
It also accepts extra options, for the list of available options
check http://www.erlang.org/doc/man/erlang.html#spawn_opt-2
"""
@spec spawn((() -> any), spawn_opts) :: pid | {pid, reference}
def spawn(fun, opts) do
:erlang.spawn_opt(fun, opts)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)`. The new process created will be placed in the system
scheduler queue and be run some time later.
It behaves exactly the same as the `Kernel.spawn/3` function.
"""
@spec spawn(module, atom, [any]) :: pid
def spawn(mod, fun, args) do
:erlang.spawn(mod, fun, args)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)`. The new process created will be placed in the system
scheduler queue and be run some time later.
It also accepts extra options, for the list of available options
check http://www.erlang.org/doc/man/erlang.html#spawn_opt-4
"""
@spec spawn(module, atom, [any], spawn_opts) :: pid | {pid, reference}
def spawn(mod, fun, args, opts) do
:erlang.spawn_opt(mod, fun, args, opts)
end
@doc """
Returns the pid of a new process started by the application of `fun`.
A link is created between the calling process and the new
process, atomically.
"""
@spec spawn_link((() -> any)) :: pid
def spawn_link(fun) do
:erlang.spawn_link(fun)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)`. A link is created between the calling process
and the new process, atomically. Otherwise works like spawn/3.
"""
@spec spawn_link(module, atom, [any]) :: pid
def spawn_link(mod, fun, args) do
:erlang.spawn_link(mod, fun, args)
end
@doc """
Returns the pid of a new process started by the application of `fun`
and reference for a monitor created to the new process.
"""
@spec spawn_monitor((() -> any)) :: {pid, reference}
def spawn_monitor(fun) do
:erlang.spawn_monitor(fun)
end
@doc """
A new process is started by the application of `module.function(args)`
and the process is monitored at the same time. Returns the pid and a
reference for the monitor. Otherwise works like spawn/3.
"""
@spec spawn_monitor(module, atom, [any]) :: {pid, reference}
def spawn_monitor(mod, fun, args) do
:erlang.spawn_monitor(mod, fun, args)
end
@doc """
The calling process starts monitoring the item given.
It returns the monitor reference.
See http://www.erlang.org/doc/man/erlang.html#monitor-2 for more info.
"""
@spec monitor(pid | {reg_name :: atom, node :: atom} | reg_name :: atom) :: reference
def monitor(item) do
:erlang.monitor(:process, item)
end
@doc """
If monitor_ref is a reference which the calling process
obtained by calling monitor/1, this monitoring is turned off.
If the monitoring is already turned off, nothing happens.
See http://www.erlang.org/doc/man/erlang.html#demonitor-2 for more info.
"""
@spec demonitor(reference) :: true
@spec demonitor(reference, options :: [:flush | :info]) :: boolean
def demonitor(monitor_ref, options // []) do
:erlang.demonitor(monitor_ref, options)
end
@doc """
Returns a list of process identifiers corresponding to all the
processes currently existing on the local node.
Note that a process that is exiting, exists but is not alive, i.e.,
alive?/1 will return false for a process that is exiting,
but its process identifier will be part of the result returned.
See http://www.erlang.org/doc/man/erlang.html#processes-0 for more info.
"""
@spec list :: [pid]
def list do
:erlang.processes()
end
@doc """
Creates a link between the calling process and another process
(or port) `pid`, if there is not such a link already.
See http://www.erlang.org/doc/man/erlang.html#link-1 for more info.
"""
@spec link(pid | port) :: true
def link(pid) do
:erlang.link(pid)
end
@doc """
Removes the link, if there is one, between the calling process and
the process or port referred to by `pid`. Returns true and does not
fail, even if there is no link or `id` does not exist
See http://www.erlang.org/doc/man/erlang.html#unlink-1 for more info.
"""
@spec unlink(pid | port) :: true
def unlink(pid) do
:erlang.unlink(pid)
end
@doc """
Associates the name with a pid or a port identifier. name, which must
be an atom, can be used instead of the pid / port identifier in the
send operator (name <- message).
See http://www.erlang.org/doc/man/erlang.html#register-2 for more info.
"""
@spec register(pid | port, atom) :: true
def register(pid, name) do
:erlang.register(name, pid)
end
@doc """
Removes the registered name, associated with a pid or a port identifier.
See http://www.erlang.org/doc/man/erlang.html#unregister-1 for more info.
"""
@spec unregister(atom) :: true
def unregister(name) do
:erlang.unregister(name)
end
@doc """
Returns the pid or port identifier with the registered name.
Returns nil if the name is not registered.
See http://www.erlang.org/doc/man/erlang.html#whereis-1 for more info.
"""
@spec whereis(atom) :: pid | port | nil
def whereis(name) do
nillify :erlang.whereis(name)
end
@doc """
Returns the pid of the group leader for the process which evaluates the function.
"""
@spec group_leader :: pid
def group_leader do
:erlang.group_leader
end
@doc """
Sets the group leader of Pid to GroupLeader. Typically, this is used when a processes
started from a certain shell should have another group leader than `:init`.
"""
@spec group_leader(leader :: pid, pid) :: true
def group_leader(leader, pid) do
:erlang.group_leader(leader, pid)
end
@doc """
Returns a list of names which have been registered using register/2.
"""
@spec registered :: [atom]
def registered do
:erlang.registered()
end
@typep process_flag :: :trap_exit | :error_handler | :min_heap_size |
:min_bin_vheap_size | :priority | :save_calls |
:sensitive
@doc """
Sets certain flags for the process which calls this function.
Returns the old value of the flag.
See http://www.erlang.org/doc/man/erlang.html#process_flag-2 for more info.
"""
@spec flag(process_flag, term) :: term
def flag(flag, value) do
:erlang.process_flag(flag, value)
end
@doc """
Sets certain flags for the process Pid, in the same manner as flag/2.
Returns the old value of the flag. The allowed values for Flag are
only a subset of those allowed in flag/2, namely: save_calls.
See http://www.erlang.org/doc/man/erlang.html#process_flag-3 for more info.
"""
@spec flag(pid, process_flag, term) :: term
def flag(pid, flag, value) do
:erlang.process_flag(pid, flag, value)
end
@doc """
Returns information about the process identified by pid or nil if the process
is not alive.
Use this only for debugging information.
See http://www.erlang.org/doc/man/erlang.html#process_info-1 for more info.
"""
@spec info(pid) :: Keyword.t
def info(pid) do
nillify :erlang.process_info(pid)
end
@doc """
Returns information about the process identified by pid
or nil if the process is not alive.
See http://www.erlang.org/doc/man/erlang.html#process_info-2 for more info.
"""
@spec info(pid, atom) :: {atom, term}
def info(pid, spec) do
nillify :erlang.process_info(pid, spec)
end
@compile { :inline, nillify: 1 }
defp nillify(:undefined), do: nil
defp nillify(other), do: other
end
|
lib/elixir/lib/process.ex
| 0.817975
| 0.49823
|
process.ex
|
starcoder
|
defmodule CreateFunEndpoint.Digester do
@digested_file_regex ~r/(-[a-fA-F\d]{32})/
@manifest_version 1
@empty_manifest %{
"version" => 1,
"digests" => %{},
"latest" => %{}
}
defp now() do
:calendar.datetime_to_gregorian_seconds(:calendar.universal_time)
end
@moduledoc """
The contents of this file are mostly copied from Phoenix.Digester.
We work with multiple phoenix application so we needed to adapt the digester slightly.
Unfortunately most of the necessary functions are private, hence the copying.
View https://github.com/phoenixframework/phoenix/blob/master/lib/phoenix/digester.ex for the original file.
Digests and compresses static files.
For each file under the given input path, Phoenix will generate a digest
and also compress in `.gz` format. The filename and its digest will be
used to generate the cache manifest file. It also avoids duplication, checking
for already digested files.
For stylesheet files found under the given path, Phoenix will replace
asset references with the digested paths, as long as the asset exists
in the generated cache manifest.
"""
@doc false
defp files_exists?(paths) when is_list(paths) do
Enum.reduce_while(paths, true, fn(path, true)->
if File.exists?(path), do: {:cont, true}, else: {:halt, false}
end)
end
@doc """
Digests and compresses the static files and saves them in the given output path.
* `input_paths` - A list of paths where the assets are located
* `output_path` - The path where the compiled/compressed files will be saved
"""
@spec compile(List.t, String.t) :: :ok | {:error, :invalid_path}
def compile(input_paths_arg, output_path) when is_list(input_paths_arg) do
input_paths = Enum.map(input_paths_arg, fn(path)->
[h | _] = String.split(path, ":", [parts: 2, trim: true])
h
end)
if files_exists?(input_paths) do
unless File.exists?(output_path), do: File.mkdir_p!(output_path)
digested_files_with_output_path = Enum.map(input_paths_arg, fn(path)->
[input_path | tail] = String.split(path, ":", [parts: 2, trim: true])
output_append = List.first(tail)
input_path
|> filter_files(output_append)
|> Enum.map(&({digest(&1), input_path}))
end)
|> List.flatten
digested_files = Enum.map(digested_files_with_output_path, fn {file, _} -> file end)
digests = load_compile_digests(output_path)
manifest = generate_manifest(digested_files, digests, output_path)
Enum.each(digested_files_with_output_path, &(write_to_disk(&1, manifest, output_path)))
else
{:error, :invalid_path}
end
end
defp filter_files(input_path, output_append \\ nil) do
input_path
|> Path.join("**")
|> Path.wildcard
|> Enum.filter(¬(File.dir?(&1) or compiled_file?(&1)))
|> Enum.map(&(map_file(&1, input_path, output_append)))
end
defp filter_digested_files(output_path) do
output_path
|> Path.join("**")
|> Path.wildcard
|> Enum.filter(&uncompressed_digested_file?/1)
|> Enum.map(&(map_digested_file(&1, output_path)))
end
defp load_compile_digests(output_path) do
manifest = load_manifest(output_path)
manifest["digests"]
end
defp load_manifest(output_path) do
manifest_path = Path.join(output_path, "cache_manifest.json")
if File.exists?(manifest_path) do
manifest_path
|> File.read!
|> Poison.decode!
|> migrate_manifest(output_path)
else
@empty_manifest
end
end
defp migrate_manifest(%{"version" => 1} = manifest, _output_path), do: manifest
defp migrate_manifest(latest, output_path) do
digests =
output_path
|> filter_digested_files
|> generate_new_digests
@empty_manifest
|> Map.put("digests", digests)
|> Map.put("latest", latest)
end
defp generate_manifest(files, old_digests, output_path) do
latest = Map.new(files, &({
manifest_join(&1.relative_path, &1.filename),
manifest_join(&1.relative_path, &1.digested_filename)
}))
old_digests_that_still_exist =
old_digests
|> Enum.filter(fn {file, _} -> File.exists?(Path.join(output_path, file)) end)
|> Map.new
new_digests = generate_new_digests(files)
digests = Map.merge(old_digests_that_still_exist, new_digests)
save_manifest(%{"latest" => latest, "version" => @manifest_version, "digests" => digests}, output_path)
latest
end
defp save_manifest(%{"latest" => _, "version" => _, "digests" => _} = manifest, output_path) do
manifest_content = Poison.encode!(manifest)
File.write!(Path.join(output_path, "cache_manifest.json"), manifest_content)
end
defp generate_new_digests(files) do
Map.new(files, &({
manifest_join(&1.relative_path, &1.digested_filename),
build_digest(&1)
}))
end
defp build_digest(file) do
%{logical_path: manifest_join(file.relative_path, file.filename),
mtime: now(),
size: file.size,
digest: file.digest}
end
defp manifest_join(".", filename), do: filename
defp manifest_join(path, filename), do: Path.join(path, filename)
defp compiled_file?(file_path) do
Regex.match?(@digested_file_regex, Path.basename(file_path)) ||
Path.extname(file_path) == ".gz" ||
Path.basename(file_path) == "cache_manifest.json"
end
defp uncompressed_digested_file?(file_path) do
Regex.match?(@digested_file_regex, Path.basename(file_path)) ||
!Path.extname(file_path) == ".gz"
end
defp map_file(file_path, input_path), do: map_file(file_path, input_path, nil)
defp map_file(file_path, input_path, nil) do
relative_input_path = Path.relative_to(file_path, input_path) |> Path.dirname()
map_file_with_relative_path(file_path, relative_input_path, relative_input_path)
end
defp map_file(file_path, input_path, output_append) do
relative_path = Path.join(output_append, Path.relative_to(file_path, input_path) |> Path.dirname())
relative_no_append = Path.relative_to(file_path, input_path) |> Path.dirname()
map_file_with_relative_path(file_path, relative_path, relative_no_append)
end
defp map_file_with_relative_path(file_path, relative_path, relative_no_append) do
{:ok, stats} = File.stat(file_path)
%{absolute_path: file_path,
relative_path: relative_path,
relative_path_no_append: relative_no_append,
filename: Path.basename(file_path),
size: stats.size,
content: File.read!(file_path)}
end
defp map_digested_file(file_path, output_path) do
{:ok, stats} = File.stat(file_path)
digested_filename = Path.basename(file_path)
[digest,_] = Regex.run(@digested_file_regex, digested_filename)
digest = String.trim_leading(digest, "-")
%{absolute_path: file_path,
relative_path: Path.relative_to(file_path, output_path) |> Path.dirname(),
digested_filename: digested_filename,
filename: String.replace(digested_filename, @digested_file_regex, ""),
digest: digest,
size: stats.size,
content: File.read!(file_path)}
end
defp digest(file) do
name = Path.rootname(file.filename)
extension = Path.extname(file.filename)
digest = Base.encode16(:erlang.md5(file.content), case: :lower)
Map.merge(file, %{
digested_filename: "#{name}-#{digest}#{extension}",
digest: digest,
})
end
defp write_to_disk({file, application_output_path}, manifest, output_path) do
path = Path.join(application_output_path, file.relative_path_no_append)
File.mkdir_p!(path)
digested_file_contents = digested_contents(file, manifest)
# compressed files
if compress_file?(file) do
File.write!(Path.join(path, file.digested_filename <> ".gz"), :zlib.gzip(digested_file_contents))
File.write!(Path.join(path, file.filename <> ".gz"), :zlib.gzip(file.content))
end
# uncompressed files
File.write!(Path.join(path, file.digested_filename), digested_file_contents)
File.write!(Path.join(path, file.filename), file.content)
file
end
defp compress_file?(file) do
Path.extname(file.filename) in Application.get_env(:phoenix, :gzippable_exts)
end
defp digested_contents(file, manifest) do
case Path.extname(file.filename) do
".css" -> digest_stylesheet_asset_references(file, manifest)
".js" -> digest_javascript_asset_references(file, manifest)
".map" -> digest_javascript_map_asset_references(file, manifest)
_ -> file.content
end
end
@stylesheet_url_regex ~r{(url\(\s*)(\S+?)(\s*\))}
@quoted_text_regex ~r{\A(['"])(.+)\1\z}
defp digest_stylesheet_asset_references(file, manifest) do
Regex.replace(@stylesheet_url_regex, file.content, fn _, open, url, close ->
case Regex.run(@quoted_text_regex, url) do
[_, quote_symbol, url] ->
open <> quote_symbol <> digested_url(url, file, manifest, true) <> quote_symbol <> close
nil ->
open <> digested_url(url, file, manifest, true) <> close
end
end)
end
@javascript_source_map_regex ~r{(//#\s*sourceMappingURL=\s*)(\S+)}
defp digest_javascript_asset_references(file, manifest) do
Regex.replace(@javascript_source_map_regex, file.content, fn _, source_map_text, url ->
source_map_text <> digested_url(url, file, manifest, false)
end)
end
@javascript_map_file_regex ~r{(['"]file['"]:['"])([^,"']+)(['"])}
defp digest_javascript_map_asset_references(file, manifest) do
Regex.replace(@javascript_map_file_regex, file.content, fn _, open_text, url, close_text ->
open_text <> digested_url(url, file, manifest, false) <> close_text
end)
end
defp digested_url("/" <> relative_path, _file, manifest, with_vsn?) do
case Map.fetch(manifest, relative_path) do
{:ok, digested_path} -> relative_digested_path(digested_path, with_vsn?)
:error -> "/" <> relative_path
end
end
defp digested_url(url, file, manifest, with_vsn?) do
case URI.parse(url) do
%URI{scheme: nil, host: nil} ->
manifest_path =
file.relative_path
|> Path.join(url)
|> Path.expand()
|> Path.relative_to_cwd()
case Map.fetch(manifest, manifest_path) do
{:ok, digested_path} ->
absolute_digested_url(url, digested_path, with_vsn?)
:error -> url
end
_ -> url
end
end
defp relative_digested_path(digested_path, true), do: relative_digested_path(digested_path) <> "?vsn=d"
defp relative_digested_path(digested_path, false), do: relative_digested_path(digested_path)
defp relative_digested_path(digested_path), do: "/" <> digested_path
defp absolute_digested_url(url, digested_path, true) do
absolute_digested_url(url, digested_path) <> "?vsn=d"
end
defp absolute_digested_url(url, digested_path, false) do
absolute_digested_url(url, digested_path)
end
defp absolute_digested_url(url, digested_path) do
url
|> Path.dirname()
|> Path.join(Path.basename(digested_path))
end
@doc """
Delete compiled/compressed asset files that are no longer in use based on
specified criteria.
* `output_path` - The path where the compiled/compressed files will be saved
* `age` - The max age of assets to keep in seconds
* `keep` - The number of old versions to keep
"""
@spec clean(String.t, integer, integer, integer) :: :ok | {:error, :invalid_path}
def clean(output_path, age, keep, now \\ now()) do
if File.exists?(output_path) do
manifest = load_manifest(output_path)
files = files_to_clean(manifest, now - age, keep)
remove_files(files, output_path)
remove_files_from_manifest(manifest, files, output_path)
:ok
else
{:error, :invalid_path}
end
end
defp files_to_clean(manifest, max_age, keep) do
latest = Map.values(manifest["latest"])
digests = Map.drop(manifest["digests"], latest)
for {_, versions} <- group_by_logical_path(digests),
file <- versions_to_clean(versions, max_age, keep),
do: file
end
defp versions_to_clean(versions, max_age, keep) do
versions
|> Enum.map(fn {path, attrs} -> Map.put(attrs, "path", path) end)
|> Enum.sort_by(&(&1["mtime"]), &>/2)
|> Enum.with_index(1)
|> Enum.filter(fn {version, index} ->
max_age > version["mtime"] || index > keep
end)
|> Enum.map(fn {version, _index} -> version["path"] end)
end
defp group_by_logical_path(digests) do
Enum.group_by(digests, fn {_, attrs} -> attrs["logical_path"] end)
end
defp remove_files(files, output_path) do
for file <- files do
output_path
|> Path.join(file)
|> File.rm
output_path
|> Path.join("#{file}.gz")
|> File.rm
end
end
defp remove_files_from_manifest(manifest, files, output_path) do
manifest
|> Map.update!("digests", &Map.drop(&1, files))
|> save_manifest(output_path)
end
end
|
create_fun_umbrella/apps/create_fun_endpoint/lib/create_fun_endpoint/digester.ex
| 0.664105
| 0.438605
|
digester.ex
|
starcoder
|
defmodule Scenic.Primitive.Style.Theme do
@moduledoc """
The theme style is a way to bundle up default colors that are intended to be used by dynamic components invoked by a scene.
There is a set of pre-defined themes.
You can also pass in a map of theme values.
Unlike other styles, these are a guide to the components.
Each component gets to pick, choose, or ignore any colors in a given style.
## Main Predefined Themes
* `:dark` - This is the default and most common. Use when the background is dark.
* `:light` - Use when the background is light colored.
## Specialty Themes
The remaining themes are designed to color the standard components and don't really
make much sense when applied to the root of a graph. You could, but it would be...
interesting.
The most obvious place to use them is with [`Button`](Scenic.Component.Button.html)
components.
* `:primary` - Blue background. This is the primary button type indicator.
* `:secondary` - Grey background. Not primary type indicator.
* `:success` - Green background.
* `:danger` - Red background. Use for irreversible or dangerous actions.
* `:warning` - Orange background.
* `:info` - Lightish blue background.
* `:text` - Transparent background.
"""
use Scenic.Primitive.Style
alias Scenic.Primitive.Style.Paint.Color
@theme_light %{
text: :black,
background: :white,
border: :dark_grey,
active: {215, 215, 215},
thumb: :cornflower_blue,
focus: :blue
}
@theme_dark %{
text: :white,
background: :black,
border: :light_grey,
active: {40, 40, 40},
thumb: :cornflower_blue,
focus: :cornflower_blue
}
# specialty themes
@primary Map.merge(@theme_dark, %{background: {72, 122, 252}, active: {58, 94, 201}})
@secondary Map.merge(@theme_dark, %{background: {111, 117, 125}, active: {86, 90, 95}})
@success Map.merge(@theme_dark, %{background: {99, 163, 74}, active: {74, 123, 56}})
@danger Map.merge(@theme_dark, %{background: {191, 72, 71}, active: {164, 54, 51}})
@warning Map.merge(@theme_light, %{background: {239, 196, 42}, active: {197, 160, 31}})
@info Map.merge(@theme_dark, %{background: {94, 159, 183}, active: {70, 119, 138}})
@text Map.merge(@theme_dark, %{text: {72, 122, 252}, background: :clear, active: :clear})
@themes %{
light: @theme_light,
dark: @theme_dark,
primary: @primary,
secondary: @secondary,
success: @success,
danger: @danger,
warning: @warning,
info: @info,
text: @text
}
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
@doc false
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must either a preset theme or a map of named colors
#{IO.ANSI.yellow()}Received: #{inspect(data)}
The predefined themes are:
:dark, :light, :primary, :secondary, :success, :danger, :warning, :info, :text
If you pass in a map of colors, the common ones used in the controls are:
:text, :background, :border, :active, :thumb, :focus
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
@doc false
def verify(name) when is_atom(name), do: Map.has_key?(@themes, name)
def verify(custom) when is_map(custom) do
Enum.all?(custom, fn {_, color} -> Color.verify(color) end)
end
def verify(_), do: false
# --------------------------------------------------------
@doc false
def normalize(theme) when is_atom(theme), do: Map.get(@themes, theme)
def normalize(theme) when is_map(theme), do: theme
# --------------------------------------------------------
@doc false
def preset(theme), do: Map.get(@themes, theme)
end
|
lib/scenic/primitive/style/theme.ex
| 0.908191
| 0.500916
|
theme.ex
|
starcoder
|
defmodule Spex.Macros do
@moduledoc """
defines the following macros:
- describe (xdescribe)
- context (xcontext)
- before
- let
- it (xit)
it examples like:
describe Calculator do
let(:a), do: 1
describe "#add" do
let(:b), do: 1 + 1
it "adds correctly" do
assert a + b == 3
end
end
end
will become:
test "Calculator #add adds correctly" do
a = (fn ->
1
end).()
b = (fn ->
1 + 1
end).()
assert a + b == 3
end
"""
use Spex.Structure
defmacro __using__(_opts) do
quote do
use Spex.Structure
import Spex.Macros
Spex.Macros.Agent.put_structure(__MODULE__, %Spex.Structure.Spec{})
end
end
defmacro describe(message, do: body) do
quote do
stack = Spex.Macros.Agent.push_stack(__MODULE__, unquote(message))
structure = Spex.Macros.Agent.get_structure(__MODULE__)
structure_with_describe = add_describe(structure, Enum.reverse(stack))
Spex.Macros.Agent.put_structure(__MODULE__, structure_with_describe)
Module.eval_quoted(__MODULE__, unquote(body))
Spex.Macros.Agent.pop_stack(__MODULE__)
end
end
defmacro let(name, do: body) do
code = Macro.to_string(body)
quote do
reversed_stack = Enum.reverse(Spex.Macros.Agent.get_stack(__MODULE__))
structure = Spex.Macros.Agent.get_structure(__MODULE__)
structure_with_let = add_let(structure, reversed_stack, unquote(name), unquote(code))
Spex.Macros.Agent.put_structure(__MODULE__, structure_with_let)
end
end
defmacro initialize_to_char_count(variables) do
Enum.map variables, fn(name) ->
var = Macro.var(name, nil)
length = name |> Atom.to_string |> String.length
quote do
unquote(var) = unquote(length)
end
end
end
def run do
initialize_to_char_count [:red, :green, :yellow]
[red, green, yellow]
end
defmacro deflet(name, value) do
var = Macro.var(name, nil)
quote do
unquote(var) = unquote(value)
end
end
defmacro deflets(lets) do
Enum.map lets, fn({name, value}) ->
IO.puts "wtf #{inspect name}, #{inspect value}"
quote do
deflet unquote(name), unquote(value)
end
end
end
def run2 do
deflets [red: 1, blue: 2]
# deflet :red, 1
# deflet :blue, 2
[red, blue]
end
defmacro it(message, do: body) do
IO.puts "COMPILE TIME"
quote do
reversed_stack = Enum.reverse(Spex.Macros.Agent.get_stack(__MODULE__))
structure = Spex.Macros.Agent.get_structure(__MODULE__)
lets = get_lets(structure, reversed_stack)
lets_keywords = Enum.reduce lets, [], fn({_key, let}, list) ->
list ++ [{let.name, let.body}]
end
IO.puts "WTF #{inspect lets_keywords}"
full_message = Enum.join(reversed_stack ++ [unquote(message)], " ")
test full_message do
IO.puts "RUN TIME it #{__MODULE__}"
# deflets unquote(lets_keywords)
unquote(body)
end
end
end
defmacro xit(message, do: body) do
quote do
@tag :pending
it unquote(message), do: unquote(body)
end
end
end
|
lib/spex/macros.ex
| 0.662578
| 0.612759
|
macros.ex
|
starcoder
|
defmodule AWS.ECR do
@moduledoc """
Amazon Elastic Container Registry (Amazon ECR) is a managed Docker registry
service. Customers can use the familiar Docker CLI to push, pull, and
manage images. Amazon ECR provides a secure, scalable, and reliable
registry. Amazon ECR supports private Docker repositories with
resource-based permissions using IAM so that specific users or Amazon EC2
instances can access repositories and images. Developers can use the Docker
CLI to author and manage images.
"""
@doc """
Check the availability of multiple image layers in a specified registry and
repository.
<note> This operation is used by the Amazon ECR proxy, and it is not
intended for general use by customers for pulling and pushing images. In
most cases, you should use the `docker` CLI to pull, tag, and push images.
</note>
"""
def batch_check_layer_availability(client, input, options \\ []) do
request(client, "BatchCheckLayerAvailability", input, options)
end
@doc """
Deletes a list of specified images within a specified repository. Images
are specified with either `imageTag` or `imageDigest`.
You can remove a tag from an image by specifying the image's tag in your
request. When you remove the last tag from an image, the image is deleted
from your repository.
You can completely delete an image (and all of its tags) by specifying the
image's digest in your request.
"""
def batch_delete_image(client, input, options \\ []) do
request(client, "BatchDeleteImage", input, options)
end
@doc """
Gets detailed information for specified images within a specified
repository. Images are specified with either `imageTag` or `imageDigest`.
"""
def batch_get_image(client, input, options \\ []) do
request(client, "BatchGetImage", input, options)
end
@doc """
Informs Amazon ECR that the image layer upload has completed for a
specified registry, repository name, and upload ID. You can optionally
provide a `sha256` digest of the image layer for data validation purposes.
<note> This operation is used by the Amazon ECR proxy, and it is not
intended for general use by customers for pulling and pushing images. In
most cases, you should use the `docker` CLI to pull, tag, and push images.
</note>
"""
def complete_layer_upload(client, input, options \\ []) do
request(client, "CompleteLayerUpload", input, options)
end
@doc """
Creates an image repository.
"""
def create_repository(client, input, options \\ []) do
request(client, "CreateRepository", input, options)
end
@doc """
Deletes the specified lifecycle policy.
"""
def delete_lifecycle_policy(client, input, options \\ []) do
request(client, "DeleteLifecyclePolicy", input, options)
end
@doc """
Deletes an existing image repository. If a repository contains images, you
must use the `force` option to delete it.
"""
def delete_repository(client, input, options \\ []) do
request(client, "DeleteRepository", input, options)
end
@doc """
Deletes the repository policy from a specified repository.
"""
def delete_repository_policy(client, input, options \\ []) do
request(client, "DeleteRepositoryPolicy", input, options)
end
@doc """
Returns metadata about the images in a repository, including image size,
image tags, and creation date.
<note> Beginning with Docker version 1.9, the Docker client compresses
image layers before pushing them to a V2 Docker registry. The output of the
`docker images` command shows the uncompressed image size, so it may return
a larger image size than the image sizes returned by `DescribeImages`.
</note>
"""
def describe_images(client, input, options \\ []) do
request(client, "DescribeImages", input, options)
end
@doc """
Describes image repositories in a registry.
"""
def describe_repositories(client, input, options \\ []) do
request(client, "DescribeRepositories", input, options)
end
@doc """
Retrieves a token that is valid for a specified registry for 12 hours. This
command allows you to use the `docker` CLI to push and pull images with
Amazon ECR. If you do not specify a registry, the default registry is
assumed.
The `authorizationToken` returned for each registry specified is a base64
encoded string that can be decoded and used in a `docker login` command to
authenticate to a registry. The AWS CLI offers an `aws ecr get-login`
command that simplifies the login process.
"""
def get_authorization_token(client, input, options \\ []) do
request(client, "GetAuthorizationToken", input, options)
end
@doc """
Retrieves the pre-signed Amazon S3 download URL corresponding to an image
layer. You can only get URLs for image layers that are referenced in an
image.
<note> This operation is used by the Amazon ECR proxy, and it is not
intended for general use by customers for pulling and pushing images. In
most cases, you should use the `docker` CLI to pull, tag, and push images.
</note>
"""
def get_download_url_for_layer(client, input, options \\ []) do
request(client, "GetDownloadUrlForLayer", input, options)
end
@doc """
Retrieves the specified lifecycle policy.
"""
def get_lifecycle_policy(client, input, options \\ []) do
request(client, "GetLifecyclePolicy", input, options)
end
@doc """
Retrieves the results of the specified lifecycle policy preview request.
"""
def get_lifecycle_policy_preview(client, input, options \\ []) do
request(client, "GetLifecyclePolicyPreview", input, options)
end
@doc """
Retrieves the repository policy for a specified repository.
"""
def get_repository_policy(client, input, options \\ []) do
request(client, "GetRepositoryPolicy", input, options)
end
@doc """
Notify Amazon ECR that you intend to upload an image layer.
<note> This operation is used by the Amazon ECR proxy, and it is not
intended for general use by customers for pulling and pushing images. In
most cases, you should use the `docker` CLI to pull, tag, and push images.
</note>
"""
def initiate_layer_upload(client, input, options \\ []) do
request(client, "InitiateLayerUpload", input, options)
end
@doc """
Lists all the image IDs for a given repository.
You can filter images based on whether or not they are tagged by setting
the `tagStatus` parameter to `TAGGED` or `UNTAGGED`. For example, you can
filter your results to return only `UNTAGGED` images and then pipe that
result to a `BatchDeleteImage` operation to delete them. Or, you can filter
your results to return only `TAGGED` images to list all of the tags in your
repository.
"""
def list_images(client, input, options \\ []) do
request(client, "ListImages", input, options)
end
@doc """
List the tags for an Amazon ECR resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Creates or updates the image manifest and tags associated with an image.
<note> This operation is used by the Amazon ECR proxy, and it is not
intended for general use by customers for pulling and pushing images. In
most cases, you should use the `docker` CLI to pull, tag, and push images.
</note>
"""
def put_image(client, input, options \\ []) do
request(client, "PutImage", input, options)
end
@doc """
Creates or updates a lifecycle policy. For information about lifecycle
policy syntax, see [Lifecycle Policy
Template](http://docs.aws.amazon.com/AmazonECR/latest/userguide/LifecyclePolicies.html).
"""
def put_lifecycle_policy(client, input, options \\ []) do
request(client, "PutLifecyclePolicy", input, options)
end
@doc """
Applies a repository policy on a specified repository to control access
permissions.
"""
def set_repository_policy(client, input, options \\ []) do
request(client, "SetRepositoryPolicy", input, options)
end
@doc """
Starts a preview of the specified lifecycle policy. This allows you to see
the results before creating the lifecycle policy.
"""
def start_lifecycle_policy_preview(client, input, options \\ []) do
request(client, "StartLifecyclePolicyPreview", input, options)
end
@doc """
Adds specified tags to a resource with the specified ARN. Existing tags on
a resource are not changed if they are not specified in the request
parameters.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Deletes specified tags from a resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Uploads an image layer part to Amazon ECR.
<note> This operation is used by the Amazon ECR proxy, and it is not
intended for general use by customers for pulling and pushing images. In
most cases, you should use the `docker` CLI to pull, tag, and push images.
</note>
"""
def upload_layer_part(client, input, options \\ []) do
request(client, "UploadLayerPart", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "api.ecr"}
host = get_host("api.ecr", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AmazonEC2ContainerRegistry_V20150921.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/ecr.ex
| 0.886494
| 0.57517
|
ecr.ex
|
starcoder
|
defmodule Kitt do
@moduledoc """
Provides encoding and decoding functionality
for UPER-encoded DSRC message payloads defined
by the J2735 standard defined by the Society for
Automotive Engineers.
Kitt uses Erlang's native ASN1 compiler to parse
the raw binary of the DSRC messages and then converts
them to Elixir structs for language-native enforcement
of required message fields, interactions with message
data frames/elements, and more flexible conversion to
common interchange formats such as JSON, Protobuf, Avro, etc.
"""
@doc """
Encodes a message struct wrapped in a message frame, tagged with
the message type identifying integer as the second byte of the message.
Defaults the format of the encoded message to a hexadecimal string. Optionally
specify the desired output format with the option `format: :hex | :binary`.
Returns the encoded message as `{:ok, encoded_message}` or `{:error, reason}`
## Example
iex> Kitt.encode(%Kitt.Message.BSM{
...> coreData: %Kitt.Message.BSM.CoreData{
...> accelSet: %{lat: 2001, long: 2001, vert: -127, yaw: 0},
...> accuracy: %{orientation: 65535, semiMajor: 255, semiMinor: 255},
...> angle: 127,
...> brakes: %{
...> abs: :unavailable,
...> auxBrakes: :unavailable,
...> brakeBoost: :unavailable,
...> scs: :unavailable,
...> traction: :unavailable,
...> wheelBrakes: [:unavailable]
...> },
...> elev: 1260,
...> heading: 4800,
...> id: <<0, 0, 2, 89>>,
...> lat: 374_230_638,
...> long: -1_221_420_467,
...> msgCnt: 1,
...> secMark: 43476,
...> size: %{length: 1200, width: 300},
...> speed: 486,
...> transmission: :unavailable
...> },
...> partII: nil,
...> regional: nil
...> })
{:ok,
"00142500400000966A7525F99BB7113E36260A767FFFFFFFF0F312C0FDFA1FA1007FFF8000962580"}
"""
defdelegate encode(message, opts \\ []), to: Kitt.Encoder, as: :encode_frame
@doc """
The same as `encode/2` but returns the encoded message directly,
else raises a `DSRCEncodeError`
## Example
iex> Kitt.encode!(%Kitt.Message.BSM{
...> coreData: %Kitt.Message.BSM.CoreData{
...> accelSet: %{lat: 2001, long: 2001, vert: -127, yaw: 0},
...> accuracy: %{orientation: 65535, semiMajor: 255, semiMinor: 255},
...> angle: 127,
...> brakes: %{
...> abs: :unavailable,
...> auxBrakes: :unavailable,
...> brakeBoost: :unavailable,
...> scs: :unavailable,
...> traction: :unavailable,
...> wheelBrakes: [:unavailable]
...> },
...> elev: 1260,
...> heading: 4800,
...> id: <<0, 0, 2, 89>>,
...> lat: 374_230_638,
...> long: -1_221_420_467,
...> msgCnt: 1,
...> secMark: 43476,
...> size: %{length: 1200, width: 300},
...> speed: 486,
...> transmission: :unavailable
...> },
...> partII: nil,
...> regional: nil
...> }, format: :binary)
<<0, 20, 37, 0, 64, 0, 0, 150, 106, 117, 37, 249, 155, 183, 17, 62, 54, 38, 10,
118, 127, 255, 255, 255, 240, 243, 18, 192, 253, 250, 31, 161, 0, 127, 255,
128, 0, 150, 37, 128>>
"""
defdelegate encode!(message, opts \\ []), to: Kitt.Encoder, as: :encode_frame!
@doc """
Decodes a message wrapped in a message frame, tagged with the message type
identifying integer as the second byte of the message, to a Kitt Elixir
struct representing the messaage content.
Defaults the expected input format of the message to a hexadecimal string.
Optionally specify the input format with the option `format: :hex | :binary`.
Returns the decoded message as `{:ok, %Struct{}}` or `{:error, reason}`
## Example
iex> Kitt.decode(<<0, 29, 45, 115, 204, 33, 222, 6, 2, 3, 148, 0, 0, 15, 172, 20, 128, 128, 60,
...> 243, 8, 136, 219, 128, 250, 12, 0, 0, 0, 18, 200, 72, 25, 169, 126, 102, 237,
...> 196, 79, 141, 137, 130, 157, 132, 176, 56, 121, 128>>, format: :binary)
{:ok,
%Kitt.Message.SRM{
regional: nil,
requestor: %{
id: {:entityID, 601},
position: %{
heading: 4800,
position: %{elevation: 1260, lat: 374230638, long: -1221420467},
speed: %{speed: 486, transmisson: :unavailable}
},
type: %{hpmsType: :bus, role: :transit}
},
requests: [
%{
duration: 2000,
minute: 497732,
request: %{
id: %{id: 1003, region: 0},
inBoundLane: {:lane, 8},
outBoundLane: {:lane, 30},
requestID: 5,
requestType: :priorityRequest
},
second: 18140
}
],
second: 48140,
sequenceNumber: 2,
timeStamp: 497731
}}
"""
defdelegate decode(binary, opts \\ []), to: Kitt.Encoder, as: :decode_frame
@doc """
The same as `decode/2` but returns the encoded message directly,
else raises a `DSRCDecodeError`
## Example
iex> Kitt.decode!("001D2D73CC21DE0602039400000FAC1480803CF30888DB80FA0C00000012C84819A97E66EDC44F8D89829D84B0387980")
%Kitt.Message.SRM{
regional: nil,
requestor: %{
id: {:entityID, 601},
position: %{
heading: 4800,
position: %{elevation: 1260, lat: 374230638, long: -1221420467},
speed: %{speed: 486, transmisson: :unavailable}
},
type: %{hpmsType: :bus, role: :transit}
},
requests: [
%{
duration: 2000,
minute: 497732,
request: %{
id: %{id: 1003, region: 0},
inBoundLane: {:lane, 8},
outBoundLane: {:lane, 30},
requestID: 5,
requestType: :priorityRequest
},
second: 18140
}
],
second: 48140,
sequenceNumber: 2,
timeStamp: 497731
}
"""
defdelegate decode!(binary, opts \\ []), to: Kitt.Encoder, as: :decode_frame!
defmodule(DSRCEncodeError, do: defexception([:message]))
defmodule(DSRCDecodeError, do: defexception([:message]))
end
|
lib/kitt.ex
| 0.91012
| 0.444806
|
kitt.ex
|
starcoder
|
defmodule RigInboundGateway.RateLimit do
@moduledoc """
Allow only a certain amount of requests per seconds per endpoint (per IP).
For synchronizing the corresponding state between the short-lived request
processes, an ETS table is used for optimal performance.
"""
use Rig.Config,
[:table_name, :enabled?, :per_ip?, :avg_rate_per_sec, :burst_size, :sweep_interval_ms]
import Ex2ms
import RigInboundGateway.RateLimit.Common, only: [now_unix: 0, ensure_table: 1]
@doc """
Request passage to a specific endpoint, from a given IP.
Depending on the granularity, the endpoint might be the hostname of the
target host, or the target socket, e.g., hostname <> ":" <> port.
Calling this function always cause a _side effect_: internally, the request
is recorded. Depending on previous calls, the function returns either :ok
or :passage_denied. The latter means that the rate limit for the given
endpoint (and source IP if per_ip? is true) has been reached, which means
that the request in question should be blocked.
"""
@spec request_passage(String.t, String.t | nil, %{} | []) :: :ok | :passage_denied
def request_passage(endpoint, ip \\ nil, opts \\ []) do
do_request_passage(endpoint, ip, Enum.into(opts, config()))
end
defp do_request_passage(_endpoint, _ip, %{enabled?: false}) do
:ok
end
defp do_request_passage(endpoint, _ip, %{per_ip?: false} = opts) do
make_request(_key = endpoint, opts)
end
defp do_request_passage(endpoint, nil, opts) do
make_request(_key = endpoint, opts)
end
defp do_request_passage(endpoint, ip, opts) do
make_request(_key = endpoint <> "_" <> ip, opts)
end
defp make_request(key, %{avg_rate_per_sec: avg_rate_per_sec,
burst_size: burst_size, table_name: tab} = opts) do
now = Map.get(opts, :current_unix_time, now_unix())
# Make sure the ets table exists:
ensure_table(tab)
# Make sure a record for this key is present:
:ets.insert_new tab, {key, _tokens = burst_size, _last_used = now}
# Update record and check if successful:
consume_token_matchspec =
fun do {^key, n_tokens, last_used}
when n_tokens + (^now - last_used) * ^avg_rate_per_sec >= 1
->
{^key, n_tokens + (^now - last_used) * ^avg_rate_per_sec - 1, ^now}
end
result = case :ets.select_replace(tab, consume_token_matchspec) do
0 -> :passage_denied
1 -> :ok
end
# It's not possible to use `min` in a match_spec, so we're cleaning up now:
cap_to_burst_size_matchspec =
fun do {^key, n_tokens, last_used} when n_tokens > ^burst_size ->
{^key, ^burst_size, last_used}
end
:ets.select_replace(tab, cap_to_burst_size_matchspec)
# Return :ok | :passage_denied
result
end
end
|
apps/rig_inbound_gateway/lib/rig_inbound_gateway/rate_limit.ex
| 0.747984
| 0.563768
|
rate_limit.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.