code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Membrane.VideoCutAndMerge do
@moduledoc """
Membrane Bin that cuts and merges multiple raw videos into one.
The bin expects each frame to be received in a separate buffer, so the parser
(`Membrane.Element.RawVideo.Parser`) may be required in a pipeline before
the merger bin (e.g. when input is read from `Membrane.File.Source`).
The element expects to receive frames in order from each input.
The bin consists of single `Membrane.VideoMerger` and mutliple
`Membrane.VideoCutter`. Number of elements is constant: cutters are
created at initialization, one for each stream.
"""
use Membrane.Bin
alias __MODULE__.Stream
alias Membrane.Caps.Video.Raw
alias Membrane.{Pad, ParentSpec, VideoCutter, VideoMerger}
def_input_pad :input,
caps: {Raw, aligned: true},
demand_unit: :buffers,
availability: :on_request,
options: [
stream: [
spec: Stream.t(),
description: "A stream to cut and merge"
]
]
def_output_pad :output,
caps: {Raw, aligned: true},
demand_unit: :buffers
defmodule Stream do
@moduledoc """
Structure describing video stream to merge by `Membrane.VideoCutAndMerge`
## Fields
- `:intervals` - List of intervals of timestamps that are supposed to be
cut and kept from the stream.
- `:offset` - Offset applied to all franes' presentation timestamp values.
"""
alias Membrane.{Pad, Time}
@enforce_keys [:intervals]
defstruct @enforce_keys ++ [offset: 0]
@type t :: %__MODULE__{
intervals: [{Time.t(), Time.t() | :infinity}],
offset: Time.t()
}
end
@impl true
def handle_init(_opts) do
children = [{:merger, VideoMerger}]
links = [link(:merger) |> to_bin_output]
spec = %ParentSpec{children: children, links: links}
{{:ok, spec: spec}, nil}
end
@impl true
def handle_pad_added({_pad, :input, id} = pad_ref, ctx, state) do
%Pad.Data{options: %{stream: stream}} = ctx.pads[pad_ref]
cutter = {id, %VideoCutter{intervals: stream.intervals, offset: stream.offset}}
link =
link_bin_input(Pad.ref(:input, id))
|> to(id)
|> via_in(Pad.ref(:input, id))
|> to(:merger)
{{:ok, spec: %ParentSpec{children: [cutter], links: [link]}}, state}
end
end
|
lib/video_cut_and_merge.ex
| 0.885223
| 0.600569
|
video_cut_and_merge.ex
|
starcoder
|
defmodule NaturalOrder do
@moduledoc """
A utility to compare strings in [natural sort order](https://en.wikipedia.org/wiki/Natural_sort_order).
Natural sort order is useful for humans. By default sorting Strings is a lot differently.
## Examples of comparing two strings
iex> NaturalOrder.compare("String2", "String11")
:lt
iex> NaturalOrder.compare("String11", "String2")
:gt
iex> NaturalOrder.compare("string", "STRING")
:gt
iex> NaturalOrder.compare("string", "string")
:eq
# Examples with sorting
iex> Enum.sort(["String2", "String11", "String3"], NaturalOrder)
["String2", "String3", "String11"]
iex> Enum.sort(["String2", "String11", "String3"], {:asc, NaturalOrder})
["String2", "String3", "String11"]
iex> Enum.sort(["String2", "String11", "String3"], {:desc, NaturalOrder})
["String11", "String3", "String2"]
"""
@doc """
Compares two strings in natural sort order.
## Examples
iex> NaturalOrder.compare("String2", "String11")
:lt
iex> NaturalOrder.compare("String11", "String2")
:gt
iex> NaturalOrder.compare("string", "STRING")
:gt
iex> NaturalOrder.compare("string", "string")
:eq
"""
@spec compare(String.t(), String.t()) :: :lt | :eq | :gt
def compare(string1, string2) when is_binary(string1) and is_binary(string2) do
compare_formatted(format(string1), format(string2))
end
defp compare_formatted([], []),
do: :eq
defp compare_formatted([], [_head2 | _tail2]),
do: :lt
defp compare_formatted([_head1 | _tail1], []),
do: :gt
defp compare_formatted([head1 | tail1], [head2 | tail2])
when is_tuple(head1) and is_tuple(head2) do
case compare_formatted_tuple(head1, head2) do
:eq -> compare_formatted(tail1, tail2)
other -> other
end
end
defp compare_formatted_tuple(tuple1, tuple1),
do: :eq
defp compare_formatted_tuple(tuple1, tuple2) when tuple1 <= tuple2,
do: :lt
defp compare_formatted_tuple(_tuple1, _tuple2),
do: :gt
defp format(string) do
string
|> split()
|> normalize()
end
defp split(string) do
Regex.split(~r/([0-9]+)|(\p{L}+)/u, string, include_captures: true, trim: true)
end
defp normalize(list) when is_list(list) do
Enum.map(list, &normalize_string/1)
end
defp normalize_string(string) do
with {:kept, string} <- to_integer(string),
ascii <- to_ascii(string),
normalized <- downcase(ascii) do
{normalized, string}
else
{:integer_converted, integer} ->
{integer, string}
end
end
defp to_integer(<<char, _rest::binary>> = string) when char in ?0..?9,
do: {:integer_converted, String.to_integer(string)}
defp to_integer(string),
do: {:kept, string}
defp downcase(string) when is_binary(string),
do: String.downcase(string)
defp downcase(string),
do: string
defp to_ascii(string) do
String.normalize(string, :nfd) |> String.replace(~r/\W/u, "")
end
end
|
lib/natural_order.ex
| 0.840341
| 0.621842
|
natural_order.ex
|
starcoder
|
defmodule Plaid.Institutions do
@moduledoc """
Functions for Plaid `institutions` endpoint.
"""
import Plaid, only: [make_request_with_cred: 4, validate_cred: 1, validate_public_key: 1]
alias Plaid.Utils
@derive Jason.Encoder
defstruct institutions: [], request_id: nil, total: nil
@type t :: %__MODULE__{
institutions: [Plaid.Institutions.Institution.t()],
request_id: String.t(),
total: integer
}
@type params :: %{required(atom) => integer | String.t() | list | map}
@type config :: %{required(atom) => String.t()}
@endpoint :institutions
defmodule Institution do
@moduledoc """
Plaid Institution data structure.
"""
@derive Jason.Encoder
defstruct country_codes: [],
credentials: [],
has_mfa: nil,
input_spec: nil,
institution_id: nil,
logo: nil,
mfa: [],
mfa_code_type: nil,
name: nil,
oauth: nil,
primary_color: nil,
products: [],
request_id: nil,
routing_numbers: [],
status: nil,
url: nil
@type t :: %__MODULE__{
country_codes: [String.t()],
credentials: [Plaid.Institutions.Institution.Credentials.t()],
has_mfa: false | true,
input_spec: String.t(),
institution_id: String.t(),
logo: String.t(),
mfa: [String.t()],
mfa_code_type: String.t(),
name: String.t(),
oauth: boolean(),
primary_color: String.t(),
products: [String.t()],
request_id: String.t(),
routing_numbers: [String.t()],
status: Plaid.Institutions.Institution.Status.t(),
url: String.t()
}
defmodule Credentials do
@moduledoc """
Plaid Institution Credentials data structure.
"""
@derive Jason.Encoder
defstruct label: nil, name: nil, type: nil
@type t :: %__MODULE__{label: String.t(), name: String.t(), type: String.t()}
end
defmodule Status do
@moduledoc """
Plaid Institution Status data structure.
"""
@derive Jason.Encoder
defstruct item_logins: nil,
transactions_updates: nil,
auth: nil,
balance: nil,
identity: nil
@type t :: %__MODULE__{
item_logins: Plaid.Institutions.Institution.Status.ItemLogins.t(),
transactions_updates: Plaid.Institutions.Institution.Status.TransactionsUpdates.t(),
auth: Plaid.Institutions.Institution.Status.Auth.t(),
balance: Plaid.Institutions.Institution.Status.Balance.t(),
identity: Plaid.Institutions.Institution.Status.Identity.t()
}
defmodule ItemLogins do
@moduledoc """
Plaid Institution Item Logins Status data structure.
"""
@derive Jason.Encoder
defstruct status: nil, last_status_change: nil, breakdown: nil
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Plaid.Institutions.Institution.Status.ItemLogins.Breakdown.t()
}
defmodule Breakdown do
@moduledoc """
Plaid Institution Item Logins Breakdown Status data structure.
"""
@derive Jason.Encoder
defstruct success: nil, error_plaid: nil, error_institution: nil
@type t :: %__MODULE__{
success: number(),
error_plaid: number(),
error_institution: number()
}
end
end
defmodule TransactionsUpdates do
@moduledoc """
Plaid Institution Transactions Updates Status data structure.
"""
@derive Jason.Encoder
defstruct status: nil, last_status_change: nil, breakdown: nil
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Plaid.Institutions.Institution.Status.TransactionsUpdates.Breakdown.t()
}
defmodule Breakdown do
@moduledoc """
Plaid Institution Transaction Updates Breakdown Status data structure.
"""
@derive Jason.Encoder
defstruct refresh_interval: nil,
success: nil,
error_plaid: nil,
error_institution: nil
@type t :: %__MODULE__{
refresh_interval: String.t(),
success: number(),
error_plaid: number(),
error_institution: number()
}
end
end
defmodule Auth do
@moduledoc """
Plaid Institution Auth Status data structure.
"""
@derive Jason.Encoder
defstruct status: nil, last_status_change: nil, breakdown: nil
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Plaid.Institutions.Institution.Status.Auth.Breakdown.t()
}
defmodule Breakdown do
@moduledoc """
Plaid Institution Auth Breakdown Status data structure.
"""
@derive Jason.Encoder
defstruct success: nil, error_plaid: nil, error_institution: nil
@type t :: %__MODULE__{
success: number(),
error_plaid: number(),
error_institution: number()
}
end
end
defmodule Balance do
@moduledoc """
Plaid Institution Balance Status data structure.
"""
@derive Jason.Encoder
defstruct status: nil, last_status_change: nil, breakdown: nil
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Plaid.Institutions.Institution.Status.Balance.Breakdown.t()
}
defmodule Breakdown do
@moduledoc """
Plaid Institution Balance Breakdown Status data structure.
"""
@derive Jason.Encoder
defstruct success: nil, error_plaid: nil, error_institution: nil
@type t :: %__MODULE__{
success: number(),
error_plaid: number(),
error_institution: number()
}
end
end
defmodule Identity do
@moduledoc """
Plaid Institution Identity Status data structure.
"""
@derive Jason.Encoder
defstruct status: nil, last_status_change: nil, breakdown: nil
@type t :: %__MODULE__{
status: String.t(),
last_status_change: String.t(),
breakdown: Plaid.Institutions.Institution.Status.Identity.Breakdown.t()
}
defmodule Breakdown do
@moduledoc """
Plaid Institution Identity Breakdown Status data structure.
"""
@derive Jason.Encoder
defstruct success: nil, error_plaid: nil, error_institution: nil
@type t :: %__MODULE__{
success: number(),
error_plaid: number(),
error_institution: number()
}
end
end
end
end
@doc """
Gets all institutions. Results paginated.
Parameters
```
%{count: 50, offset: 0}
```
"""
@spec get(params, config | nil) :: {:ok, Plaid.Institutions.t()} | {:error, Plaid.Error.t()}
def get(params, config \\ %{}) do
config = validate_cred(config)
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
@doc """
Gets an institution by id.
Parameters
```
"ins_109512"
OR
%{institution_id: "ins_109512", options: %{include_optional_metadata: true, include_status: false}}
```
"""
@spec get_by_id(String.t() | params, config | nil) ::
{:ok, Plaid.Institutions.Institution.t()} | {:error, Plaid.Error.t()}
def get_by_id(params, config \\ %{}) do
config = validate_public_key(config)
params = if is_binary(params), do: %{institution_id: params}, else: params
endpoint = "#{@endpoint}/get_by_id"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(:institution)
end
@doc """
Searches institutions by name and product.
Parameters
```
%{query: "Wells", products: ["transactions"], options: %{limit: 40, include_display_data: true}}
```
"""
@spec search(params, config | nil) :: {:ok, Plaid.Institutions.t()} | {:error, Plaid.Error.t()}
def search(params, config \\ %{}) do
config = validate_public_key(config)
endpoint = "#{@endpoint}/search"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
end
|
lib/plaid/institutions.ex
| 0.816004
| 0.60964
|
institutions.ex
|
starcoder
|
defmodule ReWeb.Types.Image do
@moduledoc """
GraphQL types for images
"""
use Absinthe.Schema.Notation
alias ReWeb.Resolvers
object :image do
field :id, :id
field :filename, :string
field :position, :integer
field :is_active, :boolean
field :description, :string
field :category, :string
end
enum :image_parent_type, values: ~w(listing development)a
input_object :image_insert_input do
field :parent_uuid, :uuid
field :parent_type, :image_parent_type
field :listing_id, :id
field :filename, non_null(:string)
field :is_active, :boolean
field :description, :string
field :category, :string
end
input_object :image_update_input do
field :id, non_null(:id)
field :position, :integer
field :description, :string
field :category, :string
end
input_object :image_deactivate_input do
field :image_ids, non_null(list_of(non_null(:id)))
end
object :image_output do
field :image, :image
field :parent_listing, :listing
field :parent, :image_parent
end
object :images_output do
field :images, list_of(:image)
field :parent_listing, :listing
field :parent, :image_parent
end
union :image_parent do
types([:development, :listing])
resolve_type(fn
%Re.Development{}, _ -> :development
%Re.Listing{}, _ -> :listing
end)
end
object :image_mutations do
@desc "Insert image"
field :insert_image, type: :image_output do
arg :input, non_null(:image_insert_input)
resolve &Resolvers.Images.insert_image/2
end
@desc "Update images"
field :update_images, type: :images_output do
arg :input, non_null(list_of(non_null(:image_update_input)))
resolve &Resolvers.Images.update_images/2
end
@desc "Deactivate images"
field :images_deactivate, type: :images_output do
arg :input, non_null(:image_deactivate_input)
resolve &Resolvers.Images.deactivate_images/2
end
end
object :image_subscriptions do
@desc "Subscribe to image deactivation"
field :images_deactivated, :images_output do
arg :listing_id, non_null(:id)
config &Resolvers.Images.images_deactivated_config/2
trigger :images_deactivate, topic: &Resolvers.Images.images_deactivate_trigger/1
end
@desc "Subscribe to image update"
field :images_updated, :images_output do
arg :listing_id, non_null(:id)
config &Resolvers.Images.images_updated_config/2
trigger :update_images, topic: &Resolvers.Images.update_images_trigger/1
end
@desc "Subscribe to image insertion"
field :image_inserted, :image_output do
arg :listing_id, :id
arg :development_uuid, :uuid
config &Resolvers.Images.image_inserted_config/2
trigger :insert_image, topic: &Resolvers.Images.insert_image_trigger/1
end
end
end
|
apps/re_web/lib/graphql/types/image.ex
| 0.622804
| 0.465387
|
image.ex
|
starcoder
|
defmodule Esquew.Subscription do
use GenServer
@moduledoc """
Genserver module for the state of a subscription
"""
@registry Esquew.Registry
defmodule SubscriptionState do
@moduledoc """
Struct representing Subscription state
"""
@enforce_keys [:topic, :subscription]
defstruct topic: "", subscription: "", messages: []
end
## api
@spec start_link({String.t(), String.t()}) :: :ok
def start_link({topic, subscription}) do
GenServer.start_link(__MODULE__, {topic, subscription},
name: String.to_atom(build_name(topic, subscription))
)
end
@spec read(String.t(), String.t(), Integer) :: list({String.t(), String.t()})
def read(topic, name, num \\ 1) do
case lookup_subscription(topic, name) do
{:ok, pid} ->
{:ok, GenServer.call(pid, {:read, num})}
resp ->
resp
end
end
@spec ack(String.t(), String.t(), reference()) :: :ok
def ack(topic, name, ref) do
case lookup_subscription(topic, name) do
{:ok, pid} ->
GenServer.cast(pid, {:ack, ref})
resp ->
resp
end
end
@spec nack(String.t(), String.t(), reference()) :: :ok
def nack(topic, name, ref) do
case lookup_subscription(topic, name) do
{:ok, pid} ->
GenServer.cast(pid, {:nack, ref})
resp ->
resp
end
end
@spec publish(String.t(), String.t(), String.t()) :: :ok
def publish(topic, name, message) do
case lookup_subscription(topic, name) do
{:ok, pid} ->
GenServer.cast(pid, {:publish, message})
resp ->
resp
end
end
## private
@spec lookup_subscription(String.t(), String.t()) :: {:ok, pid()} | {:error, String.t()}
defp lookup_subscription(topic, subscription) do
case Registry.match(@registry, topic, subscription) do
[{pid, _}] ->
{:ok, pid}
_ ->
{:error, "Subscription \"#{build_name(topic, subscription)}\" could not be found"}
end
end
@spec build_name(String.t(), String.t()) :: String.t()
defp build_name(topic, subscription),
do: "sub-" <> topic <> "@" <> subscription
@spec build_name_atom(String.t(), String.t()) :: atom()
defp build_name_atom(topic, subscription),
do: String.to_atom(build_name(topic, subscription))
@spec remove_from_pool(String.t(), String.t(), String.t(), boolean(), boolean()) :: [:ok | nil]
defp remove_from_pool(topic, subscription, ref, delay \\ false, send_again \\ false) do
if delay do
Process.sleep(20_000)
end
subscription_full_name = build_name_atom(topic, subscription)
case :ets.lookup(subscription_full_name, ref) do
[{^ref, msg}] ->
deleted = :ets.delete(subscription_full_name, ref)
if deleted && send_again do
{:ok, pid} = lookup_subscription(topic, subscription)
GenServer.cast(pid, {:publish, msg})
end
[] ->
nil
end
end
## callbacks
@impl true
@spec init({String.t(), String.t()}) :: {:ok, SubscriptionState}
def init({topic, subscription}) do
:ets.new(build_name_atom(topic, subscription), [:named_table, :public, read_concurrency: true])
Registry.register(@registry, topic, subscription)
{:ok, %SubscriptionState{topic: topic, subscription: subscription}}
end
@impl true
def handle_call({:read, count}, _from, state) do
reply =
Enum.take(state.messages, count)
|> Enum.map(fn msg ->
ref = :crypto.strong_rand_bytes(8) |> Base.encode64()
out = {ref, msg}
:ets.insert(build_name_atom(state.topic, state.subscription), out)
Task.start(fn -> remove_from_pool(state.topic, state.subscription, ref, true, true) end)
out
end)
{:reply, reply, Map.put(state, :messages, Enum.drop(state.messages, count))}
end
@impl true
def handle_cast({:publish, msg}, state),
do: {:noreply, Map.put(state, :messages, state.messages ++ [msg])}
@impl true
def handle_cast({:ack, ref}, state) do
remove_from_pool(state.topic, state.subscription, ref)
{:noreply, state}
end
@impl true
def handle_cast({:nack, ref}, state) do
remove_from_pool(state.topic, state.subscription, ref, false, true)
{:noreply, state}
end
@impl true
def handle_info(_msg, state) do
{:noreply, state}
end
end
|
lib/esquew/subscription/subscription.ex
| 0.775265
| 0.401043
|
subscription.ex
|
starcoder
|
defmodule Tus.Storage.S3 do
@moduledoc """
S3 (or compatible) storage backend for the [Tus server](https://hex.pm/packages/tus)
## Installation
The package can be installed by adding `tus_cache_redis` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:tus, "~> 0.1.1"},
{:tus_storage_s3, "~> 0.1.0"},
]
end
```
## Configuration
- `storage`: Set it as `Tus.Storage.S3`
- `s3_bucket`: The name of your bucket
- `s3_host`: Optional. "s3.amazonaws.com" by default
- `s3_prefix`: Optional. Prefix added to all files. Empty by default
- `s3_min_part_size`: The minimum size of a single part (except the last).
In Amazon S3 this is 5MB. For other, compatible services, you might want/need to
change this restriction.
In order to allow this backend to function properly, the user accessing the bucket must have at least the
following AWS IAM policy permissions for the bucket and all of its subresources:
```
s3:AbortMultipartUpload
s3:DeleteObject
s3:GetObject
s3:ListMultipartUploadParts
s3:PutObject
```
Furthermore, this uses the ExAWS package, so you'll need to add valid AWS keys to its config.
```elixir
config :ex_aws,
access_key_id: [{:system, "AWS_ACCESS_KEY_ID"}, :instance_role],
secret_access_key: [{:system, "AWS_SECRET_ACCESS_KEY"}, :instance_role]
```
This means it will try to resolve credentials in this order
- a. Look for the AWS standard AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables
- b. Resolve credentials with IAM
Consult the (ExAWS documentation)[https://hexdocs.pm/ex_aws/ExAws.html#module-aws-key-configuration] for more details.
"""
alias ExAws.S3
@default_host "s3.amazonaws.com"
@default_min_part_size 5 * 1024 * 1024
defp file_path(config, file) do
Enum.join(
[
config
|> Map.get(:s3_prefix, "")
|> String.trim("/"),
file.uid
],
"/"
)
|> String.trim("/")
end
defp host(config) do
config |> Map.get(:s3_host, @default_host)
end
defp min_part_size(config) do
config |> Map.get(:s3_min_part_size, @default_min_part_size)
end
defp last_part?(file, part_size) do
file.offset + part_size >= file.size
end
defp part_too_small?(file, config, part_size) do
if last_part?(file, part_size) do
false
else
min_size = min_part_size(config)
part_size < min_size && file.offset + min_size > file.size
end
end
@doc """
Start a [Multipart Upload](http://docs.aws.amazon.com/AmazonS3/latest/dev/uploadobjusingmpu.html)
and store its `upload_id`.
"""
def create(file, config) do
host = host(config)
file_path = file_path(config, file)
%{bucket: config.s3_bucket, path: file_path, opts: [], upload_id: nil}
|> S3.Upload.initialize(host: host)
|> case do
{:ok, rs} ->
%Tus.File{file | upload_id: rs.upload_id, path: file_path}
err ->
{:error, err}
end
end
@doc """
Add data to an already started [Multipart Upload](http://docs.aws.amazon.com/AmazonS3/latest/dev/uploadobjusingmpu.html)
(identified by `file.upload_id`).
Amazon restrict the minimum size of a single part (except the last one) to
at least 5MB. If the data is smaller than that, this function returns `:too_small`.
That limit can be customized with the config option `s3_min_part_size`.
"""
def append(file, config, body) do
part_size = byte_size(body)
if part_too_small?(file, config, part_size) do
:too_small
else
append_data(file, config, body, part_size)
end
end
defp append_data(file, config, body, part_size) do
part_id = div(file.offset, min_part_size(config)) + 1
config.s3_bucket
|> S3.upload_part(file.path, file.upload_id, part_id, body, "Content-Length": part_size)
|> ExAws.request(host: host(config))
|> case do
{:ok, %{headers: headers}} ->
{_, etag} = Enum.find(headers, fn {k, _v} -> String.downcase(k) == "etag" end)
file = %Tus.File{file | parts: file.parts ++ [{part_id, etag}]}
{:ok, file}
error ->
{:error, error}
end
end
@doc """
Finish a Multipart Upload
"""
def complete_upload(file, config) do
config.s3_bucket
|> ExAws.S3.complete_multipart_upload(file.path, file.upload_id, file.parts)
|> ExAws.request(host: host(config))
end
@doc """
Delete an uploaded object
"""
def delete(file, config) do
""
|> ExAws.S3.delete_object(file_path(config, file))
|> ExAws.request(host: host(config))
end
end
|
lib/tus_storage_s3.ex
| 0.818519
| 0.8398
|
tus_storage_s3.ex
|
starcoder
|
defmodule Unicode.Set.Search do
defstruct [:binary_tree, :string_ranges, :operation]
def build_search_tree(%Unicode.Set{parsed: {operation, tuple_list}, state: :reduced}) do
{ranges, string_ranges} = extract_and_expand_string_ranges(tuple_list)
search_tree = build_search_tree(ranges)
search_struct = [binary_tree: search_tree, string_ranges: string_ranges, operation: operation]
struct(__MODULE__, search_struct)
end
def build_search_tree([]) do
{}
end
def build_search_tree([tuple]) when is_tuple(tuple) do
tuple
end
def build_search_tree([left, right]) when is_tuple(left) and is_tuple(right) do
{left, right}
end
def build_search_tree(tuple_list) when is_list(tuple_list) do
count = Enum.count(tuple_list)
{left, right} = Enum.split(tuple_list, div(count, 2))
{build_search_tree(left), build_search_tree(right)}
end
def extract_and_expand_string_ranges(tuples) do
Enum.reduce(tuples, {[], []}, fn
{from, to} = tuple, {ranges, string_ranges} when is_list(from) and is_list(to) ->
{ranges, [tuple | string_ranges]}
tuple, {ranges, string_ranges} ->
{[tuple | ranges], string_ranges}
end)
|> Unicode.Set.expand_string_ranges()
|> tag_string_ranges
end
defp tag_string_ranges({ranges, string_ranges}) do
string_patterns =
Enum.map(string_ranges, fn [hd | _rest] = range ->
[String.length(hd) | range]
end)
{ranges, string_patterns}
end
def member?(codepoint, %__MODULE__{binary_tree: tree, operation: :in})
when is_integer(codepoint) do
member?(codepoint, tree)
end
def member?(codepoint, %__MODULE__{binary_tree: tree, operation: :not_in})
when is_integer(codepoint) do
!member?(codepoint, tree)
end
string_match =
quote do
<<var!(codepoint)::utf8, _rest::binary>> = var!(string)
end
def member?(unquote(string_match), %__MODULE__{operation: :in} = search_tree) do
%__MODULE__{binary_tree: tree, string_ranges: strings} = search_tree
member?(codepoint, tree) || string_member?(string, strings)
end
def member?(unquote(string_match), %__MODULE__{operation: :not_in} = search_tree) do
%__MODULE__{binary_tree: tree, string_ranges: strings} = search_tree
not (member?(codepoint, tree) || string_member?(string, strings))
end
def member?(_codepoint, {}) do
false
end
def member?(codepoint, {start, finish})
when is_integer(codepoint) and codepoint in start..finish do
true
end
def member?(codepoint, {start, finish})
when is_integer(codepoint) and is_integer(start) and is_integer(finish) do
false
end
def member?(codepoint, {_left, {right_start, right_finish}})
when is_integer(codepoint) and codepoint in right_start..right_finish do
true
end
def member?(codepoint, {{left_start, left_finish}, _right})
when is_integer(codepoint) and codepoint in left_start..left_finish do
true
end
# This is not at all optimal. Currently the implementation
# Can't tell whether to take the left or the right branch
# since its just nested tuples.
def member?(codepoint, {left, right}) when is_integer(codepoint) do
member?(codepoint, left) || member?(codepoint, right)
end
def string_member?(string, strings) do
Enum.reduce_while(strings, false, fn [len | pattern], acc ->
pattern = :binary.compile_pattern(pattern)
if :binary.match(string, pattern, scope: {0, len}) == :nomatch do
{:cont, acc}
else
{:halt, true}
end
end)
end
end
|
lib/set/search.ex
| 0.61832
| 0.661913
|
search.ex
|
starcoder
|
defmodule Day13 do
@moduledoc """
You need to cross a vast firewall. The firewall consists of several layers, each with a security scanner that moves
back and forth across the layer. To succeed, you must not be detected by a scanner.
By studying the firewall briefly, you are able to record (in your puzzle input) the depth of each layer and the range
of the scanning area for the scanner within it, written as depth: range. Each layer has a thickness of exactly 1.
A layer at depth 0 begins immediately inside the firewall; a layer at depth 1 would start immediately after that.
For example, suppose you've recorded the following:
0: 3
1: 2
4: 4
6: 4
This means that there is a layer immediately inside the firewall (with range 3), a second layer immediately after that
(with range 2), a third layer which begins at depth 4 (with range 4), and a fourth layer which begins at depth 6
(also with range 4). Visually, it might look like this:
0 1 2 3 4 5 6
[ ] [ ] ... ... [ ] ... [ ]
[ ] [ ] [ ] [ ]
[ ] [ ] [ ]
[ ] [ ]
Within each layer, a security scanner moves back and forth within its range. Each security scanner starts at the top
and moves down until it reaches the bottom, then moves up until it reaches the top, and repeats. A security scanner
takes one picosecond to move one step. Drawing scanners as S, the first few picoseconds look like this:
Picosecond 0:
0 1 2 3 4 5 6
[S] [S] ... ... [S] ... [S]
[ ] [ ] [ ] [ ]
[ ] [ ] [ ]
[ ] [ ]
Picosecond 1:
0 1 2 3 4 5 6
[ ] [ ] ... ... [ ] ... [ ]
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
Picosecond 2:
0 1 2 3 4 5 6
[ ] [S] ... ... [ ] ... [ ]
[ ] [ ] [ ] [ ]
[S] [S] [S]
[ ] [ ]
Picosecond 3:
0 1 2 3 4 5 6
[ ] [ ] ... ... [ ] ... [ ]
[S] [S] [ ] [ ]
[ ] [ ] [ ]
[S] [S]
Your plan is to hitch a ride on a packet about to move through the firewall. The packet will travel along the top of
each layer, and it moves at one layer per picosecond. Each picosecond, the packet moves one layer forward (its first
move takes it into layer 0), and then the scanners move one step. If there is a scanner at the top of the layer as
your packet enters it, you are caught. (If a scanner moves into the top of its layer while you are there, you are not
caught: it doesn't have time to notice you before you leave.) If you were to do this in the configuration above,
marking your current position with parentheses, your passage through the firewall would look like this:
Initial state:
0 1 2 3 4 5 6
[S] [S] ... ... [S] ... [S]
[ ] [ ] [ ] [ ]
[ ] [ ] [ ]
[ ] [ ]
Picosecond 0:
0 1 2 3 4 5 6
(S) [S] ... ... [S] ... [S]
[ ] [ ] [ ] [ ]
[ ] [ ] [ ]
[ ] [ ]
0 1 2 3 4 5 6
( ) [ ] ... ... [ ] ... [ ]
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
Picosecond 1:
0 1 2 3 4 5 6
[ ] ( ) ... ... [ ] ... [ ]
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
0 1 2 3 4 5 6
[ ] (S) ... ... [ ] ... [ ]
[ ] [ ] [ ] [ ]
[S] [S] [S]
[ ] [ ]
Picosecond 2:
0 1 2 3 4 5 6
[ ] [S] (.) ... [ ] ... [ ]
[ ] [ ] [ ] [ ]
[S] [S] [S]
[ ] [ ]
0 1 2 3 4 5 6
[ ] [ ] (.) ... [ ] ... [ ]
[S] [S] [ ] [ ]
[ ] [ ] [ ]
[S] [S]
Picosecond 3:
0 1 2 3 4 5 6
[ ] [ ] ... (.) [ ] ... [ ]
[S] [S] [ ] [ ]
[ ] [ ] [ ]
[S] [S]
0 1 2 3 4 5 6
[S] [S] ... (.) [ ] ... [ ]
[ ] [ ] [ ] [ ]
[ ] [S] [S]
[ ] [ ]
Picosecond 4:
0 1 2 3 4 5 6
[S] [S] ... ... ( ) ... [ ]
[ ] [ ] [ ] [ ]
[ ] [S] [S]
[ ] [ ]
0 1 2 3 4 5 6
[ ] [ ] ... ... ( ) ... [ ]
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
Picosecond 5:
0 1 2 3 4 5 6
[ ] [ ] ... ... [ ] (.) [ ]
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
0 1 2 3 4 5 6
[ ] [S] ... ... [S] (.) [S]
[ ] [ ] [ ] [ ]
[S] [ ] [ ]
[ ] [ ]
Picosecond 6:
0 1 2 3 4 5 6
[ ] [S] ... ... [S] ... (S)
[ ] [ ] [ ] [ ]
[S] [ ] [ ]
[ ] [ ]
0 1 2 3 4 5 6
[ ] [ ] ... ... [ ] ... ( )
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
In this situation, you are caught in layers 0 and 6, because your packet entered the layer when its scanner was at
the top when you entered it. You are not caught in layer 1, since the scanner moved into the top of the layer once
you were already there.
The severity of getting caught on a layer is equal to its depth multiplied by its range. (Ignore layers in which you
do not get caught.) The severity of the whole trip is the sum of these values. In the example above, the
trip severity is 0*3 + 6*4 = 24.
Given the details of the firewall you've recorded, if you leave immediately, what is the severity of your whole trip?
--- Part Two ---
Now, you need to pass through the firewall without being caught - easier said than done.
You can't control the speed of the packet, but you can delay it any number of picoseconds. For each picosecond you
delay the packet before beginning your trip, all security scanners move one step. You're not in the firewall during
this time; you don't enter layer 0 until you stop delaying the packet.
In the example above, if you delay 10 picoseconds (picoseconds 0 - 9), you won't get caught:
State after delaying:
0 1 2 3 4 5 6
[ ] [S] ... ... [ ] ... [ ]
[ ] [ ] [ ] [ ]
[S] [S] [S]
[ ] [ ]
Picosecond 10:
0 1 2 3 4 5 6
( ) [S] ... ... [ ] ... [ ]
[ ] [ ] [ ] [ ]
[S] [S] [S]
[ ] [ ]
0 1 2 3 4 5 6
( ) [ ] ... ... [ ] ... [ ]
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
Picosecond 11:
0 1 2 3 4 5 6
[ ] ( ) ... ... [ ] ... [ ]
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
0 1 2 3 4 5 6
[S] (S) ... ... [S] ... [S]
[ ] [ ] [ ] [ ]
[ ] [ ] [ ]
[ ] [ ]
Picosecond 12:
0 1 2 3 4 5 6
[S] [S] (.) ... [S] ... [S]
[ ] [ ] [ ] [ ]
[ ] [ ] [ ]
[ ] [ ]
0 1 2 3 4 5 6
[ ] [ ] (.) ... [ ] ... [ ]
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
Picosecond 13:
0 1 2 3 4 5 6
[ ] [ ] ... (.) [ ] ... [ ]
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
0 1 2 3 4 5 6
[ ] [S] ... (.) [ ] ... [ ]
[ ] [ ] [ ] [ ]
[S] [S] [S]
[ ] [ ]
Picosecond 14:
0 1 2 3 4 5 6
[ ] [S] ... ... ( ) ... [ ]
[ ] [ ] [ ] [ ]
[S] [S] [S]
[ ] [ ]
0 1 2 3 4 5 6
[ ] [ ] ... ... ( ) ... [ ]
[S] [S] [ ] [ ]
[ ] [ ] [ ]
[S] [S]
Picosecond 15:
0 1 2 3 4 5 6
[ ] [ ] ... ... [ ] (.) [ ]
[S] [S] [ ] [ ]
[ ] [ ] [ ]
[S] [S]
0 1 2 3 4 5 6
[S] [S] ... ... [ ] (.) [ ]
[ ] [ ] [ ] [ ]
[ ] [S] [S]
[ ] [ ]
Picosecond 16:
0 1 2 3 4 5 6
[S] [S] ... ... [ ] ... ( )
[ ] [ ] [ ] [ ]
[ ] [S] [S]
[ ] [ ]
0 1 2 3 4 5 6
[ ] [ ] ... ... [ ] ... ( )
[S] [S] [S] [S]
[ ] [ ] [ ]
[ ] [ ]
Because all smaller delays would get you caught, the fewest number of picoseconds you would need to delay to get
through safely is 10.
What is the fewest number of picoseconds that you need to delay the packet to pass through the firewall without being
caught?
"""
def test_a do
firewall = parse_file("res/day13_test.input")
{max_size,_}=Enum.max(firewall)
find_min_delay(0,{firewall, max_size+1}, {0,0}, :severity)
end
def part_a do
firewall = parse_file("res/day13.input")
{max_size,_}=Enum.max(firewall)
find_min_delay(0,{firewall, max_size+1}, {0,0}, :severity)
end
def test_b do
firewall = parse_file("res/day13_test.input")
{max_size,_}=Enum.max(firewall)
find_min_delay(0,{firewall, max_size+1}, {0,0}, :caught)
end
def part_b do
firewall = parse_file("res/day13.input")
{max_size,_}=Enum.max(firewall)
find_min_delay(0,{firewall, max_size+1}, {0,0}, :caught)
end
defp find_min_delay(_, _, {_caught,severity}, :severity) when severity != 0 do
severity
end
defp find_min_delay(delay, _, {caught,_}, :caught) when caught === 0 and delay != 0 do
delay - 1
end
defp find_min_delay(delay, {firewall, max_size}, _result, type) do
{_newfirewall, total_collisions, severity} = move_scaners(firewall, delay, [], {0,0})
Help.print_dot_every(delay+1000, 200000)
find_min_delay(delay+1, {firewall, max_size}, {total_collisions, severity}, type)
end
defp parse_file(file) do
File.read!(file) |>
String.split("\n") |>
Enum.map(fn(x) ->
[a,b] = String.split(x, [":"," "], trim: true)
{String.to_integer(a),{String.to_integer(b), 1}} end)
end
defp move_scaners([], _start_step, acc, {total_collisions, severity}) do
{acc, total_collisions, severity}
end
defp move_scaners([{loc, {max_depth, _current_depth}}|t], start_step, acc, {total_collisions, severity}) do
calculate_depth = rem(start_step+loc, ((max_depth - 1) * 2)) + 1
case calculate_depth === 1 do #collision
true ->
move_scaners(t, start_step,
[{loc, {max_depth, calculate_depth}}|acc], {total_collisions+1, severity + (loc * max_depth)})
false ->
move_scaners(t, start_step,
[{loc, {max_depth, calculate_depth}}|acc], {total_collisions, severity})
end
end
end
|
lib/day13.ex
| 0.645343
| 0.703406
|
day13.ex
|
starcoder
|
defmodule AOC.Day6.OrbitChecksum do
alias AOC.Day6.Node
alias AOC.Day6.Stack
@moduledoc false
def part1(path) do
read_puzzle_input(path)
|> process_input()
|> child_to_parent_map()
|> checksum()
end
def part2(path) do
read_puzzle_input(path)
|> process_input()
|> neighbor_map()
|> depth_first_search("YOU", "SAN")
|> node_to_path()
# Remove "YOU", "SAN", and the starting object
|> (&(length(&1) - 3)).()
end
def read_puzzle_input(path) do
File.read!(path)
end
def process_input(input) do
input
|> String.trim()
|> String.split("\n")
end
def child_to_parent_map(input) do
Enum.reduce(input, %{}, fn val, objects ->
[parent, child] = String.split(val, ")")
Map.put(objects, child, parent)
end)
end
@spec checksum(map()) :: integer
def checksum(objects) do
Enum.reduce(objects, 0, fn {key, _val}, acc ->
walk(objects, key, acc)
end)
end
@spec walk(map(), String.t(), integer) :: integer
def walk(objects, start, acc \\ 0) do
parent = Map.get(objects, start)
if parent == nil do
acc
else
walk(objects, parent, acc + 1)
end
end
@spec neighbor_map(list(String.t())) :: map
def neighbor_map(input) do
Enum.reduce(input, %{}, fn val, objects ->
[parent, child] = String.split(val, ")")
parent_neighbors = Map.get(objects, parent)
objects =
if parent_neighbors == nil do
Map.put(objects, parent, [child])
else
neighbors = [child | parent_neighbors]
Map.put(objects, parent, neighbors)
end
child_neighbors = Map.get(objects, child)
objects =
if child_neighbors == nil do
Map.put(objects, child, [parent])
else
neighbors = [parent | child_neighbors]
Map.put(objects, child, neighbors)
end
objects
end)
end
@spec depth_first_search(
a,
b,
b
) :: Node.t()
when a: any, b: any
def depth_first_search(objects, initial, goal) do
frontier =
Stack.new()
|> Stack.push(Node.new(initial, nil))
explored =
MapSet.new()
|> MapSet.put(initial)
goal_fn = fn location -> goal == location end
successors_fn = fn objects, value -> Map.get(objects, value) end
dfs(objects, frontier, explored, goal_fn, successors_fn)
end
@spec dfs(
a,
Stack.t(),
MapSet.t(),
(b -> boolean),
(a, b -> list(b))
) :: Node.t()
when a: any, b: any
defp dfs(objects, frontier, explored, goal_fn, successors_fn) do
if Stack.empty?(frontier) == false do
{current_node, frontier} = Stack.pop(frontier)
current_state = current_node.state
if goal_fn.(current_state) do
current_node
else
{frontier, explored} =
Enum.reduce(
successors_fn.(objects, current_state),
{frontier, explored},
fn child, {frontier, explored} ->
if Enum.member?(explored, child) == true do
{frontier, explored}
else
frontier = Stack.push(frontier, Node.new(child, current_node))
explored = MapSet.put(explored, child)
{frontier, explored}
end
end
)
dfs(objects, frontier, explored, goal_fn, successors_fn)
end
end
end
@spec node_to_path(Node.t()) :: list(Node.t())
def node_to_path(n) when n == nil do
[]
end
def node_to_path(n) when n != nil do
path = [n.state]
node_to_path(n, path)
end
defp node_to_path(n, path) do
if n.parent == nil do
path
else
n = n.parent
node_to_path(n, [n.state | path])
end
end
end
|
aoc-2019/lib/aoc/day6/orbit_checksum.ex
| 0.746046
| 0.514339
|
orbit_checksum.ex
|
starcoder
|
defmodule StaffNotes.Support.Helpers do
@moduledoc """
Function helpers for tests.
There are a few common types of helper functions:
* Functions that are intended to be used in assertions end in `?`
* `fixture` functions that create records in the database and return the data object
* `setup` functions that are intended to be called from `ExUnit.Callbacks.setup/1` to add items
to the test context
## Examples
```
setup [:setup_regular_user]
test "a test that needs a regular user", context do
user = context.regular_user
assert user.name == "user name"
end
```
"""
alias Plug.Conn.Status
alias StaffNotes.Accounts
alias StaffNotes.Accounts.User
alias StaffNotes.Accounts.Organization
alias StaffNotes.Markdown
alias StaffNotes.Notes
alias StaffNotes.Notes.Member
alias StaffNotes.Notes.Note
alias StaffNotesWeb.ErrorView
import Phoenix.Controller, only: [view_module: 1, view_template: 1]
@doc """
Determines whether the appropriate module and template was rendered for the given error.
"""
@spec error_rendered?(Plug.Conn.t(), atom | integer) :: boolean
def error_rendered?(conn, error)
def error_rendered?(conn, atom) when is_atom(atom), do: error_rendered?(conn, Status.code(atom))
def error_rendered?(conn, integer) when is_integer(integer) do
rendered?(conn, ErrorView, "#{integer}.html")
end
@doc """
Replaces characters in the string with their HTML-escaped versions.
"""
def escape(text) do
{:safe, text} = Phoenix.HTML.html_escape(text)
text
end
@doc """
Creates `StaffNotes.Markdown` struct from a string.
"""
def markdown(text, options \\ [])
def markdown(text, []) do
%Markdown{text: text, html: Markdown.to_html(text)}
end
def markdown(text, rendered: false) do
%Markdown{text: text, html: nil}
end
@doc """
Inserts a new member into the database and returns it.
"""
def member_fixture(attrs \\ %{}, %Organization{} = org) do
{:ok, member} =
attrs
|> Enum.into(member_attrs())
|> Notes.create_member(org)
member
end
@doc """
Inserts a new note into the database and returns it.
"""
def note_fixture(attrs \\ %{}, %User{} = author, %Member{} = member, %Organization{} = org) do
{:ok, note} =
attrs
|> Enum.into(note_attrs())
|> Notes.create_note(author, member, org)
%Note{note | text: %Markdown{text: note.text.text, html: Markdown.to_html(note.text.text)}}
end
@doc """
Inserts a new organization into the database and returns it.
"""
def org_fixture(attrs \\ %{}, %User{} = user) do
{:ok, %{org: org}} =
attrs
|> Enum.into(org_attrs())
|> Accounts.create_org(user)
org
end
@doc """
Determines whether the given view module and template were rendered.
"""
@spec rendered?(Plug.Conn.t(), module | nil, String.t()) :: boolean
def rendered?(conn, module \\ nil, template)
def rendered?(conn, nil, template), do: view_template(conn) == template
def rendered?(conn, module, template) do
view_module(conn) == module && rendered?(conn, template)
end
@doc """
Creates a standard organization and adds it to the test context as `:regular_org`
"""
def setup_regular_org(%{regular_user: user}) do
{:ok, regular_org: org_fixture(user)}
end
def setup_regular_org(_context) do
user = user_fixture()
{:ok, regular_org: org_fixture(user), regular_user: user}
end
@doc """
Creates a standard user and adds it to the test context as `:regular_user`.
"""
def setup_regular_user(_context) do
{:ok, regular_user: user_fixture()}
end
@doc """
Inserts a new team belonging to the given org into the database and returns it.
"""
def team_fixture(attrs \\ %{}, org) do
{:ok, team} =
attrs
|> Enum.into(team_attrs())
|> Accounts.create_team(org)
team
end
@doc """
Inserts a new user into the database and returns it.
"""
def user_fixture(attrs \\ %{}) do
{:ok, user} =
attrs
|> Enum.into(user_attrs())
|> Accounts.create_user()
user
end
defp member_attrs, do: %{name: "member-name"}
defp note_attrs, do: %{text: "some text"}
defp org_attrs, do: %{name: "org-name"}
defp team_attrs, do: %{name: "team-name", permission: :write, original: false}
defp user_attrs do
%{avatar_url: "some avatar_url", id: 42, name: "user-name", site_admin: false}
end
end
|
test/support/helpers.ex
| 0.888263
| 0.824533
|
helpers.ex
|
starcoder
|
defmodule SiteWeb.ScheduleView.TripList do
@moduledoc """
View functions for handling lists of trips from schedules.
"""
alias Site.Components.Icons.SvgIcon
import Phoenix.HTML, only: [raw: 1]
import Phoenix.HTML.Tag, only: [content_tag: 2, content_tag: 3]
import Phoenix.HTML.Link, only: [link: 2]
alias SiteWeb.ViewHelpers
alias Routes.Route
@doc """
Returns Trip Alerts by the trip id and time from the given predicted_schedule, route and direction_id
If no schedule is available, the prediction is used to match against alerts
Does not return alerts for Bus routes
"""
@spec trip_alerts(PredictedSchedule.t() | nil, [Alerts.Alert.t()], Route.t(), String.t()) :: [
Alerts.Alert.t()
]
def trip_alerts(_predicted_schedule, _alerts, %Route{type: 3}, _direction_id), do: []
def trip_alerts(predicted_schedule, alerts, route, direction_id) do
PredictedSchedule.map_optional(predicted_schedule, [:schedule, :prediction], [], fn x ->
Alerts.Trip.match(
alerts,
x.trip.id,
time: x.time,
route: route.id,
direction_id: direction_id
)
end)
end
@doc """
Matches the given alerts with the stop id and time from the given predicted_schedule, route and direction_id
If no schedule is available, the prediction is used to match against alerts
"""
@spec stop_alerts(PredictedSchedule.t() | nil, [Alerts.Alert.t()], String.t(), String.t()) :: [
Alerts.Alert.t()
]
def stop_alerts(predicted_schedule, alerts, route_id, direction_id) do
PredictedSchedule.map_optional(predicted_schedule, [:schedule, :prediction], [], fn x ->
Alerts.Stop.match(
alerts,
x.stop.id,
time: x.time,
route: route_id,
direction_id: direction_id
)
end)
end
@doc "If alerts are given, display alert icon"
@spec display_alerts([Alerts.Alert.t()]) :: Phoenix.HTML.Safe.t()
def display_alerts([]), do: raw("")
def display_alerts(_alerts),
do: SiteWeb.PageView.svg_icon(%SvgIcon{icon: :alert, class: "icon-small-inline"})
@doc """
Returns vehicle frequency for the frequency table, either "Every X minutes" or "No service between these hours".
"""
@spec frequency_times(Schedules.Frequency.t()) :: Phoenix.HTML.Safe.t()
def frequency_times(frequency) do
if Schedules.Frequency.has_service?(frequency) do
content_tag :span do
[
"Every ",
TimeGroup.display_frequency_range(frequency),
content_tag(:span, " minutes", class: "sr-only"),
content_tag(:span, " mins", aria_hidden: true)
]
end
else
content_tag(:span, "No service between these hours")
end
end
@spec frequency_block_name(Schedules.Frequency.t(), Schedules.Departures.t() | :no_service) ::
String.t()
def frequency_block_name(
%Schedules.Frequency{time_block: :early_morning},
%Schedules.Departures{} = departure
) do
"#{ViewHelpers.format_schedule_time(departure.first_departure)} - 6:30 AM"
end
def frequency_block_name(%Schedules.Frequency{time_block: :am_rush}, _), do: "6:30 AM - 9:30 AM"
def frequency_block_name(%Schedules.Frequency{time_block: :midday}, _), do: "9:30 AM - 3:30 PM"
def frequency_block_name(%Schedules.Frequency{time_block: :pm_rush}, _), do: "3:30 PM - 6:30 PM"
def frequency_block_name(%Schedules.Frequency{time_block: :evening}, _), do: "6:30 PM - 9:00 PM"
def frequency_block_name(%Schedules.Frequency{time_block: :night}, _), do: "9:00 PM - 12:00 AM"
def frequency_block_name(
%Schedules.Frequency{time_block: :late_night},
%Schedules.Departures{} = departure
) do
"12:00 AM - #{ViewHelpers.format_schedule_time(departure.last_departure)}"
end
@spec stop_name_link_with_alerts(String.t(), String.t(), [Alerts.Alert.t()]) ::
Phoenix.HTML.Safe.t()
def stop_name_link_with_alerts(name, url, []) do
link to: url do
name
|> ViewHelpers.break_text_at_slash()
end
end
def stop_name_link_with_alerts(name, url, alerts) do
link to: url do
name
|> ViewHelpers.break_text_at_slash()
|> add_icon_to_stop_name(alerts)
end
end
defp add_icon_to_stop_name(stop_name, alerts) do
content_tag :span, class: "name-with-icon" do
stop_name
|> String.split(" ")
|> add_icon_to_string(alerts)
end
end
defp add_icon_to_string([word | []], alerts) do
content_tag :span, class: "inline-block" do
[word, display_alerts(alerts)]
end
end
defp add_icon_to_string([word | rest], alerts) do
[word, " ", add_icon_to_string(rest, alerts)]
end
@doc """
Returns a link to expand or collapse the trip list. No link is shown
if there are no additional trips
"""
@spec trip_expansion_link(:none | :collapsed | :expanded, Date.t(), Plug.Conn.t()) ::
Phoenix.HTML.safe() | nil
def trip_expansion_link(:none, _date, _conn) do
nil
end
def trip_expansion_link(:collapsed, date, conn) do
date_string = date |> ViewHelpers.pretty_date() |> String.downcase()
link to: UrlHelpers.update_url(conn, show_all_trips: true) <> "#trip-list",
class: "trip-list-row trip-list-footer" do
"Show all trips for #{date_string}"
end
end
def trip_expansion_link(:expanded, _date, conn) do
link to: UrlHelpers.update_url(conn, show_all_trips: false) <> "#trip-list",
class: "trip-list-row trip-list-footer" do
"Show upcoming trips only"
end
end
end
|
apps/site/lib/site_web/views/schedule/trip_list.ex
| 0.796965
| 0.438424
|
trip_list.ex
|
starcoder
|
defmodule Fluid.Field.NaiveDateTime do
alias Fluid.Helper
defstruct size: nil, time_unit: :millisecond, epoch: ~N[2015-01-01 00:00:00]
def bit_size(%{size: size}), do: size
defmacro def_field_functions(field_name, opts) do
quote bind_quoted: [opts: opts, field_name: field_name] do
Fluid.Field.NaiveDateTime.verify_opts(opts)
@size Fluid.Field.NaiveDateTime.bit_size(opts)
@max_encoded_value Helper.max_int(@size)
@time_unit opts.time_unit
@epoch NaiveDateTime.truncate(opts.epoch, @time_unit)
def __fluid__(:bit_size, unquote(field_name)), do: @size
def __fluid__(:max, unquote(field_name)),
do: NaiveDateTime.add(@epoch, @max_encoded_value, @time_unit)
def __fluid__(:min, unquote(field_name)),
do: @epoch
def __fluid__(:cast, unquote(field_name), value)
when is_integer(value) and value >= 0 and value <= @max_encoded_value,
do: NaiveDateTime.add(@epoch, value, @time_unit)
def __fluid__(:cast, unquote(field_name), _value), do: :error
def __fluid__(:load, unquote(field_name), <<value::@size>>),
do: {:ok, NaiveDateTime.add(@epoch, value, @time_unit)}
def __fluid__(:load, unquote(field_name), _value), do: :error
def __fluid__(:dump, unquote(field_name), %{ __struct__: NaiveDateTime } = value) do
ret = NaiveDateTime.diff(value, @epoch, @time_unit)
if ret >= 0 and ret <= @max_encoded_value, do: {:ok, <<ret::@size>>}, else: :error
end
def __fluid__(:dump, unquote(field_name), _value), do: :error
end
end
defguardp is_time_unit(tu) when tu in [:second, :millisecond, :microsecond]
defguardp is_size(size) when is_integer(size) and size > 0
def verify_opts(%__MODULE__{size: size, epoch: %NaiveDateTime{}, time_unit: tu})
when is_size(size) and is_time_unit(tu),
do: :ok
def verify_opts(%{size: size}) when not is_size(size),
do:
raise(ArgumentError, """
Invalid size: #{inspect(size)}
size should be an integer > 0
""")
def verify_opts(%__MODULE__{time_unit: tu})
when not is_time_unit(tu),
do:
raise(ArgumentError, """
Invalid type for time_unit: #{inspect(tu)}
time_unit should be :second, :millisecond or :microsecond
""")
def verify_opts(%{epoch: epoch}),
do:
raise(ArgumentError, """
Invalid type for epoch: #{inspect(epoch)}
epoch should be NaiveDateTime
""")
def dump(
%__MODULE__{size: size, time_unit: time_unit, epoch: epoch},
%{
__struct__: NaiveDateTime
} = value
) do
ret = NaiveDateTime.diff(value, NaiveDateTime.truncate(epoch, time_unit), time_unit)
if ret < 0 or ret > Helper.max_int(size), do: :error, else: ret
end
def dump(%__MODULE__{}, _value), do: :error
end
|
lib/fluid/field/naive_date_time.ex
| 0.794185
| 0.434941
|
naive_date_time.ex
|
starcoder
|
defmodule Ockam.Wire do
@moduledoc """
Encodes and decodes messages that can be transported on the wire.
"""
alias Ockam.Message
alias Ockam.Wire.DecodeError
alias Ockam.Wire.EncodeError
require DecodeError
require EncodeError
@doc """
Encodes a message into a binary.
Returns `{:ok, iodata}`, if it succeeds.
Returns `{:error, error}`, if it fails.
"""
@callback encode(message :: Message.t()) ::
{:ok, encoded :: iodata} | {:error, error :: EncodeError.t()}
@doc """
Decodes a message from a binary.
Returns `{:ok, message}`, if it succeeds.
Returns `{:error, error}`, if it fails.
"""
@callback decode(encoded :: binary()) ::
{:ok, message :: Message.t()} | {:error, error :: DecodeError.t()}
@doc """
Formats an error returned by `Ockam.Wire.encode/1` or `Ockam.Wire.decode/1`.
Returns a string.
"""
@callback format_error(error :: EncodeError.t() | DecodeError.t()) ::
formatted_error_message :: String.t()
@doc """
Encode a message to a binary using the provided encoder.
"""
@spec encode(encoder :: atom, message :: Message.t()) ::
{:ok, encoded :: iodata} | {:error, error :: EncodeError.t()}
def encode(encoder \\ nil, message)
def encode(nil, message) do
case default_implementation() do
nil -> {:error, EncodeError.new(:encoder_is_nil_and_no_default_encoder)}
encoder -> encode(encoder, message)
end
end
def encode(encoder, message) when is_atom(encoder) do
with :ok <- ensure_loaded(:encoder, encoder),
:ok <- ensure_exported(encoder, :encode, 1) do
encoder.encode(message)
else
{:error, reason} -> {:error, EncodeError.new(reason)}
end
end
def encode(encoder, _message) when not is_atom(encoder) do
{:error, EncodeError.new({:encoder_is_not_a_module, encoder})}
end
@doc """
Decode a message from binary using the provided decoder.
"""
@spec decode(decoder :: atom, encoded :: binary) ::
{:ok, message :: Message.t()} | {:error, error :: DecodeError.t()}
def decode!(decoder, encoded) do
case decode(decoder, encoded) do
{:ok, message} -> {:ok, message}
{:error, reason} -> raise reason
end
end
def decode(decoder \\ nil, encoded)
def decode(nil, encoded) when is_binary(encoded) do
case default_implementation() do
nil -> {:error, DecodeError.new(:decoder_is_nil_and_no_default_decoder)}
decoder -> decode!(decoder, encoded)
end
end
def decode(decoder, encoded) when is_atom(decoder) and is_binary(encoded) do
with :ok <- ensure_loaded(:decoder, decoder),
:ok <- ensure_exported(decoder, :decode, 1),
{:ok, message} <- decoder.decode(encoded) do
{:ok, message}
else
{:error, reason} -> {:error, DecodeError.new(reason)}
end
end
def decode(decoder, _encoded) when not is_atom(decoder) do
{:error, DecodeError.new({:decoder_is_not_a_module, decoder})}
end
def decode(_decoder, encoded) when not is_binary(encoded) do
{:error, DecodeError.new({:encoded_input_is_not_binary, encoded})}
end
# returns :ok if module is loaded, {:error, reason} otherwise
defp ensure_loaded(type, module) do
case Code.ensure_loaded?(module) do
true -> :ok
false -> {:error, {:module_not_loaded, {type, module}}}
end
end
# returns :ok if a module exports the given function, {:error, reason} otherwise
defp ensure_exported(module, function, arity) do
case function_exported?(module, function, arity) do
true -> :ok
false -> {:error, {:module_does_not_export, {module, function, arity}}}
end
end
defp default_implementation do
module_config = Application.get_env(:ockam, __MODULE__, [])
Keyword.get(module_config, :default)
end
def format_error(%DecodeError{reason: :decoder_is_nil_and_no_default_decoder}),
do: "Decoder argument is nil and there is no default decoder configured."
def format_error(%DecodeError{reason: {:decoder_is_not_a_module, decoder}}),
do: "Decoder argument is not a module: #{inspect(decoder)}"
def format_error(%DecodeError{reason: {:encoded_input_is_not_binary, encoded}}),
do: "Encoded input cannot be decoded as it is not a binary: #{inspect(encoded)}"
def format_error(%DecodeError{reason: {:module_not_loaded, {:decoder, module}}}),
do: "Decoder module is not loaded: #{inspect(module)}"
def format_error(%DecodeError{reason: {:module_does_not_export, {module, :decode, 1}}}),
do: "Decoder module does not export: #{inspect(module)}.decode/1"
def format_error(%EncodeError{reason: :encoder_is_nil_and_no_default_encoder}),
do: "Encoder argument is nil and there is no default encoder configured."
def format_error(%EncodeError{reason: {:encoder_is_not_a_module, encoder}}),
do: "Encoder argument is not a module: #{inspect(encoder)}"
def format_error(%EncodeError{reason: {:module_not_loaded, {:encoder, module}}}),
do: "Encoder module is not loaded: #{inspect(module)}"
def format_error(%EncodeError{reason: {:module_does_not_export, {module, :encode, 1}}}),
do: "Encoder module does not export: #{inspect(module)}.encode/1"
end
|
implementations/elixir/ockam/ockam/lib/ockam/wire.ex
| 0.93011
| 0.418875
|
wire.ex
|
starcoder
|
defmodule Scenic.Cache.Term do
@moduledoc """
Helpers for loading file based Erlang terms directly into the cache.
Sometimes you want to pre-compile a big erlang term, such as a dictionary/map and
distribute it to multiple applications. In this case you build your term, then use
[`:erlang.term_to_binary/2`](http://erlang.org/doc/man/erlang.html#term_to_binary-2)
to change it into binary data, which you write out to a file. Later you read the file
and load the term.
This will be used in Scenic to store pre-compiled font metric data, such as character
widths, kerning, etc. It is much better to compute that once and store it than to
run the C code every time your program is run.
The `Scenic.Cache.Term` is very similar to [`Scenic.Cache.File`](Scenic.Cache.File.html)
module, except that after the file has been loaded into memory, it also calls
[`:erlang.binary_to_term/2`](http://erlang.org/doc/man/erlang.html#binary_to_term-2).
## Where to store your static file assets
You can store your assets anywhere in your app's `priv/` directory. This directory is
special in the sense that the Elixir build system knows to copy its contents into the
correct final build location. How you organize your assets inside of `priv/` is up to you.
my_app/
priv/
static/
terms/
asset.jpg
At compile time you need to build the actual path of your asset by combining
the build directory with the partial path inside of `priv/`
Example
path = :code.priv_dir(:my_app)
|> Path.join("/static/terms/asset.jpg")
You can do this at either compile time or runtime.
## Security
A lesson learned the hard way is that static assets (fonts, images, etc) that your app
loads out of storage can easily become attack vectors.
These formats are complicated! There is no guarantee (on any system) that a malformed
asset will not cause an error in the C code that interprets it. Again - these are complicated
and the renderers need to be fast...
The solution is to compute a SHA hash of these files during build-time of your
and to store the result in your applications code itself. Then during run time, you
compare then pre-computed hash against the run-time of the asset being loaded.
These scheme is much stronger when the application code itself is also signed and
verified, but that is an exercise for the packaging tools.
When assets are loaded this way, the `@asset_hash` term is also used as the key in
the cache. This has the additional benefit of allowing you to pre-compute
the graph itself, using the correct keys for the correct assets.
Note that the hash is of the binary data in the file.
## Full example
defmodule MyApp.MyScene do
use Scenic.Scene
# build the path to the static asset file (compile time)
@asset_path :code.priv_dir(:my_app) |> Path.join("/static/terms/asset.jpg")
# pre-compute the hash (compile time)
@asset_hash Scenic.Cache.Hash.file!( @asset_path, :sha )
def init( _, _ ) {
# load the asset into the cache (run time)
Scenic.Cache.File.load(@asset_path, @asset_hash)
...
end
end
"""
alias Scenic.Cache
alias Scenic.Cache.Hash
# --------------------------------------------------------
@doc """
Load a file-based term directly into the cache.
Parameters:
* `path` - the path to the term file
* `hash` - the pre-computed hash of the file
* `opts` - a list of options. See below.
Options:
* `hash` - format of the hash. Valid formats include `:sha, :sha224, :sha256, :sha384, :sha512, :ripemd160`. If the hash option is not set, it will use `:sha` by default.
* `scope` - Explicitly set the scope of the term in the cache.
* `safe` - prevents the creation of new atoms. [See erlang docs](http://erlang.org/doc/man/erlang.html#binary_to_term-2).
On success, returns
`{:ok, cache_key}`
The key in the cache will be the hash of the file.
"""
def load(path, hash, opts \\ [])
# insecure loading. Loads file blindly even it is altered
# don't recommend doing this in production. Better to embed the expected
# hashes. Is also slower because it has to load the file and compute the hash
# to use as a key even it is is already loaded into the cache.
def load(path, :insecure, opts) do
with {:ok, data} <- Cache.File.read(path, :insecure, opts),
{:ok, hash} <- Hash.binary(data, opts[:hash] || :sha) do
case Cache.claim(hash, opts[:scope]) do
true ->
{:ok, hash}
false ->
case do_read_term(data, opts) do
{:ok, term} -> Cache.put(hash, term, opts[:scope])
err -> err
end
end
else
err -> err
end
end
# preferred, more secure load. Expected hash signature is supplied
# also faster if the item is already loaded as then it can just skip
# loading the file
def load(path, hash, opts) do
case Cache.claim(hash, opts[:scope]) do
true ->
{:ok, hash}
false ->
# need to read and verify the file
case read(path, hash, opts) do
{:ok, data} -> Cache.put(hash, data, opts[:scope])
err -> err
end
end
end
# --------------------------------------------------------
@doc """
Read a file-based term into memory.
The reason you would use this instead of File.read is to verify the data against
a pre-computed hash.
Parameters:
* `path` - the path to the term file
* `hash` - the pre-computed hash of the file
* `opts` - a list of options. See below.
Options:
* `hash` - format of the hash. Valid formats include `:sha, :sha224, :sha256, :sha384, :sha512, :ripemd160`. If the hash option is not set, it will use `:sha` by default.
* `safe` - prevents the creation of new atoms. [See erlang docs](http://erlang.org/doc/man/erlang.html#binary_to_term-2).
On success, returns
`{:ok, term}`
"""
def read(path, hash, opts \\ [])
# insecure read
# don't recommend doing this in production. Better to embed the expected
# hashes. Is also slower because it has to load the file and compute the hash
# to use as a key even it is is already loaded into the cache.
def read(path, :insecure, opts) do
with {:ok, data} <- File.read(path) do
do_read_term(data, opts)
else
err -> err
end
end
def read(path, hash, opts) do
with {:ok, data} <- File.read(path),
{:ok, data} <- Hash.verify(data, hash, opts[:hash] || :sha) do
do_read_term(data, opts)
else
err -> err
end
end
# --------------------------------------------------------
# unzip the data if the unzip option is true. Otherwise just returns
# the data unchanged.
defp do_read_term(data, opts) do
opts =
case opts[:safe] do
false -> []
_ -> [:safe]
end
try do
{:ok, :erlang.binary_to_term(data, opts)}
rescue
_ -> {:error, :invalid_term}
end
end
end
|
lib/scenic/cache/term.ex
| 0.882225
| 0.521167
|
term.ex
|
starcoder
|
defmodule Accent.Scopes.Translation do
import Ecto.Query
@doc """
## Examples
iex> Accent.Scopes.Translation.not_id(Accent.Translation, "test")
#Ecto.Query<from t in Accent.Translation, where: t.id != ^"test">
"""
@spec not_id(Ecto.Queryable.t(), String.t()) :: Ecto.Queryable.t()
def not_id(query, id) do
from(t in query, where: t.id != ^id)
end
@doc """
Default ordering is by ascending key
## Examples
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, nil)
#Ecto.Query<from t in Accent.Translation, order_by: [asc: t.key]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "key")
#Ecto.Query<from t in Accent.Translation, order_by: [asc: t.key]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "-key")
#Ecto.Query<from t in Accent.Translation, order_by: [desc: t.key]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "updated")
#Ecto.Query<from t in Accent.Translation, order_by: [asc: t.updated_at]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "-updated")
#Ecto.Query<from t in Accent.Translation, order_by: [desc: t.updated_at]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "index")
#Ecto.Query<from t in Accent.Translation, order_by: [asc: t.file_index]>
iex> Accent.Scopes.Translation.parse_order(Accent.Translation, "-index")
#Ecto.Query<from t in Accent.Translation, order_by: [desc: t.file_index]>
"""
@spec parse_order(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def parse_order(query, "index"), do: from(t in query, order_by: [asc: :file_index])
def parse_order(query, "-index"), do: from(t in query, order_by: [desc: :file_index])
def parse_order(query, "key"), do: from(t in query, order_by: [asc: :key])
def parse_order(query, "-key"), do: from(t in query, order_by: [desc: :key])
def parse_order(query, "updated"), do: from(t in query, order_by: [asc: :updated_at])
def parse_order(query, "-updated"), do: from(t in query, order_by: [desc: :updated_at])
def parse_order(query, _), do: from(t in query, order_by: [asc: :key])
@doc """
## Examples
iex> Accent.Scopes.Translation.active(Accent.Translation)
#Ecto.Query<from t in Accent.Translation, where: t.removed == false>
"""
@spec active(Ecto.Queryable.t()) :: Ecto.Queryable.t()
def active(query), do: from(t in query, where: [removed: false])
@doc """
## Examples
iex> Accent.Scopes.Translation.parse_conflicted(Accent.Translation, nil)
Accent.Translation
iex> Accent.Scopes.Translation.parse_conflicted(Accent.Translation, false)
#Ecto.Query<from t in Accent.Translation, where: t.conflicted == false>
iex> Accent.Scopes.Translation.parse_conflicted(Accent.Translation, true)
#Ecto.Query<from t in Accent.Translation, where: t.conflicted == true>
"""
@spec parse_conflicted(Ecto.Queryable.t(), nil | boolean()) :: Ecto.Queryable.t()
def parse_conflicted(query, nil), do: query
def parse_conflicted(query, false), do: not_conflicted(query)
def parse_conflicted(query, true), do: conflicted(query)
@doc """
## Examples
iex> Accent.Scopes.Translation.conflicted(Accent.Translation)
#Ecto.Query<from t in Accent.Translation, where: t.conflicted == true>
"""
@spec conflicted(Ecto.Queryable.t()) :: Ecto.Queryable.t()
def conflicted(query), do: from(t in query, where: [conflicted: true])
@doc """
## Examples
iex> Accent.Scopes.Translation.not_conflicted(Accent.Translation)
#Ecto.Query<from t in Accent.Translation, where: t.conflicted == false>
"""
@spec not_conflicted(Ecto.Queryable.t()) :: Ecto.Queryable.t()
def not_conflicted(query), do: from(t in query, where: [conflicted: false])
@doc """
## Examples
iex> Accent.Scopes.Translation.no_version(Accent.Translation)
#Ecto.Query<from t in Accent.Translation, where: is_nil(t.version_id)>
"""
@spec no_version(Ecto.Queryable.t()) :: Ecto.Queryable.t()
def no_version(query), do: from_version(query, nil)
@doc """
## Examples
iex> Accent.Scopes.Translation.from_version(Accent.Translation, nil)
#Ecto.Query<from t in Accent.Translation, where: is_nil(t.version_id)>
iex> Accent.Scopes.Translation.from_version(Accent.Translation, "test")
#Ecto.Query<from t in Accent.Translation, where: t.version_id == ^"test">
"""
@spec from_version(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def from_version(query, nil), do: from(t in query, where: is_nil(t.version_id))
def from_version(query, version_id), do: from(t in query, where: [version_id: ^version_id])
@doc """
## Examples
iex> Accent.Scopes.Translation.from_revision(Accent.Translation, "test")
#Ecto.Query<from t in Accent.Translation, where: t.revision_id == ^"test">
"""
@spec from_revision(Ecto.Queryable.t(), String.t()) :: Ecto.Queryable.t()
def from_revision(query, revision_id), do: from(t in query, where: [revision_id: ^revision_id])
@doc """
## Examples
iex> Accent.Scopes.Translation.from_revisions(Accent.Translation, ["test"])
#Ecto.Query<from t in Accent.Translation, where: t.revision_id in ^["test"]>
"""
@spec from_revision(Ecto.Queryable.t(), list(String.t())) :: Ecto.Queryable.t()
def from_revisions(query, revision_ids), do: from(t in query, where: t.revision_id in ^revision_ids)
@doc """
## Examples
iex> Accent.Scopes.Translation.from_project(Accent.Translation, "test")
#Ecto.Query<from t in Accent.Translation, left_join: p in assoc(t, :project), where: p.id == ^"test">
"""
@spec from_project(Ecto.Queryable.t(), String.t()) :: Ecto.Queryable.t()
def from_project(query, project_id) do
from(
translation in query,
left_join: project in assoc(translation, :project),
where: project.id == ^project_id
)
end
@doc """
## Examples
iex> Accent.Scopes.Translation.from_document(Accent.Translation, nil)
#Ecto.Query<from t in Accent.Translation, where: is_nil(t.document_id)>
iex> Accent.Scopes.Translation.from_document(Accent.Translation, :all)
Accent.Translation
iex> Accent.Scopes.Translation.from_document(Accent.Translation, "test")
#Ecto.Query<from t in Accent.Translation, where: t.document_id == ^"test">
"""
@spec from_document(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def from_document(query, nil), do: from(t in query, where: is_nil(t.document_id))
def from_document(query, :all), do: query
def from_document(query, document_id), do: from(t in query, where: [document_id: ^document_id])
@doc """
## Examples
iex> Accent.Scopes.Translation.from_documents(Accent.Translation, ["test"])
#Ecto.Query<from t in Accent.Translation, where: t.document_id in ^["test"]>
"""
@spec from_documents(Ecto.Queryable.t(), list(String.t())) :: Ecto.Queryable.t()
def from_documents(query, document_ids), do: from(t in query, where: t.document_id in ^document_ids)
@doc """
## Examples
iex> Accent.Scopes.Translation.from_key(Accent.Translation, "test")
#Ecto.Query<from t in Accent.Translation, where: t.key == ^"test">
"""
@spec from_key(Ecto.Queryable.t(), String.t()) :: Ecto.Queryable.t()
def from_key(query, key), do: from(t in query, where: [key: ^key])
@doc """
## Examples
iex> Accent.Scopes.Translation.from_keys(Accent.Translation, ["test"])
#Ecto.Query<from t in Accent.Translation, where: t.key in ^["test"]>
"""
@spec from_keys(Ecto.Queryable.t(), list(String.t())) :: Ecto.Queryable.t()
def from_keys(query, key_ids), do: from(t in query, where: t.key in ^key_ids)
@doc """
## Examples
iex> Accent.Scopes.Translation.from_search(Accent.Translation, "")
Accent.Translation
iex> Accent.Scopes.Translation.from_search(Accent.Translation, nil)
Accent.Translation
iex> Accent.Scopes.Translation.from_search(Accent.Translation, 1234)
Accent.Translation
iex> Accent.Scopes.Translation.from_search(Accent.Translation, "test")
#Ecto.Query<from t in Accent.Translation, where: ilike(t.key, ^\"%test%\") or ilike(t.corrected_text, ^\"%test%\")>
iex> Accent.Scopes.Translation.from_search(Accent.Translation, "030519c4-1d47-42bb-95ee-205880be01d9")
#Ecto.Query<from t in Accent.Translation, where: ilike(t.key, ^\"%030519c4-1d47-42bb-95ee-205880be01d9%\") or ilike(t.corrected_text, ^\"%030519c4-1d47-42bb-95ee-205880be01d9%\"), or_where: t.id == ^\"030519c4-1d47-42bb-95ee-205880be01d9\">
"""
@spec from_search(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def from_search(query, nil), do: query
def from_search(query, term) when term === "", do: query
def from_search(query, term) when not is_binary(term), do: query
def from_search(query, search_term) do
term = "%" <> search_term <> "%"
from(
translation in query,
where: ilike(translation.key, ^term) or ilike(translation.corrected_text, ^term)
)
|> from_search_id(search_term)
end
defp from_search_id(query, key) do
case Ecto.UUID.cast(key) do
{:ok, uuid} -> from(t in query, or_where: [id: ^uuid])
_ -> query
end
end
@doc """
## Examples
iex> Accent.Scopes.Translation.select_key_text(Accent.Translation)
#Ecto.Query<from t in Accent.Translation, select: %{id: t.id, key: t.key, updated_at: t.updated_at, corrected_text: t.corrected_text}>
"""
@spec select_key_text(Ecto.Queryable.t()) :: Ecto.Queryable.t()
def select_key_text(query) do
from(
translation in query,
select: %{
id: translation.id,
key: translation.key,
updated_at: translation.updated_at,
corrected_text: translation.corrected_text
}
)
end
end
|
lib/accent/scopes/translation.ex
| 0.720467
| 0.436562
|
translation.ex
|
starcoder
|
defmodule Wavex.Chunk.Format do
@moduledoc """
A format chunk.
"""
alias Wavex.FourCC
@enforce_keys [
:channels,
:sample_rate,
:byte_rate,
:block_align,
:bits_per_sample
]
defstruct [
:channels,
:sample_rate,
:byte_rate,
:block_align,
:bits_per_sample
]
@type t :: %__MODULE__{
channels: pos_integer,
sample_rate: pos_integer,
byte_rate: pos_integer,
block_align: pos_integer,
bits_per_sample: pos_integer
}
@four_cc "fmt "
@doc """
The ID that identifies a format chunk.
"""
@spec four_cc :: FourCC.t()
def four_cc, do: @four_cc
@spec verify_size(non_neg_integer) :: :ok | {:error, {:unexpected_format_size, non_neg_integer}}
defp verify_size(0x00000010), do: :ok
defp verify_size(actual), do: {:error, {:unexpected_format_size, actual}}
@spec verify_format(non_neg_integer) :: :ok | {:error, {:unsupported_format, non_neg_integer}}
defp verify_format(0x0001), do: :ok
defp verify_format(actual), do: {:error, {:unsupported_format, actual}}
@spec verify_bits_per_sample(non_neg_integer) ::
:ok | {:error, {:unsupported_bits_per_sample, non_neg_integer}}
defp verify_bits_per_sample(actual) when actual in [0x0008, 0x0010, 0x0018], do: :ok
defp verify_bits_per_sample(actual), do: {:error, {:unsupported_bits_per_sample, actual}}
@spec verify_channels(non_neg_integer) :: :ok | {:error, :zero_channels}
defp verify_channels(0x0000), do: {:error, :zero_channels}
defp verify_channels(_), do: :ok
@spec verify_block_align(non_neg_integer, non_neg_integer) ::
:ok
| {:error,
{:unexpected_block_align, %{expected: non_neg_integer, actual: non_neg_integer}}}
defp verify_block_align(expected, expected), do: :ok
defp verify_block_align(expected, actual) do
{:error, {:unexpected_block_align, %{expected: expected, actual: actual}}}
end
@spec verify_byte_rate(non_neg_integer, non_neg_integer) ::
:ok
| {:error,
{:unexpected_byte_rate, %{expected: non_neg_integer, actual: non_neg_integer}}}
defp verify_byte_rate(expected, expected), do: :ok
defp verify_byte_rate(expected, actual) do
{:error, {:unexpected_byte_rate, %{expected: expected, actual: actual}}}
end
@doc ~S"""
Read a format chunk.
"""
@spec read(binary) ::
{:ok, t, binary}
| {:error,
:unexpected_eof
| :zero_channels
| {:unexpected_block_align, %{expected: non_neg_integer, actual: non_neg_integer}}
| {:unexpected_byte_rate, %{expected: non_neg_integer, actual: non_neg_integer}}
| {:unexpected_format_size, non_neg_integer}
| {:unexpected_four_cc, %{actual: FourCC.t(), expected: FourCC.t()}}
| {:unsupported_bits_per_sample, non_neg_integer}
| {:unsupported_format, non_neg_integer}}
def read(binary) do
with <<
# 0 - 3
fmt_id::binary-size(4),
# 4 - 7
size::32-little,
# 8 - 9
format::16-little,
# 10 - 11
channels::16-little,
# 12 - 15
sample_rate::32-little,
# 16 - 19
byte_rate::32-little,
# 20 - 21
block_align::16-little,
# 22 - 23
bits_per_sample::16-little,
etc::binary
>> <- binary,
:ok <- FourCC.verify(fmt_id, @four_cc),
:ok <- verify_size(size),
:ok <- verify_format(format),
:ok <- verify_channels(channels),
:ok <- verify_bits_per_sample(bits_per_sample),
:ok <- verify_block_align(channels * div(bits_per_sample, 0x08), block_align),
:ok <- verify_byte_rate(sample_rate * block_align, byte_rate) do
{:ok,
%__MODULE__{
bits_per_sample: bits_per_sample,
block_align: block_align,
byte_rate: byte_rate,
channels: channels,
sample_rate: sample_rate
}, etc}
else
binary when is_binary(binary) -> {:error, :unexpected_eof}
error -> error
end
end
end
|
lib/wavex/chunk/format.ex
| 0.88029
| 0.697854
|
format.ex
|
starcoder
|
defmodule Record do
@moduledoc """
Module to work, define and import records.
Records are simply tuples where the first element is an atom:
iex> Record.is_record {User, "john", 27}
true
This module provides conveniences for working with records at
compilation time, where compile-time field names are used to
manipulate the tuples, providing fast operations on top of
the tuples compact structure.
In Elixir, records are used mostly in two situations:
1. to work with short, internal data
2. to interface with Erlang records
The macros `defrecord/3` and `defrecordp/3` can be used to create
records while `extract/2` can be used to extract records from Erlang
files.
## Types
Types can be defined for tuples with the `record/2` macro (only available
in typespecs). Like with the generated record macros it will expand to
a tuple.
defmodule MyModule do
require Record
Record.defrecord :user, name: "john", age: 25
@type user :: record(:user, name: String.t, age: integer)
# expands to: `@type user :: {:user, String.t, integer}`
end
"""
@doc """
Extracts record information from an Erlang file.
Returns a quoted expression containing the fields as a list
of tuples. It expects the record name to be an atom and the
library path to be a string at expansion time.
## Examples
iex> Record.extract(:file_info, from_lib: "kernel/include/file.hrl")
[size: :undefined, type: :undefined, access: :undefined, atime: :undefined,
mtime: :undefined, ctime: :undefined, mode: :undefined, links: :undefined,
major_device: :undefined, minor_device: :undefined, inode: :undefined,
uid: :undefined, gid: :undefined]
"""
def extract(name, opts) when is_atom(name) and is_list(opts) do
Record.Extractor.extract(name, opts)
end
@doc """
Extracts all records information from an Erlang file.
Returns a keyword list containing extracted record names as keys, and
lists of tuples describing the fields as values. It expects a named
argument :from or :from_lib, which correspond to *include* or
*include_lib* attribute from Erlang modules, respectively.
"""
def extract_all(opts) when is_list(opts) do
Record.Extractor.extract_all(opts)
end
@doc """
Checks if the given `data` is a record of `kind`.
This is implemented as a macro so it can be used in guard clauses.
## Examples
iex> record = {User, "john", 27}
iex> Record.is_record(record, User)
true
"""
defmacro is_record(data, kind) do
case Macro.Env.in_guard?(__CALLER__) do
true ->
quote do
is_tuple(unquote(data)) and tuple_size(unquote(data)) > 0
and :erlang.element(1, unquote(data)) == unquote(kind)
end
false ->
quote do
result = unquote(data)
is_tuple(result) and tuple_size(result) > 0
and :erlang.element(1, result) == unquote(kind)
end
end
end
@doc """
Checks if the given `data` is a record.
This is implemented as a macro so it can be used in guard clauses.
## Examples
iex> record = {User, "john", 27}
iex> Record.is_record(record)
true
iex> tuple = {}
iex> Record.is_record(tuple)
false
"""
defmacro is_record(data) do
case Macro.Env.in_guard?(__CALLER__) do
true ->
quote do
is_tuple(unquote(data)) and tuple_size(unquote(data)) > 0
and is_atom(:erlang.element(1, unquote(data)))
end
false ->
quote do
result = unquote(data)
is_tuple(result) and tuple_size(result) > 0
and is_atom(:erlang.element(1, result))
end
end
end
@doc """
Defines a set of macros to create and access a record.
The macros are going to have `name`, a tag (which defaults)
to the name if none is given, and a set of fields given by
`kv`.
## Examples
defmodule User do
require Record
Record.defrecord :user, [name: "meg", age: "25"]
end
In the example above, a set of macros named `user` but with different
arities will be defined to manipulate the underlying record:
# To create records
record = user() #=> {:user, "meg", 25}
record = user(age: 26) #=> {:user, "meg", 26}
# To get a field from the record
user(record, :name) #=> "meg"
# To update the record
user(record, age: 26) #=> {:user, "meg", 26}
# Convert a record to a keyword list
user(record) #=> [name: "meg", age: 26]
The generated macros can also be used in order to pattern match on records and
to bind variables during the match:
record = user() #=> {:user, "meg", 25}
user(name: name) = record
name #=> "meg"
By default, Elixir uses the record name as the first element of
the tuple (the tag). But it can be changed to something else:
defmodule User do
require Record
Record.defrecord :user, User, name: nil
end
require User
User.user() #=> {User, nil}
## Defining extracted records with anonymous functions
If a record defines an anonymous function, an ArgumentError
will occur if you attempt to create a record with it.
This can occur unintentionally when defining a record after extracting
it from an Erlang library that uses anonymous functions for defaults.
Record.defrecord :my_rec, Record.extract(...)
#=> ** (ArgumentError) invalid value for record field fun_field,
cannot escape #Function<12.90072148/2 in :erl_eval.expr/5>.
To work around this error, redefine the field with your own &M.f/a function,
like so:
defmodule MyRec do
require Record
Record.defrecord :my_rec, Record.extract(...) |> Keyword.merge(fun_field: &__MODULE__.foo/2)
def foo(bar, baz), do: IO.inspect({bar, baz})
end
"""
defmacro defrecord(name, tag \\ nil, kv) do
quote bind_quoted: [name: name, tag: tag, kv: kv] do
tag = tag || name
fields = Record.__fields__(:defrecord, kv)
defmacro(unquote(name)(args \\ [])) do
Record.__access__(unquote(tag), unquote(fields), args, __CALLER__)
end
defmacro(unquote(name)(record, args)) do
Record.__access__(unquote(tag), unquote(fields), record, args, __CALLER__)
end
end
end
@doc """
Same as `defrecord/3` but generates private macros.
"""
defmacro defrecordp(name, tag \\ nil, kv) do
quote bind_quoted: [name: name, tag: tag, kv: kv] do
tag = tag || name
fields = Record.__fields__(:defrecordp, kv)
defmacrop(unquote(name)(args \\ [])) do
Record.__access__(unquote(tag), unquote(fields), args, __CALLER__)
end
defmacrop(unquote(name)(record, args)) do
Record.__access__(unquote(tag), unquote(fields), record, args, __CALLER__)
end
end
end
# Normalizes of record fields to have default values.
@doc false
def __fields__(type, fields) do
:lists.map(fn
{key, val} when is_atom(key) ->
try do
Macro.escape(val)
rescue
e in [ArgumentError] ->
raise ArgumentError, "invalid value for record field #{key}, " <> Exception.message(e)
else
val -> {key, val}
end
key when is_atom(key) ->
{key, nil}
other ->
raise ArgumentError, "#{type} fields must be atoms, got: #{inspect other}"
end, fields)
end
# Callback invoked from record/0 and record/1 macros.
@doc false
def __access__(atom, fields, args, caller) do
cond do
is_atom(args) ->
index(atom, fields, args)
Keyword.keyword?(args) ->
create(atom, fields, args, caller)
true ->
case Macro.expand(args, caller) do
{:{}, _, [^atom|list]} when length(list) == length(fields) ->
record = List.to_tuple([atom|list])
Macro.escape(Record.__keyword__(atom, fields, record))
{^atom, arg} when length(fields) == 1 ->
Macro.escape(Record.__keyword__(atom, fields, {atom, arg}))
_ ->
quote do: Record.__keyword__(unquote(atom), unquote(fields), unquote(args))
end
end
end
# Callback invoked from the record/2 macro.
@doc false
def __access__(atom, fields, record, args, caller) do
cond do
is_atom(args) ->
get(atom, fields, record, args)
Keyword.keyword?(args) ->
update(atom, fields, record, args, caller)
true ->
msg = "expected arguments to be a compile time atom or keywords, got: #{Macro.to_string args}"
raise ArgumentError, msg
end
end
# Gets the index of field.
defp index(atom, fields, field) do
if index = find_index(fields, field, 0) do
index - 1 # Convert to Elixir index
else
raise ArgumentError, "record #{inspect atom} does not have the key: #{inspect field}"
end
end
# Creates a new record with the given default fields and keyword values.
defp create(atom, fields, keyword, caller) do
in_match = Macro.Env.in_match?(caller)
{match, remaining} =
Enum.map_reduce(fields, keyword, fn({field, default}, each_keyword) ->
new_fields =
case Keyword.has_key?(each_keyword, field) do
true -> Keyword.get(each_keyword, field)
false ->
case in_match do
true -> {:_, [], nil}
false -> Macro.escape(default)
end
end
{new_fields, Keyword.delete(each_keyword, field)}
end)
case remaining do
[] ->
{:{}, [], [atom|match]}
_ ->
keys = for {key, _} <- remaining, do: key
raise ArgumentError, "record #{inspect atom} does not have the key: #{inspect hd(keys)}"
end
end
# Updates a record given by var with the given keyword.
defp update(atom, fields, var, keyword, caller) do
if Macro.Env.in_match?(caller) do
raise ArgumentError, "cannot invoke update style macro inside match"
end
Enum.reduce keyword, var, fn({key, value}, acc) ->
index = find_index(fields, key, 0)
if index do
quote do
:erlang.setelement(unquote(index), unquote(acc), unquote(value))
end
else
raise ArgumentError, "record #{inspect atom} does not have the key: #{inspect key}"
end
end
end
# Gets a record key from the given var.
defp get(atom, fields, var, key) do
index = find_index(fields, key, 0)
if index do
quote do
:erlang.element(unquote(index), unquote(var))
end
else
raise ArgumentError, "record #{inspect atom} does not have the key: #{inspect key}"
end
end
defp find_index([{k, _}|_], k, i), do: i + 2
defp find_index([{_, _}|t], k, i), do: find_index(t, k, i + 1)
defp find_index([], _k, _i), do: nil
# Returns a keyword list of the record
@doc false
def __keyword__(atom, fields, record) do
if is_record(record, atom) do
[_tag|values] = Tuple.to_list(record)
join_keyword(fields, values, [])
else
msg = "expected argument to be a literal atom, literal keyword or a #{inspect atom} record, got runtime: #{inspect record}"
raise ArgumentError, msg
end
end
defp join_keyword([{field, _default}|fields], [value|values], acc),
do: join_keyword(fields, values, [{field, value}| acc])
defp join_keyword([], [], acc),
do: :lists.reverse(acc)
end
|
lib/elixir/lib/record.ex
| 0.844585
| 0.737064
|
record.ex
|
starcoder
|
defmodule Ecto.Adapters.Postgres do
@moduledoc """
Adapter module for PostgreSQL.
It uses `postgrex` for communicating to the database
and manages a connection pool with `poolboy`.
## Features
* Full query support (including joins, preloads and associations)
* Support for transactions
* Support for data migrations
* Support for ecto.create and ecto.drop operations
* Support for transactional tests via `Ecto.Adapters.SQL`
## Options
Postgres options split in different categories described
below. All options should be given via the repository
configuration.
### Compile time options
Those options should be set in the config file and require
recompilation in order to make an effect.
* `:adapter` - The adapter name, in this case, `Ecto.Adapters.Postgres`
* `:timeout` - The default timeout to use on queries, defaults to `5000`
* `:log_level` - The level to use when logging queries (default: `:debug`)
### Connection options
* `:hostname` - Server hostname
* `:port` - Server port (default: 5432)
* `:username` - Username
* `:password` - <PASSWORD>
* `:parameters` - Keyword list of connection parameters
* `:ssl` - Set to true if ssl should be used (default: false)
* `:ssl_opts` - A list of ssl options, see Erlang's `ssl` docs
* `:connect_timeout` - The timeout for establishing new connections (default: 5000)
* `:extensions` - Specify extensions to the postgres adapter
### Pool options
* `:size` - The number of connections to keep in the pool (default: 10)
* `:lazy` - When true, connections to the repo are lazily started (default: true)
* `:max_overflow` - The maximum overflow of connections (default: 0) (see poolboy docs)
### Storage options
* `:encoding` - the database encoding (default: "UTF8")
* `:template` - the template to create the database from
* `:lc_collate` - the collation order
* `:lc_ctype` - the character classification
"""
# Inherit all behaviour from Ecto.Adapters.SQL
use Ecto.Adapters.SQL, :postgrex
# And provide a custom storage implementation
@behaviour Ecto.Adapter.Storage
## Storage API
@doc false
def storage_up(opts) do
database = Keyword.fetch!(opts, :database)
encoding = Keyword.get(opts, :encoding, "UTF8")
extra = ""
if template = Keyword.get(opts, :template) do
extra = extra <> " TEMPLATE=#{template}"
end
if lc_collate = Keyword.get(opts, :lc_collate) do
extra = extra <> " LC_COLLATE='#{lc_collate}'"
end
if lc_ctype = Keyword.get(opts, :lc_ctype) do
extra = extra <> " LC_CTYPE='#{lc_ctype}'"
end
{output, status} =
run_with_psql opts, "CREATE DATABASE " <> database <>
" ENCODING='#{encoding}'" <> extra
cond do
status == 0 -> :ok
String.contains?(output, "already exists") -> {:error, :already_up}
true -> {:error, output}
end
end
@doc false
def storage_down(opts) do
{output, status} = run_with_psql(opts, "DROP DATABASE #{opts[:database]}")
cond do
status == 0 -> :ok
String.contains?(output, "does not exist") -> {:error, :already_down}
true -> {:error, output}
end
end
defp run_with_psql(database, sql_command) do
unless System.find_executable("psql") do
raise "could not find executable `psql` in path, " <>
"please guarantee it is available before running ecto commands"
end
env =
if password = database[:password] do
[{"PGPASSWORD", password}]
else
[]
end
args = []
if username = database[:username] do
args = ["-U", username|args]
end
if port = database[:port] do
args = ["-p", to_string(port)|args]
end
host = database[:hostname] || System.get_env("PGHOST") || "localhost"
args = args ++ ["--quiet", "--host", host, "-d", "template1", "-c", sql_command]
System.cmd("psql", args, env: env, stderr_to_stdout: true)
end
@doc false
def supports_ddl_transaction? do
true
end
end
|
lib/ecto/adapters/postgres.ex
| 0.766992
| 0.566139
|
postgres.ex
|
starcoder
|
defmodule Ash.Query.Operator.IsNil do
@moduledoc """
left is_nil true/false
This predicate matches if the left is nil when the right is `true` or if the
left is not nil when the right is `false`
"""
use Ash.Query.Operator,
operator: :is_nil,
predicate?: true,
types: [[:any, :boolean]]
def evaluate(%{left: left, right: is_nil?}) do
{:known, is_nil(left) == is_nil?}
end
def to_string(%{left: left, right: right}, opts) do
import Inspect.Algebra
text =
if right do
" is nil"
else
" is not nil"
end
concat([
to_doc(left, opts),
text
])
end
def compare(%__MODULE__{left: %Ref{} = same_ref, right: true}, %Ash.Query.Operator.Eq{
left: %Ref{} = same_ref,
right: nil
}) do
:mutually_inclusive
end
def compare(%__MODULE__{left: %Ref{} = same_ref, right: false}, %Ash.Query.Operator.Eq{
left: %Ref{} = same_ref,
right: nil
}) do
:mutually_exclusive_and_collectively_exhaustive
end
def compare(%__MODULE__{left: %Ref{} = same_ref, right: false}, %Ash.Query.Operator.Eq{
left: %Ref{} = same_ref,
right: %Ref{}
}) do
:unknown
end
def compare(%__MODULE__{left: %Ref{} = same_ref, right: false}, %Ash.Query.Operator.Eq{
left: %Ref{} = same_ref
}) do
:right_includes_left
end
def compare(%__MODULE__{left: %Ref{} = same_ref, right: true}, %__MODULE__{
left: %Ref{} = same_ref,
right: false
}) do
:mutually_exclusive_and_collectively_exhaustive
end
def compare(%__MODULE__{left: %Ref{} = same_ref, right: false}, %__MODULE__{
left: %Ref{} = same_ref,
right: true
}) do
:mutually_exclusive_and_collectively_exhaustive
end
def compare(%__MODULE__{left: %Ref{} = same_ref, right: right}, %__MODULE__{
left: %Ref{} = same_ref,
right: right
})
when is_boolean(right) do
:mutually_inclusive
end
def compare(_left, _right) do
:unknown
end
end
|
lib/ash/query/operator/is_nil.ex
| 0.847653
| 0.522202
|
is_nil.ex
|
starcoder
|
defmodule ThinNotionApi.Blocks do
@moduledoc """
This module contains functions to interact and modify Notion blocks.
A block object represents content within Notion. Blocks can be text, lists, media, and more.
"""
import ThinNotionApi.Base
alias ThinNotionApi.Types
@spec retrieve_block(String.t()) :: Types.Response.t()
@doc """
Retrieves a Block object using the ID specified.
## Examples:
iex> ThinNotionApi.Blocks.retrieve_block(block_id)
{:ok, %{...}}
"""
def retrieve_block(block_id) do
get("blocks/" <> block_id)
end
@doc """
Returns a paginated array of child block objects contained in the block using the ID specified. In order to receive a complete representation of a block, you may need to recursively retrieve the block children of child blocks.
🚧 Returns only the first level of children for the specified block. See block objects for more detail on determining if that block has nested children.
The response may contain fewer than page_size of results.
See Pagination for details about how to use a cursor to iterate through the list.
## Examples:
iex> ThinNotionApi.Blocks.retrieve_block_children("9b4a624d5a18482ab2187e54166edda7")
{:ok, %{...}}
"""
@spec retrieve_block_children(String.t(), %{ start_cursor: String.t(), page_size: integer() } | %{}) :: Types.Response.t()
def retrieve_block_children(block_id, params \\ %{}) do
get("blocks/" <> block_id <> "/children", params)
end
@doc """
Updates the content for the specified block_id based on the block type. Supported fields based on the block object type (see Block object for available fields and the expected input for each field).
Note: The update replaces the entire value for a given field. If a field is omitted (ex: omitting checked when updating a to_do block), the value will not be changed.
## Examples:
iex> ThinNotionApi.Blocks.update_block("c4c027f4ea7c41c5908d63a7f5a9c32c", %{
paragraph: %{
text: [%{
type: "text",
text: %{
content: "Hello DOGE!",
}
}],
}
})
{:ok, %{...}}
"""
@spec update_block(String.t(), %{ archived: boolean()} | map()) :: Types.Response.t()
def update_block(block_id, body_params) do
patch("blocks/" <> block_id, body_params)
end
@spec append_block_children(String.t(), list(map())) :: Types.Response.t()
@doc """
Creates and appends new children blocks to the parent block_id specified.
Returns a paginated list of newly created first level children block objects.
## Examples:
iex> ThinNotionApi.Blocks.append_block_children("c4c027f4ea7c41c5908d63a7f5a9c32c", [%{
object: "block",
type: "paragraph",
paragraph: %{
text: [%{
type: "text",
text: %{
content: "Testing for append_block_children",
}
}]
}
}])
{:ok, %{...}}
"""
def append_block_children(block_id, children) do
patch("blocks/" <> block_id <> "/children", %{ children: children })
end
@doc """
Sets a Block object, including page blocks, to archived: true using the ID specified. Note: in the Notion UI application, this moves the block to the "Trash" where it can still be accessed and restored.
To restore the block with the API, use the Update a block or Update page respectively.
## Examples:
iex> ThinNotionApi.Blocks.delete_block("9b4a624d5a18482ab2187e54166edda7")
{:ok, %{...}}
"""
@spec delete_block(String.t()) :: Types.Response.t()
def delete_block(block_id) do
delete("blocks/" <> block_id)
end
end
|
lib/thin_notion_api/blocks.ex
| 0.88823
| 0.469155
|
blocks.ex
|
starcoder
|
defmodule TelemetryRegistry do
@moduledoc """
TelemetryRegistry provides tools for the discovery and documentation of [telemetry](https://github.com/beam-telemetry/telemetry)
events within your applications.
## Telemetry Event Definitions and Declaration
Users want to know what telemetry events are available in your library, what they mean, as well as what
the measurements and metadata maps contain. TelemetryRegistry creates an official standard and mechanism
for telemetry event declaration and definition.
### Who Should Document Events?
Library authors in particular should provide event declarations for documentation and to simplify tracing.
Of course, everyone should document their modules!
### Where Should I Declare Events?
Events should only be declared once, usually in the module from which it originates. Event names should _always_
be namespaced to your application or library. For example, if your application is an http client, your events
should start with the name of your application, not a generic name.
**Do:** `[:tesla, :request, :stop]`
**Don't:** `[:http_client, :request, :stop]`
### Event Definition Format
Events are declared using the `telemetry_event` module attribute. The attribute accepts an event definition
which are used for producing documentation and event discovery. All definition keys are required.
```elixir
%{
event: [:my_app, :event, :stop],
description: "A description of what the event is and when it is emitted",
measurements: "A string containing a pseudo or typespec - see examples",
metadata: "A string containing a pseudo or real typespec - see examples"
}
```
```erlang
\#{
event => [my_app, event, stop],
description => <<"A description of what the event is and when it is emitted">>,
measurements => <<"A string containing a pseudo or typespec - see examples">>,
metadata => <<"A string containing a pseudo or real typespec - see examples">>
}
```
#### Elixir
Elixir does not allow for declaring a custom attribute multiple times by default. We have included macros
to help with this and to provide a way to include event documentation.
```elixir
defmodule TestElixirApp do
use TelemetryRegistry
telemetry_event %{
event: [:test_elixir_app, :single, :event],
description: "emitted when this event happens",
measurements: "%{duration: non_neg_integer()}",
metadata: "%{status: status(), name: String.t()}"
}
@moduledoc \"""
Module documentation...
## Telemetry
\#{telemetry_docs()}
\"""
end
```
Add `use TelemetryRegistry` at the top of your module to prep your module for defining events. This
handles setting up everything needed to declare events and the very helpful `telemetry_event/1`
macro.
### Event Discovery
Events can be discovered by invoking `discover_all`, usually during application startup. The registry
will walk through the application tree and aggregate all events. The events are cached, so this should
only be invoked once at startup. You can view all declared events using `list_events/0`. It is also possible
to limit event discovery to a particular application tree by passing an application name to `discover_all/1`.
## Distributed Tracing
Event discovery is critical for supporting distributed tracing of black-box libraries used
in your application. Library authors are encouraged to use telemetry events in their libraries to provide
insight of internal operations to users in a vendor-agnostic manner.
TelemetryRegistry provides a mechanism through `spannable_events/0` for distributed tracing library authors
to discover events which can be used to start and stop child spans by registering telemetry event handlers
automatically at runtime with no user intervention. Library authors can then provide additional mechanisms
for users to enhance spans with attributes created from telemetry event measurements and metadata.
"""
@typedoc """
An application to discover events from.
"""
@type application() :: :telemetry_registry.application()
@typedoc """
A tuple containing the telemetry event, the module in which it was declared, and event definition meta.
`{:telemetry.event_name(), module(), t:event_meta()`
"""
@type event() :: :telemetry_registry.event()
@typedoc """
An event definition is composed of an event, description, measurements description, and metadata description.
"""
@type event_definition() :: :telemetry_registry.event_definition()
@typedoc """
A description of what the event represents and when it is emitted.
"""
@type event_description() :: :telemetry_registry.event_description()
@typedoc """
A string representation of the measurements emitted with the event. This should resemble a typespec but is
not limited to the typespec format, i.e. you can include a comment on which unit a value is in. The objective
is to inform users.
"""
@type event_measurements() :: :telemetry_registry.event_measurements()
@typedoc """
A string representation of the metadata emitted with the event. This should resemble a typespec but is
not limited to the typespec format, i.e. you can include comments or example values. The objective
is to inform users what is available.
"""
@type event_metadata() :: :telemetry_registry.event_metadata()
@typedoc """
A map of event definition meta for an event containing the event, measurements, and metadata descriptions
if the event was declared with an event definition. Otherwise, this value will be an empty map.
"""
@type event_meta() :: :telemetry_registry.event_meta()
@typedoc """
A list of spannable events known to the registry in the format of `{event_prefix, event_suffixes}`. For
example, given events `[:my_app, :request, :start], [:my_app, :request, :stop], [:my_app, :request, :exception]`
a span can be created from the `:start` -> `:stop` or the `:start` -> `:exception` events. These are aggregated
as a spannable event `{[:my_app, :request], [:start, :stop, :exception]}`.
"""
@type spannable_event() :: :telemetry_registry.spannable_event()
defmacro __using__(_opts) do
quote do
import unquote(__MODULE__), only: [telemetry_docs: 0, telemetry_event: 1]
Module.register_attribute(__MODULE__, :telemetry_event,
accumulate: true,
persist: true
)
end
end
@doc """
Declares a telemetry event. Accepts a telemetry event definition `t:event_definition/0`.
"""
defmacro telemetry_event(event) do
quote do
@telemetry_event unquote(event)
end
end
@doc """
Generates telemetry event documentation formatted in Markdown for use in your documentation.
"""
defmacro telemetry_docs do
quote do
TelemetryRegistry.docs_for(__MODULE__)
end
end
@doc """
Generate telemetry event documentation formatted in Markdown for a given module.
"""
@spec docs_for(module()) :: String.t()
def docs_for(module) do
get_events(module)
|> Enum.map(&format_event/1)
|> IO.iodata_to_binary()
end
defp format_event(event) when is_map(event) do
"""
* `#{inspect(event[:event])}`
* Description: #{event[:description]}
* Measurements: `#{event[:measurements]}`
* Metadata: `#{event[:metadata]}`
"""
end
defp format_event(event) when is_list(event) do
"""
* `#{inspect(event)}`
"""
end
defp get_events(module) do
try do
Module.get_attribute(module, :telemetry_event, [])
rescue
_ ->
module.__info__(:attributes)
|> Keyword.get_values(:telemetry_event)
|> List.flatten()
|> Enum.map(fn
[event] when is_map(event) -> event
event -> event
end)
end
end
@doc """
Discover all declared telemetry events in the application it is invoked from and all child applications.
This would normally be invoked during application startup.
"""
@spec discover_all() :: :ok
defdelegate discover_all(), to: :telemetry_registry
@doc """
Discover all declared telemetry events in the given application and its child applications. This is
typically used in libraries leveraging `telemetry_registry` where it would be necessary for the user
to define what the root application is, e.g. in tracing bridge libraries.
"""
@spec discover_all(application()) :: :ok
defdelegate discover_all(application), to: :telemetry_registry
@doc """
Returns a list of all registered events.
Example
```
iex> TelemetryRegistry.list_events()
[{%{description: "Event description", measurements: "Measurements description, metadata: "Metadata description"}}]
```
"""
@spec list_events() :: [event()]
defdelegate list_events(), to: :telemetry_registry
@doc """
Returns a list of spannable events.
Example
```
iex> TelemetryRegistry.spannable_events()
[{[:my_app, :request], [:start, :stop, :exception]}]
```
"""
@spec spannable_events() :: [spannable_event()]
defdelegate spannable_events(), to: :telemetry_registry
end
|
lib/telemetry_registry.ex
| 0.888985
| 0.812793
|
telemetry_registry.ex
|
starcoder
|
defmodule Google.Rpc.RetryInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
retry_delay: Google.Protobuf.Duration.t() | nil
}
defstruct [:retry_delay]
field :retry_delay, 1, type: Google.Protobuf.Duration
end
defmodule Google.Rpc.DebugInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
stack_entries: [String.t()],
detail: String.t()
}
defstruct [:stack_entries, :detail]
field :stack_entries, 1, repeated: true, type: :string
field :detail, 2, type: :string
end
defmodule Google.Rpc.QuotaFailure.Violation do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
subject: String.t(),
description: String.t()
}
defstruct [:subject, :description]
field :subject, 1, type: :string
field :description, 2, type: :string
end
defmodule Google.Rpc.QuotaFailure do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
violations: [Google.Rpc.QuotaFailure.Violation.t()]
}
defstruct [:violations]
field :violations, 1, repeated: true, type: Google.Rpc.QuotaFailure.Violation
end
defmodule Google.Rpc.ErrorInfo.MetadataEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: String.t()
}
defstruct [:key, :value]
field :key, 1, type: :string
field :value, 2, type: :string
end
defmodule Google.Rpc.ErrorInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
reason: String.t(),
domain: String.t(),
metadata: %{String.t() => String.t()}
}
defstruct [:reason, :domain, :metadata]
field :reason, 1, type: :string
field :domain, 2, type: :string
field :metadata, 3, repeated: true, type: Google.Rpc.ErrorInfo.MetadataEntry, map: true
end
defmodule Google.Rpc.PreconditionFailure.Violation do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
type: String.t(),
subject: String.t(),
description: String.t()
}
defstruct [:type, :subject, :description]
field :type, 1, type: :string
field :subject, 2, type: :string
field :description, 3, type: :string
end
defmodule Google.Rpc.PreconditionFailure do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
violations: [Google.Rpc.PreconditionFailure.Violation.t()]
}
defstruct [:violations]
field :violations, 1, repeated: true, type: Google.Rpc.PreconditionFailure.Violation
end
defmodule Google.Rpc.BadRequest.FieldViolation do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
field: String.t(),
description: String.t()
}
defstruct [:field, :description]
field :field, 1, type: :string
field :description, 2, type: :string
end
defmodule Google.Rpc.BadRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
field_violations: [Google.Rpc.BadRequest.FieldViolation.t()]
}
defstruct [:field_violations]
field :field_violations, 1, repeated: true, type: Google.Rpc.BadRequest.FieldViolation
end
defmodule Google.Rpc.RequestInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
request_id: String.t(),
serving_data: String.t()
}
defstruct [:request_id, :serving_data]
field :request_id, 1, type: :string
field :serving_data, 2, type: :string
end
defmodule Google.Rpc.ResourceInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
resource_type: String.t(),
resource_name: String.t(),
owner: String.t(),
description: String.t()
}
defstruct [:resource_type, :resource_name, :owner, :description]
field :resource_type, 1, type: :string
field :resource_name, 2, type: :string
field :owner, 3, type: :string
field :description, 4, type: :string
end
defmodule Google.Rpc.Help.Link do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
description: String.t(),
url: String.t()
}
defstruct [:description, :url]
field :description, 1, type: :string
field :url, 2, type: :string
end
defmodule Google.Rpc.Help do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
links: [Google.Rpc.Help.Link.t()]
}
defstruct [:links]
field :links, 1, repeated: true, type: Google.Rpc.Help.Link
end
defmodule Google.Rpc.LocalizedMessage do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
locale: String.t(),
message: String.t()
}
defstruct [:locale, :message]
field :locale, 1, type: :string
field :message, 2, type: :string
end
|
lib/google/rpc/error_details.pb.ex
| 0.79158
| 0.426949
|
error_details.pb.ex
|
starcoder
|
defmodule GGity.Element.Text do
@moduledoc """
Defines the data and functions used to style non-geom text elements.
CSS presentation attributes:
* `:family` - string: sets value of CSS `font-family`
* `:face` - string or integer: sets value of CSS `font-weight`
Valid values:
* `"normal"`
* `"bold"`
* `"bolder"`
* `"lighter"`
* `"initial"`
* `"inherit"`
* A multiple of 100 between 100 and 900
* `:color` - string: sets value of CSS `fill`
Values must be valid CSS color names or hex values.
* `:size` - number: sets value of CSS `font-size` in pixels
Other attributes:
* `:angle` - number (between 0 and 90): sets the value passed to
`transform: rotate()` for the purpose of rotating x axis tick
labels (has no effect when set for other theme elements)
"""
import GGity.Color, only: [valid_color?: 1]
alias GGity.{Element, HTML}
@valid_font_weights List.flatten([
"normal",
"bold",
"bolder",
"lighter",
"initial",
"inherit",
Enum.map(1..9, fn number -> [number * 100, to_string(number * 100)] end)
])
@derive [Element]
defstruct [
:family,
:face,
:color,
:size,
:angle
# :hjust,
# :vjust,
# :lineheight,
# :margin
]
@type t() :: %__MODULE__{}
@doc """
Constructor for a Text element.
Setting the value of an attributed to `nil` will remove that property
from the generated stylesheet altogether.
Calling `element_text(attributes)` is equivalent to `struct(GGity.Element.Text, attributes)`.
"""
@spec element_text(keyword()) :: Element.Text.t()
def element_text(attributes) do
struct(Element.Text, attributes)
end
@doc false
@spec attributes_for(Element.Text.t()) :: iolist()
def attributes_for(element) do
element
|> Map.from_struct()
|> Enum.map(&attribute_for/1)
end
defp attribute_for({_attribute, nil}), do: []
defp attribute_for({:family, value}) do
["font-family: ", HTML.escape_to_iodata(value), ";"]
end
defp attribute_for({:face, value}) when value in @valid_font_weights do
["font-weight: ", value, ";"]
end
defp attribute_for({:color, value}) when is_binary(value) do
if valid_color?(value), do: ["fill: ", value, ";"]
end
defp attribute_for({:size, value}) when is_number(value) do
["font-size: ", to_string(value), "px;"]
end
defp attribute_for(_element), do: []
end
|
lib/ggity/element/text.ex
| 0.860662
| 0.691211
|
text.ex
|
starcoder
|
defmodule WebPushEncryption.Encrypt do
@moduledoc """
Module to encrypt notification payloads.
See the following links for details about the encryption process.
https://developers.google.com/web/updates/2016/03/web-push-encryption?hl=en
"""
alias WebPushEncryption.Crypto
@max_payload_length 4078
@one_buffer <<1>>
@auth_info "Content-Encoding: auth" <> <<0>>
@otp_version :erlang.system_info(:otp_release)
|> String.Chars.to_string()
|> String.to_integer()
@doc """
Encrypts a web push notification body.
## Arguments
* `message` the body to encrypt
* `subscription`: See `WebPushEncryption.Push.send_web_push/3`
* `padding_length`: An optional padding length
## Return value
Returns the encrypted body as well as the necessary information in the following form:
```elixir
%{ciphertext: ciphertext, # the encrypted payload
salt: salt, # the generated salt used during the encryption
server_public_key: server_public_key} # the generated public key used during encryption
```
"""
@spec encrypt(message :: binary, subscription :: map, padding_length :: non_neg_integer) :: map
def encrypt(message, subscription, padding_length \\ 0)
def encrypt(message, _subscription, padding_length)
when byte_size(message) + padding_length > @max_payload_length do
raise ArgumentError,
"Payload is too large. The current length is #{byte_size(message)} bytes plus" <>
" #{padding_length} bytes of padding but the max length is #{@max_payload_length} bytes"
end
def encrypt(message, subscription, padding_length) do
padding = make_padding(padding_length)
plaintext = padding <> message
:ok = validate_subscription(subscription)
client_public_key = Base.url_decode64!(subscription.keys.p256dh, padding: false)
client_auth_token = Base.url_decode64!(subscription.keys.auth, padding: false)
:ok = validate_length(client_auth_token, 16, "Subscription's Auth token is not 16 bytes.")
:ok = validate_length(client_public_key, 65, "Subscription's client key (p256dh) is invalid.")
salt = Crypto.strong_rand_bytes(16)
{server_public_key, server_private_key} = Crypto.generate_key(:ecdh, :prime256v1)
shared_secret = :crypto.compute_key(:ecdh, client_public_key, server_private_key, :prime256v1)
prk = hkdf(client_auth_token, shared_secret, @auth_info, 32)
context = create_context(client_public_key, server_public_key)
content_encryption_key_info = create_info("aesgcm", context)
content_encryption_key = hkdf(salt, prk, content_encryption_key_info, 16)
nonce_info = create_info("nonce", context)
nonce = hkdf(salt, prk, nonce_info, 12)
ciphertext = encrypt_payload(plaintext, content_encryption_key, nonce)
%{ciphertext: ciphertext, salt: salt, server_public_key: server_public_key}
end
@compile {:no_warn_undefined, {:crypto, :hmac_init, 2}}
@compile {:no_warn_undefined, {:crypto, :hmac_update, 2}}
@compile {:no_warn_undefined, {:crypto, :hmac_final, 1}}
defp hkdf(salt, ikm, info, length) do
if @otp_version < 24 do
prk_hmac = :crypto.hmac_init(:sha256, salt)
prk_hmac = :crypto.hmac_update(prk_hmac, ikm)
prk = :crypto.hmac_final(prk_hmac)
info_hmac = :crypto.hmac_init(:sha256, prk)
info_hmac = :crypto.hmac_update(info_hmac, info)
info_hmac = :crypto.hmac_update(info_hmac, @one_buffer)
:crypto.hmac_final(info_hmac) |> :binary.part(0, length)
else
prk =
:crypto.mac_init(:hmac, :sha256, salt)
|> :crypto.mac_update(ikm)
|> :crypto.mac_final()
:crypto.mac_init(:hmac, :sha256, prk)
|> :crypto.mac_update(info)
|> :crypto.mac_update(@one_buffer)
|> :crypto.mac_final()
|> :binary.part(0, length)
end
end
defp create_context(client_public_key, _server_public_key)
when byte_size(client_public_key) != 65,
do: raise(ArgumentError, "invalid client public key length")
defp create_context(_client_public_key, server_public_key)
when byte_size(server_public_key) != 65,
do: raise(ArgumentError, "invalid server public key length")
defp create_context(client_public_key, server_public_key) do
<<0, byte_size(client_public_key)::unsigned-big-integer-size(16)>> <>
client_public_key <>
<<byte_size(server_public_key)::unsigned-big-integer-size(16)>> <> server_public_key
end
defp create_info(_type, context) when byte_size(context) != 135,
do: raise(ArgumentError, "Context argument has invalid size")
defp create_info(type, context) do
"Content-Encoding: " <> type <> <<0>> <> "P-256" <> context
end
@compile {:no_warn_undefined, {:crypto, :block_encrypt, 4}}
defp encrypt_payload(plaintext, content_encryption_key, nonce) do
{cipher_text, cipher_tag} =
if @otp_version < 24,
do: :crypto.block_encrypt(:aes_gcm, content_encryption_key, nonce, {"", plaintext}),
else:
:crypto.crypto_one_time_aead(
:aes_128_gcm,
content_encryption_key,
nonce,
plaintext,
"",
true
)
cipher_text <> cipher_tag
end
defp validate_subscription(%{keys: %{p256dh: p256dh, auth: auth}})
when not is_nil(p256dh) and not is_nil(auth) do
:ok
end
defp validate_subscription(_subscription) do
raise ArgumentError, "Subscription is missing some encryption details."
end
defp validate_length(bytes, expected_size, _message) when byte_size(bytes) == expected_size,
do: :ok
defp validate_length(_bytes, _expected_size, message) do
raise ArgumentError, message
end
defp make_padding(padding_length) do
binary_length = <<padding_length::unsigned-big-integer-size(16)>>
binary_length <> :binary.copy(<<0>>, padding_length)
end
end
|
lib/web_push_encryption/encrypt.ex
| 0.866585
| 0.605916
|
encrypt.ex
|
starcoder
|
defmodule Bitwise do
@moduledoc """
This module provide macros and operators for bitwise operators.
These macros can be used in guards.
The easiest way to use is to simply import them into
your module:
use Bitwise
bnot 1 #=> -2
1 &&& 1 #=> 1
You can select to include only or skip operators by passing options:
use Bitwise, only_operators: true
1 &&& 1 #=> 1
"""
@doc """
Allow a developer to use this module in their programs with
the following options:
* `:only_operators` - Include only operators;
* `:skip_operators` - Skip operators;
"""
defmacro __using__(options) do
except = cond do
Keyword.get(options, :only_operators) ->
[bnot: 1, band: 2, bor: 2, bxor: 2, bsl: 2, bsr: 2]
Keyword.get(options, :skip_operators) ->
[~~~: 1, &&&: 2, |||: 2, ^^^: 2, <<<: 2, >>>: 2]
true -> []
end
quote do
import Bitwise, except: unquote(except)
end
end
@doc """
Bitwise not.
"""
defmacro bnot(expr) do
quote do: __op__ :bnot, unquote(expr)
end
@doc """
Bitwise not as operator.
"""
defmacro ~~~expr do
quote do: __op__ :bnot, unquote(expr)
end
@doc """
Bitwise and.
"""
defmacro band(left, right) do
quote do: __op__ :band, unquote(left), unquote(right)
end
@doc """
Bitwise and as operator.
"""
defmacro left &&& right do
quote do: __op__ :band, unquote(left), unquote(right)
end
@doc """
Bitwise or.
"""
defmacro bor(left, right) do
quote do: __op__ :bor, unquote(left), unquote(right)
end
@doc """
Bitwise or as operator.
"""
defmacro left ||| right do
quote do: __op__ :bor, unquote(left), unquote(right)
end
@doc """
Bitwise xor.
"""
defmacro bxor(left, right) do
quote do: __op__ :bxor, unquote(left), unquote(right)
end
@doc """
Bitwise xor as operator.
"""
defmacro left ^^^ right do
quote do: __op__ :bxor, unquote(left), unquote(right)
end
@doc """
Arithmetic bitshift left.
"""
defmacro bsl(left, right) do
quote do: __op__ :bsl, unquote(left), unquote(right)
end
@doc """
Arithmetic bitshift left as operator.
"""
defmacro left <<< right do
quote do: __op__ :bsl, unquote(left), unquote(right)
end
@doc """
Arithmetic bitshift right.
"""
defmacro bsr(left, right) do
quote do: __op__ :bsr, unquote(left), unquote(right)
end
@doc """
Arithmetic bitshift right as operator.
"""
defmacro left >>> right do
quote do: __op__ :bsr, unquote(left), unquote(right)
end
end
|
lib/elixir/lib/bitwise.ex
| 0.746509
| 0.642173
|
bitwise.ex
|
starcoder
|
defmodule Code.Fragment do
@moduledoc """
This module provides conveniences for analyzing fragments of
textual code and extract available information whenever possible.
Most of the functions in this module provide a best-effort
and may not be accurate under all circumstances. Read each
documentation for more information.
This module should be considered experimental.
"""
@type position :: {line :: pos_integer(), column :: pos_integer()}
@doc """
Receives a string and returns the cursor context.
This function receives a string with an Elixir code fragment,
representing a cursor position, and based on the string, it
provides contextual information about said position. The
return of this function can then be used to provide tips,
suggestions, and autocompletion functionality.
This function provides a best-effort detection and may not be
accurate under all circumstances. See the "Limitations"
section below.
Consider adding a catch-all clause when handling the return
type of this function as new cursor information may be added
in future releases.
## Examples
iex> Code.Fragment.cursor_context("")
:expr
iex> Code.Fragment.cursor_context("hello_wor")
{:local_or_var, 'hello_wor'}
## Return values
* `{:alias, charlist}` - the context is an alias, potentially
a nested one, such as `Hello.Wor` or `HelloWor`
* `{:dot, inside_dot, charlist}` - the context is a dot
where `inside_dot` is either a `{:var, charlist}`, `{:alias, charlist}`,
`{:module_attribute, charlist}`, `{:unquoted_atom, charlist}` or a `dot`
itself. If a var is given, this may either be a remote call or a map
field access. Examples are `Hello.wor`, `:hello.wor`, `hello.wor`,
`Hello.nested.wor`, `hello.nested.wor`, and `@hello.world`
* `{:dot_arity, inside_dot, charlist}` - the context is a dot arity
where `inside_dot` is either a `{:var, charlist}`, `{:alias, charlist}`,
`{:module_attribute, charlist}`, `{:unquoted_atom, charlist}` or a `dot`
itself. If a var is given, it must be a remote arity. Examples are
`Hello.world/`, `:hello.world/`, `hello.world/2`, and `@hello.world/2`
* `{:dot_call, inside_dot, charlist}` - the context is a dot
call. This means parentheses or space have been added after the expression.
where `inside_dot` is either a `{:var, charlist}`, `{:alias, charlist}`,
`{:module_attribute, charlist}`, `{:unquoted_atom, charlist}` or a `dot`
itself. If a var is given, it must be a remote call. Examples are
`Hello.world(`, `:hello.world(`, `Hello.world `, `hello.world(`, `hello.world `,
and `@hello.world(`
* `:expr` - may be any expression. Autocompletion may suggest an alias,
local or var
* `{:local_or_var, charlist}` - the context is a variable or a local
(import or local) call, such as `hello_wor`
* `{:local_arity, charlist}` - the context is a local (import or local)
arity, such as `hello_world/`
* `{:local_call, charlist}` - the context is a local (import or local)
call, such as `hello_world(` and `hello_world `
* `{:module_attribute, charlist}` - the context is a module attribute,
such as `@hello_wor`
* `{:operator, charlist}` - the context is an operator, such as `+` or
`==`. Note textual operators, such as `when` do not appear as operators
but rather as `:local_or_var`. `@` is never an `:operator` and always a
`:module_attribute`
* `{:operator_arity, charlist}` - the context is an operator arity, which
is an operator followed by /, such as `+/`, `not/` or `when/`
* `{:operator_call, charlist}` - the context is an operator call, which is
an operator followed by space, such as `left + `, `not ` or `x when `
* `:none` - no context possible
* `{:sigil, charlist}` - the context is a sigil. It may be either the beginning
of a sigil, such as `~` or `~s`, or an operator starting with `~`, such as
`~>` and `~>>`
* `{:struct, charlist}` - the context is a struct, such as `%`, `%UR` or `%URI`
* `{:unquoted_atom, charlist}` - the context is an unquoted atom. This
can be any atom or an atom representing a module
## Limitations
The current algorithm only considers the last line of the input. This means
it will also show suggestions inside strings, heredocs, etc, which is
intentional as it helps with doctests, references, and more.
"""
@doc since: "1.13.0"
@spec cursor_context(List.Chars.t(), keyword()) ::
{:alias, charlist}
| {:dot, inside_dot, charlist}
| {:dot_arity, inside_dot, charlist}
| {:dot_call, inside_dot, charlist}
| :expr
| {:local_or_var, charlist}
| {:local_arity, charlist}
| {:local_call, charlist}
| {:module_attribute, charlist}
| {:operator, charlist}
| {:operator_arity, charlist}
| {:operator_call, charlist}
| :none
| {:sigil, charlist}
| {:struct, charlist}
| {:unquoted_atom, charlist}
when inside_dot:
{:alias, charlist}
| {:dot, inside_dot, charlist}
| {:module_attribute, charlist}
| {:unquoted_atom, charlist}
| {:var, charlist}
def cursor_context(fragment, opts \\ [])
def cursor_context(binary, opts) when is_binary(binary) and is_list(opts) do
binary =
case :binary.matches(binary, "\n") do
[] ->
binary
matches ->
{position, _} = List.last(matches)
binary_part(binary, position + 1, byte_size(binary) - position - 1)
end
binary
|> String.to_charlist()
|> :lists.reverse()
|> codepoint_cursor_context(opts)
|> elem(0)
end
def cursor_context(charlist, opts) when is_list(charlist) and is_list(opts) do
charlist =
case charlist |> Enum.chunk_by(&(&1 == ?\n)) |> List.last([]) do
[?\n | _] -> []
rest -> rest
end
charlist
|> :lists.reverse()
|> codepoint_cursor_context(opts)
|> elem(0)
end
def cursor_context(other, opts) when is_list(opts) do
cursor_context(to_charlist(other), opts)
end
@operators '\\<>+-*/:=|&~^%!'
@starter_punctuation ',([{;'
@non_starter_punctuation ')]}"\'.$'
@space '\t\s'
@trailing_identifier '?!'
@tilde_op_prefix '<=~'
@non_identifier @trailing_identifier ++
@operators ++ @starter_punctuation ++ @non_starter_punctuation ++ @space
@textual_operators ~w(when not and or in)c
defp codepoint_cursor_context(reverse, _opts) do
{stripped, spaces} = strip_spaces(reverse, 0)
case stripped do
# It is empty
[] -> {:expr, 0}
# Structs
[?%, ?:, ?: | _] -> {{:struct, ''}, 1}
[?%, ?: | _] -> {{:unquoted_atom, '%'}, 2}
[?% | _] -> {{:struct, ''}, 1}
# Token/AST only operators
[?>, ?= | rest] when rest == [] or hd(rest) != ?: -> {:expr, 0}
[?>, ?- | rest] when rest == [] or hd(rest) != ?: -> {:expr, 0}
# Two-digit containers
[?<, ?< | rest] when rest == [] or hd(rest) != ?< -> {:expr, 0}
# Ambiguity around :
[?: | rest] when rest == [] or hd(rest) != ?: -> unquoted_atom_or_expr(spaces)
# Dots
[?.] -> {:none, 0}
[?. | rest] when hd(rest) not in '.:' -> dot(rest, spaces + 1, '')
# It is a local or remote call with parens
[?( | rest] -> call_to_cursor_context(strip_spaces(rest, spaces + 1))
# A local arity definition
[?/ | rest] -> arity_to_cursor_context(strip_spaces(rest, spaces + 1))
# Starting a new expression
[h | _] when h in @starter_punctuation -> {:expr, 0}
# It is a local or remote call without parens
rest when spaces > 0 -> call_to_cursor_context({rest, spaces})
# It is an identifier
_ -> identifier_to_cursor_context(reverse, 0, false)
end
end
defp strip_spaces([h | rest], count) when h in @space, do: strip_spaces(rest, count + 1)
defp strip_spaces(rest, count), do: {rest, count}
defp unquoted_atom_or_expr(0), do: {{:unquoted_atom, ''}, 1}
defp unquoted_atom_or_expr(_), do: {:expr, 0}
defp arity_to_cursor_context({reverse, spaces}) do
case identifier_to_cursor_context(reverse, spaces, true) do
{{:local_or_var, acc}, count} -> {{:local_arity, acc}, count}
{{:dot, base, acc}, count} -> {{:dot_arity, base, acc}, count}
{{:operator, acc}, count} -> {{:operator_arity, acc}, count}
{_, _} -> {:none, 0}
end
end
defp call_to_cursor_context({reverse, spaces}) do
case identifier_to_cursor_context(reverse, spaces, true) do
{{:local_or_var, acc}, count} -> {{:local_call, acc}, count}
{{:dot, base, acc}, count} -> {{:dot_call, base, acc}, count}
{{:operator, acc}, count} -> {{:operator_call, acc}, count}
{_, _} -> {:none, 0}
end
end
defp identifier_to_cursor_context([?., ?., ?: | _], n, _), do: {{:unquoted_atom, '..'}, n + 3}
defp identifier_to_cursor_context([?., ?., ?. | _], n, _), do: {{:local_or_var, '...'}, n + 3}
defp identifier_to_cursor_context([?., ?: | _], n, _), do: {{:unquoted_atom, '.'}, n + 2}
defp identifier_to_cursor_context([?., ?. | _], n, _), do: {{:operator, '..'}, n + 2}
defp identifier_to_cursor_context(reverse, count, call_op?) do
case identifier(reverse, count) do
:none ->
{:none, 0}
:operator ->
operator(reverse, count, [], call_op?)
{:module_attribute, acc, count} ->
{{:module_attribute, acc}, count}
{:sigil, acc, count} ->
{{:sigil, acc}, count}
{:unquoted_atom, acc, count} ->
{{:unquoted_atom, acc}, count}
{:alias, rest, acc, count} ->
case strip_spaces(rest, count) do
{'.' ++ rest, count} when rest == [] or hd(rest) != ?. ->
nested_alias(rest, count + 1, acc)
{'%' ++ _, count} ->
{{:struct, acc}, count + 1}
_ ->
{{:alias, acc}, count}
end
{:identifier, _, acc, count} when call_op? and acc in @textual_operators ->
{{:operator, acc}, count}
{:identifier, rest, acc, count} ->
case strip_spaces(rest, count) do
{'.' ++ rest, count} when rest == [] or hd(rest) != ?. ->
dot(rest, count + 1, acc)
_ ->
{{:local_or_var, acc}, count}
end
end
end
defp identifier([?? | rest], count), do: check_identifier(rest, count + 1, [??])
defp identifier([?! | rest], count), do: check_identifier(rest, count + 1, [?!])
defp identifier(rest, count), do: check_identifier(rest, count, [])
defp check_identifier([h | t], count, acc) when h not in @non_identifier,
do: rest_identifier(t, count + 1, [h | acc])
defp check_identifier(_, _, _), do: :operator
defp rest_identifier([h | rest], count, acc) when h not in @non_identifier do
rest_identifier(rest, count + 1, [h | acc])
end
defp rest_identifier(rest, count, [?@ | acc]) do
case tokenize_identifier(rest, count, acc) do
{:identifier, _rest, acc, count} -> {:module_attribute, acc, count}
:none when acc == [] -> {:module_attribute, '', count}
_ -> :none
end
end
defp rest_identifier([?~ | rest], count, [letter])
when (letter in ?A..?Z or letter in ?a..?z) and
(rest == [] or hd(rest) not in @tilde_op_prefix) do
{:sigil, [letter], count + 1}
end
defp rest_identifier([?: | rest], count, acc) when rest == [] or hd(rest) != ?: do
case String.Tokenizer.tokenize(acc) do
{_, _, [], _, _, _} -> {:unquoted_atom, acc, count + 1}
_ -> :none
end
end
defp rest_identifier([?? | _], _count, _acc) do
:none
end
defp rest_identifier(rest, count, acc) do
tokenize_identifier(rest, count, acc)
end
defp tokenize_identifier(rest, count, acc) do
case String.Tokenizer.tokenize(acc) do
# Not actually an atom cause rest is not a :
{:atom, _, _, _, _, _} ->
:none
# Aliases must be ascii only
{:alias, _, _, _, false, _} ->
:none
{kind, _, [], _, _, extra} ->
if ?@ in extra do
:none
else
{kind, rest, acc, count}
end
_ ->
:none
end
end
defp nested_alias(rest, count, acc) do
{rest, count} = strip_spaces(rest, count)
case identifier_to_cursor_context(rest, count, true) do
{{:struct, prev}, count} -> {{:struct, prev ++ '.' ++ acc}, count}
{{:alias, prev}, count} -> {{:alias, prev ++ '.' ++ acc}, count}
_ -> {:none, 0}
end
end
defp dot(rest, count, acc) do
{rest, count} = strip_spaces(rest, count)
case identifier_to_cursor_context(rest, count, true) do
{{:local_or_var, var}, count} -> {{:dot, {:var, var}, acc}, count}
{{:unquoted_atom, _} = prev, count} -> {{:dot, prev, acc}, count}
{{:alias, _} = prev, count} -> {{:dot, prev, acc}, count}
{{:dot, _, _} = prev, count} -> {{:dot, prev, acc}, count}
{{:module_attribute, _} = prev, count} -> {{:dot, prev, acc}, count}
{{:struct, acc}, count} -> {{:struct, acc ++ '.'}, count}
{_, _} -> {:none, 0}
end
end
defp operator([h | rest], count, acc, call_op?) when h in @operators do
operator(rest, count + 1, [h | acc], call_op?)
end
# If we are opening a sigil, ignore the operator.
defp operator([letter, ?~ | rest], _count, [op], _call_op?)
when op in '<|/' and (letter in ?A..?Z or letter in ?a..?z) and
(rest == [] or hd(rest) not in @tilde_op_prefix) do
{:none, 0}
end
defp operator(rest, count, '~', call_op?) do
{rest, _} = strip_spaces(rest, count)
if call_op? or match?([?. | rest] when rest == [] or hd(rest) != ?., rest) do
{:none, 0}
else
{{:sigil, ''}, count}
end
end
defp operator(rest, count, acc, _call_op?) do
case :elixir_tokenizer.tokenize(acc, 1, 1, []) do
{:ok, _, _, _, [{:atom, _, _}]} ->
{{:unquoted_atom, tl(acc)}, count}
{:ok, _, _, _, [{_, _, op}]} ->
{rest, dot_count} = strip_spaces(rest, count)
cond do
Code.Identifier.unary_op(op) == :error and Code.Identifier.binary_op(op) == :error ->
:none
match?([?. | rest] when rest == [] or hd(rest) != ?., rest) ->
dot(tl(rest), dot_count + 1, acc)
true ->
{{:operator, acc}, count}
end
_ ->
{:none, 0}
end
end
@doc """
Receives a string and returns the surround context.
This function receives a string with an Elixir code fragment
and a `position`. It returns a map containing the beginning
and ending of the identifier alongside its context, or `:none`
if there is nothing with a known context.
The difference between `cursor_context/2` and `surround_context/3`
is that the former assumes the expression in the code fragment
is incomplete. For example, `do` in `cursor_context/2` may be
a keyword or a variable or a local call, while `surround_context/3`
assumes the expression in the code fragment is complete, therefore
`do` would always be a keyword.
The `position` contains both the `line` and `column`, both starting
with the index of 1. The column must precede the surrounding expression.
For example, the expression `foo`, will return something for the columns
1, 2, and 3, but not 4:
foo
^ column 1
foo
^ column 2
foo
^ column 3
foo
^ column 4
The returned map contains the column the expression starts and the
first column after the expression ends.
Similar to `cursor_context/2`, this function also provides a best-effort
detection and may not be accurate under all circumstances. See the
"Return values" and "Limitations" section under `cursor_context/2` for
more information.
## Examples
iex> Code.Fragment.surround_context("foo", {1, 1})
%{begin: {1, 1}, context: {:local_or_var, 'foo'}, end: {1, 4}}
## Differences to `cursor_context/2`
Because `surround_context/3` deals with complete code, it has some
difference to `cursor_context/2`:
* `dot_call`/`dot_arity` and `operator_call`/`operator_arity`
are collapsed into `dot` and `operator` contexts respectively
as there aren't any meaningful distinctions between them
* On the other hand, this function still makes a distinction between
`local_call`/`local_arity` and `local_or_var`, since the latter can
be a local or variable
* `@` when not followed by any identifier is returned as `{:operator, '@'}`
(in contrast to `{:module_attribute, ''}` in `cursor_context/2`
* This function never returns empty sigils `{:sigil, ''}` or empty structs
`{:struct, ''}` as context
"""
@doc since: "1.13.0"
@spec surround_context(List.Chars.t(), position(), keyword()) ::
%{begin: position, end: position, context: context} | :none
when context:
{:alias, charlist}
| {:dot, inside_dot, charlist}
| {:local_or_var, charlist}
| {:local_arity, charlist}
| {:local_call, charlist}
| {:module_attribute, charlist}
| {:operator, charlist}
| {:unquoted_atom, charlist},
inside_dot:
{:alias, charlist}
| {:dot, inside_dot, charlist}
| {:module_attribute, charlist}
| {:unquoted_atom, charlist}
| {:var, charlist}
def surround_context(fragment, position, options \\ [])
def surround_context(binary, {line, column}, opts) when is_binary(binary) do
binary
|> String.split("\n")
|> Enum.at(line - 1, '')
|> String.to_charlist()
|> position_surround_context(line, column, opts)
end
def surround_context(charlist, {line, column}, opts) when is_list(charlist) do
charlist
|> :string.split('\n', :all)
|> Enum.at(line - 1, '')
|> position_surround_context(line, column, opts)
end
def surround_context(other, {_, _} = position, opts) do
surround_context(to_charlist(other), position, opts)
end
defp position_surround_context(charlist, line, column, opts)
when is_integer(line) and line >= 1 and is_integer(column) and column >= 1 do
{reversed_pre, post} = string_reverse_at(charlist, column - 1, [])
{reversed_pre, post} = adjust_position(reversed_pre, post)
case take_identifier(post, []) do
{_, [], _} ->
maybe_operator(reversed_pre, post, line, opts)
{:identifier, reversed_post, rest} ->
{rest, _} = strip_spaces(rest, 0)
reversed = reversed_post ++ reversed_pre
case codepoint_cursor_context(reversed, opts) do
{{:struct, acc}, offset} ->
build_surround({:struct, acc}, reversed, line, offset)
{{:alias, acc}, offset} ->
build_surround({:alias, acc}, reversed, line, offset)
{{:dot, _, [_ | _]} = dot, offset} ->
build_surround(dot, reversed, line, offset)
{{:local_or_var, acc}, offset} when hd(rest) == ?( ->
build_surround({:local_call, acc}, reversed, line, offset)
{{:local_or_var, acc}, offset} when hd(rest) == ?/ ->
build_surround({:local_arity, acc}, reversed, line, offset)
{{:local_or_var, acc}, offset} when acc in @textual_operators ->
build_surround({:operator, acc}, reversed, line, offset)
{{:local_or_var, acc}, offset} when acc not in ~w(do end after else catch rescue)c ->
build_surround({:local_or_var, acc}, reversed, line, offset)
{{:module_attribute, ''}, offset} ->
build_surround({:operator, '@'}, reversed, line, offset)
{{:module_attribute, acc}, offset} ->
build_surround({:module_attribute, acc}, reversed, line, offset)
{{:sigil, acc}, offset} ->
build_surround({:sigil, acc}, reversed, line, offset)
{{:unquoted_atom, acc}, offset} ->
build_surround({:unquoted_atom, acc}, reversed, line, offset)
_ ->
maybe_operator(reversed_pre, post, line, opts)
end
{:alias, reversed_post, _rest} ->
reversed = reversed_post ++ reversed_pre
case codepoint_cursor_context(reversed, opts) do
{{:alias, acc}, offset} ->
build_surround({:alias, acc}, reversed, line, offset)
{{:struct, acc}, offset} ->
build_surround({:struct, acc}, reversed, line, offset)
_ ->
:none
end
end
end
defp maybe_operator(reversed_pre, post, line, opts) do
case take_operator(post, []) do
{[], _rest} ->
:none
{reversed_post, rest} ->
reversed = reversed_post ++ reversed_pre
case codepoint_cursor_context(reversed, opts) do
{{:operator, acc}, offset} ->
build_surround({:operator, acc}, reversed, line, offset)
{{:sigil, ''}, offset} when hd(rest) in ?A..?Z or hd(rest) in ?a..?z ->
build_surround({:sigil, [hd(rest)]}, [hd(rest) | reversed], line, offset + 1)
{{:dot, _, [_ | _]} = dot, offset} ->
build_surround(dot, reversed, line, offset)
_ ->
:none
end
end
end
defp build_surround(context, reversed, line, offset) do
{post, reversed_pre} = enum_reverse_at(reversed, offset, [])
pre = :lists.reverse(reversed_pre)
pre_length = :string.length(pre) + 1
%{
context: context,
begin: {line, pre_length},
end: {line, pre_length + :string.length(post)}
}
end
defp take_identifier([h | t], acc) when h in @trailing_identifier,
do: {:identifier, [h | acc], t}
defp take_identifier([h | t], acc) when h not in @non_identifier,
do: take_identifier(t, [h | acc])
defp take_identifier(rest, acc) do
with {[?. | t], _} <- strip_spaces(rest, 0),
{[h | _], _} when h in ?A..?Z <- strip_spaces(t, 0) do
take_alias(rest, acc)
else
_ -> {:identifier, acc, rest}
end
end
defp take_alias([h | t], acc) when h not in @non_identifier,
do: take_alias(t, [h | acc])
defp take_alias(rest, acc) do
with {[?. | t], acc} <- move_spaces(rest, acc),
{[h | t], acc} when h in ?A..?Z <- move_spaces(t, [?. | acc]) do
take_alias(t, [h | acc])
else
_ -> {:alias, acc, rest}
end
end
defp take_operator([h | t], acc) when h in @operators, do: take_operator(t, [h | acc])
defp take_operator([h | t], acc) when h == ?., do: take_operator(t, [h | acc])
defp take_operator(rest, acc), do: {acc, rest}
# Unquoted atom handling
defp adjust_position(reversed_pre, [?: | post])
when hd(post) != ?: and (reversed_pre == [] or hd(reversed_pre) != ?:) do
{[?: | reversed_pre], post}
end
defp adjust_position(reversed_pre, [?% | post]) do
adjust_position([?% | reversed_pre], post)
end
# Dot/struct handling
defp adjust_position(reversed_pre, post) do
case move_spaces(post, reversed_pre) do
# If we are between spaces and a dot, move past the dot
{[?. | post], reversed_pre} when hd(post) != ?. and hd(reversed_pre) != ?. ->
{post, reversed_pre} = move_spaces(post, [?. | reversed_pre])
{reversed_pre, post}
_ ->
case strip_spaces(reversed_pre, 0) do
# If there is a dot to our left, make sure to move to the first character
{[?. | rest], _} when rest == [] or hd(rest) not in '.:' ->
{post, reversed_pre} = move_spaces(post, reversed_pre)
{reversed_pre, post}
# If there is a % to our left, make sure to move to the first character
{[?% | _], _} ->
case move_spaces(post, reversed_pre) do
{[h | _] = post, reversed_pre} when h in ?A..?Z ->
{reversed_pre, post}
_ ->
{reversed_pre, post}
end
_ ->
{reversed_pre, post}
end
end
end
defp move_spaces([h | t], acc) when h in @space, do: move_spaces(t, [h | acc])
defp move_spaces(t, acc), do: {t, acc}
defp string_reverse_at(charlist, 0, acc), do: {acc, charlist}
defp string_reverse_at(charlist, n, acc) do
case :unicode_util.gc(charlist) do
[gc | cont] when is_integer(gc) -> string_reverse_at(cont, n - 1, [gc | acc])
[gc | cont] when is_list(gc) -> string_reverse_at(cont, n - 1, :lists.reverse(gc, acc))
[] -> {acc, []}
end
end
defp enum_reverse_at([h | t], n, acc) when n > 0, do: enum_reverse_at(t, n - 1, [h | acc])
defp enum_reverse_at(rest, _, acc), do: {acc, rest}
@doc """
Receives a code fragment and returns a quoted expression
with a cursor at the nearest argument position.
A container is any Elixir expression starting with `(`,
`{`, and `[`. This includes function calls, tuples, lists,
maps, and so on. For example, take this code, which would
be given as input:
max(some_value,
This function will return the AST equivalent to:
max(some_value, __cursor__())
In other words, this function is capable of closing any open
brackets and insert the cursor position. Any content at the
cursor position that is after a comma or an opening bracket
is discarded. For example, if this is given as input:
max(some_value, another_val
It will return the same AST:
max(some_value, __cursor__())
Similarly, if only this is given:
max(some_va
Then it returns:
max(__cursor__())
Calls without parenthesis are also supported, as we assume the
brackets are implicit.
Operators and anonymous functions are not containers, and therefore
will be discarded. The following will all return the same AST:
max(some_value,
max(some_value, fn x -> x end
max(some_value, 1 + another_val
max(some_value, 1 |> some_fun() |> another_fun
On the other hand, tuples, lists, maps, etc all retain the
cursor position:
max(some_value, [1, 2,
Returns the following AST:
max(some_value, [1, 2, __cursor__()])
Keyword lists (and do-end blocks) are also retained. The following:
if(some_value, do:
if(some_value, do: :token
if(some_value, do: 1 + val
all return:
if(some_value, do: __cursor__())
The AST returned by this function is not safe to evaluate but
it can be analyzed and expanded.
## Examples
iex> Code.Fragment.container_cursor_to_quoted("max(some_value, ")
{:ok, {:max, [line: 1], [{:some_value, [line: 1], nil}, {:__cursor__, [line: 1], []}]}}
## Options
* `:file` - the filename to be reported in case of parsing errors.
Defaults to `"nofile"`.
* `:line` - the starting line of the string being parsed.
Defaults to 1.
* `:column` - the starting column of the string being parsed.
Defaults to 1.
* `:columns` - when `true`, attach a `:column` key to the quoted
metadata. Defaults to `false`.
* `:token_metadata` - when `true`, includes token-related
metadata in the expression AST, such as metadata for `do` and `end`
tokens, for closing tokens, end of expressions, as well as delimiters
for sigils. See `t:Macro.metadata/0`. Defaults to `false`.
"""
@doc since: "1.13.0"
@spec container_cursor_to_quoted(List.Chars.t(), keyword()) ::
{:ok, Macro.t()} | {:error, {location :: keyword, binary | {binary, binary}, binary}}
def container_cursor_to_quoted(fragment, opts \\ []) do
file = Keyword.get(opts, :file, "nofile")
line = Keyword.get(opts, :line, 1)
column = Keyword.get(opts, :column, 1)
columns = Keyword.get(opts, :columns, false)
token_metadata = Keyword.get(opts, :token_metadata, false)
Code.string_to_quoted(fragment,
file: file,
line: line,
column: column,
columns: columns,
token_metadata: token_metadata,
cursor_completion: true,
emit_warnings: false
)
end
end
|
lib/elixir/lib/code/fragment.ex
| 0.828315
| 0.657153
|
fragment.ex
|
starcoder
|
defmodule VisaCheckout do
@moduledoc """
Visa Checkout API reference: https://developer.visa.com/capabilities/visa_checkout/reference
"""
alias VisaCheckout.{Http, Util}
@doc """
Get payment data
Visa Checkout API reference: https://developer.visa.com/capabilities/visa_checkout/reference#visa_checkout__get_payment_data_api____get_payment_data
## Example
```
VisaCheckout.get_payment_data("call_id")
```
"""
def get_payment_data(call_id) do
endpoint = build_endpoint("data", call_id)
endpoint
|> Http.get(build_token_header(endpoint))
|> handle_response
end
@doc """
Update payment info
Visa Checkout API reference: https://developer.visa.com/capabilities/visa_checkout/reference#visa_checkout__update_payment_information____update_payment_info
## Example
```
VisaCheckout.update_payment_info("call_id", %{orderInfo: ...})
```
"""
def update_payment_info(call_id, params) do
endpoint = build_endpoint("info", call_id)
endpoint
|> Http.put(params, build_token_header(endpoint, params))
|> handle_response
end
defp build_endpoint(endpoint, call_id) do
"/#{endpoint}/#{call_id}?apikey=#{Util.api_key()}"
end
defp build_token_header(endpoint, params \\ nil) do
token = build_token(endpoint, params)
[{"x-pay-token", token}]
end
defp build_token(endpoint, params) do
timestamp = :os.system_time(:seconds)
string_params =
if is_nil(params) do
""
else
Jason.encode!(params)
end
token =
Util.secret()
|> Util.hmac("#{timestamp}payment#{String.replace(endpoint, "?", "")}#{string_params}")
|> Base.encode16
|> String.downcase
"xv2:#{timestamp}:#{token}"
end
defp handle_response({:ok, response}) do
case response.status_code do
200 ->
if is_map(response.body) && Map.has_key?(response.body, "encKey") && Map.has_key?(response.body, "encPaymentData") do
decrypted_payload = Util.decrypt_payload(response.body["encKey"], response.body["encPaymentData"])
{:ok, decrypted_payload}
else
{:ok, response.body}
end
_ -> {:error, response}
end
end
defp handle_response({:error, error}) do
{:error, error}
end
end
|
lib/visa_checkout.ex
| 0.724675
| 0.435661
|
visa_checkout.ex
|
starcoder
|
defmodule EpicenterWeb.Forms.DemographicForm do
use Ecto.Schema
import Ecto.Changeset
alias Epicenter.Cases.Demographic
alias Epicenter.Cases.Ethnicity
alias Epicenter.Coerce
alias Epicenter.MajorDetailed
alias EpicenterWeb.Forms.DemographicForm
@primary_key false
embedded_schema do
field :employment, :string
field :ethnicity, :map
field :gender_identity, :map
field :marital_status, :string
field :notes, :string
field :occupation, :string
field :race, :map
field :sex_at_birth, :string
end
@required_attrs ~w{}a
@optional_attrs ~w{
employment
ethnicity
gender_identity
marital_status
notes
occupation
race
sex_at_birth
}a
def model_to_form_changeset(%Demographic{} = demographic) do
demographic |> model_to_form_attrs() |> attrs_to_form_changeset()
end
def model_to_form_attrs(%Demographic{} = demographic) do
%{
employment: demographic.employment,
ethnicity: demographic.ethnicity |> MajorDetailed.for_form(Demographic.standard_values(:ethnicity)),
ethnicity_hispanic_latinx_or_spanish_origin: demographic.ethnicity |> Ethnicity.hispanic_latinx_or_spanish_origin(),
gender_identity: demographic.gender_identity |> MajorDetailed.for_form(Demographic.standard_values(:gender_identity)),
marital_status: demographic.marital_status,
notes: demographic.notes,
occupation: demographic.occupation,
race: demographic.race |> MajorDetailed.for_form(Demographic.standard_values(:race)),
sex_at_birth: demographic.sex_at_birth
}
end
def attrs_to_form_changeset(attrs) do
attrs =
attrs
|> Euclid.Extra.Map.stringify_keys()
|> Euclid.Extra.Map.transform(
~w{employment marital_status sex_at_birth},
&Coerce.to_string_or_nil/1
)
%DemographicForm{}
|> cast(attrs, @required_attrs ++ @optional_attrs)
|> validate_required(@required_attrs)
end
def form_changeset_to_model_attrs(%Ecto.Changeset{} = form_changeset) do
case apply_action(form_changeset, :create) do
{:ok, form} ->
{:ok,
%{
employment: form.employment,
ethnicity: form.ethnicity |> MajorDetailed.for_model(:map) |> Ethnicity.from_major_detailed(),
gender_identity: form.gender_identity |> MajorDetailed.for_model(:list),
marital_status: form.marital_status,
notes: form.notes,
occupation: form.occupation,
race: form.race |> MajorDetailed.for_model(:map),
sex_at_birth: form.sex_at_birth,
source: "form"
}}
other ->
other
end
end
end
|
lib/epicenter_web/forms/demographic_form.ex
| 0.544075
| 0.401189
|
demographic_form.ex
|
starcoder
|
defmodule Mix.Tasks.Re.Tags.Create do
@moduledoc """
Create all tags from our system.
"""
use Mix.Task
require Logger
@tags [
%{category: "concierge", name: "24 Horas", visibility: "public"},
%{category: "concierge", name: "Horario Comercial", visibility: "public"},
%{category: "concierge", name: "Portaria Eletrônica", visibility: "public"},
%{category: "infrastructure", name: "Academia", visibility: "public"},
%{category: "infrastructure", name: "Bicicletário", visibility: "public"},
%{category: "infrastructure", name: "Brinquedoteca", visibility: "public"},
%{category: "infrastructure", name: "Churrasqueira", visibility: "public"},
%{category: "infrastructure", name: "Espaço Gourmet", visibility: "public"},
%{category: "infrastructure", name: "Espaço Verde", visibility: "public"},
%{category: "infrastructure", name: "Parque", visibility: "public"},
%{category: "infrastructure", name: "Piscina", visibility: "public"},
%{category: "infrastructure", name: "Playground", visibility: "public"},
%{category: "infrastructure", name: "Quadra", visibility: "public"},
%{category: "infrastructure", name: "Salão De Festas", visibility: "public"},
%{category: "infrastructure", name: "Salão De Jogos", visibility: "public"},
%{category: "infrastructure", name: "Sauna", visibility: "public"},
%{category: "realty", name: "Armários Embutidos", visibility: "public"},
%{category: "realty", name: "Banheiro Empregados", visibility: "public"},
%{category: "realty", name: "Bom Para Pets", visibility: "public"},
%{category: "realty", name: "Dependência Empregados", visibility: "public"},
%{category: "realty", name: "Espaço Para Churrasco", visibility: "public"},
%{category: "realty", name: "Fogão Embutido", visibility: "public"},
%{category: "realty", name: "Lavabo", visibility: "public"},
%{category: "realty", name: "Reformado", visibility: "public"},
%{category: "realty", name: "Sacada", visibility: "public"},
%{category: "realty", name: "Terraço", visibility: "public"},
%{category: "realty", name: "<NAME>", visibility: "public"},
%{category: "realty", name: "Varanda", visibility: "public"},
%{category: "view", name: "Comunidade", visibility: "private"},
%{category: "view", name: "Cristo", visibility: "public"},
%{category: "view", name: "Lagoa", visibility: "public"},
%{category: "view", name: "Mar", visibility: "public"},
%{category: "view", name: "Montanhas", visibility: "public"},
%{category: "view", name: "Parcial Comunidade", visibility: "private"},
%{category: "view", name: "<NAME>", visibility: "public"},
%{category: "view", name: "Pedras", visibility: "public"},
%{category: "view", name: "Verde", visibility: "public"},
%{category: "view", name: "Vizinho", visibility: "private"}
]
def run(_) do
Mix.EctoSQL.ensure_started(Re.Repo, [])
Enum.map(@tags, &insert/1)
end
def insert(params) do
{:ok, tag} =
%Re.Tag{}
|> Re.Tag.changeset(params)
|> Re.Repo.insert(on_conflict: :nothing)
Logger.info("insert : tag name #{tag.name_slug}")
end
end
|
apps/re/lib/mix/tasks/tags/create.ex
| 0.761006
| 0.458773
|
create.ex
|
starcoder
|
defmodule MatrixOperation do
@moduledoc """
*MatrixOperation* is a linear algebra library in Elixir language.
Matrix indices of a row and column is an integer starting from 1 (not from 0).
"""
@doc """
Numbers of rows and columns of a matrix are got.
#### Argument
- matrix: Target matrix for finding the numbers of rows and columns.
#### Output
{num_rows, num_cols}: Numbers of rows and columns of a matrix
#### Example
iex> MatrixOperation.size([[3, 2, 3], [2, 1, 2]])
{2, 3}
"""
def size(matrix) when is_list(hd(matrix)) do
col_num = Enum.map(matrix, &size_sub(&1, 0))
max_num = Enum.max(col_num)
if(max_num == Enum.min(col_num), do: {length(matrix), max_num}, else: nil)
end
def size(_matrix) do
nil
end
defp size_sub(row, i) when i != length(row) do
if(is_number(Enum.at(row, i)), do: size_sub(row, i + 1), else: nil)
end
defp size_sub(row, i) when i == length(row) do
i
end
@doc """
A n-th unit matrix is got.
#### Argument
- n: Number of rows / columns in the unit matrix to output.
#### Output
A n-th unit matrix
#### Example
iex> MatrixOperation.unit_matrix(3)
[[1, 0, 0], [0, 1, 0], [0, 0, 1]]
"""
def unit_matrix(n) when n > 0 and is_integer(n) do
idx_list = Enum.to_list(1..n)
Enum.map(idx_list, fn x -> Enum.map(idx_list, &unit_matrix_sub(x, &1)) end)
end
defp unit_matrix_sub(i, j) when i == j do
1
end
defp unit_matrix_sub(_i, _j) do
0
end
@doc """
A m×n matrix having even-elements is got.
#### Argument
- elem: Value of the common element of the matrix to output.
- {row_num, col_num}: Size of the matrix to output.
#### Output
A row_num×col_num matrix having even elements
#### Example
iex> MatrixOperation.even_matrix(0, {2, 3})
[[0, 0, 0], [0, 0, 0]]
iex> MatrixOperation.even_matrix(1, {3, 2})
[[1, 1], [1, 1], [1, 1]]
"""
def even_matrix(elem, {row_num, col_num})
when row_num > 0 and col_num > 0 and is_number(elem) do
List.duplicate(elem, col_num)
|> List.duplicate(row_num)
end
def even_matrix(_elem, _size) do
nil
end
@doc """
A m×n matrix having random elements is got.
#### Argument
- min_val: Minimum value of random number.
- max_val: Maximum value of random number.
- {row_num, col_num}: Size of the matrix to output.
- type: Data type of elements. "int" or "real".
#### Output
A row_num×col_num matrix having random elements
"""
def random_matrix(min_val, max_val, {row_num, col_num}, type \\ "int")
when row_num > 0 and col_num > 0 and max_val > min_val do
Enum.to_list(1..row_num)
|> Enum.map(
fn _ ->
Enum.map(
Enum.to_list(1..col_num), & &1 * 0 + random_element(min_val, max_val, type)
)
end
)
end
def random_matrix(_min_val, _max_val, _size, _type) do
nil
end
defp random_element(min_val, max_val, "int") do
Enum.random(min_val..max_val)
end
defp random_element(min_val, max_val, "real") do
const = 10000000
min_val_real = min_val * const
max_val_real = max_val * const
Enum.random(min_val_real..max_val_real) / const
end
@doc """
An element of a matrix is got.
#### Argument
- matrix: Target matrix from which to extract the element.
- {row_idx, col_idx}: Index of row and column of the element to be extracted.
#### Output
An element of a matrix
#### Example
iex> MatrixOperation.get_one_element([[1, 2, 3], [4, 5, 6], [7, 8, 9] ], {1, 1})
1
"""
def get_one_element(matrix, {row_idx, col_idx}) do
matrix
|> Enum.at(row_idx - 1)
|> Enum.at(col_idx - 1)
end
@doc """
A row of a matrix is got.
#### Argument
- matrix: Target matrix from which to extract the row.
- row_idx: Index of the row to be extracted.
#### Output
A row of a matrix
#### Example
iex> MatrixOperation.get_one_row([[1, 2, 3], [4, 5, 6], [7, 8, 9] ], 1)
[1, 2, 3]
"""
def get_one_row(matrix, row_idx) do
matrix
|> Enum.at(row_idx - 1)
end
@doc """
A column of a matrix is got.
#### Argument
- matrix: Target matrix from which to extract the column.
- col_idx: Index of the column to be extracted.
#### Output
A column of a matrix
#### Example
iex> MatrixOperation.get_one_column([[1, 2, 3], [4, 5, 6], [7, 8, 9] ], 1)
[1, 4, 7]
"""
def get_one_column(matrix, col_idx) do
matrix
|> transpose()
|> Enum.at(col_idx - 1)
end
@doc """
A row of a matrix is deleted.
#### Argument
- matrix: Target matrix from which to delete the row.
- del_idx: Index of the row to be deleted.
#### Output
The matrix from which the specified row was deleted.
#### Example
iex> MatrixOperation.delete_one_row([[1, 2, 3], [4, 5, 6], [7, 8, 9]], 3)
[[1, 2, 3], [4, 5, 6]]
"""
def delete_one_row(matrix, del_idx) do
matrix
|> Enum.with_index()
|> Enum.reject(fn {_x, idx} -> idx == del_idx - 1 end)
|> Enum.map(fn {x, _idx} -> x end)
end
@doc """
A column of a matrix is deleted.
#### Argument
- matrix: Target matrix from which to delete the column.
- del_idx: Index of the column to be deleted.
#### Output
The matrix from which the specified column was deleted.
#### Example
iex> MatrixOperation.delete_one_column([[1, 2, 3], [4, 5, 6], [7, 8, 9]], 2)
[[1, 3], [4, 6], [7, 9]]
"""
def delete_one_column(matrix, del_idx) do
matrix
|> transpose()
|> Enum.with_index()
|> Enum.reject(fn {_x, idx} -> idx == del_idx - 1 end)
|> Enum.map(fn {x, _idx} -> x end)
|> transpose()
end
@doc """
A row of a matrix is exchanged.
#### Argument
- matrix: Target matrix from which to exchange the row.
- exchange_idx: Index of the row to be exchanged.
- exchange_list: List of the row to be exchanged.
#### Output
The matrix from which the specified row was exchanged.
#### Example
iex> MatrixOperation.exchange_one_row([[1, 2, 3], [4, 5, 6], [7, 8, 9]], 3, [1, 1, 1])
[[1, 2, 3], [4, 5, 6], [1, 1, 1]]
"""
def exchange_one_row(matrix, exchange_idx, exchange_list) do
matrix
|> Enum.with_index()
|> Enum.map(fn {x, idx} -> if(idx == exchange_idx - 1, do: exchange_list, else: x) end)
end
@doc """
A column of a matrix is exchanged.
#### Argument
- matrix: Target matrix from which to exchange the column.
- exchange_idx: Index of the column to be exchanged.
- exchange_list: List of the column to be exchanged.
#### Output
The matrix from which the specified column was exchanged.
#### Example
iex> MatrixOperation.exchange_one_column([[1, 2, 3], [4, 5, 6], [7, 8, 9]], 2, [1, 1, 1])
[[1, 1, 3], [4, 1, 6], [7, 1, 9]]
"""
def exchange_one_column(matrix, exchange_idx, exchange_list) do
matrix
|> transpose
|> Enum.with_index()
|> Enum.map(fn {x, idx} -> if(idx == exchange_idx - 1, do: exchange_list, else: x) end)
|> transpose()
end
@doc """
Transpose of a matrix
#### Argument
- matrix: Target matrix to transpose.
#### Output
Transposed matrix
#### Example
iex> MatrixOperation.transpose([[1.0, 2.0], [3.0, 4.0]])
[[1.0, 3.0], [2.0, 4.0]]
"""
def transpose(matrix) do
Enum.zip(matrix)
|> Enum.map(&Tuple.to_list(&1))
end
@doc """
Trace of a matrix
#### Argument
- matrix: Target matrix to output trace.
#### Output
Trance of the matrix
#### Example
iex> MatrixOperation.trace([[1.0, 2.0], [3.0, 4.0]])
5.0
"""
def trace(matrix) do
{row_num, col_num} = size(matrix)
matrix_with_idx = add_index(matrix)
Enum.map(matrix_with_idx, &trace_sub(&1, row_num, col_num))
|> Enum.sum()
end
defp trace_sub(_, row_num, col_num) when row_num != col_num do
nil
end
defp trace_sub([idx, row], _row_num, _col_num) do
Enum.at(row, idx - 1)
end
@doc """
A determinant of a n×n square matrix is got.
#### Argument
- matrix: Target matrix to output determinant.
#### Output
Determinant of the matrix
#### Example
iex> MatrixOperation.determinant([[1, 2, 1], [2, 1, 0], [1, 1, 2]])
-5
iex> MatrixOperation.determinant([[1, 2, 1, 1], [2, 1, 0, 1], [1, 1, 2, 1], [1, 2, 3, 4]])
-13
iex> MatrixOperation.determinant([ [3,1,1,2,1], [5,1,3,4,1], [2,0,1,0,1], [1,3,2,1,1], [1,1,1,1,1] ])
-14
"""
def determinant(matrix) do
determinant_sub(1, matrix)
end
# 1×1 matrix
defp determinant_sub(_, matrix) when length(matrix) == 1 do
Enum.at(matrix, 0)
|> Enum.at(0)
end
# 2×2 matrix
defp determinant_sub(co, [[a11, a12], [a21, a22]]) do
co * (a11 * a22 - a12 * a21)
end
# 3×3 or over matrix
defp determinant_sub(co, matrix) do
matrix_with_idx = add_index(matrix)
Enum.map(
matrix_with_idx,
&determinant_sub(
(-1 + 2 * rem(hd(&1), 2)) * co * hd(Enum.at(&1, 1)),
minor_matrix(matrix_with_idx, &1)
)
)
|> Enum.sum()
end
defp minor_matrix(matrix_with_idx, row) do
(matrix_with_idx -- [row])
|> Enum.map(&Enum.at(&1, 1))
|> Enum.map(&Enum.drop(&1, 1))
end
# add index
defp add_index(matrix) do
Stream.iterate(1, &(&1 + 1))
|> Enum.zip(matrix)
|> Enum.map(&(&1 |> Tuple.to_list()))
end
@doc """
Cramer's rule
#### Argument
- matrix: Target matrix to perform Cramer's rule.
- vertical_vec: Vertical vector to perform Cramer's rule.
- select_idx: Index of the target to perform Cramer's rule.
#### Output
Solution to the linear equation when Cramer's rule is applied.
#### Example
iex> MatrixOperation.cramer([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [[1], [0], [0]], 1)
1.0
iex> MatrixOperation.cramer([[0, -2, 1], [-1, 1, -4], [3, 3, 1]], [[3], [-7], [4]], 1)
2.0
"""
def cramer(matrix, vertical_vec, select_idx) do
[t] = transpose(vertical_vec)
det = determinant(matrix)
cramer_sub(matrix, t, select_idx - 1, det)
end
defp cramer_sub(_, _, _, nil), do: nil
defp cramer_sub(_, _, _, 0), do: nil
defp cramer_sub(a, t, select_idx, det) do
rep_det = transpose(a) |> replace_element_in_list(select_idx, t, 0, []) |> determinant
rep_det / det
end
defp replace_element_in_list(list, i, replace_element, i, output) when i < length(list) do
replace_element_in_list(list, i, replace_element, i + 1, output ++ [replace_element])
end
defp replace_element_in_list(list, select_idx, replace_element, i, output)
when i < length(list) do
replace_element_in_list(
list,
select_idx,
replace_element,
i + 1,
output ++ [Enum.at(list, i)]
)
end
defp replace_element_in_list(list, _select_idx, _replace_element, i, output)
when i == length(list),
do: output
@doc """
Leading principal minor is generetaed.
#### Argument
- matrix: Target matrix to find leading principal minor.
- idx: Index of a row and column to find leading principal minor.
#### Output
Leading principal minor
#### Example
iex> MatrixOperation.leading_principal_minor([[1, 3, 2], [2, 5, 1], [3, 4, 5]], 2)
[[1, 3], [2, 5]]
"""
def leading_principal_minor(matrix, idx) do
matrix
|> Enum.slice(0, idx)
|> Enum.map(& Enum.slice(&1, 0, idx))
end
@doc """
LU decomposition
#### Argument
- matrix: Target matrix to solve LU decomposition.
#### Output
{L, U}. L(U) is L(U)-matrix of LU decomposition.
#### Example
iex> MatrixOperation.lu_decomposition([[1, 1, 0, 3], [2, 1, -1, 1], [3, -1, -1, 2], [-1, 2, 3, -1]])
{
[[1, 0, 0, 0], [2.0, 1, 0, 0], [3.0, 4.0, 1, 0], [-1.0, -3.0, 0.0, 1]],
[[1, 1, 0, 3], [0, -1.0, -1.0, -5.0], [0, 0, 3.0, 13.0], [0, 0, 0, -13.0]]
}
"""
def lu_decomposition(matrix) do
{row_num, col_num} = size(matrix)
# check the setupufficient condition
check_num = lu_decomposition_check(matrix, row_num, col_num)
if(check_num == 0, do: nil, else: lu_decomposition_sub(matrix, 0, length(matrix), [], []))
end
defp lu_decomposition_check(_matrix, row_num, col_num) when row_num != col_num do
nil
end
defp lu_decomposition_check(matrix, row_num, _col_num) do
Enum.to_list(1..row_num)
|> Enum.map(& leading_principal_minor(matrix, &1) |> determinant)
|> Enum.reduce(fn x, acc -> x * acc end)
end
defp lu_decomposition_sub(matrix, k, matrix_len, _l_matrix, _u_matrix) when k == 0 do
u_matrix = even_matrix(0, {matrix_len, matrix_len})
|> exchange_one_row(1, hd(matrix))
inverce_u11 = 1.0 / hd(hd(u_matrix))
factor = matrix
|> transpose()
|> get_one_row(1)
|> Enum.slice(1, matrix_len)
l_row = [1] ++ hd(const_multiple(inverce_u11, [factor]))
l_matrix = even_matrix(0, {matrix_len, matrix_len})
|> exchange_one_row(1, l_row)
lu_decomposition_sub(matrix, k + 1, matrix_len, l_matrix, u_matrix)
end
defp lu_decomposition_sub(matrix, k, matrix_len, l_matrix, u_matrix) when k != matrix_len do
t_matrix = transpose(matrix)
u_solve = u_cal(matrix, k, matrix_len, l_matrix, u_matrix)
u_matrix_2 = exchange_one_row(u_matrix, k + 1, u_solve)
l_solve = l_cal(t_matrix, k, matrix_len, l_matrix, u_matrix_2)
l_matrix_2 = exchange_one_row(l_matrix, k + 1, l_solve)
lu_decomposition_sub(matrix, k + 1, matrix_len, l_matrix_2, u_matrix_2)
end
defp lu_decomposition_sub(_matrix, _k, _matrix_len, l_matrix, u_matrix) do
{transpose(l_matrix), u_matrix}
end
defp l_cal(t_matrix, k, matrix_len, l_matrix, u_matrix) do
factor = Enum.at(t_matrix, k) |> Enum.slice(k + 1, matrix_len)
u_extract = transpose(u_matrix) |> Enum.at(k)
l_row = transpose(l_matrix)
|> Enum.slice(k + 1, matrix_len)
|> Enum.map(& inner_product(&1, u_extract))
|> Enum.zip(factor)
|> Enum.map(fn {x, y} -> y - x end)
inverce_uii = 1.0 / Enum.at(Enum.at(u_matrix, k), k)
[l_row_2] = const_multiple(inverce_uii, [l_row])
[1] ++ l_row_2
|> add_zero_element(0, k)
end
defp u_cal(matrix, k, matrix_len, l_matrix, u_matrix) do
factor = Enum.at(matrix, k) |> Enum.slice(k, matrix_len)
l_extract = transpose(l_matrix) |> Enum.at(k)
transpose(u_matrix)
|> Enum.slice(k, matrix_len)
|> Enum.map(& inner_product(&1, l_extract))
|> Enum.zip(factor)
|> Enum.map(fn {x, y} -> y - x end)
|> add_zero_element(0, k)
end
defp add_zero_element(list, init, fin) when init != fin do
add_zero_element([0] ++ list, init + 1, fin)
end
defp add_zero_element(list, _init, _fin) do
list
end
@doc """
Linear equations are solved by LU decomposition.
#### Argument
- matrix: Target matrix to solve simultaneous linear equations.
- vertical_vec: Vertical vector to solve linear equations.
#### Output
Solutions of the linear equations
#### Example
iex> MatrixOperation.solve_sle([[1, 0, 0], [0, 1, 0], [0, 0, 1]], [[1], [0], [0]])
[1.0, 0.0, 0.0]
iex> MatrixOperation.solve_sle([[4, 1, 1], [1, 3, 1], [2, 1, 5]], [[9], [10], [19]])
[1.0, 2.0, 3.0]
"""
def solve_sle(matrix, vertical_vec) do
# check the setupufficient condition
if determinant(matrix) == 0 do
nil
else
[t] = transpose(vertical_vec)
solve_sle_sub(matrix, t)
end
end
defp solve_sle_sub(matrix, t) do
{l_matrix, u_matrix} = lu_decomposition(matrix)
dim = length(l_matrix)
y = forward_substitution(l_matrix, t, [], 0, dim)
backward_substitution(u_matrix, y, [], dim, dim)
end
defp forward_substitution(l_matrix, t, _y, k, dim) when k == 0 do
forward_substitution(l_matrix, t, [hd(t)], k + 1, dim)
end
defp forward_substitution(l_matrix, t, y, k, dim) when k != dim do
l_extract = Enum.at(l_matrix, k) |> Enum.slice(0, k)
y_extract = y |> Enum.slice(0, k)
ly = inner_product(l_extract, y_extract)
t_ly = Enum.at(t, k) - ly
forward_substitution(l_matrix, t, y ++ [t_ly], k + 1, dim)
end
defp forward_substitution(_l_matrix, _t, y, k, dim) when k == dim do
y
end
defp backward_substitution(u_matrix, y, _b, k, dim) when k == dim do
dim_1 = dim - 1
y_n = Enum.at(y, dim_1)
u_nn = Enum.at(Enum.at(u_matrix, dim_1), dim_1)
backward_substitution(u_matrix, y, [y_n / u_nn], k - 1, dim)
end
defp backward_substitution(_, _, b, k, _) when k == 0 do
b
end
defp backward_substitution(u_matrix, y, b, k, dim) when k != dim do
k_1 = k - 1
u_extract = Enum.at(u_matrix, k_1) |> Enum.slice(k, dim)
lb = inner_product(u_extract, b)
inverce_uii = Enum.at(Enum.at(u_matrix, k_1), k_1)
t_lb = (Enum.at(y, k_1) - lb) / inverce_uii
backward_substitution(u_matrix, y, [t_lb] ++ b, k_1, dim)
end
@doc """
A matrix is multiplied by a constant.
#### Argument
- const: Constant to multiply the matrix.
- matrix: Target vector/matrix to be multiplied by a constant.
#### Output
Vector/Matrix multiplied by the constant.
#### Example
iex> MatrixOperation.const_multiple(-1, [1.0, 2.0, 3.0])
[-1.0, -2.0, -3.0]
iex> MatrixOperation.const_multiple(2, [[1, 2, 3], [2, 2, 2], [3, 8, 9]])
[[2, 4, 6], [4, 4, 4], [6, 16, 18]]
"""
def const_multiple(const, x) when is_number(x) do
const * x
end
def const_multiple(const, x) when is_list(x) do
Enum.map(x, &const_multiple(const, &1))
end
@doc """
A matrix is added by a constant.
#### Argument
- const: Constant to add the matrix.
- matrix: Target vector/matrix to be added by a constant.
#### Output
Vector/Matrix multiplied by the constant.
#### Example
iex> MatrixOperation.const_addition(1, [1.0, 2.0, 3.0])
[2.0, 3.0, 4.0]
iex> MatrixOperation.const_addition(1, [[1, 2, 3], [2, 2, 2], [3, 8, 9]])
[[2, 3, 4], [3, 3, 3], [4, 9, 10]]
"""
def const_addition(const, x) when is_number(x) do
const + x
end
def const_addition(const, x) when is_list(x) do
Enum.map(x, &const_addition(const, &1))
end
@doc """
Inverse Matrix
#### Argument
- matrix: Matrix to be inverse Matrix.
#### Output
Inverse Matrix
#### Example
iex> MatrixOperation.inverse_matrix([[1, 1, -1], [-2, -1, 1], [-1, -2, 1]])
[[-1.0, -1.0, 0.0], [-1.0, 0.0, -1.0], [-3.0, -1.0, -1.0]]
"""
def inverse_matrix(matrix) when is_list(hd(matrix)) do
det = determinant(matrix)
create_index_matrix(matrix)
|> Enum.map(&map_index_row(matrix, det, &1))
|> transpose()
end
def inverse_matrix(_) do
nil
end
defp create_index_matrix(matrix) do
idx_list = Enum.to_list(1..length(matrix))
Enum.map(idx_list, fn x -> Enum.map(idx_list, &[x, &1]) end)
end
defp map_index_row(_matrix, det, _row) when det == 0 do
nil
end
defp map_index_row(matrix, det, row) do
Enum.map(row, &minor_matrix(matrix, det, &1))
end
defp minor_matrix(matrix, det, [row_num, col_num]) do
det_temp_matrix =
delete_one_row(matrix, row_num)
|> transpose
|> delete_one_row(col_num)
|> determinant
if(rem(row_num + col_num, 2) == 0,
do: det_temp_matrix / det,
else: -1 * det_temp_matrix / det
)
end
@doc """
Matrix product
#### Argument
- a: Left side of the product of matrices.
- b: Right side of the product of matrices.
#### Output
Product of two matrices
#### Example
iex> MatrixOperation.product([[3, 2, 3], [2, 1, 2]], [[2, 3], [2, 1], [3, 5]])
[[19, 26], [12, 17]]
"""
def product(a, b) do
check_product(a, b)
end
defp check_product(a, b) do
{_, col_num_a} = size(a)
{row_num_b, _} = size(b)
if(col_num_a == row_num_b, do: product_sub(a, b), else: nil)
end
defp product_sub(a, b) do
Enum.map(a, fn row_a ->
transpose(b)
|> Enum.map(&inner_product(row_a, &1))
end)
end
defp inner_product(row_a, col_b) do
Enum.zip(row_a, col_b)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> x * acc end))
|> Enum.sum()
end
@doc """
Matrix addition
#### Argument
- a: Left side of the addition of matrices.
- b: Right side of the addition of matrices.
#### Output
Addition of two matrices
#### Example
iex> MatrixOperation.add([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [3, 2, 2]])
[[5, 5, 4], [5, 3, 4]]
"""
def add(a, b) do
check_add(a, b)
end
defp check_add(a, b) do
size_a = size(a)
size_b = size(b)
if(size_a == size_b, do: add_sub(a, b), else: nil)
end
defp add_sub(a, b) do
Enum.zip(a, b)
|> Enum.map(fn {x, y} ->
Enum.zip(x, y)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> x + acc end))
end)
end
@doc """
Matrix subtraction
#### Argument
- a: Left side of the subtraction of matrices.
- b: Right side of the subtraction of matrices.
#### Output
Subtraction of two matrices
#### Example
iex> MatrixOperation.subtract([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [3, 2, 2]])
[[1, -1, 2], [-1, -1, 0]]
"""
def subtract(a, b) do
check_subtract(a, b)
end
defp check_subtract(a, b) do
size_a = size(a)
size_b = size(b)
if(size_a == size_b, do: subtract_sub(a, b), else: nil)
end
defp subtract_sub(a, b) do
Enum.zip(a, b)
|> Enum.map(fn {x, y} ->
Enum.zip(x, y)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> acc - x end))
end)
end
@doc """
Hadamard product
#### Argument
- a: Left side of the Hadamard production of matrices.
- b: Right side of the Hadamard production of matrices.
#### Output
Hadamard production of two matrices
#### Example
iex> MatrixOperation.hadamard_product([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [3, 2, 2]])
[[6, 6, 3], [6, 2, 4]]
"""
def hadamard_product(a, b) do
Enum.zip(a, b)
|> Enum.map(fn {x, y} -> hadamard_product_sub(x, y) end)
end
defp hadamard_product_sub(row_a, row_b) do
Enum.zip(row_a, row_b)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> x * acc end))
end
@doc """
Hadamard division
#### Argument
- a: Left side of the Hadamard division of matrices.
- b: Right side of the Hadamard division of matrices.
#### Output
Hadamard division of two matrices
#### Example
iex> MatrixOperation.hadamard_division([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [3, 2, 2]])
[[1.5, 0.6666666666666666, 3.0], [0.6666666666666666, 0.5, 1.0]]
"""
def hadamard_division(a, b) do
Enum.zip(a, b)
|> Enum.map(fn {x, y} -> hadamard_division_sub(x, y) end)
end
defp hadamard_division_sub(row_a, row_b) do
Enum.zip(row_a, row_b)
|> Enum.map(&Tuple.to_list(&1))
|> Enum.map(&Enum.reduce(&1, fn x, acc -> acc / x end))
end
@doc """
Hadamard power
#### Argument
- matrix: Target matrix that elements are to be n-th powered.
- n: Exponent of a power.
#### Output
Matrix that elements are to be n-th powered
#### Example
iex> MatrixOperation.hadamard_power([[3, 2, 3], [2, 1, 2]], 2)
[[9.0, 4.0, 9.0], [4.0, 1.0, 4.0]]
"""
def hadamard_power(matrix, n) do
Enum.map(matrix, &Enum.map(&1, fn x -> :math.pow(x, n) end))
end
@doc """
Tensor product
#### Argument
- a: Left side of the tensor production of matrices.
- b: Right side of the tensor production of matrices.
#### Output
Tensor production of two matrices
#### Example
iex> MatrixOperation.tensor_product([[3, 2, 3], [2, 1, 2]], [[2, 3, 1], [2, 1, 2], [3, 5, 3]])
[
[
[[6, 9, 3], [6, 3, 6], [9, 15, 9]],
[[4, 6, 2], [4, 2, 4], [6, 10, 6]],
[[6, 9, 3], [6, 3, 6], [9, 15, 9]]
],
[
[[4, 6, 2], [4, 2, 4], [6, 10, 6]],
[[2, 3, 1], [2, 1, 2], [3, 5, 3]],
[[4, 6, 2], [4, 2, 4], [6, 10, 6]]
]
]
"""
def tensor_product(a, b) when is_list(a) do
Enum.map(a, &tensor_product(&1, b))
end
def tensor_product(a, b) when is_number(a) do
const_multiple(a, b)
end
@doc """
Calculate eigenvalue using algebra method [R^2×R^2/R^3×R^3 matrix]
#### Argument
- [[a11, a12], [a21, a22]] or [[a11, a12, a13], [a21, a22, a23], [a31, a32, a33]]:
R^2×R^2/R^3×R^3 matrix
#### Output
Eigenvalues which is a non-trivial value other than zero.
#### Example
iex> MatrixOperation.eigenvalue_algebra([[3, 1], [2, 2]])
{4.0, 1.0}
iex> MatrixOperation.eigenvalue_algebra([[6, -3], [4, -1]])
{3.0, 2.0}
iex> MatrixOperation.eigenvalue_algebra([[1, 1, 1], [1, 2, 1], [1, 2, 3]])
{4.561552806429505, 0.43844714673139706, 1.0000000468390973}
iex> MatrixOperation.eigenvalue_algebra([[2, 1, -1], [1, 1, 0], [-1, 0, 1]])
{3.0000000027003626, 0.9999999918989121}
"""
# 2×2 algebra method
def eigenvalue_algebra([[a11, a12], [a21, a22]]) do
quadratic_formula(1, -a11 - a22, a11 * a22 - a12 * a21)
|> exclude_zero_eigenvalue()
|> List.to_tuple()
end
# 3×3 algebratic method
def eigenvalue_algebra([[a11, a12, a13], [a21, a22, a23], [a31, a32, a33]]) do
a = -1
b = a11 + a22 + a33
c = a21 * a12 + a13 * a31 + a32 * a23 - a11 * a22 - a11 * a33 - a22 * a33
d =
a11 * a22 * a33 + a12 * a23 * a31 + a13 * a32 * a21 - a11 * a32 * a23 - a22 * a31 * a13 -
a33 * a21 * a12
dis = -4 * a * c * c * c - 27 * a * a * d * d + b * b * c * c + 18 * a * b * c * d - 4 * b * b * b * d
if(dis > 0, do: cubic_formula(a, b, c, d), else: nil)
|> exclude_zero_eigenvalue()
|> List.to_tuple()
end
def eigenvalue_algebra(_a) do
"2×2 or 3×3 matrix only"
end
defp quadratic_formula(a, b, c) do
quadratic_formula_sub(a, b, c)
end
defp quadratic_formula_sub(a, b, c) when b * b < 4 * a * c do
nil
end
defp quadratic_formula_sub(a, b, c) do
d = :math.sqrt(b * b - 4 * a * c)
[0.5 * (-b + d) / a, 0.5 * (-b - d) / a]
end
defp cubic_formula(a, b, c, d)
when -4 * a * c * c * c - 27 * a * a * d * d + b * b * c * c + 18 * a * b * c * d -
4 * b * b * b * d < 0 do
nil
end
defp cubic_formula(a, b, c, d) do
ba = b / a
ca = c / a
da = d / a
const1 = (27 * da + 2 * ba * ba * ba - 9 * ba * ca) / 54
const2 = cubic_formula_sub(const1 * const1 + :math.pow((3 * ca - ba * ba) / 9, 3))
const_plus = csqrt([-const1 + Enum.at(const2, 0), Enum.at(const2, 1)], 3)
const_minus = csqrt([-const1 - Enum.at(const2, 0), -Enum.at(const2, 1)], 3)
root3 = :math.sqrt(3)
x1 = Enum.at(const_plus, 0) + Enum.at(const_minus, 0) - ba / 3
x2 =
-0.5 * Enum.at(const_plus, 0) - 0.5 * root3 * Enum.at(const_plus, 1) -
0.5 * Enum.at(const_minus, 0) + 0.5 * root3 * Enum.at(const_minus, 1) - ba / 3
x3 =
-0.5 * Enum.at(const_plus, 0) + 0.5 * root3 * Enum.at(const_plus, 1) -
0.5 * Enum.at(const_minus, 0) - 0.5 * root3 * Enum.at(const_minus, 1) - ba / 3
[x1, x2, x3]
|> Enum.map(& zero_approximation(&1))
end
defp cubic_formula_sub(x) when x < 0 do
[0, :math.sqrt(-x)]
end
defp cubic_formula_sub(x) do
[:math.sqrt(x), 0]
end
defp atan(x) when x < 0 do
y = atan(-x)
-1 * y
end
defp atan(x) do
atan_sub(x, 0, 0)
end
defp atan_sub(x, z, s) when z < x do
del = 0.0000001
z = z + del
s = s + del / (z * z + 1)
atan_sub(x, z, s)
end
defp atan_sub(_, _, s) do
s
end
defp csqrt([re, im], _n) when re == 0 and im == 0 do
[0, 0]
end
defp csqrt([re, im], n) when re == 0 and im > 0 do
r = :math.pow(im * im, 0.5 / n)
re2 = r * :math.pow(3, 0.5) * 0.5
im2 = r * 0.5
[re2, im2]
end
defp csqrt([re, im], n) when re == 0 and im < 0 do
r = :math.pow(im * im, 0.5 / n)
re2 = r * :math.pow(3, 0.5) * 0.5
im2 = -r * 0.5
[re2, im2]
end
defp csqrt([re, im], n) when re < 0 do
r = :math.pow(re * re + im * im, 0.5 / n)
re2 = -r * :math.cos(atan(im / re) / n)
im2 = r * :math.sin(atan(im / re) / n)
[re2, im2]
end
defp csqrt([re, im], n) do
r = :math.pow(re * re + im * im, 0.5 / n)
re2 = r * :math.cos(atan(im / re) / n)
im2 = r * :math.sin(atan(im / re) / n)
[re2, im2]
end
# Due to a numerical calculation error
defp zero_approximation(delta) when abs(delta) < 0.000001 do
0
end
defp zero_approximation(delta) do
delta
end
defp exclude_zero_eigenvalue(eigenvalues) do
eigenvalues2 = Enum.map(eigenvalues, & zero_approximation(&1))
len = length(eigenvalues)
zero_list = Enum.to_list(1..len)
|> Enum.map(& &1 * 0)
eigenvalues2 -- zero_list
end
defp exclude_zero_eigenvalue(eigenvalues, eigenvectors) do
Enum.map(eigenvalues, & zero_approximation(&1))
|> Enum.zip(eigenvectors)
|> Enum.map(fn {val, vec} -> if(val==0, do: nil, else: {val, vec}) end)
|> Enum.filter(& !is_nil(&1))
|> Enum.unzip()
end
"""
Matrix diagonalization using algebra method [R^2×R^2/R^3×R^3 matrix]
#### Argument
- matrix: R^2×R^2/R^3×R^3 matrix. Target matrix to be diagonalized.
#### Output
Diagonalized matrix
#### Example
iex> MatrixOperation.diagonalization_algebra([[1, 3], [4, 2]])
[[5.0, 0], [0, -2.0]]
iex> MatrixOperation.diagonalization_algebra([[2, 1, -1], [1, 1, 5], [-1, 2, 1]])
[[-2.6170355131217935, 0, 0], [0, 4.1017849347870765, 0], [0, 0, 2.515250578334717]]
iex> MatrixOperation.diagonalization_algebra([[2, 1, -1], [1, 1, 0], [-1, 0, 1]])
nil
"""
defp diagonalization_algebra(matrix) do
ev = matrix
|> eigenvalue_algebra()
|> Tuple.to_list()
if(length(ev)==length(matrix), do: ev, else: nil)
|> diagonalization_algebra_condition()
end
defp diagonalization_algebra_condition(matrix) when matrix == nil do
nil
end
defp diagonalization_algebra_condition(matrix) do
matrix
|> Enum.with_index()
|> Enum.map(& diagonalization_algebra_sub(&1, length(matrix), 0, []))
end
defp diagonalization_algebra_sub(_, dim, i, row) when i + 1 > dim do
row
end
defp diagonalization_algebra_sub({ev, index}, dim, i, row) when i != index do
diagonalization_algebra_sub({ev, index}, dim, i + 1, row ++ [0])
end
defp diagonalization_algebra_sub({ev, index}, dim, i, row) when i == index do
diagonalization_algebra_sub({ev, index}, dim, i + 1, row ++ [ev])
end
@doc """
Jordan_normal_form [R^2×R^2/R^3×R^3 matrix]
#### Argument
- matrix: R^2×R^2/R^3×R^3 matrix. Target matrix to be Jordan normal form.
#### Output
Jordan normal form matrix
#### Example
iex> MatrixOperation.jordan_normal_form([[1, 3], [4, 2]])
[[5.0, 0], [0, -2.0]]
iex> MatrixOperation.jordan_normal_form([[7, 2], [-2, 3]])
[[5.0, 1], [0, 5.0]]
iex> MatrixOperation.jordan_normal_form([[2, 1, -1], [1, 1, 0], [-1, 0, 1]])
nil
iex> MatrixOperation.jordan_normal_form([[1, -1, 1], [0, 2, -2], [1, 1, 3]])
[[2.0, 1, 0], [0, 2.0, 1], [0, 0, 2.0]]
iex> MatrixOperation.jordan_normal_form([[3, 0, 1], [-1, 2, -1], [-1, 0, 1]])
[[2.0, 1, 0], [0, 2.0, 0], [0, 0, 2.0]]
iex> MatrixOperation.jordan_normal_form([[1, 0, -1], [0, 2, 0], [0, 1, 1]])
[[2.0, 0, 0], [0, 0.9999999999999999, 1], [0, 0, 0.9999999999999999]]
iex> MatrixOperation.jordan_normal_form([[6, 2, 3], [-3, 0, -2], [-4, -2, -1]])
[[1.0, 0, 0], [0, 2.0, 1], [0, 0, 2.0]]
"""
# R^2×R^2 matrix
def jordan_normal_form([[m11, m12], [m21, m22]]) do
b = -m11 - m22
c = m11 * m22 - m12 * m21
jordan_R2R2(b, c, [[m11, m12], [m21, m22]])
end
# R^3×R^3 matrix
def jordan_normal_form([[m11, m12, m13], [m21, m22, m23], [m31, m32, m33]]) do
b = m11 + m22 + m33
c = m21 * m12 + m13 * m31 + m32 * m23 - m11 * m22 - m11 * m33 - m22 * m33
d =
m11 * m22 * m33 + m12 * m23 * m31 + m13 * m32 * m21 - m11 * m32 * m23 - m22 * m31 * m13 -
m33 * m21 * m12
jordan_R3R3(b, c, d, [[m11, m12, m13], [m21, m22, m23], [m31, m32, m33]])
end
def jordan_normal_form(_) do
nil
end
defp jordan_R2R2(b, c, m) when (b * b > 4 * c) do
diagonalization_algebra(m)
end
defp jordan_R2R2(b, c, m) when b * b == 4 * c do
m_lambda = subtract(m, [[-b * 0.5, 0], [0, -b * 0.5]])
max_jordan_dim = jordan_R2R2_sub(m_lambda, 1)
jordan_R2R2_sub2(b, max_jordan_dim)
end
defp jordan_R2R2(_, _, _) do
nil
end
defp jordan_R2R2_sub(ml, n) when ml != [[0, 0], [0, 0]] and n <= 2 do
product(ml, ml)
|> jordan_R2R2_sub(n + 1)
end
defp jordan_R2R2_sub(_, n) when n > 2 do
nil
end
defp jordan_R2R2_sub(_, n) do
n
end
defp jordan_R2R2_sub2(b, mjd) when mjd == 2 do
[[-b * 0.5, 1], [0, -b * 0.5]]
end
defp jordan_R2R2_sub2(b, mjd) when mjd == 1 do
[[-b * 0.5, 0], [0, -b * 0.5]]
end
defp jordan_R2R2_sub2(_, _) do
nil
end
defp jordan_R3R3(b, c, d, m)
when 4 * c * c * c - 27 * d * d + b * b * c * c - 18 * b * c * d -
4 * b * b * b * d > 0 do
diagonalization_algebra(m)
end
# Triple root
defp jordan_R3R3(b, c, d, m)
when (4 * c * c * c - 27 * d * d + b * b * c * c - 18 * b * c * d -
4 * b * b * b * d == 0) and (b * b == -3 * c and b * b * b == 27 * d) do
m_lambda = subtract(m, [[b/3, 0, 0], [0, b/3, 0], [0, 0, b/3]])
max_jordan_dim = jordan_R3R3_sub(m_lambda, 1)
jordan_R3R3_sub2(b, max_jordan_dim)
end
# Double root
defp jordan_R3R3(b, c, d, _)
when (4 * c * c * c - 27 * d * d + b * b * c * c - 18 * b * c * d -
4 * b * b * b * d == 0) do
lambda = cubic_formula(-1, b, c, d)
jordan_R3R3_sub3(lambda)
end
defp jordan_R3R3(_, _, _, _) do
nil
end
defp jordan_R3R3_sub(ml, n) when ml != [[0, 0, 0], [0, 0, 0], [0, 0, 0]] and n < 3 do
product(ml, ml)
|> Enum.map(& Enum.map(&1, fn x -> zero_approximation(x) end))
|> jordan_R3R3_sub(n + 1)
end
defp jordan_R3R3_sub(_, n) when n > 3 do
nil
end
defp jordan_R3R3_sub(_, n) do
n
end
defp jordan_R3R3_sub2(b, mjd) when mjd == 3 do
[[b/3, 1, 0], [0, b/3, 1], [0, 0, b/3]]
end
defp jordan_R3R3_sub2(b, mjd) when mjd == 2 do
[[b/3, 1, 0], [0, b/3, 0], [0, 0, b/3]]
end
defp jordan_R3R3_sub2(b, mjd) when mjd == 1 do
[[b/3, 0, 0], [0, b/3, 0], [0, 0, b/3]]
end
defp jordan_R3R3_sub2(_, _) do
nil
end
defp jordan_R3R3_sub3([l1, l2, l3]) when l1 == l2 do
[[l1, 1, 0], [0, l2, 0], [0, 0, l3]]
end
defp jordan_R3R3_sub3([l1, l2, l3]) when l2 == l3 do
[[l1, 0, 0], [0, l2, 1], [0, 0, l3]]
end
defp jordan_R3R3_sub3([l1, l2, l3]) when l1 == l3 do
[[l1, 1, 0], [0, l3, 0], [0, 0, l2]]
end
defp jordan_R3R3_sub3(_) do
nil
end
@doc """
Power iteration method (maximum eigen value and eigen vector)
#### Argument
- matrix: Matrix to adapt the power iteration method.
- iter_max: iteration number of the power iteration method. The default value is 1000.
#### Output
Maximum eigenvalue and normalized eigenvector corresponding to the maximum eigenvalue
#### Example
iex> MatrixOperation.power_iteration([[3, 1], [2, 2]])
{
4.0,
[0.7071067811865476, 0.7071067811865476]
}
iex> MatrixOperation.power_iteration([[1, 1, 2], [0, 2, -1], [0, 0, 3]])
{
3.0,
[0.3333333333333333, -0.6666666666666666, 0.6666666666666666]
}
"""
def power_iteration(matrix, iter_max \\ 1000) do
init_vec = random_column(length(matrix))
xk_pre = power_iteration_sub(matrix, init_vec, iter_max)
# eigen vector
[xk_vec] = product(matrix, xk_pre) |> transpose
[xk_pre_vec] = transpose(xk_pre)
# eigen value
eigen_value = inner_product(xk_vec, xk_vec) / inner_product(xk_vec, xk_pre_vec)
norm_xk_vec = :math.sqrt(inner_product(xk_vec, xk_vec))
normalized_eigen_vec = Enum.map(xk_vec, & &1/norm_xk_vec)
{eigen_value, normalized_eigen_vec}
end
defp random_column(num) when num > 1 do
tmp = Enum.reduce(1..num, [], fn _, acc -> [Enum.random(0..50000) / 10000 | acc] end)
transpose([tmp])
end
defp random_column(_num) do
nil
end
defp power_iteration_sub(matrix, v, iter_max) do
# Normarization is for overflow suppression
Enum.reduce(1..iter_max, v, fn _, acc ->
vp = product(matrix, acc)
[vpt] = transpose(vp)
const_multiple(1 / :math.sqrt(inner_product(vpt, vpt)), vp)
end)
end
@doc """
Calculate eigenvalues and eigenvectors by using Jacobi method
#### Argument
- matrix: Matrix to adapt the power iteration method.
- iter_max: iteration number of the power iteration method. The default value is 1000.
#### Output
[Eigenvalues list, Eigenvectors list]: Eigenvalues and eigenvectors
#### Example
iex> MatrixOperation.jacobi([[10, 3, 2], [3, 5, 1], [2, 1, 0]])
{
[11.827601656660915, 3.5956497715829547, -0.42325142824210527],
[
[0.8892872578006493, -0.42761854121985043, -0.16220529066103917],
[0.4179466723082575, 0.9038581385546461, -0.09143874712126684],
[0.1857114757355714, 0.013522151221627882, 0.982511271796136]
]
}
"""
def jacobi(matrix, iter_max \\ 1000) do
[pap, p] = jacobi_iteration(matrix, iter_max, 0, unit_matrix(length(matrix)))
p_rnd = Enum.map(p, & Enum.map(&1, fn x -> zero_approximation(x) end))
pap
|> Enum.with_index()
|> Enum.map(& jacobi_sub4(&1))
|> Enum.map(& zero_approximation(&1))
|> exclude_zero_eigenvalue(p_rnd)
end
defp jacobi_iteration(matrix, iter_max, l, p_pre) when l != iter_max do
{row_num, col_num} = size(matrix)
odts = off_diagonal_terms(matrix, row_num, col_num, 0, 0, [])
|> Enum.map(& abs(&1))
max_odt = Enum.max(odts)
[max_i, max_j] = Enum.with_index(odts)
|> jocobi_sub(max_odt, 0)
|> jocobi_sub2(col_num, 0)
a_ij = get_one_element(matrix, {max_i + 1, max_j + 1})
a_ii = get_one_element(matrix, {max_i + 1, max_i + 1})
a_jj = get_one_element(matrix, {max_j + 1, max_j + 1})
phi = phi_if(a_ii - a_jj, a_ij)
p = jacobi_sub3(phi, col_num, max_i, max_j, 0, 0, [], [])
p_pi = product(p_pre, p)
p
|> transpose()
|> product(matrix)
|> product(p)
|> jacobi_iteration(iter_max, l + 1, p_pi)
end
defp jacobi_iteration(matrix, _, _, p) do
[matrix, p]
end
defp phi_if(denominator, a_ij) when denominator < 0.0000001 and a_ij > 0 do
-0.78539816339 # -pi/2
end
defp phi_if(denominator, a_ij) when denominator < 0.0000001 and a_ij < 0 do
0.78539816339 # -pi/2
end
defp phi_if(denominator, a_ij) do
atan(-2 * a_ij / denominator) * 0.5
end
defp off_diagonal_terms(m, row_num, col_num, i, j, output) when i < j and row_num >= i and col_num > j do
off_diagonal_terms(m, row_num, col_num, i, j + 1, output ++ [get_one_element(m, {i + 1, j + 1})])
end
defp off_diagonal_terms(m, row_num, col_num, i, j, output) when i < j and row_num > i and col_num == j do
off_diagonal_terms(m, row_num, col_num, i + 1, 0, output)
end
defp off_diagonal_terms(_, row_num, col_num, i, j, output) when row_num == i and col_num == j do
output
end
defp off_diagonal_terms(m, row_num, col_num, i, j, output) do
off_diagonal_terms(m, row_num, col_num, i, j + 1, output)
end
defp jocobi_sub(element_idx_list, target_element, i) when hd(element_idx_list) == {target_element, i} do
i
end
defp jocobi_sub(element_idx_list, target_element, i) do
[_|tail] = element_idx_list
jocobi_sub(tail, target_element, i + 1)
end
defp jocobi_sub2(idx, col_num, i) when idx < (i + 1) * col_num - ((i + 1) * (2 + i) * 0.5) do
[max_i, max_j] = [i, idx - i * (2 * col_num - i - 1) * 0.5 + i + 1]
[max_i, round(max_j)]
end
defp jocobi_sub2(idx, col_num, i) do
jocobi_sub2(idx, col_num, i + 1)
end
defp jacobi_sub3(phi, col_num, target_i, target_j, i, j, o_row, output) when i == j and ( i == target_i or j == target_j) do
jacobi_sub3(phi, col_num, target_i, target_j, i, j + 1, o_row ++ [:math.cos(phi)], output)
end
defp jacobi_sub3(phi, col_num, target_i, target_j, i, j, o_row, output) when i == target_i and j == target_j and j != col_num do
jacobi_sub3(phi, col_num, target_i, target_j, i, j + 1, o_row ++ [:math.sin(phi)], output)
end
defp jacobi_sub3(phi, col_num, target_i, target_j, i, j, o_row, output) when i == target_i and j == target_j and j == col_num do
jacobi_sub3(phi, col_num, target_i, target_j, i + 1, 0, [] , output ++ [o_row ++ [:math.sin(phi)]])
end
defp jacobi_sub3(phi, col_num, target_i, target_j, i, j, o_row, output) when i == target_j and j == target_i do
jacobi_sub3(phi, col_num, target_i, target_j, i, j + 1, o_row ++ [:math.sin(-phi)], output)
end
defp jacobi_sub3(phi, col_num, target_i, target_j, i, j, o_row, output) when (i != target_i or j != target_j) and i == j and j != col_num do
jacobi_sub3(phi, col_num, target_i, target_j, i, j + 1, o_row ++ [1], output)
end
defp jacobi_sub3(phi, col_num, target_i, target_j, i, j, o_row, output) when (i != target_i or j != target_j) and i != j and j == col_num do
jacobi_sub3(phi, col_num, target_i, target_j, i + 1, 0, [], output ++ [o_row])
end
defp jacobi_sub3(_, col_num, _, _, i, j, _, output) when i == j and j == col_num do
output
end
defp jacobi_sub3(phi, col_num, target_i, target_j, i, j, o_row, output) do
jacobi_sub3(phi, col_num, target_i, target_j, i, j + 1, o_row ++ [0], output)
end
defp jacobi_sub4({list, index}) do
Enum.at(list, index)
end
@doc """
Singular Value Decomposition (SVD) using Jacobi method.
#### Argument
- matrix: Matrix to adapt the SVD by using the QR decomposition method.
#### Output
[Singular values, U-matrix, V-matrix]:
Singular values, U-matrix and V-matrix.
Singular value is a non-trivial value other than zero.
#### Example
iex> MatrixOperation.svd([[1, 0, 0], [0, 1, 1]])
{
[1.0, 1.4142135623730951],
[
[1.0, 0.0],
[0.0, 1.0]
],
[
[1.0, 0.0, 0.0],
[0.0, 0.7071067811865475, 0.7071067811865475]
]
}
iex> MatrixOperation.svd([[1, 1], [1, -1], [1, 0]])
{
[1.7320508075688772, 1.4142135623730951],
[
[0.5773502691896258, 0.5773502691896258, 0.5773502691896258],
[0.7071067811865476, -0.7071067811865476, 0.0]
],
[
[1.0, 0.0],
[0.0, 1.0]
]
}
iex> MatrixOperation.svd([[1, 1], [1, 1]])
{
[1.9999999999999998],
[[0.7071067811865476, 0.7071067811865476]],
[[0.7071067811865476, 0.7071067811865476]]
}
"""
def svd(a) do
a_t = transpose(a)
svd_sub(a, a_t)
end
def svd_sub(a, a_t) when length(a) <= length(a_t) do
# U matrix
aat = product(a, a_t)
{sv_sq, u} = eigen(aat)
# V matirx
ata = product(a_t, a)
{_, v} = eigen(ata)
# Singular value
s = Enum.map(sv_sq, & :math.sqrt(&1))
# A = USV^t
{s, u, v}
end
def svd_sub(a, a_t) do
# U matrix
aat = product(a, a_t)
{_, u} = eigen(aat)
# V matirx
ata = product(a_t, a)
{sv_sq, v} = eigen(ata)
# Singular value
s = Enum.map(sv_sq, & :math.sqrt(&1))
# A = USV^t
{s, u, v}
end
@doc """
Moore-Penrose general inverse matrix
#### Argument
- matrix: Matrix to be Moore-Penrose general inverse matrix.
#### Output
Moore-Penrose general inverse matrix
#### Example
iex> MatrixOperation.mp_inverse_matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
[
[-0.6388888888888877, -0.16666666666666777, 0.30555555555555647],
[-0.0555555555555557, -1.8041124150158794e-16, 0.05555555555555575],
[0.5277777777777768, 0.16666666666666755, -0.19444444444444522]
]
"""
def mp_inverse_matrix(matrix) do
svd(matrix)
|> sv_matrix_inv()
end
defp sv_matrix_inv({sv, u, v}) do
# Zero matrix with index
sv_len = length(sv)
zm_idx =
even_matrix(0, {sv_len, sv_len})
|> Enum.with_index()
# Inverse singular value matrix
svm_inv =
Enum.map(
zm_idx,
fn {zm_row, idx} ->
List.replace_at(
zm_row,
idx,
1/Enum.at(sv, idx)
)
end
)
# VΣ^-U^T
vt = transpose(v)
vt
|> product(svm_inv)
|> product(u)
end
@doc """
Calculate eigenvalues and eigenvectors by using QR decomposition for symmetric matrices.
#### Argument
- a: Symmetric matrix to calculate eigenvalues and eigenvectors by using the QR decomposition.
#### Output
[Eigenvalues list, Eigenvectors list]: Eigenvalues and eigenvectors.
Eigenvalue is a non-trivial value other than zero, and complex numbers are not supported.
#### Example
iex> MatrixOperation.eigh([[3, 0], [0, 2]])
{[3.0, 2.0], [[1.0, 0.0], [0.0, 1.0]]}
iex> MatrixOperation.eigh([[1, 4, 5], [4, 2, 6], [5, 6, 3]])
{
[12.175971065046884, -2.50728796709364, -3.6686830979532647],
[
[0.496599784546191, 0.8095854617397509, -0.3129856771935597],
[0.577350269189626, -0.577350269189626, -0.5773502691896257],
[0.6481167492476515, -0.10600965430705458, 0.7541264035547063]
]
}
iex> row1 = [ 5, -1, 0, 1, 2]
iex> row2 = [-1, 5, 0, 5, 3]
iex> row3 = [ 0, 0, 4, 7, 2]
iex> row4 = [ 1, 5, 7, 0, 9]
iex> row5 = [ 2, 3, 2, 9, 2]
iex> MatrixOperation.eigh([row1, row2, row3, row4, row5])
{
[16.394097630317376, 5.901499037899706, 4.334013998770404, -0.891690865956603, -9.737919801031268],
[
[0.11199211262602528, -0.8283773397697639, -0.4403916223463706, 0.3275456024443265, -0.00422456530824197],
[0.39542664705563546, 0.5332887206459925, -0.5342108202525103, 0.4973517482650887, 0.16279110925630544],
[0.4267472595014673, -0.13695943658576812, 0.6991586689712901, 0.4519460705200494, 0.3256544091239611],
[0.6029452475982553, -0.007822597120772413, 0.07907415791820135, -0.1297224632045824, -0.7831444282267664],
[0.5342652322719152, -0.10283502852688214, -0.15999516131462643, -0.651361611317911, 0.5040984210950804]
]
}
"""
def eigh(a) do
# Set the number of iterations according to the number of dimensions.
# Refer to the LAPACK (ex. dlahqr).
iter_max = 30 * Enum.max([10, length(a)])
matrix_len = length(a)
u = unit_matrix(matrix_len)
# Hessenberg transform
{hess, q_h} = hessenberg(a, matrix_len, u, u, 1)
# Compute eigenvalues and eigenvectors
{eigenvals, eigenvecs} = hess
|> qr_iter(matrix_len, q_h, u, 0, iter_max)
{eigenvals, eigenvecs} = exclude_zero_eigenvalue(eigenvals, eigenvecs)
# Normalize
eigenvecs_norm_t = eigenvecs
|> Enum.map(& normalize_vec(&1))
|> transpose()
{eigenvals, eigenvecs_norm_t}
end
defp qr_iter(a, matrix_len, q, u, count, iter_max)
when count != iter_max and matrix_len <= 2 do
q_n = a
|> qr_for_ev(u, matrix_len, u, 1)
# Compute matrix a_k
a_k = q_n
|> transpose()
|> product(a)
|> product(q_n)
# Compute matrix q_k
q_k = product(q, q_n)
qr_iter(a_k, matrix_len, q_k, u, count+1, iter_max)
end
defp qr_iter(a, matrix_len, q, u, count, iter_max) when count != iter_max do
shift = wilkinson_shift_value(a)
a_s = eigen_shift(a, -shift)
q_n = qr_for_ev(a_s, u, matrix_len, u, 1)
# Compute matrix a_k
a_k = q_n
|> transpose()
|> product(a_s)
|> product(q_n)
|> eigen_shift(shift)
# Compute matrix q_k
q_k = product(q, q_n)
qr_iter(a_k, matrix_len, q_k, u, count+1, iter_max)
end
defp qr_iter(a_k, _, q_k, _, _, _) do
# Compute eigenvalues
eigenvals = a_k
|> Enum.with_index()
|> Enum.map(fn {x, i} -> Enum.at(x, i) end)
# Compute eigenvectors
eigenvecs = transpose(q_k)
{eigenvals, eigenvecs}
end
defp wilkinson_shift_value(a) do
# The bottom right elements of the matrix
matrix_len = length(a)
w11 = get_one_element(a, {matrix_len-1, matrix_len-1})
w12 = get_one_element(a, {matrix_len-1, matrix_len})
w21 = w12
w22 = get_one_element(a, {matrix_len, matrix_len})
# Wilkinson shift value
e = w11 + w22
f = :math.sqrt(e * e - 4 * (w11 * w22 - w12 * w21))
k1 = 0.5 * (e + f)
k2 = 0.5 * (e - f)
if(abs(w22 - k1) < abs(w22 - k2), do: k1, else: k2)
end
defp eigen_shift(a, shift) do
um = a
|> length()
|> unit_matrix()
shift_matrix = const_multiple(shift, um)
add(a, shift_matrix)
end
defp hessenberg(a, matrix_len, q, u, num) when matrix_len != num + 1 do
q_n = a
|> get_one_column(num)
|> replace_zero(num)
|> householder(num, u)
q_nt = transpose(q_n)
hess = q_n
|> product(a)
|> product(q_nt)
# Compute matrix q_k
q_k = product(q, q_n)
hessenberg(hess, matrix_len, q_k, u, num+1)
end
defp hessenberg(hess, _, q_k, _, _) do
{hess, q_k}
end
defp normalize_vec(vec) do
norm = vec
|> Enum.map(& &1*&1)
|> Enum.sum()
|> :math.sqrt()
Enum.map(vec, & &1/norm)
end
@doc """
Calculate eigenvalues and eigenvectors by using QR decomposition.
#### Argument
- a: Matrix to calculate eigenvalues and eigenvectors by using the QR decomposition.
#### Output
[Eigenvalues list, Eigenvectors list]: Eigenvalues and eigenvectors.
Eigenvalue is a non-trivial value other than zero, and complex numbers are not supported.
#### Example
iex> MatrixOperation.eigen([[1, 4, 5], [4, 2, 6], [5, 6, 3]])
{
[12.175971065046914, -3.6686830979532736, -2.507287967093643],
[
[0.4965997845461912, 0.5773502691896258, 0.6481167492476514],
[-0.3129856771935595, -0.5773502691896258, 0.7541264035547063],
[-0.8095854617397507, 0.577350269189626, 0.10600965430705471]
]
}
"""
def eigen(a) do
delta = 0.0001 # avoid division by zero
evals = eigenvalue(a)
evecs = evals
|> Enum.map(
& eigenvalue_shift(a, -&1+delta)
|> inverse_matrix()
|> power_iteration()
|> extract_second()
)
{evals, evecs}
end
defp eigenvalue(a) do
# Set the number of iterations according to the number of dimensions.
# Refer to the LAPACK (ex. dlahqr).
iter_max = 30 * Enum.max([10, length(a)])
matrix_len = length(a)
u = unit_matrix(matrix_len)
# Hessenberg transform
{hess, _q} = hessenberg(a, matrix_len, u, u, 1)
# Compute eigenvalues and eigenvectors
hess
|> eigenvalue_sub(matrix_len, u, 0, iter_max)
|> exclude_zero_eigenvalue()
end
defp eigenvalue_sub(a, matrix_len, u, count, iter_max) when count != iter_max do
q_n = qr_for_ev(a, u, matrix_len, u, 1)
a_k = q_n
|> transpose()
|> product(a)
|> product(q_n)
eigenvalue_sub(a_k, matrix_len, u, count+1, iter_max)
end
defp eigenvalue_sub(a_k, _, _, _, _) do
a_k
|> Enum.with_index()
|> Enum.map(fn {x, i} -> Enum.at(x, i) end)
end
defp qr_for_ev(a, q, matrix_len, u, num) when matrix_len != num do
h = a
|> get_one_column(num)
|> replace_zero(num-1)
|> householder(num-1, u)
a_n = product(h, a)
q_n = product(q, h)
qr_for_ev(a_n, q_n, matrix_len, u, num+1)
end
defp qr_for_ev(_, q_n, _, _, _) do
q_n
end
defp replace_zero(list, thresh_num) do
list
|> Enum.with_index()
|> Enum.map(fn {x, i} -> if(i < thresh_num, do: 0, else: x) end)
end
defp householder(col, index, u) do
col_norm = col
|> Enum.map(& &1*&1)
|> Enum.sum()
|> :math.sqrt()
top = Enum.at(col, index)
top_cn = if(top >= 0, do: top + col_norm, else: top - col_norm)
v = List.replace_at(col, index, top_cn)
cn_top = if(top >= 0, do: col_norm + top, else: col_norm - top)
vtv = [v]
|> transpose
|> product([v])
# avoid division by zero
norm = if(
col_norm * cn_top == 0,
do: 0.0001,
else: col_norm * cn_top
)
m = const_multiple(1/norm, vtv)
subtract(u, m)
end
defp eigenvalue_shift(a, ev) do
unit = a
|> length
|> unit_matrix()
b = const_multiple(ev, unit)
add(a, b)
end
defp extract_second({_first, second}) do
second
end
@doc """
Matrix diagonalization using the QR decomposition.
#### Argument
- a: Matrix to be diagonalized by using the QR decomposition.
#### Output
Diagonalized matrix
#### Example
iex> MatrixOperation.diagonalization([[1, 3], [4, 2]])
[[5.000000000000018, 0], [0, -1.999999999999997]]
iex> MatrixOperation.diagonalization([[2, 1, -1], [1, 1, 5], [-1, 2, 1]])
[[4.101784906061095, 0, 0], [0, -2.6170329440542233, 0], [0, 0, 2.515248037993127]]
iex> MatrixOperation.diagonalization([[2, 1, -1], [1, 1, 0], [-1, 0, 1]])
nil
iex> MatrixOperation.diagonalization([[2, 1, -1], [1, 1, 0], [-1, 0, 1]])
nil
iex> MatrixOperation.diagonalization([[16, -1, 1, 2, 3], [2, 12, 1, 5, 6], [1, 3, -24, 8, 9], [3, 4, 9, 1, 23], [5, 3, 1, 2, 1]])
[
[-26.608939298557207, 0, 0, 0, 0],
[0, 20.42436493500135, 0, 0, 0],
[0, 0, 14.665793374162678, 0, 0],
[0, 0, 0, -3.5477665464080044, 0],
[0, 0, 0, 0, 1.0665475358009446]
]
"""
def diagonalization(a) do
ev = eigenvalue(a)
if(length(ev)==length(a), do: ev, else: nil)
|> diagonalization_condition()
end
defp diagonalization_condition(a) when a == nil do
nil
end
defp diagonalization_condition(a) do
a
|> Enum.with_index()
|> Enum.map(& diagonalization_sub(&1, length(a), 0, []))
|> Enum.map(& Enum.map(&1, fn x -> zero_approximation(x) end))
end
defp diagonalization_sub(_, dim, i, row) when i + 1 > dim do
row
end
defp diagonalization_sub({ev, index}, dim, i, row) when i != index do
diagonalization_sub({ev, index}, dim, i + 1, row ++ [0])
end
defp diagonalization_sub({ev, index}, dim, i, row) when i == index do
diagonalization_sub({ev, index}, dim, i + 1, row ++ [ev])
end
@doc """
Calculate singular Value by using QR decomposition.
#### Argument
- a: Matrix to calculate singular values.
#### Output
Singular values list. Singular value is a non-trivial value other than zero.
#### Example
iex> MatrixOperation.singular_value([[1, 2, 3, 1], [2, 4, 1, 5], [3, 3, 10, 8]])
{14.9121726205599, 4.23646340778201, 1.6369134152873912}
"""
def singular_value(a) do
a
|> transpose()
|> product(a)
|> eigenvalue()
|> Enum.map(& :math.sqrt(&1))
|> List.to_tuple()
end
@doc """
Calculate the rank of a matrix by using QR decomposition
#### Example
iex> MatrixOperation.rank([[2, 3, 4], [1, 4, 2], [2, 1, 4]])
2
iex> MatrixOperation.rank([[2, 3, 4, 2], [1, 4, 2, 3], [2, 1, 4, 4]])
3
iex> input = [[2, 3, 4, 3], [1, 42, 2, 11], [2, 1, 4, 4], [3, 7, 2, 2], [35, 6, 4, 6], [7, 23, 5, 2]]
iex> MatrixOperation.rank(input)
4
"""
def rank(matrix) do
matrix
|> singular_value()
|> Tuple.to_list()
|> length()
end
@doc """
Frobenius norm
#### Argument
- a: Matrix to calculate Frobenius norm.
#### Output
Frobenius norm
#### Example
iex> MatrixOperation.frobenius_norm([[2, 3], [1, 4], [2, 1]])
5.916079783099616
iex> MatrixOperation.frobenius_norm([[1, 3, 3], [2, 4, 1], [2, 3, 2]])
7.54983443527075
"""
def frobenius_norm(a) do
a
|> Enum.map(& Enum.map(&1, fn x -> x * x end))
|> Enum.map(& Enum.sum(&1))
|> Enum.sum()
|> :math.sqrt()
end
@doc """
The one norm
#### Argument
- a: Matrix to calculate the one norm.
#### Output
one norm
#### Example
iex> MatrixOperation.one_norm([[2, 3], [1, 4], [2, 1]])
5
iex> MatrixOperation.one_norm([[1, 3, 3], [2, 4, 1], [2, 3, 2]])
7
"""
def one_norm(a) do
a
|> Enum.map(& Enum.map(&1, fn x -> if(x > 0, do: x, else: -x) end))
|> Enum.map(& Enum.sum(&1))
|> Enum.max()
end
@doc """
The two norm
#### Argument
- a: Matrix to calculate the two norm.
#### Output
The two norm
#### Example
iex> MatrixOperation.two_norm([[2, 3], [1, 4], [2, 1]])
5.674983803488139
iex> MatrixOperation.two_norm([[1, 3, 3], [2, 4, 1], [2, 3, 2]])
7.329546646114923
"""
def two_norm(a) do
a
|> singular_value()
|> Tuple.to_list()
|> Enum.max()
end
@doc """
The max norm
#### Argument
- a: Matrix to calculate the max norm.
#### Output
The max norm
#### Example
iex> MatrixOperation.max_norm([[2, 3], [1, 4], [2, 1]])
8
iex> MatrixOperation.max_norm([[1, 3, 3], [2, 4, 1], [2, 3, 2]])
10
"""
def max_norm(a) do
a
|> transpose()
|> Enum.map(& Enum.map(&1, fn x -> if(x > 0, do: x, else: -x) end))
|> Enum.map(& Enum.sum(&1))
|> Enum.max()
end
@doc """
A variance-covariance matrix is generated.
#### Argument
- data: x and y coordinate lists ([[x_1, y_1], [x_2, y_2], ...]) to calculate variance-covariance matrix.
#### Output
Variance-covariance matrix
#### Example
iex> MatrixOperation.variance_covariance_matrix([[40, 80], [80, 90], [90, 100]])
[
[466.66666666666663, 166.66666666666666],
[166.66666666666666, 66.66666666666666]
]
"""
def variance_covariance_matrix(data) do
x = data
|> transpose()
|> Enum.map(& Enum.map(&1, fn x -> x - Enum.sum(&1)/length(&1) end))
xt = transpose(x)
xtx = product(x, xt)
const_multiple(1/length(xt), xtx)
end
end
|
lib/matrix_operation.ex
| 0.914329
| 0.763484
|
matrix_operation.ex
|
starcoder
|
defmodule Quark.Curry do
@moduledoc ~S"""
[Currying](https://en.wikipedia.org/wiki/Currying) breaks up a function into a
series of unary functions that apply their arguments to some inner
n-ary function. This is a convenient way to achieve a general and flexible
partial application on any curried function.
"""
defmacro __using__(_) do
quote do
require unquote(__MODULE__)
import unquote(__MODULE__)
end
end
@doc ~S"""
Curry a function at runtime, rather than upon definition
## Examples
iex> curried_reduce_3 = curry &Enum.reduce/3
...> {_, arity} = :erlang.fun_info(curried_reduce_3, :arity)
...> arity
1
iex> curried_reduce_3 = curry &Enum.reduce/3
...> curried_reduce_3.([1,2,3]).(42).(&(&1 + &2))
48
"""
@spec curry(fun) :: fun
def curry(fun) do
{_, arity} = :erlang.fun_info(fun, :arity)
curry(fun, arity, [])
end
@spec curry(fun, integer, [any]) :: fun
defp curry(fun, 0, arguments), do: apply(fun, Enum.reverse(arguments))
defp curry(fun, arity, arguments) do
fn arg -> curry(fun, arity - 1, [arg | arguments]) end
end
@doc ~S"""
Convert a curried function to a function on pairs
## Examples
iex> curried_add = fn x -> (fn y -> x + y end) end
iex> add = uncurry curried_add
iex> add.(1,2)
3
"""
@spec uncurry((any -> fun)) :: ((any, any) -> any)
def uncurry(fun), do: &(fun.(&1).(&2))
@doc ~S"""
Apply a series of arguments to a curried function
## Examples
iex> curried_add = fn x -> (fn y -> x + y end) end
...> uncurry(curried_add, [1,2])
3
"""
@spec uncurry(fun, any | [any]) :: any
def uncurry(fun, arg_list) when is_list(arg_list) do
arg_list
|> Enum.reduce(fun, &Kernel.apply/2)
end
@doc ~S"""
Apply an argument to a function
## Examples
iex> add_one = &(&1 + 1)
...> uncurry(add_one, 1)
2
iex> curried_add = fn x -> (fn y -> x + y end) end
...> add_one = uncurry(curried_add, 1)
...> add_one.(3)
4
"""
@spec uncurry(fun, any) :: any
def uncurry(fun, arg), do: fun.(arg)
@doc "Define a curried function"
defmacro defcurry(head, do: body) do
{fun_name, ctx, args} = head
quote do
def unquote({fun_name, ctx, []}), do: unquote(wrap(args, body))
end
end
@doc "Define a curried private function"
defmacro defcurryp(head, do: body) do
{fun_name, ctx, args} = head
quote do
defp unquote({fun_name, ctx, []}), do: unquote(wrap(args, body))
end
end
defp wrap([arg|args], body) do
quote do
fn unquote(arg) ->
unquote(wrap(args, body))
end
end
end
defp wrap(_, body), do: body
end
|
lib/quark/curry.ex
| 0.78968
| 0.634798
|
curry.ex
|
starcoder
|
defmodule Chunkr.Page do
@moduledoc """
A single page of results.
## Fields
* `raw_results` — rows in the form `{cursor_values, record}` where `cursor_values` is the list
of values to be used for generating a cursor. Note that in cases where coalescing or other
manipulation was performed for the sake of pagination, the cursor values will reflect
that manipulation, while the record itself will by default not.
* `has_previous_page` — whether or not there is a previous page of results.
* `has_next_page` — whether or not there is a subsequent page of results.
* `start_cursor` — a cursor representing the first record in this page of results.
* `end_cursor` — a cursor representing the last record in this page of results.
* `opts` — `Chunkr.Opts` used to generate this page of results.
"""
alias Chunkr.{Cursor, Opts, Page}
@type record :: any()
@type t :: %__MODULE__{
raw_results: [{Cursor.cursor_values(), record()}],
has_previous_page: boolean(),
has_next_page: boolean(),
start_cursor: Cursor.cursor() | nil,
end_cursor: Cursor.cursor() | nil,
opts: Opts.t()
}
@enforce_keys [
:raw_results,
:has_previous_page,
:has_next_page,
:start_cursor,
:end_cursor,
:opts
]
defstruct [
:raw_results,
:has_previous_page,
:has_next_page,
:start_cursor,
:end_cursor,
:opts
]
@doc """
Fetches the total, non-paginated count of records that match the query.
Counting the total number of records requires a (potentially very expensive) extra database query,
so this is not performed by default.
"""
@spec total_count(Page.t()) :: integer()
def total_count(%__MODULE__{opts: opts}) do
opts.repo.aggregate(opts.query, :count)
end
@doc """
Extracts just the records out of the raw results.
"""
@spec records(Page.t()) :: [any()]
def records(%__MODULE__{} = page) do
Enum.map(page.raw_results, fn {_cursor_values, record} -> record end)
end
@doc """
Returns opaque cursors with their corresponding records.
"""
@spec cursors_and_records(Page.t()) :: [{Cursor.cursor(), any()}]
def cursors_and_records(%__MODULE__{} = page) do
cursor_module = page.opts.cursor_mod || raise("`cursor_mod` cannot be `nil`.")
Enum.map(page.raw_results, fn {cursor_values, record} ->
{Cursor.encode(cursor_values, cursor_module), record}
end)
end
end
|
lib/chunkr/page.ex
| 0.836321
| 0.608187
|
page.ex
|
starcoder
|
defmodule Fuentes.Account do
@moduledoc """
The Account module represents accounts in the system which are of _asset_,
_liability_, or _equity_ types, in accordance with the "accounting equation".
Each account must be set to one of the following types:
| TYPE | NORMAL BALANCE | DESCRIPTION |
| :-------- | :-------------:| :--------------------------------------|
| asset | Debit | Resources owned by the Business Entity |
| liability | Credit | Debts owed to outsiders |
| equity | Credit | Owners rights to the Assets |
Each account can also be marked as a _Contra Account_. A contra account will have it's
normal balance swapped. For example, to remove equity, a "Drawing" account may be created
as a contra equity account as follows:
`account = %Fuentes.Account{name: "Drawing", type: "asset", contra: true}`
At all times the balance of all accounts should conform to the "accounting equation"
*Assets = Liabilities + Owner's Equity*
Each account type acts as it's own ledger.
For more details see:
[Wikipedia - Accounting Equation](http://en.wikipedia.org/wiki/Accounting_equation)
[Wikipedia - Debits, Credits, and Contra Accounts](http://en.wikipedia.org/wiki/Debits_and_credits)
"""
@typedoc "A Fuentes Account type."
@type t :: %__MODULE__{ name: String.t, type: String.t, contra: Boolean.t, uuid: String.t, amounts: [Fuentes.Amount] }
alias Fuentes.{ Account, Amount, Config }
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
schema "accounts" do
field :name, :string
field :type, :string
field :uuid, :string
field :contra, :boolean, default: false
field :balance, :decimal, virtual: true
has_many :amounts, Fuentes.Amount, on_delete: :delete_all
timestamps
end
@fields ~w(name type contra uuid)
@credit_types ["asset"]
@debit_types ["liability", "equity"]
@doc """
Creates a changeset requiring a `:name` and `:type`
"""
def changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required([:name, :type])
|> validate_inclusion(:type, @credit_types ++ @debit_types)
end
@doc """
Creates a changeset requiring a `:name` and `:type`
"""
def uniq_account_changeset(model, params \\ %{}) do
model
|> cast(params, @fields)
|> validate_required([:name, :type])
|> validate_inclusion(:type, @credit_types ++ @debit_types)
|> generate_uuid
end
defp with_amounts(query) do
from q in query, preload: [:amounts]
end
@doc false
@spec amount_sum(Ecto.Repo.t, Fuentes.Account.t, String.t) :: Decimal.t
def amount_sum(repo, account, type) do
[sum] = Amount |> Amount.for_account(account) |> Amount.sum_type(type) |> repo.all
if sum do
sum
else
Decimal.new(0)
end
end
@doc false
@spec amount_sum(Ecto.Repo.t, Fuentes.Account.t, String.t, map) :: Decimal.t
def amount_sum(repo, account, type, dates) do
[sum] =
Amount |> Amount.for_account(account) |> Amount.dated(dates) |> Amount.sum_type(type) |> repo.all
if sum do
sum
else
Decimal.new(0)
end
end
@doc """
Computes the account balance for a given `Fuentes.Account` in a given
Ecto.Repo when provided with a map of dates in the format
`%{from_date: from_date, to_date: to_date}`.
Returns Decimal type.
"""
@spec balance(Ecto.Repo.t, [Fuentes.Account.t], Ecto.Date.t) :: Decimal.t
def balance(repo \\ Config.repo, account_or_account_list, dates \\ nil)
# Balance for individual account
def balance(repo, account = %Account { type: type, contra: contra }, dates) when is_nil(dates) do
credits = Account.amount_sum(repo, account, "credit")
debits = Account.amount_sum(repo, account, "debit")
if type in @credit_types && !(contra) do
balance = Decimal.sub(debits, credits)
else
balance = Decimal.sub(credits, debits)
end
end # Balance for individual account
@doc """
Computes the account balance for a list of `Fuentes.Account` in a given
Ecto.Repo inclusive of all entries. This function is intended to be used with a
list of `Fuentes.Account`s of the same type.
Returns Decimal type.
"""
# Balance for individual account with dates
def balance(repo, account = %Account { type: type, contra: contra }, dates) do
credits = Account.amount_sum(repo, account, "credit", dates)
debits = Account.amount_sum(repo, account, "debit", dates)
if type in @credit_types && !(contra) do
balance = Decimal.sub(debits, credits)
else
balance = Decimal.sub(credits, debits)
end
end
# Balance for list of accounts, intended for use when of the same account type.
def balance(repo, accounts, dates) when is_list(accounts) do
Enum.reduce(accounts, Decimal.new(0.0), fn(account, acc) ->
Decimal.add( Account.balance(repo, account, dates), acc)
end)
end
@doc """
Computes the trial balance for all accounts in the provided Ecto.Repo.
Returns Decimal type.
"""
# Trial Balance for all accounts
@spec trial_balance(Ecto.Repo.t) :: Decimal.t
def trial_balance(repo \\ Config.repo_from_config) do
accounts = repo.all(Account)
accounts_by_type = Enum.group_by(accounts, fn(i) -> String.to_atom(i.type) end)
accounts_by_type = Enum.map(accounts_by_type, fn { account_type, accounts } ->
{ account_type, Account.balance(repo, accounts) }
end)
accounts_by_type[:asset]
|> Decimal.sub(accounts_by_type[:liability])
|> Decimal.sub(accounts_by_type[:equity])
end
@doc """
Create default accounts for a user
"""
def create_accounts() do
asset_struct = %{ name: "Assets", type: "asset" }
{ _, debit_account } = %Account{}
|> Account.uniq_account_changeset(asset_struct)
|> Repo.insert()
liablilty_struct = %{ name: "Liabilities", type: "liabilities", uuid: debit_account.uuid }
{ _, credit_account } = %Account{}
|> Account.changeset(liablilty_struct)
|> Repo.insert()
equity_struct = %{ name: "Equity", type: "liabilities", uuid: debit_account.uuid }
{ _, equity_account } = %Account{}
|> Account.changeset(equity_struct)
|> Repo.insert()
%{ accounts: [ debit_account, credit_account, equity_account ] }
end
@doc """
generates a unique id for the account
"""
defp generate_uuid(changeset) do
changeset
|> put_change(:uuid, Ecto.UUID.generate)
end
end
|
lib/fuentes/account.ex
| 0.907743
| 0.657332
|
account.ex
|
starcoder
|
defmodule Fuzzyurl.Match do
@doc ~S"""
Returns an integer representing how closely `mask` (which may have
wildcards) resembles `url` (which may not), or `nil` in the
case of a conflict.
"""
@spec match(%Fuzzyurl{}, %Fuzzyurl{}) :: non_neg_integer | nil
def match(%Fuzzyurl{} = mask, %Fuzzyurl{} = url) do
scores = match_scores(mask, url) |> Map.from_struct() |> Map.values()
if nil in scores, do: nil, else: Enum.sum(scores)
end
@doc ~S"""
Returns `true` if `mask` (which may contain wildcards) matches `url`
(which may not), or `false` otherwise.
"""
@spec matches?(%Fuzzyurl{}, %Fuzzyurl{}) :: boolean
def matches?(%Fuzzyurl{} = mask, %Fuzzyurl{} = url) do
if match(mask, url) == nil, do: false, else: true
end
@doc ~S"""
Returns a Fuzzyurl struct containing values representing how well different
parts of `mask` and `url` match. Values are integer; higher values indicate
closer matches.
"""
@spec match_scores(%Fuzzyurl{}, %Fuzzyurl{}) :: %Fuzzyurl{}
def match_scores(%Fuzzyurl{} = mask, %Fuzzyurl{} = url) do
## Infer port from protocol, and vice versa.
url_protocol = url.protocol || Fuzzyurl.Protocols.get_protocol(url.port)
protocol_score = fuzzy_match(mask.protocol, url_protocol)
url_port = url.port || Fuzzyurl.Protocols.get_port(url.protocol)
port_score = fuzzy_match(mask.port, url_port)
%Fuzzyurl{
protocol: protocol_score,
username: fuzzy_match(mask.username, url.username),
password: fuzzy_match(mask.password, url.password),
hostname: fuzzy_match(mask.hostname, url.hostname),
port: port_score,
path: fuzzy_match(mask.path, url.path),
query: fuzzy_match(mask.query, url.query),
fragment: fuzzy_match(mask.fragment, url.fragment)
}
end
@doc ~S"""
Returns 0 for wildcard match, 1 for exact match, or nil otherwise.
Wildcard language:
* matches anything
foo/* matches "foo/" and "foo/bar/baz" but not "foo"
foo/** matches "foo/" and "foo/bar/baz" and "foo"
*.example.com matches "api.v1.example.com" but not "example.com"
**.example.com matches "api.v1.example.com" and "example.com"
Any other form is treated as a literal match.
"""
@spec fuzzy_match(String.t(), String.t()) :: 0 | 1 | nil
def fuzzy_match(mask, value) when is_binary(mask) and is_binary(value) do
case {mask, value, String.reverse(mask), String.reverse(value)} do
{"*", _, _, _} ->
0
{x, x, _, _} ->
1
{"**." <> m, v, _, _} ->
if m == v or String.ends_with?(v, "." <> m), do: 0, else: nil
{"*" <> m, v, _, _} ->
if String.ends_with?(v, m), do: 0, else: nil
{_, _, "**/" <> m, v} ->
if m == v or String.ends_with?(v, "/" <> m), do: 0, else: nil
{_, _, "*" <> m, v} ->
if String.ends_with?(v, m), do: 0, else: nil
_ ->
nil
end
end
def fuzzy_match("*", nil), do: 0
def fuzzy_match(_, nil), do: nil
def fuzzy_match(nil, _), do: nil
@doc ~S"""
From a list of Fuzzyurl masks, returns the list index of the one which
best matches `url`. Returns nil if none of `masks` match.
iex> masks = [Fuzzyurl.mask(path: "/foo/*"), Fuzzyurl.mask(path: "/foo/bar"), Fuzzyurl.mask]
iex> Fuzzyurl.Match.best_match_index(masks, Fuzzyurl.from_string("http://exmaple.com/foo/bar"))
1
"""
def best_match_index(masks, url) do
masks
|> Enum.with_index()
|> Enum.map(fn {m, i} -> {i, match(m, url)} end)
|> Enum.filter(fn {_i, score} -> score != nil end)
|> Enum.sort(fn {_ia, a}, {_ib, b} -> a >= b end)
|> List.first()
|> elem(0)
end
end
|
lib/fuzzyurl/match.ex
| 0.854688
| 0.426322
|
match.ex
|
starcoder
|
defmodule Snitch.Domain.Order.Transitions do
@moduledoc """
Helpers for the `Order` state machine.
The `Snitch.Domain.Order.DefaultMachine` makes direct use of these helpers.
By documenting these handy functions, we encourage the developer of a custom
state machine to use, extend or compose them to build large event transitions.
"""
use Snitch.Domain
alias BeepBop.Context
alias Snitch.Core.Tools.MultiTenancy.MultiQuery
alias Snitch.Data.Model.Package
alias Snitch.Data.Schema.Order
alias Snitch.Data.Model.Payment, as: PaymentModel
alias Snitch.Domain.Package, as: PackageDomain
alias Snitch.Domain.{Payment, Shipment, ShipmentEngine, Splitters.Weight}
alias Snitch.Domain.Order, as: OrderDomain
alias Snitch.Tools.OrderEmail
@doc """
Embeds the addresses and computes some totals of the `order`.
The following fields are required under the `:state` key:
* `:billing_address` The billing `Address` params
* `:shipping_address` The shipping `Address` params
The following fields are computed: `item_total`, `tax_total` and `total`.
`total` = `item_total` + `tax_total`
> The promo and adjustment totals are ignored for now.
"""
@spec associate_address(Context.t()) :: Context.t()
def associate_address(
%Context{
valid?: true,
struct: order,
multi: multi,
state: %{
billing_address: billing,
shipping_address: shipping
}
} = context
) do
changeset =
Order.partial_update_changeset(order, %{
billing_address: billing,
shipping_address: shipping
})
struct(context, multi: MultiQuery.update(multi, :order, changeset))
end
def associate_address(%Context{} = context), do: struct(context, valid?: false)
@doc """
Computes a shipment fulfilling the `order`.
Returns a new `Context.t` struct with the `shipment` under the the [`:state`,
`:shipment`] key-path.
> The `:state` key of the `context` is not utilised here.
## Note
If `shipment` is `[]`, we DO NOT mark the `context` "invalid".
"""
@spec compute_shipments(Context.t()) :: Context.t()
# TODO: This function does not gracefully handle errors, they are raised!
def compute_shipments(%Context{valid?: true, struct: order, state: state} = context) do
order =
if is_nil(order.shipping_address) do
%{order | shipping_address: state.shipping_address}
else
order
end
shipment =
order
|> Shipment.default_packages()
|> ShipmentEngine.run(order)
|> Weight.split()
struct(context, state: %{shipment: shipment})
end
def compute_shipments(%Context{valid?: false} = context), do: context
@doc """
Persists the computed shipment to the DB.
`Package`s and their `PackageItem`s are inserted together in a DB transaction.
The `packages` are added to the `:state` under the `:packages` key.
Thus the signature of `context.state.packages` is,
```
context.state.packages :: {:ok, [Pacakge.t()]} | {:error, Ecto.Changeset.t()}
```
"""
@spec persist_shipment(Context.t()) :: Context.t()
def persist_shipment(%Context{valid?: true, struct: %Order{} = order} = context) do
%{state: %{shipment: shipment}} = context
packages =
Repo.transaction(fn ->
shipment
|> Stream.map(&Shipment.to_package(&1, order))
|> Stream.map(&Package.create/1)
|> fail_fast_reduce()
|> case do
{:error, error} ->
Repo.rollback(error)
{:ok, packages} ->
packages
end
end)
state = Map.put(context.state, :packages, packages)
struct(context, state: state)
end
def persist_shipment(%Context{valid?: false} = context), do: context
@doc """
Removes the shipment belonging to Order from DB.
Shipments which are basically `Package`s and their `PackageItem`s are
removed together in a transaction.
```
context.state.packages :: {:ok, [Pacakge.t()]} | {:error, Ecto.Changeset.t()}
```
"""
def remove_shipment(%Context{valid?: true, struct: %Order{} = order} = context) do
packages =
Repo.transaction(fn ->
order.packages
|> Stream.map(&Package.delete/1)
|> fail_fast_reduce()
|> case do
{:error, error} ->
Repo.rollback(error)
{:ok, packages} ->
packages
end
end)
case packages do
{:ok, packages} ->
state = Map.put(context.state, :packages, packages)
struct(context, state: state)
{:error, changeset} ->
struct(context, valid?: false, errors: changeset)
end
end
def remove_shipment(%Context{valid?: false} = context), do: context
@doc """
Persists the shipping preferences of the user in each `package` of the `order`.
Along with the chosen `ShippingMethod`, we update package price fields. User's
selection is assumed to be under the `context.state.shipping_preferences` key-path.
## Schema of the `:state`
```
%{
shipping_preferences: [
%{
package_id: string,
shipping_method_id: non_neg_integer
}
]
}
```
## Assumptions
* For each `package` of the `order`, a valid `shipping_method` must be chosen.
> If an `order` has 3 packages, then
`length(context.state.shipping_preferences)` must be `3`.
* The chosen `shipping_method` for the `package` must be one among the
`package.shipping_methods`.
"""
@spec persist_shipping_preferences(Context.t()) :: Context.t()
def persist_shipping_preferences(%Context{valid?: true, struct: %Order{} = order} = context) do
%{state: %{shipping_preferences: shipping_preferences}, multi: multi} = context
packages = Map.fetch!(Repo.preload(order, [:packages]), :packages)
if validate_shipping_preferences(packages, shipping_preferences) do
function = fn _, _ ->
shipping_preferences
|> Stream.map(fn %{package_id: package_id, shipping_method_id: shipping_method_id} ->
packages
|> Enum.find(fn %{id: id} -> id == package_id end)
|> PackageDomain.set_shipping_method(shipping_method_id, order)
end)
|> fail_fast_reduce()
end
struct(context, multi: Multi.run(multi, :packages, function))
else
struct(context, valid?: false, errors: [shipping_preferences: "is invalid"])
end
end
@doc """
Marks all the `shipment` aka `packages` of an order transition from `pending`
to the `processing` state.
This function is a side effect of the transition in which payment for an
order is made. In case of full payment of the order, all the packages should
move to the processing stage.
"""
@spec process_shipments(Context.t()) :: Context.t()
def process_shipments(%Context{valid?: true, struct: %Order{} = order} = context) do
params = [state: "processing"]
package_update_multi = PackageDomain.update_all_for_order(Multi.new(), order, params)
struct(context, multi: package_update_multi)
end
def process_shipments(%Context{valid?: false} = context), do: context
@doc """
Checks if `order` is fully paid for.
The order total cost should match sum of all the `payments` for that `order`
in `paid` state.
"""
@spec confirm_order_payment_status(Context.t()) :: Context.t()
def confirm_order_payment_status(%Context{valid?: true, struct: %Order{} = order} = context) do
order_cost = OrderDomain.total_amount(order)
order_payments_total = OrderDomain.payments_total(order, "paid")
if order_cost == order_payments_total do
context
else
struct(context, valid?: false, errors: [error: "balance due for order"])
end
end
def confirm_order_payment_status(%Context{valid?: false} = context), do: context
@doc """
Tranistion function to handle payment creation.
For more information see.
## See
`Snitch.Domain.Payment`
"""
@spec make_payment_record(Context.t()) :: Contex.t()
def make_payment_record(
%Context{
valid?: true,
struct: %Order{} = order,
state: %{
payment_method: payment_method,
payment_params: payment_params
}
} = context
) do
case Payment.create_payment(payment_params, payment_method, order) do
{:ok, map} ->
state = Map.put(context.state, :payment, map)
struct(context, state: state)
{:error, changeset} ->
struct(context, valid?: false, errors: changeset.errors)
end
end
@doc """
Removes `payment` as well as corresponding `subpayment` type records created
for an order in a transaction.
"""
def remove_payment_record(%Context{valid?: true, struct: %Order{} = order} = context) do
payments =
Repo.transaction(fn ->
order.payments
|> Stream.map(&PaymentModel.delete/1)
|> fail_fast_reduce()
|> case do
{:error, error} ->
Repo.rollback(error)
{:ok, payments} ->
payments
end
end)
case payments do
{:ok, payments} ->
state = Map.put(context.state, :payments, payments)
struct(context, state: state)
{:error, changeset} ->
struct(context, valid?: false, errors: changeset)
end
end
def remove_payment_record(%Context{valid?: false} = context), do: context
def send_email_confirmation(
%Context{valid?: true, struct: %Order{} = order, multi: multi} = context
) do
multi =
Multi.run(multi, :add_email, fn _, _ ->
MailManager.send_mail(order)
{:ok, "mail_sent"}
end)
struct(context, state: context.state, multi: multi)
end
def send_email_confirmation(context), do: context
def update_stock(%Context{valid?: true, struct: %Order{} = order, multi: multi} = context) do
order = order |> Repo.preload(packages: :items)
packages =
Repo.transaction(fn ->
order.packages
|> Stream.map(&PackageDomain.update_items_stock/1)
|> Enum.each(fn result ->
result
|> fail_fast_reduce()
|> case do
{:error, error} ->
Repo.rollback(error)
{:ok, package} ->
package
end
end)
end)
case packages do
{:ok, _} ->
struct(context, state: context.state)
{:error, changeset} ->
struct(context, valid?: false, errors: changeset)
end
end
def update_stock(context), do: context
def check_order_completion(%Context{valid?: true} = context) do
context
|> order_paid()
|> packages_delivered()
end
def check_order_completion(context), do: context
defp order_paid(%Context{valid?: true, struct: %Order{} = order} = context) do
if OrderDomain.payments_total(order, "paid") == OrderDomain.total_amount(order) do
context
else
struct(context, valid?: false, errors: [error: "Payment due for order"])
end
end
defp order_paid(context), do: context
defp packages_delivered(
%Context{valid?: true, struct: %Order{} = order, multi: multi} = context
) do
if OrderDomain.order_package_delivered?(order) do
context
else
struct(context, valid?: false, errors: [error: "Packages not delivered"])
end
end
defp packages_delivered(context), do: context
defp validate_shipping_preferences([], _), do: true
defp validate_shipping_preferences(packages, selection) do
# selection must be over all packages, no package can be skipped.
# TODO: Replace with some nice API contract/validator.
package_ids =
packages
|> Enum.map(fn %{id: id} -> id end)
|> MapSet.new()
selection
|> Enum.map(fn %{package_id: p_id} -> p_id end)
|> MapSet.new()
|> MapSet.equal?(package_ids)
end
defp fail_fast_reduce(things) do
Enum.reduce_while(things, {:ok, []}, fn
{:ok, thing}, {:ok, acc} ->
{:cont, {:ok, [thing | acc]}}
{:error, _} = error, _ ->
{:halt, error}
end)
end
end
|
apps/snitch_core/lib/core/domain/order/transitions.ex
| 0.889685
| 0.857052
|
transitions.ex
|
starcoder
|
defmodule Prolly.CountMinSketch do
require Vector
@moduledoc """
Use CountMinSketch when you want to count and query the
approximate number of occurences of values in a stream using sublinear memory
For example, "how many times has the string `foo` been in the stream so far?" is
a reasonable question for CountMinSketch.
A CountMinSketch will not undercount occurences, but may overcount occurences,
reporting a count that is higher than the real number of occurences for a given
value.
"""
@opaque t :: %__MODULE__{
matrix: Vector.t,
hash_fns: list((String.t -> integer)),
depth: pos_integer
}
# storing depth on the struct is an optimization so it doesn't
# have to be computed for every single update and query
defstruct [matrix: nil, hash_fns: nil, depth: 1]
@doc """
Create a CountMinSketch
## Examples
iex> require Prolly.CountMinSketch, as: Sketch
iex> Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end]).matrix
...> |> Enum.map(&Vector.to_list(&1))
[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]
"""
@spec new(pos_integer, pos_integer, list((String.t -> integer))) :: t
def new(width, depth, hash_fns) when is_integer(width) and is_integer(depth) do
matrix =
Enum.map(1..width, fn(_) ->
Vector.new(Enum.map(1..depth, fn _ -> 0 end))
end)
|> Vector.new
%__MODULE__{
matrix: matrix,
hash_fns: hash_fns,
depth: depth
}
end
@doc """
Query a sketch for the count of a given value
## Examples
iex> require Prolly.CountMinSketch, as: Sketch
iex> Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi") |> Sketch.get_count("hi")
1
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi")
...> |> Sketch.update("hi")
...> |> Sketch.update("hi")
iex> Sketch.get_count(sketch, "hi")
3
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update([77, "list"])
...> |> Sketch.update([77, "list"])
...> |> Sketch.update([77, "list"])
...> |> Sketch.update([77, "list"])
...> |> Sketch.update([77, "list"])
iex> Sketch.get_count(sketch, [77, "list"])
5
"""
@spec get_count(t, String.Chars) :: integer
def get_count(%__MODULE__{matrix: matrix, hash_fns: hash_fns, depth: depth}, value) when is_binary(value) do
hash_fns
|> Enum.with_index
|> Enum.map(fn({hash_fn, i}) ->
[i, compute_index(hash_fn, value, depth)]
end)
|> Enum.map(fn(path) ->
Kernel.get_in(matrix, path)
end)
|> Enum.min
end
def get_count(%__MODULE__{} = sketch, value) do
get_count(sketch, to_string(value))
end
@doc """
Update a sketch with a value
## Examples
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi")
iex> sketch.matrix |> Enum.map(&Vector.to_list(&1))
[[0, 1, 0, 0, 0], [0, 0, 1, 0, 0], [0, 1, 0, 0, 0]]
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update(["a", "list", "of", "things"])
iex> sketch.matrix |> Enum.map(&Vector.to_list(&1))
[[0, 0, 0, 0, 1], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0]]
"""
@spec update(t, String.Chars) :: t
def update(%__MODULE__{matrix: matrix, hash_fns: hash_fns, depth: depth} = sketch, value) when is_binary(value) do
new_matrix =
hash_fns
|> Enum.with_index
|> Enum.reduce(matrix, fn({hash_fn, i}, acc) ->
Kernel.update_in(
acc,
[i, compute_index(hash_fn, value, depth)],
&(&1 + 1)
)
end)
%{sketch | matrix: new_matrix}
end
def update(%__MODULE__{} = sketch, value) do
update(sketch, to_string(value))
end
@doc """
Union two sketches by cell-wise adding their counts
## Examples
iex> require Prolly.CountMinSketch, as: Sketch
iex> sketch1 = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi")
iex> sketch2 = Sketch.new(3, 5,
...> [fn(value) -> :crypto.hash(:sha, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:md5, value) |> :crypto.bytes_to_integer() end,
...> fn(value) -> :crypto.hash(:sha256, value) |> :crypto.bytes_to_integer() end])
...> |> Sketch.update("hi")
iex> Sketch.union(sketch1, sketch2).matrix |> Enum.map(&Vector.to_list(&1))
[[0, 2, 0, 0, 0], [0, 0, 2, 0, 0], [0, 2, 0, 0, 0]]
"""
@spec union(t, t) :: t
def union(
%__MODULE__{matrix: matrix1, hash_fns: hash_fns, depth: depth} = sketch1,
%__MODULE__{matrix: matrix2} = _sketch2
) do
paths =
for w <- 0..(Enum.count(hash_fns) - 1),
d <- 0..(depth - 1), do: [w, d]
new_matrix =
Enum.reduce(paths, matrix1, fn(path, matrix) ->
Kernel.update_in(matrix, path, fn(first) ->
first + Kernel.get_in(matrix2, path)
end)
end)
%{sketch1 | matrix: new_matrix}
end
defp compute_index(hash_fn, value, k) do
hash_fn.(value) |> (fn(n) -> rem(n, k) end).()
end
end
|
lib/prolly/count_min_sketch.ex
| 0.894873
| 0.700556
|
count_min_sketch.ex
|
starcoder
|
defmodule ArtemisWeb.ViewHelper.Tables do
use Phoenix.HTML
@default_delimiter ","
@doc """
Generates empty table row if no records match
"""
def render_table_row_if_empty(records, options \\ [])
def render_table_row_if_empty(%{entries: entries}, options), do: render_table_row_if_empty(entries, options)
def render_table_row_if_empty(records, options) when length(records) == 0 do
message = Keyword.get(options, :message, "No records found")
Phoenix.View.render(ArtemisWeb.LayoutView, "table_row_if_empty.html", message: message)
end
def render_table_row_if_empty(_records, _options), do: nil
@doc """
Render sortable table header
"""
def sortable_table_header(conn, value, label, delimiter \\ @default_delimiter) do
path = order_path(conn, value, delimiter)
text = content_tag(:span, label)
icon = content_tag(:i, "", class: icon_class(conn, value, delimiter))
content_tag(:a, [text, icon], href: path)
end
defp order_path(conn, value, delimiter) do
updated_query_params = update_query_param(conn, value, delimiter)
query_string = Plug.Conn.Query.encode(updated_query_params)
"#{conn.request_path}?#{query_string}"
end
defp update_query_param(conn, value, delimiter) do
inverse = inverse_value(value)
query_params = Map.get(conn, :query_params, %{})
current_value = Map.get(query_params, "order", "")
current_fields = String.split(current_value, delimiter)
updated_fields =
cond do
Enum.member?(current_fields, value) -> replace_item(current_fields, value, inverse)
Enum.member?(current_fields, inverse) -> replace_item(current_fields, inverse, value)
true -> [value]
end
updated_value = Enum.join(updated_fields, delimiter)
Map.put(query_params, "order", updated_value)
end
defp inverse_value(value), do: "-#{value}"
defp replace_item(list, current, next) do
case Enum.find_index(list, &(&1 == current)) do
nil -> list
index -> List.update_at(list, index, fn _ -> next end)
end
end
defp icon_class(conn, value, delimiter) do
base = "sort icon"
query_params = Map.get(conn, :query_params, %{})
current_value = Map.get(query_params, "order", "")
current_fields = String.split(current_value, delimiter)
cond do
Enum.member?(current_fields, value) -> "#{base} ascending"
Enum.member?(current_fields, inverse_value(value)) -> "#{base} descending"
true -> base
end
end
@doc """
Render Data Table
Example:
<%=
render_data_table(
@conn,
@customers,
allowed_columns: allowed_columns(),
default_columns: ["name", "slug", "actions"]
)
%>
Options:
allowed_columns: map of allowed columns
default_columns: list of strings
## Features
### Column Ordering
The `columns` query param can be used to define a custom order to table
columns. For example, the default columns might be:
Name | Slug | Actions
By passing in the query param `?columns=status,name,address` the table
will transform to show:
Status | Name | Address
This enables custom reporting in a standard and repeatable way across the
application. Since query params are used to define the columns, any reports a
user creates can be revisited using the same URL. Which in turn, also makes
it easy to share with others.
### Table Export
Custom exporters can be defined for any format, like `html`, `json`, `csv`,
`xls`, or `pdf`. There's no conventions to learn or magic. As documented below,
standard Elixir and Phoenix code can be used to define and write custom
exporters in any format.
## Options
The goal of the data table is to be extensible without introducing new
data table specific conventions. Instead, enable extension using standard
Elixir and Phoenix calls.
### Allowed Columns
The value for `allowed_columns` should be a map. A complete example may look like:
%{
"name" => [
label: fn (_conn) -> "Name" end,
value: fn (_conn, row) -> row.name end,
],
"slug" => [
label: fn (_conn) -> "Slug" end,
value: fn (_conn, row) -> row.slug end,
]
}
The key for each entry should be a URI friendly slug. It is used to match
against the `columns` query param.
The value for each entry is a keyword list. It must define a `label` and
`value` function.
The `label` function is used in column headings. It takes one argument, the
`conn` struct. The most common return will be a simple bitstring, but
the `conn` is included for more advanced usage, for instance creating an
anchor link.
The `value` function is used for the column value. It takes two arguments,
the `conn` struct and the `row` value. The most common return will be calling
an attribute on the row value, for instance `data.name`. The `conn` value is
included for more advanced usage.
#### Support for Different Content Types / Formats
The required `label` and `value` functions should return simple values, like
bitstrings, integers, and floats.
Format specific values, such as HTML tags, should be defined in format
specific keys. For instance:
"name" => [
label: fn (_conn) -> "Name" end,
value: fn (_conn, row) -> row.name end,
value_html: fn (conn, row) ->
link(row.name, to: Routes.permission_path(conn, :show, row))
end
]
The data table function will first search for `label_<format>` and
`value_<format>` keys. E.g. a standard `html` request would search for
`label_html` and `value_html`. And in turn, a request for `csv` content type
would search for `label_csv` and `value_csv`. If format specific keys are not
found, the require `label` and `value` keys will be used as a fallback.
### Default Columns
The default columns option should be a list of bitstrings, each corresponding
to a key defined in the `allowed_columns` map.
default_columns: ["name", "slug"]
"""
def render_data_table(conn, data, options \\ []) do
format = Phoenix.Controller.get_format(conn)
columns = get_data_table_columns(conn, options)
params = [
columns: columns,
conn: conn,
data: data
]
Phoenix.View.render(ArtemisWeb.LayoutView, "data_table.#{format}", params)
end
@doc """
Compares the `?columns=` query param value against the `allowed_columns`. If
the query param is not set, compares the `default_columns` value instead.
Returns a map of matching keys in `allowed_columns`.
"""
def get_data_table_columns(conn, options) do
allowed_columns = Keyword.get(options, :allowed_columns, [])
requested_columns = parse_data_table_requested_columns(conn, options)
filtered =
Enum.reduce(requested_columns, [], fn key, acc ->
case Map.get(allowed_columns, key) do
nil -> acc
column -> [column | acc]
end
end)
Enum.reverse(filtered)
end
@doc """
Parse query params and return requested data table columns
"""
def parse_data_table_requested_columns(conn, options \\ []) do
conn.query_params
|> Map.get("columns")
|> get_data_table_requested_columns(options)
end
defp get_data_table_requested_columns(nil, options), do: Keyword.get(options, :default_columns, [])
defp get_data_table_requested_columns(value, _) when is_bitstring(value), do: String.split(value, ",")
defp get_data_table_requested_columns(value, _) when is_list(value), do: value
@doc """
Renders the label for a data center column.
"""
def render_data_table_label(conn, column, format) do
key = String.to_atom("label_#{format}")
default = Keyword.fetch!(column, :label)
render = Keyword.get(column, key, default)
render.(conn)
end
@doc """
Renders the row value for a data center column.
"""
def render_data_table_value(conn, column, row, format) do
key = String.to_atom("value_#{format}")
default = Keyword.fetch!(column, :value)
render = Keyword.get(column, key, default)
render.(conn, row)
end
@doc """
Generates export link with specified format
"""
def export_path(conn, format, params \\ []) do
additional_params =
params
|> Enum.into(%{})
|> Artemis.Helpers.keys_to_strings()
query_params =
conn
|> Map.get(:query_params, %{})
|> Map.put("_format", format)
|> Map.merge(additional_params)
query_string = Plug.Conn.Query.encode(query_params)
"#{conn.request_path}?#{query_string}"
end
end
|
apps/artemis_web/lib/artemis_web/view_helpers/tables.ex
| 0.770637
| 0.508971
|
tables.ex
|
starcoder
|
defmodule Wordza.Game do
@moduledoc """
This is our Wordza Game, a single game managing:
- Config (dictionary, rules)
- Tiles (tiles available)
- Board (tiles tiles played)
- Players (tiles in trays, current score)
- Plays (history, game log)
- Scores
We are going to base it largely off of WordFued and Scabble
With minor changes to the board configuration, dictionary, and rules
it should be compatible with either...
Therefore the create_game and dictionary and rules are all
keyed into game_type.
"""
use GenServer
alias Wordza.GameInstance
alias Wordza.GamePlay
### Client API
@doc """
Easy access to start up the server
On new:
returns {:ok, pid}
On repeat:
returns {:error, {:already_started, #PID<0.248.0>}}
"""
def start_link(type, player_1_id, player_2_id) do
name = GameInstance.build_game_name(type)
start_link(type, player_1_id, player_2_id, name)
end
def start_link(type, player_1_id, player_2_id, name) do
GenServer.start_link(
__MODULE__,
[type, player_1_id, player_2_id, name],
[
timeout: 30_000, # 30 seconds to init or die
name: via_tuple(name), # named game (optionally eaiser to lookup)
]
)
end
@doc """
get information about this game
try get(pid, :full) for everything
"""
def get(pid_or_name, key \\ :board)
def get(pid, key) when is_pid(pid), do: pid |> GenServer.call({:get, key})
def get(name, key), do: name |> via_tuple |> GenServer.call({:get, key})
def board(pid_or_name), do: get(pid_or_name, :board)
def player_1(pid_or_name), do: get(pid_or_name, :player_1)
def player_2(pid_or_name), do: get(pid_or_name, :player_2)
def tiles(pid_or_name), do: get(pid_or_name, :tiles)
def turn(pid_or_name), do: get(pid_or_name, :turn)
def game_over?(pid_or_name), do: get(pid_or_name, :turn) == :game_over
@doc """
submit a play for this game (from a UI)
already have a GamePlay (bot generated?) - you can submit it
"""
def play(pid_or_name, player_key, %GamePlay{} = play) do
pid_or_name |> GenServer.call({:play, player_key, play})
end
def play(pid_or_name, player_key, letters_yx) do
pid_or_name |> GenServer.call({:play, player_key, letters_yx})
end
@doc """
submit a pass for this game (from a UI)
"""
def pass(pid_or_name, player_key) do
pid_or_name |> GenServer.call({:pass, player_key})
end
### Server API
@doc """
"""
def init([type, player_1_id, player_2_id, name]) do
allowed = [:scrabble, :wordfeud, :mock]
case Enum.member?(allowed, type) do
true -> {:ok, GameInstance.create(type, player_1_id, player_2_id, name)}
false -> {:error, "Invalid type supplied to Game init #{type}"}
end
end
# NOTE state = game
# (that's the point, GenServer Game.state = "game state")
def handle_call({:get, :full}, _from, state) do
{:reply, state, state}
end
def handle_call({:get, :player_1}, _from, state) do
{:reply, Map.get(state, :player_1), state}
end
def handle_call({:get, :player_2}, _from, state) do
{:reply, Map.get(state, :player_2), state}
end
def handle_call({:get, :board}, _from, state) do
{:reply, Map.get(state, :board), state}
end
def handle_call({:get, :tiles}, _from, state) do
{:reply, Map.get(state, :tiles_in_pile), state}
end
def handle_call({:get, :turn}, _from, state) do
{:reply, Map.get(state, :turn), state}
end
def handle_call({:play, _player_key, %GamePlay{} = play}, _from, state) do
case GameInstance.apply_play(state, play) do
{:ok, state} -> {:reply, {:ok, state}, state}
{:error, err} -> {:reply, {:error, err}, state}
end
end
def handle_call({:play, player_key, letters_yx}, from, state) do
play = player_key |> GamePlay.create(letters_yx) |> GamePlay.verify(state)
handle_call({:play, player_key, play}, from, state)
end
def handle_call({:pass, player_key}, _from, state) do
case GameInstance.apply_pass(state, player_key) do
{:ok, state} -> {:reply, {:ok, state}, state}
{:error, err} -> {:reply, {:error, err}, state}
end
end
# Fancy name <-> pid refernce library `gproc`
defp via_tuple(name) do
{:via, :gproc, {:n, :l, {:wordza_game, name}}}
end
end
|
lib/game/game.ex
| 0.638046
| 0.438004
|
game.ex
|
starcoder
|
defmodule BeamToExAst do
alias BeamToExAst.Translate
def convert(list, opts \\ []) do
opts = Enum.into(opts, %{})
{mod_name, rest, _opts} = Enum.reduce(list, {"", [], opts}, &do_convert/2)
case length(rest) do
1 ->
{:defmodule, [line: 1], [{:__aliases__, [line: 1], [mod_name]}, [do: List.first(rest)]]}
_ ->
{:defmodule, [line: 1],
[
{:__aliases__, [line: 1], [mod_name]},
[do: {:__block__, [], Enum.sort(rest, &sort_fun/2)}]
]}
end
end
def sort_fun({:def, [line: lna], _}, {:def, [line: lnb], _}) do
lna < lnb
end
# _n is number of parameters
# ln is the line number
def do_convert({:attribute, _ln, :module, name}, {_, rest, opts}) do
{clean_module(name), rest, opts}
end
def do_convert({:attribute, _ln, :record, _ast}, {mod_name, rest, opts}) do
{mod_name, rest, opts}
end
def do_convert({:attribute, _, _, _}, acc) do
acc
end
def do_convert({:function, _, :__info__, _, _}, acc) do
acc
end
def do_convert({:function, _ln, name, _n, body}, {mod_name, rest, opts}) do
opts = Map.put(opts, :parents, [:function])
{mod_name,
Enum.concat(
Enum.map(body, fn
{:clause, ln2, params, guard, body_def} ->
case guard do
[] ->
{:def, [line: ln2],
[{name, [line: ln2], Translate.to_elixir(params, opts)}, def_body(body_def, opts)]}
[[g]] ->
{:def, [line: ln2],
[
{:when, [line: ln2],
[
{name, [line: ln2], Translate.to_elixir(params, opts)},
Translate.to_elixir(g, opts)
]},
def_body(body_def, opts)
]}
[g1, g2] ->
{:def, [line: ln2],
[
{:when, [line: ln2],
[
{name, [line: ln2], Translate.to_elixir(params, opts)},
{:and, [],
[
Translate.to_elixir(List.first(g1), opts),
Translate.to_elixir(List.first(g2), opts)
]}
]},
def_body(body_def, opts)
]}
end
_ ->
body
end),
rest
), opts}
end
def do_convert({:eof, _ln}, acc) do
acc
end
def def_body(items, opts) do
opts = Map.update!(opts, :parents, &[:body | &1])
filtered_items =
items
|> Enum.filter(fn
{:atom, _, nil} -> false
_ -> true
end)
case length(filtered_items) do
1 -> [do: Translate.to_elixir(List.first(filtered_items), opts)]
_ -> [do: {:__block__, [], Translate.to_elixir(filtered_items, opts)}]
end
end
def def_body_less(items, opts) do
opts = Map.update!(opts, :parents, &[:body_less | &1])
case length(items) do
1 -> Translate.to_elixir(List.first(items), opts)
_ -> {:__block__, [], Translate.to_elixir(items, opts)}
end
end
def def_body_less_filter(items, opts) do
opts = Map.update!(opts, :parents, &[:body_less_filter | &1])
items2 =
items
|> Translate.to_elixir(opts)
|> Enum.filter(&filter_empty/1)
case length(items2) do
1 -> List.first(items2)
_ -> {:__block__, [], items2}
end
end
def get_caller(c_mod_call, ln, caller, params, opts) do
case String.match?(c_mod_call, ~r"^[A-Z]") do
true ->
{{:., [line: ln],
[{:__aliases__, [line: ln], [String.to_atom(c_mod_call)]}, clean_atom(caller, opts)]},
[line: ln], Translate.to_elixir(params, opts)}
false ->
{{:., [line: ln], [String.to_atom(c_mod_call), clean_atom(caller, opts)]}, [line: ln],
Translate.to_elixir(params, opts)}
end
end
def remove_tuples(l1) when is_list(l1) do
Enum.map(l1, &remove_tuple/1)
end
def remove_tuples(rest) do
rest
end
def remove_tuple({:{}, [line: _ln], params}) do
params
end
def remove_tuple(params) do
params
end
def only_one(l1) do
case length(l1) do
1 -> List.first(l1)
_ -> l1
end
end
def insert_line_number({:&, [line: 0], number}, ln) do
{:&, [line: ln], number}
end
def insert_line_number(var, _ln) do
var
end
def check_params(params) do
Enum.reduce(params, false, fn
{:var, _ln, var}, acc ->
case Atom.to_string(var) do
<<"__@", _rest::binary>> -> true
<<"_@", _rest::binary>> -> true
_ -> acc
end
_, acc ->
acc
end)
end
def check_bins(s1, acc) when is_binary(s1) do
acc
end
def check_bins(_, _acc) do
true
end
def clean_op(op1) do
op1
|> Atom.to_string()
|> case do
"=:=" -> "==="
"=/=" -> "!=="
"/=" -> "!="
"=<" -> "<="
"andalso" -> "and"
s1 -> s1
end
|> String.to_atom()
end
def clean_module(a1) do
s1 =
a1
|> Atom.to_string()
|> String.replace("Elixir.", "")
s1
|> String.match?(~r"^[A-Z]")
|> case do
true -> s1
false -> Macro.camelize(s1)
end
|> String.to_atom()
end
def clean_atom(a1, _) do
a1
|> Atom.to_string()
|> String.replace("Elixir.", "")
|> String.to_atom()
end
def half_clean_atom(a1, _) do
a1
|> Atom.to_string()
|> String.replace("Elixir.", "")
end
def clean_var(v1, %{erlang: true}) do
v1
|> Atom.to_string()
|> Macro.underscore()
|> String.to_atom()
end
def clean_var(v1, %{elixir: true}) do
v1_string =
v1
|> Atom.to_string()
case System.version() do
<<"1.6", _rest::binary>> ->
v1_string
|> String.replace(~r/^V/, "")
_ ->
if Regex.match?(~r/@\d*/, v1_string) do
v1_string
|> String.replace(~r/^_/, "")
else
v1_string
end
end
|> String.replace(~r/@\d*/, "")
|> String.to_atom()
end
def clean_var(v1, _) do
v1
|> Atom.to_string()
|> String.replace(~r"@\d+", "")
|> String.to_atom()
end
def filter_empty(:filter_this_thing_out_of_the_list_please) do
false
end
def filter_empty(_) do
true
end
end
|
lib/beam_to_ex_ast.ex
| 0.511473
| 0.48932
|
beam_to_ex_ast.ex
|
starcoder
|
defmodule ReIntegrations.Routific.Payload.Outbound do
@moduledoc """
Builds routific payload.
"""
@derive Jason.Encoder
alias Re.Calendars.Calendar
alias ReIntegrations.Routific
defstruct [:visits, :fleet, :options]
def build(input, opts) do
with {:ok, visits} <- build_visits(input),
{:ok, fleet} <- build_fleet() do
{:ok, %__MODULE__{visits: visits, fleet: fleet, options: build_options(opts)}}
end
end
defp build_visits(input) do
visits = build_visits_list(input)
if visits != :error and Enum.all?(visits, fn {_, visit} -> visit != :error end),
do: {:ok, visits},
else: {:error, :invalid_input}
end
defp build_visits_list(input),
do:
Enum.reduce(input, %{}, fn visit, acc ->
if is_map(acc) and Map.has_key?(visit, :id),
do: Map.put(acc, visit.id, build_visit(visit)),
else: :error
end)
defp build_visit(%{duration: _duration, address: address} = visit) do
visit
|> Map.take([:duration, :start, :end, :notes, :type])
|> Map.update(:start, Routific.shift_start(), &to_time_string/1)
|> Map.update(:end, Routific.shift_end(), &to_time_string/1)
|> Map.put(:customNotes, Map.get(visit, :custom_notes, %{}))
|> Map.put(:location, %{
name: address,
address: address
})
end
defp build_visit(_visit), do: :error
defp build_fleet do
case get_calendars() do
calendars when length(calendars) !== 0 ->
{:ok,
Enum.reduce(calendars, %{}, fn calendar, acc ->
Map.put(acc, calendar.uuid, %{
speed: calendar.speed,
start_location: build_depot(calendar),
shift_start: to_time_string(calendar.shift_start),
shift_end: to_time_string(calendar.shift_end),
type: calendar.types,
breaks: get_breaks()
})
end)}
_ ->
{:error, :no_calendars_found}
end
end
defp get_calendars do
Calendar
|> Calendar.Queries.preload_relations([:address])
|> Re.Repo.all()
end
defp build_depot(%{address: address}),
do: %{
id: address.id,
name: "#{address.street}, #{address.street_number}",
lat: address.lat,
lng: address.lng
}
defp get_breaks, do: [%{id: "lunch", start: "12:00", end: "13:00"}]
defp build_options(options), do: Enum.into(options, %{})
defp to_time_string(%Time{} = time), do: time |> Time.to_string() |> String.slice(0..4)
defp to_time_string(time), do: time
end
|
apps/re_integrations/lib/routific/payload/outbound.ex
| 0.702326
| 0.426829
|
outbound.ex
|
starcoder
|
defmodule Freshcom.Identity do
@moduledoc """
This API module provides functions that deal with identity and access management.
It follows a combination of Stripe and AWS style IAM.
Generally speaking, identity in Freshcom consist of three resources:
- The app that is making the request on behalf of the user (the client)
- The account that the request is targeting
- The user that is making the request (the requester)
These three resources are used together to authorize each request. ID of
these 3 resources are required for all API module functions which means the
`:client_id`, `:account_id` and `:requester_id` (collectively referred as "identity fields")
must be set on the `Freshom.Request` struct unless otherwise indicated in the documentation.
This module provides functions to help you create and manage these three resources
and their related resources.
Note that no module in freshcom actually provides any authentication related
functions. It is assumed all calls to these functions are already authenticated
and whatever provided in the identity fields of `Freshcom.Request` struct
is already validated. It is up to your delivery layer to implement your own authentication
and make sure the user is who they say they are. For example [freshcom_web](https://github.com/freshcom/freshcom_web)
uses OAuth with JWT to do authentication.
## Resource Relationship
The relationships between the identity resources are as illustrated in the diagram below.
<img alt="Relationship Diagram" src="images/identity/relationship.png" width="271px">
Relationship can be described as follows:
- Standard user can have multiple account
- Account can have multiple managed user
- Account can have multiple app
- All resources except standard user must belongs to an account
You can create a standard user by using `Freshcom.Identity.register_user/1`.
## Test Account
There are two types of account in freshcom: live account and test account. Each
live account will have one test account associated it. User that have access to
the live account will have the same access level to the corresponding test account
but not vice versa.
## API Key
In most cases it is not secure to allow a user to directly pass in the
`:user_id` and `:account_id` because these IDs are not changeable and cannot be deleted
if compromised, so freshcom provides you with API keys that can help you implement
your authentication method. Using a API key you can retrieve the `:account_id`
and `:user_id` it belongs to, it can also be easily re-generated in case it is compromised.
Standard user have an API Key for each account they own including test accounts.
Managed user for a live account have two API keys, one for the live account, one
for the corresponding test account. Managed user for test account only have one
API Key. Each account also have an API Key that is not associated with any user
you can use this API key if you only want to identify the account without any user.
How you use API keys are completely up to you, you can directly expose them to the user,
or in the case of [freshcom_web](https://github.com/freshcom/freshcom_web)
it is used as the refresh token for the actual access token which itself is a JWT
that contains the `:account_id` and `:user_id`.
## Bypass Authorization
Sometime you will need to make calls to an API module's function without having the identity
information. This is especially the case when you are implementing your own authentication
method on top of freshcom's Elixir API. For example in [freshcom_web](https://github.com/freshcom/freshcom_web)
the API key needs to be retrieved before a user is authenticated, but the `get_api_key/1`
function requires all identity fields be provided.
To solve this problem, freshcom allow you bypass authorization by setting
the value of `:_role_` of the request struct. If you set the value of `:_role_`
to any of the following then authorization will be bypassed:
`"system"`, `"sysdev"`, `"appdev"`. When authorizatino is bypassed you can omit
all identity fields, however we recommand you still provide as much as you know
so they can still be logged and useful for debugging and auditing.
The authorization bypass works for all API module functions, however we recommand
you only bypass when necessary.
### Example to bypass authorization
```
alias Freshcom.{Identity, Request}
Identity.get_api_key(%Request{
identifier: %{"id" => "cae028f2-f5e8-402d-a0b9-4bf5ae478151"},
_role_: "system"
})
```
## Role Groups
For the purpose of this documentation we group user roles in to the following groups:
- Customer Management Roles: `"owner"`, `"administrator"`, `"manager"`, `"developer"`, `"support_specialist"`
- Development Roles: `"owner"`, `"administrator"`, `"developer"`
- Admin Roles: `"owner"`, `"administrator"`
## Abbreviation
For better formatting the following abbreviation are used in the documentation:
- C/S: Case Sensitive
"""
use Freshcom, :api_module
import FCSupport.Normalization, only: [atomize_keys: 2]
import Freshcom.IdentityPolicy
import UUID
alias FCIdentity.{
RegisterUser,
AddUser,
UpdateUserInfo,
ChangeDefaultAccount,
ChangeUserRole,
ChangePassword,
DeleteUser,
CreateAccount,
UpdateAccountInfo,
CloseAccount,
GeneratePasswordResetToken,
AddApp,
UpdateApp,
DeleteApp
}
alias FCIdentity.{
UserRegistered,
UserAdded,
UserInfoUpdated,
DefaultAccountChanged,
UserRoleChanged,
PasswordChanged,
UserDeleted,
AccountCreated,
AccountInfoUpdated,
AccountClosed,
PasswordResetTokenGenerated,
AppAdded,
AppUpdated,
AppDeleted
}
alias Freshcom.{UserProjector, AccountProjector, AppProjector}
alias Freshcom.{User, Account, APIKey, App}
@doc """
Register a standard user.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.register_user(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
data: %{
"name" => "<NAME>",
"username" => "<EMAIL>",
"email" => "<EMAIL>",
"password" => "<PASSWORD>",
"account_name" => "Unamed Account",
"default_locale" => "en",
"is_term_accepted" => true
}
})
```
## Identity Fields
| Key | Description |
|--------------|-------------------------------------------------------------|
| `:client_id` | _(required)_ Must be the ID of an app with type `"system"`. |
## Data Fields
| Key | Type | Description |
|----------------------|-----------|--------------------------------------------------------------------------------------------------------------------------------------------------|
| `"username"` | _String_ | _(required)_ Must be unique across all standard user. Length between 3 and 120 characters. Can contain alphanumeric characters and `'`, `.`, `+`, `-`, `@`. |
| `"password"` | _String_ | _(required)_ Must be at least 8 characters long. |
| `"is_term_accepted"` | _Boolean_ | _(required)_ Must be true. |
| `"email"` | _String_ | Must be in correct format. |
| `"name"` | _String_ | Name of the user. |
| `"account_name"` | _String_ | Name of the default account to be created, defaults to `"Unnamed Account"`. |
| `"default_locale"` | _String_ | Default locale of the default account, defaults to `"en"`. |
"""
@spec register_user(Request.t()) :: APIModule.resp()
def register_user(%Request{} = req) do
req
|> to_command(%RegisterUser{})
|> dispatch_and_wait(UserRegistered)
~> Map.get(:user)
~> preload(req)
|> to_response()
end
@doc """
Add a managed user to an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.add_user(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "4<PASSWORD>",
data: %{
"username" => "testuser",
"password" => "test1234",
"role" => "developer",
"email" => "<EMAIL>",
"name" => "<NAME>"
}
})
```
## Identity Fields
| Key | Description |
|-----------------|---------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. Must be a system app. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a user with role in [Admin Roles](#module-roles). |
## Data Fields
| Key | Type | Description |
|----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"username"` | _String_ | _(required)_ Must be unique across all standard user. Length between 3 and 120 characters. Can contain alphanumeric characters and `'`, `.`, `+`, `-`, `@`. |
| `"password"` | _String_ | _(required)_ Must be at least 8 characters long. |
| `"role"` | _String_ | _(required)_ Please see `Freshcom.User` for list of valid roles. |
| `"email"` | _String_ | Must be in correct format. |
| `"name"` | _String_ | Full name of the user. |
| `"first_name"` | _String_ | First name of the user. |
| `"last_name"` | _String_ | Last name of the user. |
"""
@spec add_user(Request.t()) :: APIModule.resp()
def add_user(%Request{} = req) do
req
|> to_command(%AddUser{})
|> dispatch_and_wait(UserAdded)
~> Map.get(:user)
~> preload(req)
|> to_response()
end
@doc """
Update a user's general information.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.update_user_info(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515<PASSWORD>53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "<PASSWORD>",
identifier: %{"id" => "8d168caa-dc9c-420e-bd88-7474463bcdea"},
data: %{
"name" => "Demo User"
}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. Must be a system app if the target user is a standard user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must meet one of the following conditions: <ul style="margin: 0px;"><li>be the same user as the target user</li><li>be a user with role in [Customer Management Roles](#module-roles) if the target user is of role `"customer"`</li><li>be a user with role in [Admin Roles](#module-roles) if the target user is a managed user</li></ul> |
## Identifier Fields
| Key | Description |
|-----------|-----------------------------------------------------------------------------------------------------------|
| `"id"` | _(required)_ ID of the target user. |
## Data Fields
| Key | Type | Description |
|----------------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"username"` | _String_ | Must be unique across all standard user. Length between 3 and 120 characters. It can contain alphanumeric characters and `'`, `.`, `+`, `-`, `@`. |
| `"email"` | _String_ | Must be in correct format. |
| `"name"` | _String_ | Full name of the user. |
| `"first_name"` | _String_ | First name of the user. |
| `"last_name"` | _String_ | Last name of the user. |
| `"custom_data"` | _Map_ | Set of key-value pairs that you can attach to this resource. |
"""
@spec update_user_info(Request.t()) :: APIModule.resp()
def update_user_info(%Request{} = req) do
identifier = atomize_keys(req.identifier, ["id"])
req = expand(req, only: :_default_locale_)
req
|> to_command(%UpdateUserInfo{})
|> Map.put(:user_id, identifier[:id])
|> dispatch_and_wait(UserInfoUpdated)
~> Map.get(:user)
~> preload(req)
~> translate(req.locale, req._default_locale_)
|> to_response()
end
@doc """
Change the default account of a standard user.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.change_default_account(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
requester_id: "<PASSWORD>",
data: %{"id" => "3a0ab0a2-1865-4f80-9127-e2d413ba4b5e"}
})
```
## Identity Fields
| Key | Description |
|-----------------|--------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. Must be a system app. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a standard user. |
## Data Fields
| Key | Type | Description |
|--------|----------|------------------------------------------------------------------------------------------------------|
| `"id"` | _String_ | _(required)_ ID of the new default account. The provided account must be owned by the requester. |
"""
@spec change_default_account(Request.t()) :: APIModule.resp()
def change_default_account(%Request{} = req) do
req
|> to_command(%ChangeDefaultAccount{})
|> Map.put(:user_id, req.requester_id)
|> Map.put(:account_id, req.data["id"])
|> dispatch_and_wait(DefaultAccountChanged)
~> Map.get(:user)
~> preload(req)
|> to_response()
end
@doc """
Change the role of a managed user.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.change_user_role(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
requester_id: "<PASSWORD>",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
identifier: %{"id" => "8d168caa-dc9c-420e-bd88-7474463bcdea"},
data: %{"value" => "manager"}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a user with role in [Admin Roles](#module-roles). |
## Identifier Fields
| Key | Type | Description |
|--------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"id"` | _String_ | _(required)_ ID of the target user. Must be a managed user and cannot be the same as the requester. |
## Data Fields
| Key | Type | Description |
|-----------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"value"` | _String_ | _(required)_ New role of the user. |
"""
@spec change_user_role(Request.t()) :: APIModule.resp()
def change_user_role(%Request{} = req) do
cmd = %ChangeUserRole{
user_id: req.identifier["id"],
role: req.data["value"]
}
req
|> to_command(cmd)
|> dispatch_and_wait(UserRoleChanged)
~> Map.get(:user)
~> preload(req)
|> to_response()
end
@doc """
Generate a password reset token for a user.
There are two ways to generate a password reset token:
- By providing the username of the user, using a username you can generate a reset token for both standard and managed user.
- By providing the ID of the user, using the user ID you can only generate a reset token for managed user.
## Examples
### Using user's username
```
alias Freshcom.{Identity, Request}
Identity.generate_password_reset_token(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
identifier: %{"username" => "roy"}
})
```
### Using user's ID
```
alias Freshcom.{Identity, Request}
Identity.generate_password_reset_token(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "4df750ca-ea88-4150-8a0b-7bb77efa43a4",
identifier: %{"id" => "8d168caa-dc9c-420e-bd88-7474463bcdea"}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. Must be a system app if the target user is a standard user. |
| `:account_id` | ID of the target account, if provided will only look for managed user of the target account, otherwise will only look for standard user. |
| `:requester_id` | ID of the user making the request, required if `identifier["id"]` is provided. Must meet one of the following conditions: <ul style="margin: 0px;"><li>be a user with role in [Customer Management Roles](#module-roles) if the target user is of role `"customer"`</li><li>be a user with role in [Admin Roles](#module-roles)</li></ul> |
## Identifier Fields
| Key | Type | Description |
|--------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"id"` | _String_ | ID of the target user, required if `identifier["username"]` is not provided. Must be the ID a managed user. |
| `"username"` | _String_ | Username of the target user, required if `identifier["id"]` is not provided. |
"""
@spec generate_password_reset_token(Request.t()) :: APIModule.resp()
def generate_password_reset_token(%Request{identifier: %{"username" => username}} = req) do
user =
req
|> Map.put(:identifier, %{"username" => username})
|> to_query(User)
|> Repo.one()
req
|> to_command(%GeneratePasswordResetToken{
user_id: (user || %{id: uuid4()}).id,
expires_at: Timex.shift(Timex.now(), hours: 24)
})
|> dispatch_and_wait(PasswordResetTokenGenerated)
~> Map.get(:user)
|> to_response()
end
def generate_password_reset_token(%Request{identifier: %{"id" => id}} = req) do
req
|> to_command(%GeneratePasswordResetToken{
user_id: id,
expires_at: Timex.shift(Timex.now(), hours: 24)
})
|> dispatch_and_wait(PasswordResetTokenGenerated)
~> Map.get(:user)
|> to_response()
end
def generate_password_reset_token(_), do: {:error, :not_found}
@doc """
Change the password of a user.
There are two ways to change a password:
- By providing a password reset token.
- By providing the ID of the user.
## Examples
### Using a password reset token
```
alias Freshcom.{Identity, Request}
Identity.change_password(%Request{
client_id: client.id,
identifier: %{"reset_token" => reset_token},
data: %{"new_password" => "<PASSWORD>"}
})
```
### Using the user's ID
```
alias Freshcom.{Identity, Request}
Identity.change_password(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "4df750ca-ea88-4150-8a0b-7bb77efa43a4",
identifier: %{"id" => "8d168caa-dc9c-420e-bd88-7474463bcdea"},
data: %{"new_password" => "<PASSWORD>"}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. Must be a system app if the target user is a standard user. |
| `:account_id` | ID of the target account, required if `identifier["reset_token"]` is not provided. |
| `:requester_id` | ID of the user making the request, required if `identifier["reset_token"]` is not provided. When required must be the same as `identifier["id"]` or be a user with role `"owner"` or `"administrator"`. |
## Identifier Fields
| Key | Type | Description |
|----------------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"id"` | _String_ | ID of the target user, required if `identifier["reset_token"]` is not provided. |
| `"reset_token"` | _String_ | A password reset token of the target user, required if `identifier["id"]` is not provided.|
## Data Fields
| Key | Type | Description |
|----------------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"new_password"` | _String_ | _(required)_ New password of the user. Must be at least 8 character long. |
| `"current_password"` | _String_ | Current password of the user, required if requester is the same as target user. |
"""
@spec change_password(Request.t()) :: APIModule.resp()
def change_password(%Request{identifier: %{"reset_token" => reset_token}} = req) do
user =
req
|> Map.put(:identifier, %{"password_reset_token" => reset_token})
|> to_query(User)
|> Repo.one()
req
|> to_command(%ChangePassword{
user_id: (user || %{id: uuid4()}).id,
reset_token: reset_token
})
|> dispatch_and_wait(PasswordChanged)
~> Map.get(:user)
~> preload(req)
|> to_response()
end
def change_password(%Request{identifier: %{"id" => id}} = req) do
req
|> to_command(%ChangePassword{})
|> Map.put(:user_id, id)
|> dispatch_and_wait(PasswordChanged)
~> Map.get(:user)
~> preload(req)
|> to_response()
end
def change_password(_), do: {:error, :not_found}
@doc """
Delete a managed user.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.delete_user(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a<PASSWORD>63",
requester_id: "<PASSWORD>",
identifier: %{"id" => "8d168caa-dc9c-420e-bd88-7474463bcdea"}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a user with role in [Admin Roles](#module-roles). |
## Identifier Fields
| Key | Type | Description |
|---------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"id"` | _String_ | _(required)_ ID of the target user. Must be a managed user and cannot be the same as the requester. |
"""
@spec delete_user(Request.t()) :: APIModule.resp()
def delete_user(%Request{} = req) do
identifier = atomize_keys(req.identifier, ["id"])
req
|> to_command(%DeleteUser{})
|> Map.put(:user_id, identifier[:id])
|> dispatch_and_wait(UserDeleted)
~> Map.get(:user)
|> to_response()
end
@doc """
List all managed user of an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.list_user(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "<PASSWORD>88-4150-8a0b-7bb77efa43a4",
filter: [%{"role" => "customer"}],
search: "roy"
})
```
## Identity Fields
| Key | Description |
|-----------------|-----------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a user with role in [Admin Roles](#module-roles). |
## Filter Fields
| Key | Type | [C/S](#module-abbreviation) | Supported Operators | Default |
|-------------------------|---------------------|------|---------------------------------------|----------------------------------------------|
| `"status"` | _String_ | Yes | [Equality Operators](Freshcom.Filter.html#module-operator-groups) | `%{"$eq" => "active"}` |
| `"username"` | _String_ | No | [Equality Operators](Freshcom.Filter.html#module-operator-groups) | N/A |
| `"email"` | _String_ | No | [Equality Operators](Freshcom.Filter.html#module-operator-groups) | N/A |
| `"name"` | _String_ | Yes | [Equality Operators](Freshcom.Filter.html#module-operator-groups) | N/A |
| `"first_name"` | _String_ | Yes | [Equality Operators](Freshcom.Filter.html#module-operator-groups) | N/A |
| `"last_name"` | _String_ | Yes | [Equality Operators](Freshcom.Filter.html#module-operator-groups) | N/A |
| `"role"` | _String_ | Yes | [Equality Operators](Freshcom.Filter.html#module-operator-groups) | N/A |
| `"email_verified"` | _Boolean_ | N/A | `"$eq"` | N/A |
| `"email_verified_at"` | _String_ (ISO8601) | N/A | `"$eq"` and [Range Operators](Freshcom.Filter.html#module-operator-groups) | N/A |
| `"password_changed_at"` | _String_ (ISO8601) | N/A | `"$eq"` and [Range Operators](Freshcom.Filter.html#module-operator-groups) | N/A |
Please see `Freshcom.Filter` for details on how to use filter.
## Other Fields
- Searchable fields: `["name", "username", "email"]`
- Sortable fields: `["status", "username", "email", "role"]`
"""
@spec list_user(Request.t()) :: APIModule.resp()
def list_user(%Request{} = req) do
req = expand(req)
req
|> Map.put(:_filterable_keys_, [
"status",
"role",
"email_verified",
"email_verified_at",
"password_changed_at"
])
|> Map.put(:_searchable_keys_, ["name", "username", "email"])
|> Map.put(:_sortable_keys_, ["status", "username", "email", "role"])
|> authorize(:list_user)
~> to_query(User)
~> Repo.all()
~> preload(req)
~> translate(req.locale, req._default_locale_)
|> to_response()
end
@doc """
Count the number of managed user of an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.count_user(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "<PASSWORD>",
filter: [%{"role" => "customer"}],
search: "roy"
})
```
## Request Fields
All fields are the same as `list_user/1`, except any pagination will be ignored.
"""
def count_user(%Request{} = req) do
req
|> expand()
|> Map.put(:_filterable_keys_, [
"status",
"role",
"email_verified",
"email_verified_at",
"password_changed_at"
])
|> Map.put(:_searchable_keys_, ["name", "username", "email"])
|> Map.put(:pagination, nil)
|> authorize(:list_user)
~> to_query(User)
~> Repo.aggregate(:count, :id)
|> to_response()
end
@doc """
Get the details of a specific user.
There are two ways to get a user:
- By providing the username and password of a user
- By providing a user ID
## Examples
### Using username and password
```
alias Freshcom.{Identity, Request}
Identity.get_user(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
identifier: %{
"type" => "standard",
"username" => "demouser",
"password" => "<PASSWORD>"
}
})
```
### Using a user ID
```
alias Freshcom.{Identity, Request}
Identity.get_user(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "4df750ca-ea88-4150-8a0b-7bb77efa43a4",
identifier: %{"id" => "8d168caa-dc9c-420e-bd88-7474463bcdea"}
})
```
## Identity Fields
| Key | Description |
|-----------------|---------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | ID of the target account, required if the target user is a managed user. |
| `:requester_id` | ID of the user making the request, required if `identifier["id"]` is provided. When required, must be the same as the target user or be a user with role in [Admin Roles](#module-roles). |
## Identifier Fields
| Key | Type | Description |
|--------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"id"` | _String_ | ID of the target user, required if `identifier["username"]` and `identifier["password"]` is not provided. |
| `"username"` | _String_ | Username of the target user, required if `identifier["id"]` is not provided. |
| `"password"` | _String_ | <PASSWORD>, required if `identifier["id"]` is not provided. |
| `"type"` | _String_ | Type of the target user. |
"""
@spec get_user(Request.t()) :: APIModule.resp()
def get_user(%Request{} = req) do
req = expand(req)
req
|> authorize(:get_user)
~> account_id_by_identifier()
~> to_query(User)
~> Repo.all()
~>> ensure_one()
~> check_password(req)
~> check_account_id(req)
~> preload(req)
~> translate(req.locale, req._default_locale_)
|> to_response()
end
defp account_id_by_identifier(%{identifier: %{"id" => _}} = req) do
Request.put(req, :account_id, nil)
end
defp account_id_by_identifier(req), do: req
defp check_password(nil, _), do: nil
defp check_password(user, %{identifier: %{"password" => password}}) do
if User.is_password_valid?(user, password) do
user
else
nil
end
end
defp check_password(user, _), do: user
defp check_account_id(nil, _), do: nil
defp check_account_id(%{account_id: nil} = user, _), do: user
defp check_account_id(%{account_id: aid} = user, %{account_id: t_aid}) do
if aid == Account.bare_id(t_aid) do
user
else
nil
end
end
defp check_account_id(_, _), do: nil
@doc """
List all the accounts owned by a standard user.
Only live account with `"active"` status can be listed.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.list_account(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
requester_id: "<PASSWORD>"
})
```
## Identity Fields
| Key | Description |
|-----------------|---------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. Must be a system app. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a standard user. |
"""
@spec list_account(Request.t()) :: APIModule.resp()
def list_account(%Request{} = req) do
req
|> expand()
|> Map.put(:_searchable_keys_, [])
|> Map.put(:_sortable_keys_, [])
|> authorize(:list_account)
~> Map.put(:account_id, nil)
~> Map.put(:filter, [%{"mode" => "live"}, %{"status" => "active"}])
~> to_query(Account)
~> Repo.all()
~> preload(req)
|> to_response()
end
@doc """
Count the number of accounts owned by a standard user.
Only live account with `"active"` status is counted.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.count_account(%Request{
client_id: "ab9f27c5-8636-498e-9<PASSWORD>",
requester_id: "<PASSWORD>"
})
```
## Request Fields
All fields are the same as `list_account/1`, except any pagination will be ignored.
"""
def count_account(%Request{} = req) do
req
|> expand()
|> Map.put(:pagination, nil)
|> authorize(:list_account)
~> Map.put(:account_id, nil)
~> Map.put(:filter, [%{"mode" => "live"}, %{"status" => "active"}])
~> to_query(Account)
~> Repo.aggregate(:count, :id)
|> to_response()
end
@doc """
Create an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.create_account(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
requester_id: "<PASSWORD>",
data: %{
"name" => "SpaceX",
"default_locale" => "en"
}
})
```
## Identity Fields
| Key | Description |
|-----------------|---------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. Must be a system app. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a standard user. |
## Data Fields
| Key | Type | Description |
|----------------------|-----------|--------------------------------------------------------------------------------------------------------------------------------------------------|
| `"name"` | _String_ | Name of the default account to be created, defaults to `"Unnamed Account"`. |
| `"default_locale"` | _String_ | Default locale of the default account, defaults to `"en"`. |
"""
@spec create_account(Request.t()) :: APIModule.resp()
def create_account(%Request{} = req) do
req
|> to_command(%CreateAccount{})
|> Map.put(:account_id, nil)
|> Map.put(:owner_id, req.requester_id)
|> Map.put(:mode, "live")
|> dispatch_and_wait(AccountCreated)
~> Map.get(:account)
~> preload(req)
|> to_response()
end
@doc """
Get the details of an account.
There are two ways to get an account:
- Using an account handle
- Using an account ID
## Examples
### Using an account handle
```
alias Freshcom.{Identity, Request}
Identity.get_account(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
identifier: %{"handle" => "freshcom"}
})
```
### Using an account ID
```
alias Freshcom.{Identity, Request}
Identity.get_account(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763"
})
```
## Identity Fields
| Key | Description |
|-----------------|---------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | ID of the target account, required if `identifier["handle"]` is not provided. |
| `:requester_id` | ID of the user making the request. |
## Identifier Fields
| Key | Type | Description |
|--------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"handle"` | _String_ | ID of the target account, required if `:account_id` is not provided. |
"""
@spec get_account(Request.t()) :: APIModule.resp()
def get_account(%Request{identifier: %{"handle" => _}} = req) do
req
|> expand()
|> Map.put(:_identifiable_keys_, ["handle"])
|> authorize(:get_account)
~> to_query(Account)
~> Repo.all()
~>> ensure_one()
~> Account.put_prefixed_id()
|> to_response()
end
def get_account(%Request{} = req) do
req
|> expand()
|> Map.put(:_identifiable_keys_, ["id"])
|> authorize(:get_account)
~> Map.get(:_account_)
~> Account.put_prefixed_id()
|> to_response()
end
@doc """
Update the general information of an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.update_account_info(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "<PASSWORD>",
data: %{
"handle" => "spacex",
"name" => "SpaceX",
"caption" => "A new age of space exploration starts...",
"description" => "What more do you want?"
}
})
```
## Identity Fields
| Key | Description |
|-----------------|---------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a user with role in [Admin Roles](#module-roles). |
## Data Fields
| Key | Type | Description |
|----------------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"handle"` | _String_ | A unique identifier of the account, must be unique across all accounts. |
| `"name"` | _String_ | Name of the account. |
| `"caption"` | _String_ | Short description of the account. |
| `"description"` | _String_ | Long description of the account. |
| `"custom_data"` | _Map_ | Set of key-value pairs that you can attach to this resource. |
"""
@spec update_account_info(Request.t()) :: APIModule.resp()
def update_account_info(%Request{} = req) do
req
|> to_command(%UpdateAccountInfo{})
|> dispatch_and_wait(AccountInfoUpdated)
~> Map.get(:account)
~> preload(req)
|> to_response()
end
@doc """
Close an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.close_account(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "<PASSWORD>"
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. Must be a system app. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a standard user that owns the target account. |
"""
@spec close_account(Request.t()) :: APIModule.resp()
def close_account(%Request{} = req) do
identifier = atomize_keys(req.identifier, ["id"])
req
|> to_command(%CloseAccount{})
|> Map.put(:account_id, identifier[:id])
|> dispatch_and_wait(AccountClosed)
~> Map.get(:account)
|> to_response()
end
@doc """
Get the details an API Key.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.get_api_key(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "<PASSWORD>"
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must meet one of the following conditions: <ul style="margin: 0px;"><li>be the same as `identifier["user_id"]` if that is provided</li><li>be a user with role in [Admin Roles](#module-roles).</li></ul> |
## Identifier Fields
| Key | Description |
|----------------|-----------------------------------------------------------------------------------------------------------|
| `"user_id"` | ID of the target user, if provided will get user specific API keys, otherwise will only get the account's publishable API key. |
"""
@spec get_api_key(Request.t()) :: APIModule.resp()
def get_api_key(%Request{} = req) do
req = expand(req)
req
|> Map.put(:_identifiable_keys_, ["id", "user_id"])
|> authorize(:get_api_key)
~> get_api_key_normalize()
~> to_query(APIKey)
~> Repo.all()
~>> ensure_one()
~> APIKey.put_prefixed_id(req._account_)
|> to_response()
end
defp get_api_key_normalize(%{identifier: %{"id" => id}} = req) do
Request.put(req, :identifier, "id", APIKey.bare_id(id))
end
defp get_api_key_normalize(%{identifier: identifier} = req) do
if !identifier["user_id"] do
Map.put(req, :identifier, %{"user_id" => nil})
else
req
end
end
defp get_api_key_normalize(req), do: req
@doc """
Exchange the given API key an API Key of the same user but for a differnt account.
If the given API Key is already for the specified account, then it is simply
returned.
This function is intended for exchanging a live API Key for a corresponding
test API Key or for another live API Key owned by the same user but
for a different account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.exchange_api_key(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
data: %{"id" => "cae028f2-f5e8-402d-a0b9-4bf5ae478151"}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | ID of the user making the request. |
## Data Fields
| Key | Description |
|-----------|-----------------------------------------------------------------------------------------------------------|
| `"id"` | _(required)_ ID of the API key. |
"""
@spec exchange_api_key(Request.t()) :: APIModule.resp()
def exchange_api_key(%Request{} = req) do
req = expand(req)
req
|> authorize(:exchange_api_key)
~> do_exchange_api_key()
~> APIKey.put_prefixed_id(req._account_)
|> to_response()
end
defp do_exchange_api_key(%{_account_: nil}), do: nil
defp do_exchange_api_key(%{_account_: account, identifier: %{"id" => id}}) do
api_key = Repo.get(APIKey, APIKey.bare_id(id))
cond do
is_nil(api_key) ->
nil
# Exchanging for the same account
api_key.account_id == account.id ->
api_key
# Exchanging for the test account
api_key.account_id == account.live_account_id ->
Repo.get_by(APIKey, account_id: account.id, user_id: api_key.user_id)
# Exchanging for other live account owned by the same user
api_key.user_id == account.owner_id ->
Repo.get_by(APIKey, account_id: account.id, user_id: api_key.user_id)
true ->
nil
end
end
@doc """
Add an app to an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.add_app(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "4<PASSWORD>",
data: %{
"name" => "Test"
}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. Must be a system app. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request.<br/> Must be a user with role `"owner"`, `"administrator"` or `"developer"`. |
## Data Fields
| Key | Type | Description |
|----------|----------|-------------------------------|
| `"name"` | _String_ | _(required)_ Name of the app. |
"""
@spec add_app(Request.t()) :: APIModule.resp()
def add_app(%Request{} = req) do
req
|> to_command(%AddApp{})
|> dispatch_and_wait(AppAdded)
~> Map.get(:app)
~> preload(req)
~> App.put_prefixed_id()
|> to_response()
end
@doc """
Get the details of an app.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.get_app(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "4df750ca-ea88-4150-8a0b-7bb77efa43a4",
identifier: %{"id" => "8d168caa-dc9c-420e-bd88-7474463bcdea"}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a user with role in [Development Roles](#module-roles). |
## Identifier Fields
| Key | Description |
|-----------|-----------------------------------------------------------------------------------------------------------|
| `"id"` | _(required)_ ID of the target app. |
"""
@spec get_app(Request.t()) :: APIModule.resp()
def get_app(%Request{} = req) do
req
|> expand()
|> authorize(:get_app)
~> get_app_normalize()
~> to_query(App)
~> Repo.all()
~>> ensure_one()
~> App.put_prefixed_id()
|> to_response()
end
defp get_app_normalize(%{identifier: %{"id" => id}} = req) do
Request.put(req, :identifier, "id", App.bare_id(id))
end
defp get_app_normalize(req), do: req
@doc """
List all app of an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.list_app(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "4df750ca-ea88-4150-8a0b-7bb77efa43a4"
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a user with role in [Development Roles](#module-roles). |
"""
@spec list_app(Request.t()) :: APIModule.resp()
def list_app(%Request{} = req) do
req = expand(req)
req
|> Map.put(:_filterable_keys_, [])
|> Map.put(:_searchable_keys_, [])
|> Map.put(:_sortable_keys_, [])
|> authorize(:list_app)
~> to_query(App)
~> Repo.all()
~> preload(req)
~> App.put_account(req._account_)
~> App.put_prefixed_id()
|> to_response()
end
@doc """
Count the number of apps of an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.list_app(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "<PASSWORD>"
})
```
## Request Fields
All fields are the same as `list_app/1`, except any pagination will be ignored.
"""
def count_app(%Request{} = req) do
req
|> expand()
|> Map.put(:pagination, nil)
|> authorize(:list_app)
~> to_query(App)
~> Repo.aggregate(:count, :id)
|> to_response()
end
@doc """
Update an app.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.update_app(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "<PASSWORD>",
identifier: %{"id" => "8d168caa-dc9c-420e-bd88-7474463bcdea"},
data: %{
"name" => "Example App"
}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a user with role in [Development Roles](#module-roles). |
## Identifier Fields
| Key | Description |
|-----------|-----------------------------------------------------------------------------------------------------------|
| `"id"` | _(required)_ ID of the target app. |
## Data Fields
| Key | Type | Description |
|----------|----------|-------------------------------|
| `"name"` | _String_ | Name of the app. |
"""
@spec update_app(Request.t()) :: APIModule.resp()
def update_app(%Request{} = req) do
identifier = atomize_keys(req.identifier, ["id"])
req
|> to_command(%UpdateApp{})
|> Map.put(:app_id, identifier[:id])
|> dispatch_and_wait(AppUpdated)
~> Map.get(:app)
~> preload(req)
|> to_response()
end
@doc """
Delete an app from an account.
## Examples
```
alias Freshcom.{Identity, Request}
Identity.delete_app(%Request{
client_id: "ab9f27c5-8636-498e-96ab-515de6aba53e",
account_id: "c59ca218-3850-497b-a03f-a0584e5c7763",
requester_id: "4<PASSWORD>ca-ea88-4150-8a0b-7bb77efa43a4",
identifier: %{"id" => "8d168caa-dc9c-420e-bd88-7474463bcdea"}
})
```
## Identity Fields
| Key | Description |
|-----------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `:client_id` | _(required)_ ID of the app that is making the request on behalf of the user. |
| `:account_id` | _(required)_ ID of the target account. |
| `:requester_id` | _(required)_ ID of the user making the request. Must be a user with role in [Development Roles](#module-roles). |
## Identifier Fields
| Key | Type | Description |
|---------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `"id"` | _String_ | _(required)_ ID of the target app. |
"""
@spec delete_app(Request.t()) :: APIModule.resp()
def delete_app(%Request{} = req) do
identifier = atomize_keys(req.identifier, ["id"])
req
|> to_command(%DeleteApp{})
|> Map.put(:app_id, identifier[:id])
|> dispatch_and_wait(AppDeleted)
~> Map.get(:app)
|> to_response()
end
defp dispatch_and_wait(cmd, event) do
dispatch_and_wait(cmd, event, &wait/1)
end
defp wait(%UserRegistered{user_id: user_id}) do
Projector.wait([
{:user, UserProjector, &(&1.id == user_id)},
{:live_account, AccountProjector, &(&1.owner_id == user_id && &1.mode == "live")},
{:test_account, AccountProjector, &(&1.owner_id == user_id && &1.mode == "test")},
{:live_app, AppProjector, &(!!&1)},
{:test_app, AppProjector, &(!!&1)}
])
end
defp wait(%et{user_id: user_id})
when et in [
UserAdded,
UserInfoUpdated,
DefaultAccountChanged,
UserRoleChanged,
PasswordResetTokenGenerated,
PasswordChanged,
UserDeleted
] do
Projector.wait([
{:user, UserProjector, &(&1.id == user_id)}
])
end
defp wait(%et{account_id: account_id})
when et in [AccountCreated, AccountInfoUpdated, AccountClosed] do
Projector.wait([
{:account, AccountProjector, &(&1.id == account_id)}
])
end
defp wait(%et{app_id: app_id}) when et in [AppAdded, AppUpdated, AppDeleted] do
Projector.wait([
{:app, AppProjector, &(&1.id == app_id)}
])
end
end
|
lib/freshcom/api/identity.ex
| 0.821259
| 0.844858
|
identity.ex
|
starcoder
|
defmodule Asteroid.ObjectStore.AuthenticationEvent.Riak do
@moduledoc """
Riak implementation of the `Asteroid.ObjectStore.AuthenticationEvent` behaviour
## Initializing a Riak bucket type
```console
$ sudo riak-admin bucket-type create ephemeral_token '{"props":{"datatype":"map", "backend":"leveldb_mult"}}'
ephemeral_token created
$ sudo riak-admin bucket-type activate ephemeral_token
ephemeral_token has been activated
```
## Options
The options (`Asteroid.ObjectStore.AuthenticationEvent.opts()`) are:
- `:bucket_type`: an `String.t()` for the bucket type that must be created beforehand in
Riak. No defaults, **mandatory**
- `bucket_name`: a `String.t()` for the bucket name. Defaults to `"authentication_event"`
- `:purge_interval`: the `integer()` interval in seconds the purge process will be triggered,
or `:no_purge` to disable purge. Defaults to `60` (1 minutes)
- `:rows`: the maximum number of results that a search will return. Defaults to `1_000_000`.
Search is used by the purge process.
## Installation function
The `install/1` function executes the following actions:
- it installs a custom schema (`asteroid_object_store_authentication_event_riak_schema`)
- it creates a new index (`asteroid_object_store_authentication_event_riak_index`) on the bucket
(and not the bucket type - so as to avoid collisions)
This is necessary to:
1. Efficiently index expiration timestamp
2. Disable indexing of raw authentication event data
## Purge process
The purge process uses the `Singleton` library. Therefore the purge process will be unique
per cluster (and that's probably what you want if you use Riak).
"""
require Logger
@behaviour Asteroid.ObjectStore.AuthenticationEvent
@impl true
def install(opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "authentication_event"
with :ok <-
Riak.Search.Schema.create(
schema_name(),
(:code.priv_dir(:asteroid) ++ '/riak/object_store_authentication_event_schema.xml')
|> File.read!()
),
:ok <- Riak.Search.Index.put(index_name(), schema_name()),
:ok <- Riak.Search.Index.set({bucket_type, bucket_name}, index_name()) do
Logger.info(
"#{__MODULE__}: created authentication event store `#{bucket_name}` " <>
"of bucket type `#{bucket_type}`"
)
:ok
else
e ->
"#{__MODULE__}: failed to create authentication event store `#{bucket_name}` " <>
"of bucket type `#{bucket_type}` (reason: #{inspect(e)})"
{:error, "#{inspect(e)}"}
end
catch
:exit, e ->
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "authentication_event"
"#{__MODULE__}: failed to create authentication event store `#{bucket_name}` " <>
"of bucket type `#{bucket_type}` (reason: #{inspect(e)})"
{:error, "#{inspect(e)}"}
end
@impl true
def start_link(opts) do
opts = Keyword.merge([purge_interval: 60], opts)
# we launch the process anyway because we need to return a process
# but the singleton will do nothing if the value is `:no_purge`
Singleton.start_child(__MODULE__.Purge, opts, __MODULE__)
end
@impl true
def get(authentication_event_id, opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "authentication_event"
case Riak.find(bucket_type, bucket_name, authentication_event_id) do
res when not is_nil(res) ->
authentication_event =
res
|> Riak.CRDT.Map.get(:register, "authentication_event_data_binary")
|> Base.decode64!(padding: false)
|> :erlang.binary_to_term()
Logger.debug(
"#{__MODULE__}: getting authentication event `#{authentication_event_id}`, " <>
"value: `#{inspect(authentication_event)}`"
)
{:ok, authentication_event}
nil ->
Logger.debug(
"#{__MODULE__}: getting authentication event `#{authentication_event_id}`, " <>
"value: `nil`"
)
{:ok, nil}
end
catch
:exit, e ->
{:error, "#{inspect(e)}"}
end
@impl true
def get_from_authenticated_session_id(authenticated_session_id, opts) do
search(
"authenticated_session_id_register:\"#{
String.replace(authenticated_session_id, "\"", "\\\"")
}\"",
opts
)
end
@impl true
def put(authentication_event, opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "authentication_event"
riak_map = Riak.CRDT.Map.new()
authentication_event_data_binary =
authentication_event
|> :erlang.term_to_binary()
|> Base.encode64(padding: false)
|> Riak.CRDT.Register.new()
riak_map =
Riak.CRDT.Map.put(
riak_map,
"authentication_event_data_binary",
authentication_event_data_binary
)
riak_map =
if authentication_event.authenticated_session_id != nil do
Riak.CRDT.Map.put(
riak_map,
"authenticated_session_id",
Riak.CRDT.Register.new(authentication_event.authenticated_session_id)
)
else
riak_map
end
riak_map =
if authentication_event.data["exp"] != nil do
Riak.CRDT.Map.put(
riak_map,
"exp_int",
Riak.CRDT.Register.new(to_string(authentication_event.data["exp"]))
)
else
Logger.warn(
"Inserting authentication event with no expiration: #{
String.slice(authentication_event.id, 1..5)
}..."
)
riak_map
end
Riak.update(riak_map, bucket_type, bucket_name, authentication_event.id)
Logger.debug(
"#{__MODULE__}: stored authentication event `#{authentication_event.id}`, " <>
"value: `#{inspect(authentication_event)}`"
)
:ok
catch
:exit, e ->
{:error, "#{inspect(e)}"}
end
@impl true
def delete(authentication_event_id, opts) do
bucket_type = opts[:bucket_type] || raise "Missing bucket type"
bucket_name = opts[:bucket_name] || "authentication_event"
Riak.delete(bucket_type, bucket_name, authentication_event_id)
Logger.debug("#{__MODULE__}: deleted authentication event `#{authentication_event_id}`")
:ok
catch
:exit, e ->
{:error, "#{inspect(e)}"}
end
@doc """
Searches in Riak-stored authentication events
This function is used internaly and made available for user convenience. authentication events are
stored in the following fields:
| Field name | Indexed as |
|--------------------------------------------|:-------------:|
| authentication_event_data_binary_register | *not indexed* |
| authenticated_session_id | string |
| exp_int_register | int |
Note that you are responsible for escaping values accordingly with Solr escaping.
"""
@spec search(String.t(), Asteroid.ObjectStore.AuthenticationEvent.opts()) ::
{:ok, [Asteroid.OIDC.AuthenticationEvent.id()]}
| {:error, any()}
def search(search_query, opts) do
case Riak.Search.query(index_name(), search_query, rows: opts[:rows] || 1_000_000) do
{:ok, {:search_results, result_list, _, _}} ->
{:ok,
for {_index_name, attribute_list} <- result_list do
:proplists.get_value("_yz_rk", attribute_list)
end}
{:error, _} = error ->
error
end
end
@spec schema_name() :: String.t()
defp schema_name(), do: "asteroid_object_store_authentication_event_riak_schema"
@doc false
@spec index_name() :: String.t()
def index_name(), do: "asteroid_object_store_authentication_event_riak_index"
end
|
lib/asteroid/object_store/authentication_event/riak.ex
| 0.86009
| 0.6955
|
riak.ex
|
starcoder
|
defmodule K8s.Conn do
@moduledoc """
Handles authentication and connection configuration details for a Kubernetes cluster.
`Conn`ections can be registered via Mix.Config or environment variables.
Connections can also be dynaically built during application runtime.
"""
alias __MODULE__
alias K8s.Conn.{PKI, RequestOptions, Config}
@providers [
K8s.Conn.Auth.Certificate,
K8s.Conn.Auth.Token,
K8s.Conn.Auth.AuthProvider
]
@typep auth_t :: nil | struct
defstruct cluster_name: nil,
user_name: nil,
url: "",
insecure_skip_tls_verify: false,
ca_cert: nil,
auth: nil,
discovery_driver: nil,
discovery_opts: nil
@type t :: %__MODULE__{
cluster_name: atom(),
user_name: String.t() | nil,
url: String.t(),
insecure_skip_tls_verify: boolean(),
ca_cert: String.t() | nil,
auth: auth_t,
discovery_driver: module(),
discovery_opts: Keyword.t()
}
@doc """
List of all registered connections.
Connections are registered via Mix.Config or env variables.
## Examples
```elixir
K8s.Conn.list()
[%K8s.Conn{ca_cert: nil, auth: %K8s.Conn.Auth{}, cluster_name: :"docker-for-desktop-cluster", discovery_driver: K8s.Discovery.Driver.File, discovery_opts: [config: "test/support/discovery/example.json"], insecure_skip_tls_verify: true, url: "https://localhost:6443", user_name: "docker-for-desktop"}]
```
"""
@spec list() :: list(K8s.Conn.t())
def list() do
Enum.reduce(Config.all(), [], fn {cluster_name, conf}, agg ->
conn = config_to_conn(conf, cluster_name)
[conn | agg]
end)
end
@doc """
Lookup a registered connection by name. See `K8s.Conn.Config`.
## Examples
```elixir
K8s.Conn.lookup(:test)
{:ok, %K8s.Conn{ca_cert: nil, auth: %K8s.Conn.Auth{}, cluster_name: :"docker-for-desktop-cluster", discovery_driver: K8s.Discovery.Driver.File, discovery_opts: [config: "test/support/discovery/example.json"], insecure_skip_tls_verify: true, url: "https://localhost:6443", user_name: "docker-for-desktop"}}
```
"""
@spec lookup(atom()) :: {:ok, K8s.Conn.t()} | {:error, :connection_not_registered}
def lookup(cluster_name) do
config = Map.get(Config.all(), cluster_name)
case config do
nil -> {:error, :connection_not_registered}
config -> {:ok, config_to_conn(config, cluster_name)}
end
end
@spec config_to_conn(map, atom) :: K8s.Conn.t()
defp config_to_conn(config, cluster_name) do
case Map.get(config, :conn) do
nil ->
K8s.Conn.from_service_account(cluster_name)
%{use_sa: true} ->
K8s.Conn.from_service_account(cluster_name)
conn_path ->
opts = config[:conn_opts] || []
K8s.Conn.from_file(conn_path, opts)
end
end
@doc """
Reads configuration details from a kubernetes config file.
Defaults to `current-context`.
### Options
* `context` sets an alternate context
* `cluster` set or override the cluster read from the context
* `user` set or override the user read from the context
* `discovery_driver` module name to use for discovery
* `discovery_opts` options for discovery module
"""
@spec from_file(binary, keyword) :: K8s.Conn.t()
def from_file(config_file, opts \\ []) do
abs_config_file = Path.expand(config_file)
base_path = Path.dirname(abs_config_file)
config = YamlElixir.read_from_file!(abs_config_file)
context_name = opts[:context] || config["current-context"]
context = find_by_name(config["contexts"], context_name, "context")
user_name = opts[:user] || context["user"]
user = find_by_name(config["users"], user_name, "user")
cluster_name = opts[:cluster] || context["cluster"]
cluster = find_by_name(config["clusters"], cluster_name, "cluster")
discovery_driver = opts[:discovery_driver] || K8s.Discovery.default_driver()
discovery_opts = opts[:discovery_opts] || K8s.Discovery.default_opts()
%Conn{
cluster_name: String.to_atom(cluster_name),
user_name: user_name,
url: cluster["server"],
ca_cert: PKI.cert_from_map(cluster, base_path),
auth: get_auth(user, base_path),
insecure_skip_tls_verify: cluster["insecure-skip-tls-verify"],
discovery_driver: discovery_driver,
discovery_opts: discovery_opts
}
end
@doc """
Generates configuration from kubernetes service account
## Links
[kubernetes.io :: Accessing the API from a Pod](https://kubernetes.io/docs/tasks/access-application-cluster/access-cluster/#accessing-the-api-from-a-pod)
"""
@spec from_service_account(atom()) :: K8s.Conn.t()
def from_service_account(cluster_name),
do: from_service_account(cluster_name, "/var/run/secrets/kubernetes.io/serviceaccount")
@spec from_service_account(atom, String.t()) :: K8s.Conn.t()
def from_service_account(cluster_name, root_sa_path) do
host = System.get_env("KUBERNETES_SERVICE_HOST")
port = System.get_env("KUBERNETES_SERVICE_PORT")
cert_path = Path.join(root_sa_path, "ca.crt")
token_path = Path.join(root_sa_path, "token")
%Conn{
cluster_name: cluster_name,
url: "https://#{host}:#{port}",
ca_cert: PKI.cert_from_pem(cert_path),
auth: %K8s.Conn.Auth.Token{token: File.read!(token_path)},
discovery_driver: K8s.Discovery.default_driver(),
discovery_opts: K8s.Discovery.default_opts()
}
end
@spec find_by_name([map()], String.t(), String.t()) :: map()
defp find_by_name(items, name, type) do
items
|> Enum.find(fn item -> item["name"] == name end)
|> Map.get(type)
end
@doc false
@spec resolve_file_path(binary, binary) :: binary
def resolve_file_path(file_name, base_path) do
case Path.type(file_name) do
:absolute -> file_name
_ -> Path.join([base_path, file_name])
end
end
@spec get_auth(map(), String.t()) :: auth_t
defp get_auth(%{} = auth_map, base_path) do
Enum.find_value(providers(), fn provider -> provider.create(auth_map, base_path) end)
end
@spec providers() :: list(atom)
defp providers do
Application.get_env(:k8s, :auth_providers, []) ++ @providers
end
defimpl K8s.Conn.RequestOptions, for: __MODULE__ do
@doc "Generates HTTP Authorization options for certificate authentication"
@spec generate(K8s.Conn.t()) :: K8s.Conn.RequestOptions.generate_t()
def generate(%K8s.Conn{} = conn) do
case RequestOptions.generate(conn.auth) do
{:ok, %RequestOptions{headers: headers, ssl_options: auth_options}} ->
verify_options =
case conn.insecure_skip_tls_verify do
true -> [verify: :verify_none]
_ -> []
end
ca_options =
case conn.ca_cert do
nil -> []
cert -> [cacerts: [cert]]
end
{:ok,
%RequestOptions{
headers: headers,
ssl_options: auth_options ++ verify_options ++ ca_options
}}
error ->
error
end
end
end
end
|
lib/k8s/conn.ex
| 0.865863
| 0.546859
|
conn.ex
|
starcoder
|
defmodule PigLatin do
@vowels ["a", "e", "i", "o", "u"]
@doc """
Given a `phrase`, translate it a word at a time to Pig Latin.
Words beginning with consonants should have the consonant moved to the end of
the word, followed by "ay".
Words beginning with vowels (aeiou) should have "ay" added to the end of the
word.
Some groups of letters are treated like consonants, including "ch", "qu",
"squ", "th", "thr", and "sch".
Some groups are treated like vowels, including "yt" and "xr".
"""
@spec translate(phrase :: String.t()) :: String.t()
def translate(phrase) do
phrase
|> String.split(" ", trim: true)
|> Enum.map(&pigalize/1)
|> Enum.join(" ")
end
defp pigalize(word) do
cond do
starts_with_vowel?(word) || is_xy_vowel?(word) ->
vowel_behavior(word)
is_qu_consonant?(word) ->
consonant_behavior(word, qu_index(word) + 1)
true ->
consonant_behavior(word, find_vowel_index(word))
end
end
defp vowel_behavior(word), do: word <> "ay"
defp starts_with_vowel?(word), do: String.starts_with?(word, @vowels)
defp is_xy_vowel?(word) do
String.starts_with?(word, ["x", "y"]) and not is_xy_consonant?(word)
end
# The combination "x"+vowel or "y"+vowel means the phrase has a
# consonant behavior
defp is_xy_consonant?(word) do
xys = for vowel <- @vowels, do: ["x" <> vowel | ["y" <> vowel | []]]
cons_behavior = xys |> List.flatten()
word |> String.slice(0..1) |> String.contains?(cons_behavior)
end
defp is_qu_consonant?(word) do
word |> String.slice(0..2) |> String.contains?("qu")
end
defp qu_index(word), do: word |> find_vowel_index("u")
# Gets the index of the first vowel it finds
defp find_vowel_index(phrase, vowel \\ @vowels) do
phrase
|> String.split("", trim: true)
|> Enum.find_index(fn char -> String.contains?(char, vowel) end)
end
defp consonant_behavior(phrase, index) do
{prefix, suffix} = phrase |> String.split_at(index)
vowel_behavior(suffix <> prefix)
end
end
|
elixir/pig-latin/lib/pig_latin.ex
| 0.729905
| 0.444324
|
pig_latin.ex
|
starcoder
|
defmodule Scitree do
@moduledoc """
Bindings to Yggdrasil Decision Forests (YDF), with a
collection of decision forest model algorithms.
"""
alias Scitree.Native
alias Scitree.Infer
alias Scitree.Validations, as: Val
alias Nx
@train_validations [:label, :dataset_size, :learner, :task]
@pred_validations [:dataset_size]
@doc """
Train a model using the scitree config and a dataset.
if the training is successfull, this function returns
a model reference.
## Examples
iex> data_train = %{
...> "outlook" => [1, 1, 2, 3, 3, 3, 2, 1, 1, 3, 1, 2, 2, 3],
...> "temperature" => [1, 1, 1, 2, 3, 3, 3, 2, 3, 2, 2, 2, 1, 2],
...> "humidity" => [1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1],
...> "wind" => [1, 2, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2],
...> "play_tennis" => [1, 1, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 1]
...> }
iex> config = Scitree.Config.init() |> Scitree.Config.label("play_tennis")
iex> Scitree.train(config, data_train)
"""
def train(config, data) do
data = Infer.execute(data)
case Val.validate(data, config, @train_validations) do
:ok ->
case Native.train(config, data) do
{:ok, ref} ->
ref
{:error, reason} ->
raise List.to_string(reason)
end
{:error, reason} ->
raise reason
end
end
@doc """
Apply the model to a dataset.
The reference of the model to be executed must be received
in the first argument and as the second argument a valid dataset.
## Examples
iex> data_train = %{
...> "outlook" => [1, 1, 2, 3, 3, 3, 2, 1, 1, 3, 1, 2, 2, 3],
...> "temperature" => [1, 1, 1, 2, 3, 3, 3, 2, 3, 2, 2, 2, 1, 2],
...> "humidity" => [1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1],
...> "wind" => [1, 2, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2],
...> "play_tennis" => [1, 1, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 1]
...> }
iex> data_predict = %{
...> "outlook" => [1, 1, 2, 3, 3],
...> "temperature" => [1, 1, 1, 2, 3],
...> "humidity" => [1, 1, 1, 1, 2],
...> "wind" => [1, 2, 1, 1, 1]
...> }
iex> config = Scitree.Config.init() |> Scitree.Config.label("play_tennis")
iex> ref = Scitree.train(config, data_train)
iex> Scitree.predict(ref, data_predict)
#Nx.Tensor<
f32[5][1]
[
[0.09257776290178299],
[0.007093166466802359],
[0.90837562084198],
[0.6750206351280212],
[0.9997445940971375]
]
>
"""
def predict(reference, data) do
data = Infer.execute(data)
case Val.validate(data, @pred_validations) do
:ok ->
case Native.predict(reference, data) do
{:ok, results, chunk_size} ->
results
|> Enum.chunk_every(chunk_size)
|> Nx.tensor()
{:error, reason} ->
raise List.to_string(reason)
end
{:error, reason} ->
raise reason
end
end
@doc """
A data specification is a list of attribute definitions that indicates
how a dataset is semantically understood.
The definition of an attribute contains its name, semantic type, and
type-dependent meta-information.
You can configure a simple template:
data_train = %{
"outlook" => [1, 1, 2, 3, 3, 3, 2, 1, 1, 3, 1, 2, 2, 3],
"temperature" => [1, 1, 1, 2, 3, 3, 3, 2, 3, 2, 2, 2, 1, 2],
"humidity" => [1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1],
"wind" => [1, 2, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2],
"play_tennis" => [1, 1, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 1]
}
ref =
Scitree.Config.init()
|> Scitree.Config.label("play_tennis")
|> Scitree.train(config, data_train)
You can inspect the model to fetch the details:
Number of records: 14
Number of columns: 5
Number of columns by type:
CATEGORICAL: 5 (100%)
Columns:
CATEGORICAL: 5 (100%)
0: "humidity" CATEGORICAL integerized vocab-size:3 no-ood-item
1: "outlook" CATEGORICAL integerized vocab-size:4 no-ood-item
2: "play_tennis" CATEGORICAL integerized vocab-size:3 no-ood-item
3: "temperature" CATEGORICAL integerized vocab-size:4 no-ood-item
4: "wind" CATEGORICAL integerized vocab-size:3 no-ood-item
Terminology:
nas: Number of non-available (i.e. missing) values.
ood: Out of dictionary.
manually-defined: Attribute which type is manually defined by the user i.e. the type was not automatically inferred.
tokenized: The attribute value is obtained through tokenization.
has-dict: The attribute is attached to a string dictionary e.g. a categorical attribute stored as a string.
vocab-size: Number of unique values.
"""
def inspect_dataspec(reference) do
case Native.show_dataspec(reference) do
{:ok, result} ->
result
|> List.to_string()
|> IO.write()
reference
{:error, reason} ->
raise List.to_string(reason)
end
end
@doc """
Save the model in a directory.
The directory must not yet exist and will be created by
this function.
"""
def save(ref, path) do
if File.dir?(path) do
raise "The directory already exists"
else
case Scitree.Native.save(ref, path) do
:ok ->
ref
{:error, reason} ->
raise List.to_string(reason)
end
end
end
@doc """
loads a saved training and returns a model reference
based on the path.
"""
def load(path) do
case Scitree.Native.load(path) do
{:ok, ref} ->
ref
{:error, reason} ->
raise reason
end
end
end
|
lib/scitree.ex
| 0.893173
| 0.80871
|
scitree.ex
|
starcoder
|
defmodule Cloudinary.Transformation do
@moduledoc """
Handles the cloudinary transformation options.
A cloudinary transformation is represented by a `t:map/0` or a `t:keyword/0`, and a `t:list/0` of
transformations represents chained transformations.
See each type documentation to know available ranges and options, and to recognize how parameters
to be converted to an URL string.
"""
import Cloudinary.Format
import __MODULE__.Color
@typedoc """
A keyword list or a map containing transformation parameters.
"""
@type t :: keyword | map
@typedoc """
A number greater than 0.
"""
@type pos_number :: pos_integer | float
@typedoc """
A number greater than or equal to 0.
"""
@type non_neg_number :: non_neg_integer | float
@doc """
Converts the cloudinary transformation parameters to an URL string.
If the argument is a list of transformation keywords/maps, it returns chained (joined by slash)
transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformations
* https://cloudinary.com/documentation/video_manipulation_and_delivery
* https://cloudinary.com/documentation/audio_transformations
* https://cloudinary.com/documentation/image_transformation_reference
* https://cloudinary.com/documentation/video_transformation_reference
## Example
iex> #{__MODULE__}.to_url_string(
...> overlay: "horses",
...> width: 220,
...> height: 140,
...> crop: :fill,
...> y: 140,
...> x: -110,
...> radius: 20
...> )
"l_horses,w_220,h_140,c_fill,y_140,x_-110,r_20"
iex> #{__MODULE__}.to_url_string(
...> overlay: %{
...> text: "Memories from our trip",
...> font_family: "Parisienne",
...> font_size: 35,
...> font_weight: :bold
...> },
...> color: '990C47',
...> y: 155
...> )
"l_text:Parisienne_35_bold:Memories%20from%20our%20trip,co_rgb:990C47,y_155"
iex> #{__MODULE__}.to_url_string(
...> if: #{__MODULE__}.expression(:illustrative_likelihood > 0.5),
...> width: 120,
...> height: 150,
...> crop: :pad
...> )
"if_ils_gt_0.5,w_120,h_150,c_pad"
iex> #{__MODULE__}.to_url_string(
...> raw_transformation: "w_$small,h_$medium,c_$mode",
...> variables: [small: 90, medium: 135, mode: "crop"]
...> )
"$small_90,$medium_135,$mode_!crop!,w_$small,h_$medium,c_$mode"
iex> #{__MODULE__}.to_url_string([[
...> overlay: "cloudinary_icon",
...> width: 200,
...> gravity: :east,
...> start_offset: 1,
...> end_offset: 11
...> ], [
...> overlay: %{text: "Sample Video", font_family: "arial", font_size: 40},
...> gravity: :south,
...> y: 60,
...> start_offset: 2
...> ]])
"l_cloudinary_icon,w_200,g_east,so_1,eo_11/l_text:arial_40:Sample%20Video,g_south,y_60,so_2"
"""
@spec to_url_string(t | [t]) :: String.t()
def to_url_string(transformation) when is_list(transformation) or is_map(transformation) do
if is_map(transformation) || Keyword.keyword?(transformation) do
transformation |> Enum.sort(&sort/2) |> Enum.map_join(",", &encode/1)
else
transformation |> Enum.map_join("/", &to_url_string/1)
end
end
# `:if` and `:variables` to be first, `:raw_transformation` to be last.
@spec sort({atom, any}, {atom, any}) :: boolean
defp sort(_, {:if, _}), do: false
defp sort(_, {:variables, _}), do: false
defp sort({:raw_transformation, _}, _), do: false
defp sort(_, _), do: true
@spec encode(
{:if, __MODULE__.Expression.as_boolean()}
| {:variables, keyword | map}
| {:angle, angle}
| {:aspect_ratio, aspect_ratio}
| {:audio_codec, audio_codec}
| {:audio_frequency, audio_frequency}
| {:background, color}
| {:bit_rate, bit_rate}
| {:border, border}
| {:color, color}
| {:color_space, color_space}
| {:crop, crop}
| {:custom_function, custom_function}
| {:custom_pre_function, custom_function}
| {:default_image, default_image}
| {:delay, delay}
| {:density, density}
| {:device_pixel_ratio, device_pixel_ratio}
| {:duration, duration}
| {:effect, effect}
| {:end_offset, offset}
| {:flags, flags | [flags]}
| {:fetch_format, fetch_format}
| {:fps, fps}
| {:gravity, gravity}
| {:height, height}
| {:keyframe_interval, keyframe_interval}
| {:opacity, opacity}
| {:overlay, overlay}
| {:page, page}
| {:quality, quality}
| {:radius, radius}
| {:start_offset, offset}
| {:streaming_profile, streaming_profile}
| {:transformation, transformation}
| {:underlay, underlay}
| {:video_codec, video_codec}
| {:video_sampling, video_sampling}
| {:width, width}
| {:x, x}
| {:y, y}
| {:zoom, zoom}
| {:raw_transformation, String.t()}
) :: String.t()
@typedoc """
The angle parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#angle_parameter
* https://cloudinary.com/documentation/video_transformation_reference#rotating_and_rounding_videos
## Example
iex> #{__MODULE__}.to_url_string(angle: 5)
"a_5"
iex> #{__MODULE__}.to_url_string(angle: [:auto_right, :hflip])
"a_auto_right.hflip"
"""
@type angle ::
number
| :auto_right
| :auto_left
| :ignore
| :vflip
| :hflip
| [:auto_right | :auto_left | :ignore | :vflip | :hflip]
defp encode({:angle, angle}), do: "a_#{__MODULE__.Angle.to_url_string(angle)}"
@typedoc """
The aspect_ratio parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#aspect_ratio_parameter
* https://cloudinary.com/documentation/video_transformation_reference#resizing_and_cropping_videos
## Example
iex> #{__MODULE__}.to_url_string(aspect_ratio: {16, 9})
"ar_16:9"
iex> #{__MODULE__}.to_url_string(aspect_ratio: 1.3)
"ar_1.3"
"""
@type aspect_ratio :: {pos_number, pos_number} | pos_number
defp encode({:aspect_ratio, {left, right}})
when is_number(left) and left > 0 and is_number(right) and right > 0 do
"ar_#{left}:#{right}"
end
defp encode({:aspect_ratio, aspect_ratio}) when is_number(aspect_ratio) and aspect_ratio > 0 do
"ar_#{aspect_ratio}"
end
@typedoc """
The audio_codec parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#audio_settings
## Example
iex> #{__MODULE__}.to_url_string(audio_codec: :vorbis)
"ac_vorbis"
"""
@type audio_codec :: :none | :aac | :vorbis | :mp3 | :opus
defp encode({:audio_codec, audio_codec})
when audio_codec in [:none, :aac, :vorbis, :mp3, :opus] do
"ac_#{audio_codec}"
end
@typedoc """
The audio_frequency parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#audio_settings
## Example
iex> #{__MODULE__}.to_url_string(audio_frequency: :initial_frequency)
"af_iaf"
iex> #{__MODULE__}.to_url_string(audio_frequency: 44100)
"af_44100"
"""
@type audio_frequency ::
:initial_frequency
| 8000
| 11025
| 16000
| 22050
| 32000
| 37800
| 44056
| 44100
| 47250
| 48000
| 88200
| 96000
| 176_400
| 192_000
defp encode({:audio_frequency, :initial_frequency}), do: "af_iaf"
defp encode({:audio_frequency, audio_frequency})
when audio_frequency in [
8000,
11025,
16000,
22050,
32000,
37800,
44056,
44100,
47250,
48000,
88200,
96000,
176_400,
192_000
] do
"af_#{audio_frequency}"
end
@typedoc """
The bit_rate parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#video_settings
## Example
iex> #{__MODULE__}.to_url_string(bit_rate: 12000000)
"br_12m"
iex> #{__MODULE__}.to_url_string(bit_rate: {800000, :constant})
"br_800k:constant"
"""
@type bit_rate :: pos_number | {pos_number, :constant}
defp encode({:bit_rate, bit_rate}), do: "br_#{__MODULE__.BitRate.to_url_string(bit_rate)}"
@typedoc """
The border parameter of transformations.
Options:
* `:width` - the border width in pixels.
* `:style` - an `t:atom/0` of the border style name.
* `:color` - a `t:charlist/0` representing the color hex triplet or a `t:binary/0` color name.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#border_parameter
## Example
iex> #{__MODULE__}.to_url_string(border: [width: 4, color: '483F62'])
"bo_4px_solid_rgb:483F62"
iex> #{__MODULE__}.to_url_string(border: [width: 6, style: :solid, color: "blue"])
"bo_6px_solid_blue"
"""
@type border :: keyword | map
defp encode({:border, options}) when is_list(options) do
encode({:border, Enum.into(options, %{})})
end
defp encode({:border, options}) when is_map(options) do
"bo_#{__MODULE__.Border.to_url_string(options)}"
end
@typedoc """
The color/background parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#color_parameter
* https://cloudinary.com/documentation/image_transformation_reference#background_parameter
* https://cloudinary.com/documentation/video_transformation_reference#resizing_and_cropping_videos
## Example
iex> #{__MODULE__}.to_url_string(color: "red")
"co_red"
iex> #{__MODULE__}.to_url_string(color: '3BA67D68')
"co_rgb:3BA67D68"
iex> #{__MODULE__}.to_url_string(background: "blue")
"b_blue"
iex> #{__MODULE__}.to_url_string(background: '4BC67F68')
"b_rgb:4BC67F68"
"""
@type color :: Cloudinary.Transformation.Color.t()
defp encode({:background, color}) when is_binary(color), do: "b_#{color}"
defp encode({:background, color}) when is_rgb(color) or is_rgba(color), do: "b_rgb:#{color}"
defp encode({:color, color}) when is_binary(color), do: "co_#{color}"
defp encode({:color, color}) when is_rgb(color) or is_rgba(color), do: "co_rgb:#{color}"
@typedoc """
The color_space parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#color_space_parameter
* https://cloudinary.com/documentation/video_transformation_reference#video_settings
## Example
iex> #{__MODULE__}.to_url_string(color_space: :keep_cmyk)
"cs_keep_cmyk"
iex> #{__MODULE__}.to_url_string(color_space: {:icc, "uploaded.icc"})
"cs_icc:uploaded.icc"
"""
@type color_space ::
:srgb | :tinysrgb | :cmyk | :no_cmyk | :keep_cmyk | :copy | {:icc, String.t()}
defp encode({:color_space, {:icc, id}}) when is_binary(id), do: "cs_icc:#{id}"
defp encode({:color_space, preset})
when preset in [:srgb, :tinysrgb, :cmyk, :no_cmyk, :keep_cmyk, :copy] do
"cs_#{preset}"
end
@typedoc """
The crop parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#crop_parameter
* https://cloudinary.com/documentation/video_transformation_reference#resizing_and_cropping_videos
* https://cloudinary.com/documentation/image_transformations#resizing_and_cropping_images
* https://cloudinary.com/documentation/video_manipulation_and_delivery#resizing_and_cropping_videos
## Example
iex> #{__MODULE__}.to_url_string(crop: :lpad)
"c_lpad"
"""
@type crop ::
:scale
| :fit
| :limit
| :mfit
| :fill
| :lfill
| :pad
| :lpad
| :mpad
| :fill_pad
| :crop
| :thumb
| :imagga_crop
| :imagga_scale
defp encode({:crop, mode})
when mode in [
:scale,
:fit,
:limit,
:mfit,
:fill,
:lfill,
:pad,
:lpad,
:mpad,
:fill_pad,
:crop,
:thumb,
:imagga_crop,
:imagga_scale
] do
"c_#{mode}"
end
@typedoc """
The custom_function/custom_pre_function parameters in the transformations.
Options:
* `:function_type` - `:remote` or `:wasm`.
* `:source` - the source of function, an URL or a public_id.
## Official documentation
* https://cloudinary.com/documentation/custom_functions
* https://cloudinary.com/documentation/image_transformation_reference#custom_function_parameter
## Example
iex> #{__MODULE__}.to_url_string(custom_function: [function_type: :wasm, source: "example.wasm"])
"fn_wasm:example.wasm"
iex> #{__MODULE__}.to_url_string(custom_function: [function_type: :remote, source: "https://example.com/fun"])
"fn_remote:aHR0cHM6Ly9leGFtcGxlLmNvbS9mdW4="
iex> #{__MODULE__}.to_url_string(custom_pre_function: [function_type: :remote, source: "https://example.com/fun"])
"fn_pre_remote:aHR0cHM6Ly9leGFtcGxlLmNvbS9mdW4="
"""
@type custom_function :: keyword | map
defp encode({param_key, options})
when param_key in [:custom_function, :custom_pre_function] and is_list(options) do
encode({param_key, Enum.into(options, %{})})
end
defp encode({:custom_function, options}) when is_map(options) do
"fn_#{__MODULE__.CustomFunction.to_url_string(options)}"
end
defp encode({:custom_pre_function, %{function_type: :remote} = options}) when is_map(options) do
"fn_pre_#{__MODULE__.CustomFunction.to_url_string(options)}"
end
@typedoc """
The default_image parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#default_image_parameter
## Example
iex> #{__MODULE__}.to_url_string(default_image: "avatar.png")
"d_avatar.png"
"""
@type default_image :: String.t()
defp encode({:default_image, id}) when is_binary(id), do: "d_#{id}"
@typedoc """
The delay parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#delay_parameter
* https://cloudinary.com/documentation/video_transformation_reference#converting_videos_to_animated_images
## Example
iex> #{__MODULE__}.to_url_string(delay: 20)
"dl_20"
"""
@type delay :: number
defp encode({:delay, delay}) when is_number(delay), do: "dl_#{delay}"
@typedoc """
The density parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#density_parameter
## Example
iex> #{__MODULE__}.to_url_string(density: :initial_density)
"dn_idn"
iex> #{__MODULE__}.to_url_string(density: 400)
"dn_400"
"""
@type density :: pos_number | :initial_density
defp encode({:density, :initial_density}), do: "dn_idn"
defp encode({:density, density}) when is_number(density) and density > 0, do: "dn_#{density}"
@typedoc """
The dpr parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#dpr_parameter
* https://cloudinary.com/documentation/video_transformation_reference#video_settings
* https://cloudinary.com/documentation/responsive_images#automatic_pixel_density_detection
## Example
iex> #{__MODULE__}.to_url_string(device_pixel_ratio: :auto)
"dpr_auto"
iex> #{__MODULE__}.to_url_string(device_pixel_ratio: 3.0)
"dpr_3.0"
"""
@type device_pixel_ratio :: pos_number | :auto
defp encode({:device_pixel_ratio, dpr}) when (is_number(dpr) and dpr > 0) or dpr == :auto do
"dpr_#{dpr}"
end
@typedoc """
The duration parameter of transformation.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#trimming_and_overlay_offsets
## Example
iex> #{__MODULE__}.to_url_string(duration: 6.32)
"du_6.32"
iex> #{__MODULE__}.to_url_string(duration: {62, :percents})
"du_62p"
"""
@type duration :: offset
defp encode({:duration, {duration, :percents}}) when duration <= 100 and duration >= 0 do
"du_#{duration}p"
end
defp encode({:duration, duration}) when is_number(duration) and duration >= 0 do
"du_#{duration}"
end
@typedoc """
The effect parameter of transformations.
See `#{__MODULE__}.Effect` module documentation for more informations about each effect.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#effect_parameter
* https://cloudinary.com/documentation/video_transformation_reference#adding_video_effects
## Example
iex> #{__MODULE__}.to_url_string(effect: {:art, :sizzle})
"e_art:sizzle"
"""
@type effect :: __MODULE__.Effect.t()
defp encode({:effect, effect}), do: "e_#{__MODULE__.Effect.to_url_string(effect)}"
@typedoc """
The start_offset/end_offset parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#trimming_and_overlay_offsets
## Example
iex> #{__MODULE__}.to_url_string(start_offset: 8.24)
"so_8.24"
iex> #{__MODULE__}.to_url_string(start_offset: {88, :percents})
"so_88p"
iex> #{__MODULE__}.to_url_string(end_offset: 5.32)
"eo_5.32"
iex> #{__MODULE__}.to_url_string(end_offset: {32, :percents})
"eo_32p"
"""
@type offset :: non_neg_number | {0..100 | float, :percents} | :auto
defp encode({:end_offset, {offset, :percents}}) when offset <= 100 and offset >= 0 do
"eo_#{offset}p"
end
defp encode({:end_offset, offset}) when is_number(offset) and offset >= 0, do: "eo_#{offset}"
defp encode({:start_offset, {offset, :percents}}) when offset <= 100 and offset >= 0 do
"so_#{offset}p"
end
defp encode({:start_offset, offset}) when is_number(offset) and offset >= 0, do: "so_#{offset}"
defp encode({:start_offset, :auto}), do: "so_auto"
@typedoc """
The fetch_format parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#fetch_format_parameter
* https://cloudinary.com/documentation/video_transformation_reference#video_settings
* https://cloudinary.com/documentation/image_transformations#automatic_format_selection
## Example
iex> #{__MODULE__}.to_url_string(fetch_format: :png)
"f_png"
iex> #{__MODULE__}.to_url_string(fetch_format: :auto)
"f_auto"
"""
@type fetch_format :: Cloudinary.Format.t() | :auto
defp encode({:fetch_format, :auto}), do: "f_auto"
defp encode({:fetch_format, format}) when is_supported(format) do
"f_#{format}"
end
@typedoc """
The flags parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#flags_parameter
* https://cloudinary.com/documentation/video_transformation_reference#video_transformation_flags
## Example
iex> #{__MODULE__}.to_url_string(flags: :awebp)
"fl_awebp"
iex> #{__MODULE__}.to_url_string(flags: [:force_strip, :preserve_transparency])
"fl_force_strip.preserve_transparency"
"""
@type flags ::
:animated
| :any_format
| :attachment
| :apng
| :awebp
| :clip
| :clip_evenodd
| :cutter
| :force_strip
| :force_icc
| :getinfo
| :hlsv3
| :ignore_aspect_ratio
| :immutable_cache
| :keep_attribution
| :keep_dar
| :keep_iptc
| :layer_apply
| :lossy
| :mono
| :no_overflow
| :no_stream
| :preserve_transparency
| :png8
| :png24
| :png32
| :progressive
| :rasterize
| :region_relative
| :relative
| :replace_image
| :sanitize
| :splice
| :streaming_attachment
| :strip_profile
| :text_no_trim
| :text_disallow_overflow
| :tiff8_lzw
| :tiled
| :truncate_ts
| :waveform
defp encode({:flags, flags}), do: "fl_#{__MODULE__.Flags.to_url_string(flags)}"
@typedoc """
The fps parameter of transformations.
Options:
* `:min` - specifies the minimum frame rate.
* `:max` - specifies the maximum frame rate.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#video_settings
## Example
iex> #{__MODULE__}.to_url_string(fps: [min: 25])
"fps_25-"
iex> #{__MODULE__}.to_url_string(fps: [min: 20, max: 30])
"fps_20-30"
"""
@type fps :: keyword | map
defp encode({:fps, options}) when is_list(options), do: encode({:fps, Enum.into(options, %{})})
defp encode({:fps, %{min: min, max: max}})
when is_number(max) and max > 0 and min <= max and min > 0 do
"fps_#{min}-#{max}"
end
defp encode({:fps, %{min: min}}) when is_number(min) and min > 0, do: "fps_#{min}-"
@typedoc """
The gravity parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#gravity_parameter
* https://cloudinary.com/documentation/video_transformation_reference#resizing_and_cropping_videos
* https://cloudinary.com/documentation/image_transformations#control_gravity
* https://cloudinary.com/documentation/video_manipulation_and_delivery#automatic_cropping
## Example
iex> #{__MODULE__}.to_url_string(gravity: :north)
"g_north"
iex> #{__MODULE__}.to_url_string(gravity: {:face, :auto})
"g_face:auto"
"""
@type gravity ::
:north_west
| :north
| :north_east
| :west
| :center
| :east
| :south_west
| :south
| :south_east
| :xy_center
| :liquid
| :ocr_text
| :adv_face
| :adv_faces
| :adv_eyes
| :face
| :faces
| :body
| :custom
| {:face | :faces, :center | :auto}
| {:body, :face}
| {:custom, :face | :faces | :adv_face | :adv_faces}
defp encode({:gravity, gravity}), do: "g_#{__MODULE__.Gravity.to_url_string(gravity)}"
@typedoc """
The height parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#height_parameter
* https://cloudinary.com/documentation/video_transformation_reference#resizing_and_cropping_videos
## Example
iex> #{__MODULE__}.to_url_string(height: 40)
"h_40"
iex> #{__MODULE__}.to_url_string(height: 0.3)
"h_0.3"
"""
@type height :: non_neg_number
defp encode({:height, height}) when (height <= 1 or is_integer(height)) and height >= 0 do
"h_#{height}"
end
@typedoc """
The `keyframe_interval` parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#video_settings
## Example
iex> #{__MODULE__}.to_url_string(keyframe_interval: 0.15)
"ki_0.15"
"""
@type keyframe_interval :: pos_number
defp encode({:keyframe_interval, interval}) when is_number(interval) and interval > 0 do
"ki_#{interval}"
end
@typedoc """
The opacity parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#opacity_parameter
## Example
iex> #{__MODULE__}.to_url_string(opacity: 30)
"o_30"
iex> #{__MODULE__}.to_url_string(opacity: 60)
"o_60"
"""
@type opacity :: 0..100 | float
defp encode({:opacity, opacity}) when opacity <= 100 and opacity >= 0, do: "o_#{opacity}"
@typedoc """
The overlay parameter of transformations.
See `#{__MODULE__}.Layer` module documentation for more informations about available options.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#overlay_parameter
* https://cloudinary.com/documentation/video_transformation_reference#resizing_and_cropping_videos
## Example
iex> #{__MODULE__}.to_url_string(overlay: "badge")
"l_badge"
"""
@type overlay :: String.t() | keyword | map
defp encode({:overlay, options}) when is_list(options) do
encode({:overlay, Enum.into(options, %{})})
end
defp encode({:overlay, overlay}) when is_map(overlay) or is_binary(overlay) do
"l_#{__MODULE__.Layer.to_url_string(overlay)}"
end
@typedoc """
The page parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#page_parameter
* https://cloudinary.com/documentation/animated_images#deliver_a_single_frame
* https://cloudinary.com/documentation/paged_and_layered_media#deliver_a_pdf_or_selected_pages_of_a_pdf
* https://cloudinary.com/documentation/paged_and_layered_media#delivering_photoshop_images
## Example
iex> #{__MODULE__}.to_url_string(page: 2)
"pg_2"
iex> #{__MODULE__}.to_url_string(page: 6..8)
"pg_6-8"
iex> #{__MODULE__}.to_url_string(page: "-11;15")
"pg_-11;15"
iex> #{__MODULE__}.to_url_string(page: [3, 5..7, "9-"])
"pg_3;5-7;9-"
iex> #{__MODULE__}.to_url_string(page: [name: "main"])
"pg_name:main"
iex> #{__MODULE__}.to_url_string(page: [name: ["record_cover", "Shadow"]])
"pg_name:record_cover:Shadow"
"""
@type page ::
integer
| Range.t()
| String.t()
| [integer | Range.t() | String.t()]
| [name: String.t() | [String.t()]]
| %{name: String.t() | [String]}
defp encode({:page, page}) when is_integer(page) or is_binary(page), do: "pg_#{page}"
defp encode({:page, first..last}), do: "pg_#{first}-#{last}"
defp encode({:page, name: name}), do: encode({:page, %{name: name}})
defp encode({:page, %{name: name}}) when is_binary(name), do: "pg_name:#{name}"
defp encode({:page, %{name: names}}) when is_list(names) do
"pg_name:#{Enum.map_join(names, ":", fn name when is_binary(name) -> name end)}"
end
defp encode({:page, pages}) when is_list(pages) do
"pg_#{
Enum.map_join(pages, ";", fn
page when is_integer(page) or is_binary(page) -> page
first..last -> "#{first}-#{last}"
end)
}"
end
@typedoc """
The quality parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#quality_parameter
* https://cloudinary.com/documentation/video_transformation_reference#video_settings
* https://cloudinary.com/documentation/image_transformations#adjusting_image_quality
* https://cloudinary.com/documentation/video_manipulation_and_delivery#quality_control
## Example
iex> #{__MODULE__}.to_url_string(quality: 20)
"q_20"
iex> #{__MODULE__}.to_url_string(quality: {20, chroma: 444})
"q_20:444"
iex> #{__MODULE__}.to_url_string(quality: :jpegmini)
"q_jpegmini"
iex> #{__MODULE__}.to_url_string(quality: {:auto, :eco})
"q_auto:eco"
iex> #{__MODULE__}.to_url_string(quality: {70, max_quantization: 80})
"q_70:qmax_80"
"""
@type quality ::
1..100
| float
| :jpegmini
| :auto
| {:auto, :best | :good | :eco | :low}
| {1..100 | float, chroma: 420 | 444}
| {1..100 | float, max_quantization: 1..100 | float}
defp encode({:quality, quality})
when (quality <= 100 and quality >= 1) or quality in [:jpegmini, :auto] do
"q_#{quality}"
end
defp encode({:quality, {:auto, level}}) when level in [:best, :good, :eco, :low] do
"q_auto:#{level}"
end
defp encode({:quality, {quality, chroma: chroma}})
when quality <= 100 and quality >= 1 and chroma in [420, 444] do
"q_#{quality}:#{chroma}"
end
defp encode({:quality, {quality, max_quantization: qmax}})
when quality <= 100 and quality >= 1 and qmax <= 100 and qmax >= 1 do
"q_#{quality}:qmax_#{qmax}"
end
@typedoc """
The radius parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#radius_parameter
* https://cloudinary.com/documentation/video_transformation_reference#rotating_and_rounding_videos
## Example
iex> #{__MODULE__}.to_url_string(radius: 20)
"r_20"
iex> #{__MODULE__}.to_url_string(radius: {20, 0, 40, 40})
"r_20:0:40:40"
iex> #{__MODULE__}.to_url_string(radius: :max)
"r_max"
"""
@type radius ::
non_neg_number
| {non_neg_number}
| {non_neg_number, non_neg_number}
| {non_neg_number, non_neg_number, non_neg_number}
| {non_neg_number, non_neg_number, non_neg_number, non_neg_number}
| :max
defp encode({:radius, radius}) when is_number(radius) and radius >= 0, do: "r_#{radius}"
defp encode({:radius, {radius}}) when is_number(radius) and radius >= 0, do: "r_#{radius}"
defp encode({:radius, {top_left_bottom_right, top_right_bottom_left}})
when is_number(top_left_bottom_right) and top_left_bottom_right >= 0 and
is_number(top_right_bottom_left) and top_right_bottom_left >= 0 do
"r_#{top_left_bottom_right}:#{top_right_bottom_left}"
end
defp encode({:radius, {top_left, top_right_bottom_left, bottom_right}})
when is_number(top_left) and top_left >= 0 and
is_number(top_right_bottom_left) and top_right_bottom_left >= 0 and
is_number(bottom_right) and bottom_right >= 0 do
"r_#{top_left}:#{top_right_bottom_left}:#{bottom_right}"
end
defp encode({:radius, {top_left, top_right, bottom_right, bottom_left}})
when is_number(top_left) and top_left >= 0 and
is_number(top_right) and top_right >= 0 and
is_number(bottom_right) and bottom_right >= 0 and
is_number(bottom_left) and bottom_left >= 0 do
"r_#{top_left}:#{top_right}:#{bottom_right}:#{bottom_left}"
end
defp encode({:radius, :max}), do: "r_max"
@typedoc """
The streaming_profile parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#video_settings
* https://cloudinary.com/documentation/video_manipulation_and_delivery#predefined_streaming_profiles
* https://cloudinary.com/documentation/admin_api#get_adaptive_streaming_profiles
* https://cloudinary.com/documentation/admin_api#create_a_streaming_profile
## Example
iex> #{__MODULE__}.to_url_string(streaming_profile: "full_hd")
"sp_full_hd"
"""
@type streaming_profile :: String.t()
defp encode({:streaming_profile, profile}) when is_binary(profile), do: "sp_#{profile}"
@typedoc """
The named transformation parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#transformation_parameter
## Example
iex> #{__MODULE__}.to_url_string(transformation: "media_lib_thumb")
"t_media_lib_thumb"
"""
@type transformation :: String.t()
defp encode({:transformation, name}) when is_binary(name), do: "t_#{name}"
@typedoc """
The underlay parameter of transformations.
See `#{__MODULE__}.Layer` module documentation for more informations about available options.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#underlay_parameter
## Example
iex> #{__MODULE__}.to_url_string(underlay: "background_image_1")
"u_background_image_1"
iex> #{__MODULE__}.to_url_string(underlay: public_id: "background_image_2")
"u_background_image_2"
"""
@type underlay :: String.t() | keyword | map
defp encode({:underlay, options}) when is_list(options) do
encode({:underlay, Enum.into(options, %{})})
end
defp encode({:underlay, underlay}) when is_map(underlay) or is_binary(underlay) do
"u_#{__MODULE__.Layer.to_url_string(underlay)}"
end
@typedoc """
The video_codec parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#video_settings
## Example
iex> #{__MODULE__}.to_url_string(video_codec: :h265)
"vc_h265"
iex> #{__MODULE__}.to_url_string(video_codec: {:h264, "baseline"})
"vc_h264:baseline"
iex> #{__MODULE__}.to_url_string(video_codec: {:h264, "baseline", "3.1"})
"vc_h264:baseline:3.1"
"""
@type video_codec ::
:vp9
| :vp8
| :prores
| :h264
| :h265
| :theora
| :auto
| {:h264 | :h265, String.t()}
| {:h264 | :h265, String.t(), String.t()}
defp encode({:video_codec, codec})
when codec in [:vp9, :vp8, :prores, :h264, :h265, :theora, :auto] do
"vc_#{codec}"
end
defp encode({:video_codec, {codec, profile}})
when codec in [:h264, :h265] and is_binary(profile) do
"vc_#{codec}:#{profile}"
end
defp encode({:video_codec, {codec, profile, level}})
when codec in [:h264, :h265] and is_binary(profile) and is_binary(level) do
"vc_#{codec}:#{profile}:#{level}"
end
@typedoc """
The video_sampling parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/video_transformation_reference#converting_videos_to_animated_images
## Example
iex> #{__MODULE__}.to_url_string(video_sampling: 20)
"vs_20"
iex> #{__MODULE__}.to_url_string(video_sampling: {2.3, :seconds})
"vs_2.3s"
"""
@type video_sampling :: pos_integer | {pos_number, :seconds}
defp encode({:video_sampling, {every, :seconds}}) when is_number(every) and every > 0 do
"vs_#{every}s"
end
defp encode({:video_sampling, total}) when is_integer(total) and total > 0, do: "vs_#{total}"
@typedoc """
The width parameter of transformations.
Options for the `:auto` setting:
* `:rounding_step` - the step to round width up.
* `:width` - the width specification overriding the client header.
* `:breakpoints` - if true, set the default breakpoints to round width up. Or, you can pass options
with a keyword list or a map. Breakpoints options:
* `:min_width`
* `:max_width`
* `:bytes_step`
* `:max_images`
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#width_parameter
* https://cloudinary.com/documentation/video_transformation_reference#resizing_and_cropping_videos
* https://cloudinary.com/documentation/responsive_images#automatic_image_width
* https://cloudinary.com/documentation/responsive_images#automatic_image_width_using_optimal_responsive_breakpoints
## Example
iex> #{__MODULE__}.to_url_string(width: 600)
"w_600"
iex> #{__MODULE__}.to_url_string(width: 1.22)
"w_1.22"
iex> #{__MODULE__}.to_url_string(width: :auto)
"w_auto"
iex> #{__MODULE__}.to_url_string(width: {:auto, rounding_step: 50})
"w_auto:50"
iex> #{__MODULE__}.to_url_string(width: {:auto, rounding_step: 50, client_width: 87})
"w_auto:50:87"
iex> #{__MODULE__}.to_url_string(width: {:auto, breakpoints: true})
"w_auto:breakpoints"
iex> #{__MODULE__}.to_url_string(width: {:auto, breakpoints: [min_width: 70, max_width: 1200, bytes_step: 10_000, max_images: 30]})
"w_auto:breakpoints_70_1200_10_30"
"""
@type width :: non_neg_number | :auto | {:auto, keyword | map}
defp encode({:width, {:auto, options}}) when is_list(options) do
encode({:width, {:auto, Enum.into(options, %{})}})
end
defp encode({:width, {:auto, %{breakpoints: breakpoints} = opts}}) when is_list(breakpoints) do
encode({:width, {:auto, %{opts | breakpoints: Enum.into(breakpoints, %{})}}})
end
defp encode({:width, width}), do: "w_#{__MODULE__.Width.to_url_string(width)}"
@typedoc """
The x parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#x_parameter
## Example
iex> #{__MODULE__}.to_url_string(x: 40)
"x_40"
iex> #{__MODULE__}.to_url_string(x: 0.3)
"x_0.3"
"""
@type x :: number
defp encode({:x, x}) when is_number(x), do: "x_#{x}"
@typedoc """
The y parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#y_parameter
## Example
iex> #{__MODULE__}.to_url_string(y: 40)
"y_40"
iex> #{__MODULE__}.to_url_string(y: 0.3)
"y_0.3"
"""
@type y :: number
defp encode({:y, y}) when is_number(y), do: "y_#{y}"
@typedoc """
The zoom parameter of transformations.
## Official documentation
* https://cloudinary.com/documentation/image_transformation_reference#zoom_parameter
## Example
iex> #{__MODULE__}.to_url_string(zoom: 1.2)
"z_1.2"
iex> #{__MODULE__}.to_url_string(zoom: 2)
"z_2"
"""
@type zoom :: number
defp encode({:zoom, zoom}) when is_number(zoom), do: "z_#{zoom}"
defp encode({:raw_transformation, raw_transformation}), do: raw_transformation
defp encode({:if, condition}), do: "if_#{condition}"
defp encode({:variables, variables}) do
Enum.map_join(variables, ",", fn
{name, value} -> "$#{name}_#{__MODULE__.Expression.build(value)}"
end)
end
@doc """
Builds an expression with operators and custom variables those are evaluated on the cloud.
Expressions can be used in several transformation parameters.
Atoms are treated as predefined values.
#{__MODULE__}.expression(:initial_height / :initial_width)
Variable names are treated as user defined variables.
#{__MODULE__}.expression(smallsize * 2)
If you want to use local variables, use the pin operator.
#{__MODULE__}.expression(^local_variable * :initial_height)
## Official documentation
* https://cloudinary.com/documentation/conditional_transformations
* https://cloudinary.com/documentation/user_defined_variables
* https://cloudinary.com/documentation/video_conditional_expressions
* https://cloudinary.com/documentation/video_user_defined_variables
## Example
iex> #{__MODULE__}.expression(:initial_width / :width)
%#{__MODULE__}.Expression{numerable: true, source: "iw_div_w"}
iex> #{__MODULE__}.expression(:face_count * unit)
%#{__MODULE__}.Expression{numerable: true, source: "fc_mul_$unit"}
iex> dynamic_height = 200 * 2
...> #{__MODULE__}.expression(^dynamic_height * :aspect_ratio)
%#{__MODULE__}.Expression{numerable: true, source: "400_mul_ar"}
iex> #{__MODULE__}.expression(:initial_width == 500)
%#{__MODULE__}.Expression{booleanable: true, source: "iw_eq_500"}
iex> #{__MODULE__}.expression(["tag1", "tag2"] in :tags)
%#{__MODULE__}.Expression{booleanable: true, source: "!tag1:tag2!_in_tags"}
iex> #{__MODULE__}.expression(:context["productType"] not in :page_names)
%#{__MODULE__}.Expression{booleanable: true, source: "ctx:!productType!_nin_pgnames"}
iex> #{__MODULE__}.expression(:context["name"] != "John")
%#{__MODULE__}.Expression{booleanable: true, source: "ctx:!name!_ne_!John!"}
"""
@spec expression(Macro.t()) :: Macro.t()
defmacro expression(ast) do
__MODULE__.Expression.traverse(ast)
end
end
|
lib/cloudinary/transformation.ex
| 0.936496
| 0.427875
|
transformation.ex
|
starcoder
|
defmodule Day13 do
def run_part1() do
AOCHelper.read_input()
|> SolutionPart1.run()
end
def run_part2() do
AOCHelper.read_input()
|> SolutionPart2.run()
end
def debug_sample() do
[
"939",
"7,13,x,x,59,x,31,19"
]
|> SolutionPart2.run()
end
end
defmodule SolutionPart1 do
def run(input) do
input
|> Parser.parse
|> (fn %{:bus_lines => bus_lines, :timestamp => timestamp} ->
find_next_bus(timestamp, bus_lines)
end).()
|> (fn {wait, line} ->
wait * line
end).()
end
defp find_next_bus(timestamp, bus_lines) do
bus_lines
|> Enum.map(fn line ->
x = timestamp / line
{line, floor(x) * line, ceil(x) * line}
end)
|> Enum.map(fn {line, _bus_before, bus_after} ->
{bus_after - timestamp, line}
end)
|> Enum.sort()
|> hd
end
end
defmodule SolutionPart2 do
def run(input) do
input
|> Parser.parse_without_filter
|> (fn %{:bus_lines => bus_lines} ->
next_sequence(bus_lines)
end).()
end
# Fast solution taken from https://elixirforum.com/t/advent-of-code-2020-day-13/36180/5
def next_sequence(busses) do
busses
|> Enum.with_index()
|> Enum.reduce({0, 1}, &add_to_sequence/2)
|> elem(0)
end
defp add_to_sequence({:x, _index}, state), do: state
defp add_to_sequence({bus, index}, {t, step}) do
IO.inspect {{bus, index}, {t, step}}
if Integer.mod(t + index, bus) == 0 do
{t, lcm(step, bus)}
else
add_to_sequence({bus, index}, {t + step, step})
end
end
defp lcm(a, b) do
div(a * b, Integer.gcd(a, b))
end
# Naive solution (Takes a loooong time).
defp find_timestamp(bus_lines, range) do
index_bus_lines =
bus_lines
|> Enum.with_index()
desired_offsets =
index_bus_lines
|> Enum.map(fn {_line, offset} ->
offset
end)
index_bus_lines
|> (fn bus_lines ->
range
|> Enum.filter(fn time ->
Enum.all?(bus_lines, fn {line_number, offset} ->
# IO.inspect {line_number, offset}
case line_number do
:x -> true
number ->
rem(time, number) == rem((line_number - offset), line_number)
end
end)
end)
end).()
# |> Stream.filter(fn time ->
# {first_bus_line, _} = hd(bus_lines)
# rem(time, first_bus_line) == 0
# end)
# |> Enum.filter(fn time ->
# {second_bus_line, _} = Enum.at(bus_lines, 1)
# rem(time, second_bus_line) == 1
# end)
# |> Enum.map(fn time ->
# offsets_for_time =
# bus_lines
# |> Enum.map(fn {line, offset} ->
# case line do
# :x -> offset
# line ->
# (ceil(time / line) * line) - time
# end
# end)
# {time, offsets_for_time}
# end)
# end).()
# |> Enum.find(fn {_, offsets} ->
# offsets == desired_offsets
# end)
end
end
defmodule Parser do
def parse(input) do
[cur_time, bus_lines] = input
%{
timestamp: String.to_integer(cur_time),
bus_lines: bus_lines |> String.split(",") |> Enum.filter(&(&1 != "x")) |> Enum.map(&String.to_integer/1)
}
end
def parse_without_filter(input) do
[cur_time, bus_lines] = input
%{
timestamp: String.to_integer(cur_time),
bus_lines: bus_lines |> String.split(",") |> Enum.map(&Integer.parse/1) |> Enum.map(fn elem ->
case elem do
{val, _} -> val
:error -> :x
end
end)
}
end
end
defmodule AOCHelper do
def read_input() do
"input.txt"
|> File.read!()
|> String.split("\n")
|> Enum.map(&(String.replace(&1, "\r", "")))
end
end
""
|
aoc-2020/day13/lib/day13.ex
| 0.585101
| 0.417331
|
day13.ex
|
starcoder
|
defmodule Util do
@moduledoc """
A `@docp` bevezetese es error-monad, illetve mindenfele szir-szar. A tobbfele hasznalat kompatibilis!
Egyreszt arra valo, hogy tudjak privat fuggvenyeknek `@docp` attributumot adni.
Ekkor hasznalata:
```elixir
defmodule Valamilyen.Modul do
# Mindenfele kod.
# Mindenfele kod.
# Mindenfele kod.
# Es a legvegen:
use Util
# defmodule
end
```
Plusz: error-monad(-szeruseg). A `with` utasitas helyett, ami szerintem szar.
Hasznalata:
```elixir
defmodule Valamilyen.Modul do
import Util
def xxx do
# ...
wmonad do
wo(:ok) = if feltetel, do: wo(:ok), else: wf("elbasztad_valamiert")
end
# ...
end
Szir-szar:
- `wfix(x, default)`: `x` erteke marad, ha nem `nil`, kulonben `default`.
- `wife(x, default)`: `x`, ha nem `nil`, kulonben `default`.
if condi, do: clause, else: var
# defmodule
end
```
"""
defmacro __using__([]) do
quote do
@docp "placeholder"
@doc "A kurva docp-warning miatt kell ez."
@spec docp() :: String.t()
def docp, do: @docp
end
end
@typedoc "Error-monad. Az `:error`-hoz tartozo barmi lehet."
@type w(a) :: {:ok, a} | {:error, any}
@doc """
Hiba eseten "hozzarak" egy potlolagos hibauzenetet-darabot egy space-szel.
Parameterek:
- `x`: a monad.
- `y`: ha `x == {:error, hiba}`, akkor `y`-t hozza kell fuzni `hiba`-hoz.
Return: `x`, esetleg modositva, ha hiba volt.
"""
def wext(x, y) do
case x do
{:ok, _} -> x
{:error, hiba} -> {:error, "#{hiba}_#{y}"}
end
end
@doc """
Megfelel a Haskell `return(x)` muveletnek.
"""
defmacro wo(x) do
{:ok, x}
end
@doc """
Megfelel a `fail(x)` muveletnek.
Parameterek:
- `x`: ami a fail-ben visszamegy.
Return: makrokent `{:error, x}`.
"""
@spec wf(String.t()) :: Macro.t()
defmacro wf(x) do
quote do
{:error, unquote(x)}
end
end
@doc """
Egyszeru szintaktikai elem a Haskell `do`-jelolesere.
Hasznalata:
```elixir
wmonad do
utasitasok
wo(z) = kif
end
```
Megjegyzesek:
- Ha match-hiba van, visszaadja a hibazo cuccot.
"""
defmacro wmonad(do: clause) do
quote do
try do
unquote(clause)
rescue
err in [MatchError] ->
%MatchError{term: t} = err
t
catch
:badmatch, x -> x
end
end
# defmacro wmonad
end
@doc """
Kicsit bovitett szintaktikai elem a Haskell `do`-jelolesere.
Hasznalata:
```elixir
wmonad do
utasitasok
wo(z) = kif
catch
{:error, x} -> errormsg(x)
{:ok, x} -> tovabbi_muveletek
end
```
Megjegyzesek:
- Ha match-hiba van, visszaadja a hibazo cuccot.
"""
defmacro wmonad(do: clause, catch: branches) do
quote do
try do
unquote(clause)
rescue
err in [MatchError] ->
%MatchError{term: t} = err
t
catch
:badmatch, x -> x
end
|> case do
unquote(branches)
end
end
end
@doc """
```elixir
Util.wmatch([title, folder_id], params, BAD_SAND_VOTE_COLLECTION_PARAMS)
```
Megnezi, hogy `params` illeszkedik-e `[title, folder_id]`-re.
Ha igen, megy tovabb, es az illeszkedes miatt a valtozok fel is veszik az ertekeket.
Ha nem, visszaadja az `Util.wf(BAD_SAND_VOTE_COLLECTION_PARAMS)` hibat.
"""
defmacro wmatch(target, term, error_term) do
quote do
Util.wo(unquote(target)) =
case unquote(term) do
unquote(target) -> Util.wo(unquote(target))
_ -> Util.wf(unquote(error_term))
end
end
end
@doc """
```elixir
Util.wcond(pr == nil, BAD_SAND_VOTE_COLLECTION_FOLDER)
```
Ha `pr == nil`, akkor `Util.wf(BAD_SAND_VOTE_COLLECTION_FOLDER)`.
"""
defmacro wcond(condition, error_term) do
quote do
:ok = if unquote(condition), do: Util.wf(unquote(error_term)), else: :ok
end
end
@doc """
```elixir
Util.wcall(valami(param))
# ekvivalens:
:ok = valami(param)
```
ahol `valami(param)` vagy `:ok`-t ad vissza, vagy `{:error, term}`-et.
"""
defmacro wcall(call) do
quote do
:ok = unquote(call)
end
end
@doc """
`wfix(x, default)`: `x` erteke marad, ha nem `nil`, kulonben `default`.
"""
@spec wfix(any, any) :: Macro.t()
defmacro wfix(x, default) do
quote do
unquote(x) = if unquote(x) == nil, do: unquote(default), else: unquote(x)
end
end
@doc """
Ekvivalens:
```elixir
var = if condi, do: clause, else: var
Util.wif var, condi, do: clause
```
"""
defmacro wif(var, condi, do: clause) do
quote do
unquote(var) =
if unquote(condi) do
unquote(clause)
else
unquote(var)
end
end
end
@doc """
Ekvivalens:
```elixir
if condi, do: clause, else: var
Util.wife var, condi, do: clause
```
"""
defmacro wife(var, condi, do: clause) do
quote do
if unquote(condi) do
unquote(clause)
else
unquote(var)
end
end
end
@doc """
Szoveg beszurasa, amit aztan figyelmen kivul hagyunk.
"""
@spec comment(String.t()) :: Macro.t()
defmacro comment(_text) do
end
@doc """
Hatravetett ertekadasi operatort definial.
```elixir
defmodule Valami do
require Util
Util.arrow_assignment()
def shitty_function(x, y, z) do
# Ezek ekvivalensek.
var = expr
expr >>> var
end
end
```
"""
defmacro arrow_assignment() do
quote do
defmacrop expr >>> var do
quote do
unquote(var) = unquote(expr)
end
end
defmacrop mpath do
__MODULE__ |> Module.split() |> Enum.reverse() |> tl |> Enum.reverse() |> Module.concat()
end
end
end
# defmodule
end
|
lib/util/x.ex
| 0.620966
| 0.745977
|
x.ex
|
starcoder
|
defmodule Want do
@moduledoc """
Type conversion library for Elixir and Erlang.
"""
@doc """
Convert a value to a string.
## Options
* `:max` - Maximum allowable string length.
* `:min` - Minimum allowable string length.
* ':decode' - Currently only supports :uri; runs URI.decode on the input value
* `:matches` - The resulting string must match the given regex.
* `:default` - If conversion fails, this value should be returned instead.
## Examples
iex> Want.string(1)
{:ok, "1"}
iex> Want.string({:a, :b}, default: "string")
{:ok, "string"}
iex> Want.string(:hello, max: 3)
{:error, "String length exceeds maximum of 3."}
iex> Want.string("hello%20world", decode: :uri)
{:ok, "hello world"}
iex> Want.string(:a, min: 3)
{:error, "String length below minimum of 3."}
iex> Want.string(:a, matches: ~r/a/)
{:ok, "a"}
"""
def string(value),
do: string(value, [])
def string(value, default) when not is_list(default),
do: string(value, default: default)
def string(value, opts),
do: maybe_default(Want.String.cast(value, opts), opts)
def string!(value),
do: string!(value, [])
def string!(value, default) when not is_list(default),
do: string!(value, default: default)
def string!(value, opts),
do: maybe_default!(Want.String.cast(value, opts), opts)
@doc """
Convert a value to an integer.
## Options
* `:max` - Maximum allowable integer value.
* `:min` - Minimum allowable integer value.
* `:default` - If conversion fails, this value should be returned instead.
## Examples
iex> Want.integer(1.0)
{:ok, 1}
iex> Want.integer({:a, :b}, default: 1)
{:ok, 1}
iex> Want.integer(:'5', max: 3)
{:error, "Integer value exceeds maximum 3."}
iex> Want.integer("1", min: 3)
{:error, "Integer value below minimum 3."}
"""
def integer(value),
do: integer(value, [])
def integer(value, default) when not is_list(default),
do: integer(value, default: default)
def integer(value, opts),
do: maybe_default(Want.Integer.cast(value, opts), opts)
def integer!(value),
do: integer!(value, [])
def integer!(value, default) when not is_list(default),
do: integer!(value, default: default)
def integer!(value, opts),
do: maybe_default!(Want.Integer.cast(value, opts), opts)
@doc """
Convert a value to a float.
## Options
* `:max` - Maximum allowable float value.
* `:min` - Minimum allowable float value.
* `:default` - If conversion fails, this value should be returned instead.
## Examples
iex> Want.float(1.0)
{:ok, 1.0}
iex> Want.float({:a, :b}, default: 1.0)
{:ok, 1.0}
iex> Want.float(:'5.0', max: 3.0)
{:error, "Float value exceeds maximum 3.0."}
iex> Want.float("1.0", min: 3.0)
{:error, "Float value below minimum 3.0."}
"""
def float(value),
do: float(value, [])
def float(value, default) when not is_list(default),
do: float(value, default: default)
def float(value, opts),
do: maybe_default(Want.Float.cast(value, opts), opts)
def float!(value),
do: float!(value, [])
def float!(value, default) when not is_list(default),
do: float!(value, default: default)
def float!(value, opts),
do: maybe_default!(Want.Float.cast(value, opts), opts)
@doc """
Cast a value to an atom.
## Options
* `:exists` - If true, only convert to an atom if a matching atom already exists.
* `:default` - If conversion fails, this value should be returned instead.
## Examples
iex> Want.atom("hello")
{:ok, :hello}
iex> Want.atom(1.0)
{:ok, :'1.0'}
iex> Want.atom({:a, :b})
{:error, "Failed to convert value {:a, :b} to atom."}
iex> Want.atom({:a, :b}, default: :c)
{:ok, :c}
iex> Want.atom("10", exists: true)
{:error, "An atom matching the given value does not exist."}
"""
def atom(value),
do: atom(value, [])
def atom(value, default) when not is_list(default),
do: atom(value, default: default)
def atom(value, opts),
do: maybe_default(Want.Atom.cast(value, opts), opts)
def atom!(value),
do: atom!(value, [])
def atom!(value, default) when not is_list(default),
do: atom!(value, default: default)
def atom!(value, opts),
do: maybe_default!(Want.Atom.cast(value, opts), opts)
@doc """
Cast an incoming value to a datetime.
## Examples
iex> Want.datetime("2020-02-06 18:23:55.850218Z")
{:ok, ~U[2020-02-06 18:23:55.850218Z]}
iex> Want.datetime({{2020, 02, 06}, {18, 23, 55}})
{:ok, ~U[2020-02-06 18:23:55Z]}
iex> Want.datetime({{2020, 02, 06}, {18, 23, 55, 123456}})
{:ok, ~U[2020-02-06 18:23:55.123456Z]}
"""
def datetime(value),
do: datetime(value, [])
def datetime(value, default) when not is_list(default),
do: datetime(value, default: default)
def datetime(value, opts),
do: maybe_default(Want.DateTime.cast(value, opts), opts)
def datetime!(value),
do: datetime!(value, [])
def datetime!(value, default) when not is_list(default),
do: datetime!(value, default: default)
def datetime!(value, opts),
do: maybe_default!(Want.DateTime.cast(value, opts), opts)
@doc """
Cast an input to a sort tuple.
## Options
* `:fields` - List of allowed sort fields. Casting will fail if the input doesn't match any of these.
* `:default` - If conversion fails, this value should be returned instead.
## Examples
iex> Want.sort("inserted_at:desc", fields: [:inserted_at, :id, :name])
{:ok, {:inserted_at, :desc}}
iex> Want.sort("updated_at", fields: [:inserted_at, :id], default: {:id, :asc})
{:ok, {:id, :asc}}
iex> Want.sort("updated_at:asc", [])
{:error, "You must specify a list of valid sort fields using the :fields option."}
"""
def sort(input, opts),
do: maybe_default(Want.Sort.cast(input, opts), opts)
def sort!(input, opts),
do: maybe_default!(Want.Sort.cast(input, opts), opts)
@doc """
Cast an input value to an enum. The input must loosely match one of the allowed values in order for
the cast to succeed.
## Options
* `:valid` - List of valid enum values. The input must loosely match one of these.
* `:default` - If conversion fails, this value should be returned instead.
## Examples
iex> Want.enum("hello", valid: [:hello, :world])
{:ok, :hello}
iex> Want.enum("hello", valid: ["hello", :world])
{:ok, "hello"}
iex> Want.enum("foo", valid: ["hello", :world], default: :bar)
{:ok, :bar}
"""
def enum(input, opts),
do: maybe_default(Want.Enum.cast(input, opts), opts)
def enum!(input, opts),
do: maybe_default!(Want.Enum.cast(input, opts), opts)
@doc """
Cast an input into a list. By default this function will simply break up the input into list elements, but
further casting and validation of elements can be performed by providing an `element` option. The separator
used to split the list defaults to the comma character and this can be controlled using the `separator` option.
## Options
* `:separator` - Determines the character(s) used to separate list items. Defaults to the comma character.
* `:element` - Provides the ability to further control how list elements are cast and validated. Similar to the
`map` and `keywords` functions, accepts a keyword list with its own `:type` field and validation options.
* `:default` - If conversion fails, this value should be returned instead.
## Examples
iex> Want.list("1")
{:ok, ["1"]}
iex> Want.list("1", element: [type: :integer])
{:ok, [1]}
iex> Want.list("1,2,3,4", element: [type: :integer])
{:ok, [1, 2, 3, 4]}
iex> Want.list("1:2:3:4", separator: ":", element: [type: :integer])
{:ok, [1, 2, 3, 4]}
iex> Want.list("hello:world", separator: ":", element: [type: :enum, valid: [:hello, :world]])
{:ok, [:hello, :world]}
iex> Want.list("hello:world", separator: ":", element: [type: :enum, valid: [:hello]])
{:ok, [:hello]}
"""
def list(input, opts \\ []),
do: maybe_default(Want.List.cast(input, opts), opts)
def list!(input, opts \\ []),
do: maybe_default!(Want.List.cast(input, opts), opts)
@doc """
Cast an incoming keyword list or map to an output map using the
provided schema to control conversion rules and validations. Each value in
the schema map represents conversion options.
Specify a :type field to cast the input value for a given key to that type, defaults to :string.
Specific conversion and validation options for each type corresponds to those available
for `Want.integer/2`, `Want.float/2`, `Want.string/2` and `Want.atom/2`.
Maps can be nested by using a new schema map as a value in a parent schema. The field from which
a given value is derived can also be modified using the `:from` option.
## Options
* `:merge` - Provide a map matching the given schema that contains default values to be
used if the input value does not contain a particular field. Useful when updating a map
with new inputs without overwriting all fields.
## Examples
iex> Want.map(%{"id" => 1}, %{id: [type: :integer]})
{:ok, %{id: 1}}
iex> Want.map(%{"identifier" => 1}, %{id: [type: :integer, from: :identifier]})
{:ok, %{id: 1}}
iex> Want.map(%{}, %{id: [type: :integer, default: 1]})
{:ok, %{id: 1}}
iex> Want.map(%{"id" => "bananas"}, %{id: [type: :integer, default: 1]})
{:ok, %{id: 1}}
iex> Want.map(%{"hello" => "world", "foo" => "bar"}, %{hello: [], foo: [type: :atom]})
{:ok, %{hello: "world", foo: :bar}}
iex> Want.map(%{"hello" => %{"foo" => "bar"}}, %{hello: %{foo: [type: :atom]}})
{:ok, %{hello: %{foo: :bar}}}
iex> Want.map(%{"id" => "bananas"}, %{id: [type: :integer, default: 1]}, merge: %{id: 2})
{:ok, %{id: 2}}
"""
def map(input, schema, opts \\ []),
do: Want.Map.cast(input, schema, opts)
def map!(input, schema, opts \\ []) do
case Want.Map.cast(input, schema, opts) do
{:ok, output} ->
output
{:error, reason} ->
raise ArgumentError, message: reason
end
end
@doc """
Cast an incoming keyword list or map to an output keyword list using the provided schema to control
conversion rules and validations. Each value in the schema map represents conversion options.
Specify a :type field to cast the input value for a given key to that type, defaults to :string.
Specific conversion and validation options for each type corresponds to those available
for `Want.integer/2`, `Want.float/2`, `Want.string/2` and `Want.atom/2`.
Keyword lists can be nested by using a new schema map as a value in a parent schema. The field from which
a given value is derived can also be modified using the `:from` option.
## Examples
iex> Want.keywords(%{"id" => 1}, %{id: [type: :integer]})
{:ok, [id: 1]}
iex> Want.keywords(%{"identifier" => 1}, %{id: [type: :integer, from: :identifier]})
{:ok, [id: 1]}
iex> Want.keywords(%{}, %{id: [type: :integer, default: 1]})
{:ok, [id: 1]}
iex> Want.keywords(%{"id" => "bananas"}, %{id: [type: :integer, default: 1]})
{:ok, [id: 1]}
iex> Want.keywords(%{"identifier" => "bananas"}, %{id: [type: :integer, default: 1, from: :identifier]})
{:ok, [id: 1]}
iex> Want.keywords(%{"hello" => "world", "foo" => "bar"}, %{hello: [], foo: [type: :atom]})
{:ok, [hello: "world", foo: :bar]}
iex> Want.keywords(%{"hello" => %{"foo" => "bar"}}, %{hello: %{foo: [type: :atom]}})
{:ok, [hello: [foo: :bar]]}
iex> Want.keywords(%{"id" => "bananas"}, %{id: [type: :integer, default: 1]}, merge: [id: 2])
{:ok, [id: 2]}
"""
def keywords(input, schema, opts \\ []),
do: Want.Keyword.cast(input, schema, opts)
def keywords!(input, schema, opts \\ []) do
case Want.Keyword.cast(input, schema, opts) do
{:ok, output} ->
output
{:error, reason} ->
raise ArgumentError, message: reason
end
end
@doc """
Dump a casted input into a more serializable form. Typically used to generate
Phoenix query parameters.
## Options
* `:update` - Update the input value using Want.Update protocol before dumping
## Examples
iex> Want.dump({:inserted_at, :desc})
{:ok, "inserted_at:desc"}
iex> Want.dump({:inserted_at, :desc}, update: :inserted_at)
{:ok, "inserted_at:asc"}
iex> Want.dump({:inserted_at, :desc}, update: :updated_at)
{:ok, "updated_at:asc"}
iex> Want.dump("hello")
{:ok, "hello"}
iex> Want.dump(%{hello: :world, sort: {:inserted_at, :desc}})
{:ok, [hello: :world, sort: "inserted_at:desc"]}
iex> Want.dump(%{hello: :world, sort: {:inserted_at, :desc}}, update: [sort: :inserted_at])
{:ok, [hello: :world, sort: "inserted_at:asc"]}
iex> Want.dump({:a, :b, :c})
{:error, "Unrecognized dump input {:a, :b, :c}"}
"""
def dump(input),
do: dump(input, [])
def dump(input, opts) do
with true <- Keyword.has_key?(opts, :update),
{:ok, new} <- Want.Update.update(input, opts[:update]) do
Want.Dump.dump(new, opts)
else
false ->
Want.Dump.dump(input, opts)
other ->
other
end
end
def dump!(input),
do: dump!(input, [])
def dump!(input, opts) do
case dump(input, opts) do
{:ok, result} ->
result
{:error, reason} ->
raise ArgumentError, message: reason
end
end
#
# Handles a cast result by potentially converting an error
# result to an ok result through the use of a default value.
#
defp maybe_default({:ok, result}, _opts),
do: {:ok, result}
defp maybe_default({:error, reason}, opts) do
if Keyword.has_key?(opts, :default) do
{:ok, opts[:default]}
else
{:error, reason}
end
end
defp maybe_default!(result, opts) do
case maybe_default(result, opts) do
{:ok, result} ->
result
{:error, reason} ->
raise ArgumentError, message: reason
end
end
end
|
lib/want.ex
| 0.909204
| 0.549278
|
want.ex
|
starcoder
|
defmodule Nebulex.Adapter.Stats do
@moduledoc """
Specifies the stats API required from adapters.
Each adapter is responsible for providing support for stats by implementing
this behaviour. However, this module brings with a default implementation
using [Erlang counters][https://erlang.org/doc/man/counters.html], with all
callbacks overridable, which is supported by the built-in adapters.
See `Nebulex.Adapters.Local` for more information about how can be used from
the adapter, and also [Nebulex Telemetry Guide][telemetry_guide] to learn how
to use the Cache with Telemetry.
[telemetry_guide]: http://hexdocs.pm/nebulex/telemetry.html
"""
@doc """
Returns `Nebulex.Stats.t()` with the current stats values.
If the stats are disabled for the cache, then `nil` is returned.
The adapter may also include additional custom measurements,
as well as metadata.
See `c:Nebulex.Cache.stats/0`.
"""
@callback stats(Nebulex.Adapter.adapter_meta()) :: Nebulex.Stats.t() | nil
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Nebulex.Adapter.Stats
@impl true
def stats(adapter_meta) do
if counter_ref = adapter_meta[:stats_counter] do
%Nebulex.Stats{
measurements: %{
hits: :counters.get(counter_ref, 1),
misses: :counters.get(counter_ref, 2),
writes: :counters.get(counter_ref, 3),
updates: :counters.get(counter_ref, 4),
evictions: :counters.get(counter_ref, 5),
expirations: :counters.get(counter_ref, 6)
},
metadata: %{
cache: adapter_meta[:name] || adapter_meta[:cache]
}
}
end
end
defoverridable stats: 1
end
end
@doc """
Initializes the Erlang's counter to be used by the adapter. See the module
documentation for more information about the stats default implementation.
Returns `nil` is the option `:stats` is set to `false` or it is not set at
all; the stats will be skipped.
## Example
Nebulex.Adapter.Stats.init(opts)
> **NOTE:** This function is usually called by the adapter in case it uses
the default implementation; the adapter should feed `Nebulex.Stats.t()`
counters.
See adapters documentation for more information about stats implementation.
"""
@spec init(Keyword.t()) :: :counters.counters_ref() | nil
def init(opts) do
case Keyword.get(opts, :stats, false) do
true -> :counters.new(6, [:write_concurrency])
false -> nil
other -> raise ArgumentError, "expected stats: to be boolean, got: #{inspect(other)}"
end
end
@doc """
Increments the `counter`'s `stat_name` by the given `incr` value.
## Examples
Nebulex.Adapter.Stats.incr(stats_counter, :hits)
Nebulex.Adapter.Stats.incr(stats_counter, :writes, 10)
> **NOTE:** This function is usually called by the adapter in case it uses
the default implementation; the adapter should feed `Nebulex.Stats.t()`
counters.
See adapters documentation for more information about stats implementation.
"""
@spec incr(:counters.counters_ref() | nil, atom, integer) :: :ok
def incr(counter, stat_name, incr \\ 1)
def incr(nil, _stat, _incr), do: :ok
def incr(ref, :hits, incr), do: :counters.add(ref, 1, incr)
def incr(ref, :misses, incr), do: :counters.add(ref, 2, incr)
def incr(ref, :writes, incr), do: :counters.add(ref, 3, incr)
def incr(ref, :updates, incr), do: :counters.add(ref, 4, incr)
def incr(ref, :evictions, incr), do: :counters.add(ref, 5, incr)
def incr(ref, :expirations, incr), do: :counters.add(ref, 6, incr)
end
|
lib/nebulex/adapter/stats.ex
| 0.921446
| 0.675972
|
stats.ex
|
starcoder
|
defmodule Schocken.Game.Ranking do
@moduledoc false
alias Schocken.Game.Player
# Hand ranks
@schock_out 5
@schock 4
@general 3
@straight 2
@house_number 1
@doc """
Evaluates the current toss and returns {rank, high, tries}
"""
@spec evaluate(Player.current_toss()) :: Player.current_toss()
def evaluate(current_toss = %{dices: dices, one_toss: one_toss, tries: tries}) do
score =
dices
|> Enum.sort(&(&1 >= &2))
|> eval(one_toss)
|> Tuple.append(tries)
%{current_toss | score: score}
end
@spec highest_toss([Player.t()]) :: {{Player.t(), integer}, atom, integer}
def highest_toss(players) do
best_player =
players
|> Enum.filter(&(&1.state != :out))
|> Enum.max_by(fn player -> player.current_toss.score end)
index_best = Enum.find_index(players, fn player -> player.name == best_player.name end)
{type, number} = calc_amount_of_coasters(best_player)
{{best_player, index_best}, type, number}
end
@spec lowest_toss([Player.t()]) :: {Player.t(), integer}
def lowest_toss(players) do
worst_player =
players
|> Enum.filter(&(&1.state != :out))
|> Enum.min_by(fn player -> player.current_toss.score end)
index_worst = Enum.find_index(players, fn player -> player.name == worst_player.name end)
{worst_player, index_worst}
end
@spec calc_amount_of_coasters(Player) :: integer
defp calc_amount_of_coasters(player) do
score = player.current_toss.score
case score do
{5, _, _} -> {:schock_out, 0}
{4, number, _} -> {:schock, number}
{3, _, _} -> {:general, 3}
{2, _, _} -> {:straight, 2}
_ -> {:house_number, 1}
end
end
@spec eval([Player.dice()], boolean) :: Player.score()
defp eval(dices, one_toss)
defp eval([1, 1, 1], _), do: {@schock_out, 0}
defp eval([h, 1, 1], _), do: {@schock, h}
defp eval([s, s, s], _), do: {@general, s}
defp eval([a, b, c], true) when a - c == 2 and a != b and b != c, do: {@straight, a}
defp eval(dices, _) do
rank =
dices
|> Enum.join()
|> String.to_integer()
{@house_number, rank}
end
end
|
lib/schocken/game/ranking.ex
| 0.843251
| 0.452778
|
ranking.ex
|
starcoder
|
defmodule ExBankID.Sign.Payload do
@moduledoc """
Provides the struct used when initiating a signing of data
"""
defstruct [:endUserIp, :personalNumber, :requirement, :userVisibleData, :userNonVisibleData]
import ExBankID.PayloadHelpers
@spec new(
binary,
binary,
personal_number: String.t(),
requirement: map(),
user_non_visible_data: String.t()
) ::
{:error, String.t()} | %ExBankID.Sign.Payload{}
@doc """
Constructs a new Sign Payload with the given ip-address, user visible data, and optionally personal number and user non visible data.
user_visible_data and user_non_visible_data will be properly encode
## Examples
iex> ExBankID.Sign.Payload.new("1.1.1.1", "This will be visible in the bankID app")
%ExBankID.Sign.Payload{endUserIp: "1.1.1.1", userVisibleData: "VGhpcyB3aWxsIGJlIHZpc2libGUgaW4gdGhlIGJhbmtJRCBhcHA="}
iex> ExBankID.Sign.Payload.new("1.1.1.1", "This will be visible in the bankID app", personal_number: "190000000000")
%ExBankID.Sign.Payload{endUserIp: "1.1.1.1", personalNumber: "190000000000", userVisibleData: "VGhpcyB3aWxsIGJlIHZpc2libGUgaW4gdGhlIGJhbmtJRCBhcHA="}
iex> ExBankID.Sign.Payload.new("1.1.1.1", "This will be visible in the bankID app", requirement: %{allowFingerprint: :false})
%ExBankID.Sign.Payload{endUserIp: "1.1.1.1", userVisibleData: "VGhpcyB3aWxsIGJlIHZpc2libGUgaW4gdGhlIGJhbmtJRCBhcHA=", requirement: %{allowFingerprint: :false}}
iex> ExBankID.Sign.Payload.new("Not a valid ip address", "This will be visible in the bankID app", personal_number: "190000000000")
{:error, "Invalid ip address: Not a valid ip address"}
"""
def new(ip_address, user_visible_data, opts \\ [])
when is_binary(ip_address) and is_binary(user_visible_data) and is_list(opts) do
with {:ok, ip_address} <- check_ip_address(ip_address),
{:ok, user_visible_data} <- encode_user_visible_data(user_visible_data),
{:ok, personal_number} <- check_personal_number(Keyword.get(opts, :personal_number)),
{:ok, user_non_visible_data} <-
encode_user_non_visible_data(Keyword.get(opts, :user_non_visible_data)),
{:ok, requirement} <- check_requirement(Keyword.get(opts, :requirement)) do
%ExBankID.Sign.Payload{
endUserIp: ip_address,
userVisibleData: user_visible_data,
personalNumber: personal_number,
requirement: requirement,
userNonVisibleData: user_non_visible_data
}
end
end
defp encode_user_visible_data(data) when is_binary(data) do
data = Base.encode64(data)
if byte_size(data) < 40_000 do
{:ok, data}
else
{:error, "User visible data is to large"}
end
end
defp encode_user_non_visible_data(data) when is_binary(data) do
data = Base.encode64(data)
if byte_size(data) < 200_000 do
{:ok, data}
else
{:error, "User visible data is to large"}
end
end
defp encode_user_non_visible_data(nil) do
{:ok, nil}
end
end
|
lib/ex_bank_id/sign/payload.ex
| 0.807726
| 0.427397
|
payload.ex
|
starcoder
|
defmodule CFEnv.Middleware do
@moduledoc """
The adapter interface for pluggable service middleware.
Your VCAP services might need to be parsed and transformed as part of startup,
such as vault paths, or base64 decoding. Or maybe you want to derive services
maps to structs.
There are two callbacks - `init` and `call`.
`init` is given the arguments from configuration, and a map of the current services.
it should returns a tuple of {:ok, state}, which will be provided to the `call` callback.
It's recommended to do any setup needed in `init`.
## Examples
You can add a Middleware using the `plug` macro.
Provide a module, and a list of options to be passed to the init callback.
```
defmodule MyApp.Env do
use CFEnv, otp_app: my_app
# decode the credentials under "database" and "aws"
plug CFEnv.Base64, services: ["database", "AWS"]
end
```
"""
@typedoc "Service Name"
@type service_name :: String.t()
@typedoc "Service Value"
@type service_value :: term()
@typedoc "Map of Service Bindings"
@type services :: map()
@typedoc "State passed into every `call` callback"
@type state :: term
@typedoc "Initial values passed into Middleware's `init` callback"
@type options :: [term]
@doc """
Initializes the processor. Any shared setup that needs to be done beforehand
should be done here.
## Examples
Should return a tuple of either `{:ok, state}` or `{:error, reason}`.
The state will be provided to the `call` callback.
```
defmodule MyProcessor do
def init(services, options) do
services = Keyword.get(options, :services, [])
{:ok, %{services: services}}
end
...
end
```
"""
@callback init(services, []) :: {:ok, state} | {:error, term}
@doc """
Processes a key in the service bindings.
## Examples
Each processors iterates over every service concurrently. When a processor is
called, it only has access to the current service.
The return value should be a tuple, consisting of the service name, and the the service value.
```
defmodule MyProcessor do
def call({service_name, service_value} = service, state) do
if service_name in state.services do
{service_name, some_processing_function(service_value)}
else
service
end
end
...
end
```
"""
@callback call({service_name, service_value}, state) :: {service_name, service_value}
end
|
lib/middleware/middleware.ex
| 0.94121
| 0.832509
|
middleware.ex
|
starcoder
|
defmodule Mockery.Assertions do
@moduledoc """
This module contains a set of additional assertion functions.
"""
alias Mockery.Error
alias Mockery.History
alias Mockery.Utils
@doc """
Asserts that function from given module with given name or name and arity
was called at least once.
**NOTE**: Mockery doesn't keep track of function calls on modules that
weren't prepared by `Mockery.of/2` and for MIX_ENV other than :test
## Examples
Assert Mod.fun/2 was called
assert_called Mod, fun: 2
Assert any function named :fun from module Mod was called
assert_called Mod, :fun
"""
def assert_called(mod, [{fun, arity}]) do
ExUnit.Assertions.assert(
called?(mod, fun, arity),
"#{Utils.print_mod(mod)}.#{fun}/#{arity} was not called"
)
end
def assert_called(mod, fun) do
ExUnit.Assertions.assert(called?(mod, fun), "#{Utils.print_mod(mod)}.#{fun} was not called")
end
@doc """
Asserts that function from given module with given name or name and arity
was NOT called.
**NOTE**: Mockery doesn't keep track of function calls on modules that
weren't prepared by `Mockery.of/2` and for MIX_ENV other than :test
## Examples
Assert Mod.fun/2 wasn't called
refute_called Mod, fun: 2
Assert any function named :fun from module Mod wasn't called
refute_called Mod, :fun
"""
def refute_called(mod, [{fun, arity}]) do
ExUnit.Assertions.refute(
called?(mod, fun, arity),
"#{Utils.print_mod(mod)}.#{fun}/#{arity} was called at least once"
)
end
def refute_called(mod, fun) do
ExUnit.Assertions.refute(
called?(mod, fun),
"#{Utils.print_mod(mod)}.#{fun} was called at least once"
)
end
@doc """
Asserts that function from given module with given name was called
at least once with arguments matching given pattern.
**NOTE**: Mockery doesn't keep track of function calls on modules that
weren't prepared by `Mockery.of/2` and for MIX_ENV other than :test
## Examples
Assert Mod.fun/2 was called with given args list
assert_called Mod, :fun, ["a", "b"]
You can also use unbound variables inside args pattern
assert_called Mod, :fun, ["a", _second]
"""
defmacro assert_called(mod, fun, args) do
mod = Macro.expand(mod, __CALLER__)
args = Macro.expand(args, __CALLER__)
quote do
ExUnit.Assertions.assert(unquote(called_with?(mod, fun, args)), """
#{unquote(Utils.print_mod(mod))}.#{unquote(fun)} \
was not called with given arguments\
#{unquote(History.print(mod, fun, args))}
""")
end
end
@doc """
Asserts that function from given module with given name was NOT called
with arguments matching given pattern.
**NOTE**: Mockery doesn't keep track of function calls on modules that
weren't prepared by `Mockery.of/2` and for MIX_ENV other than :test
## Examples
Assert Mod.fun/2 wasn't called with given args list
refute_called Mod, :fun, ["a", "b"]
You can also use unbound variables inside args pattern
refute_called Mod, :fun, ["a", _second]
"""
defmacro refute_called(mod, fun, args) do
mod = Macro.expand(mod, __CALLER__)
args = Macro.expand(args, __CALLER__)
quote do
ExUnit.Assertions.refute(unquote(called_with?(mod, fun, args)), """
#{unquote(Utils.print_mod(mod))}.#{unquote(fun)} \
was called with given arguments at least once\
#{unquote(History.print(mod, fun, args))}
""")
end
end
@doc """
Asserts that function from given module with given name was called
given number of times with arguments matching given pattern.
Similar to `assert_called/3` but instead of checking if function was called
at least once, it checks if function was called specific number of times.
**NOTE**: Mockery doesn't keep track of function calls on modules that
weren't prepared by `Mockery.of/2` and for MIX_ENV other than :test
## Examples
Assert Mod.fun/2 was called with given args 5 times
assert_called Mod, :fun, ["a", "b"], 5
Assert Mod.fun/2 was called with given args from 3 to 5 times
assert_called Mod, :fun, ["a", "b"], 3..5
Assert Mod.fun/2 was called with given args 3 or 5 times
assert_called Mod, :fun, ["a", "b"], [3, 5]
"""
defmacro assert_called(mod, fun, args, times) do
mod = Macro.expand(mod, __CALLER__)
args = Macro.expand(args, __CALLER__)
quote do
ExUnit.Assertions.assert(unquote(ncalled_with?(mod, fun, args, times)), """
#{unquote(Utils.print_mod(mod))}.#{unquote(fun)} \
was not called with given arguments expected number of times\
#{unquote(History.print(mod, fun, args))}
""")
end
end
@doc """
Asserts that function from given module with given name was NOT called
given number of times with arguments matching given pattern.
Similar to `refute_called/3` but instead of checking if function was called
at least once, it checks if function was called specific number of times.
**NOTE**: Mockery doesn't keep track of function calls on modules that
weren't prepared by `Mockery.of/2` and for MIX_ENV other than :test
## Examples
Assert Mod.fun/2 was not called with given args 5 times
refute_called Mod, :fun, ["a", "b"], 5
Assert Mod.fun/2 was not called with given args from 3 to 5 times
refute_called Mod, :fun, ["a", "b"], 3..5
Assert Mod.fun/2 was not called with given args 3 or 5 times
refute_called Mod, :fun, ["a", "b"], [3, 5]
"""
defmacro refute_called(mod, fun, args, times) do
mod = Macro.expand(mod, __CALLER__)
args = Macro.expand(args, __CALLER__)
quote do
ExUnit.Assertions.refute(unquote(ncalled_with?(mod, fun, args, times)), """
#{unquote(Utils.print_mod(mod))}.#{unquote(fun)} \
was called with given arguments unexpected number of times\
#{unquote(History.print(mod, fun, args))}
""")
end
end
defp called?(mod, fun), do: Utils.get_calls(mod, fun) != []
defp called?(mod, fun, arity) do
mod
|> Utils.get_calls(fun)
|> Enum.any?(&match?({^arity, _}, &1))
end
defp called_with?(mod, fun, args) when not is_list(args), do: args_should_be_list(mod, fun)
defp called_with?(mod, fun, args) do
quote do
unquote(mod)
|> Utils.get_calls(unquote(fun))
|> Enum.any?(&match?({_, unquote(args)}, &1))
end
end
defp ncalled_with?(mod, fun, args, _times) when not is_list(args),
do: args_should_be_list(mod, fun)
defp ncalled_with?(mod, fun, args, times) when is_integer(times) do
quote do
unquote(mod)
|> Utils.get_calls(unquote(fun))
|> Enum.filter(&match?({_, unquote(args)}, &1))
|> Enum.count()
|> (&(&1 == unquote(times))).()
end
end
defp ncalled_with?(mod, fun, args, times) do
quote do
unquote(mod)
|> Utils.get_calls(unquote(fun))
|> Enum.filter(&match?({_, unquote(args)}, &1))
|> Enum.count()
|> (&(&1 in unquote(times))).()
end
end
defp args_should_be_list(mod, fun) do
quote do
raise Error, "args for #{unquote(Utils.print_mod(mod))}.#{unquote(fun)} should be a list"
end
end
end
|
lib/mockery/assertions.ex
| 0.863046
| 0.738663
|
assertions.ex
|
starcoder
|
defmodule Timex.Parsers.DateFormat.DefaultParser do
@moduledoc """
This module is responsible for parsing date strings using
the default timex formatting syntax.
See `Timex.DateFormat.Formatters.DefaultFormatter` for more info.
"""
use Timex.Parsers.DateFormat.Parser
alias Timex.Parsers.DateFormat.Directive
@doc """
The tokenizer used by this parser.
"""
defdelegate tokenize(format_string), to: Timex.Parsers.DateFormat.Tokenizers.Default
@doc """
Extracts the value for a given directive.
"""
def parse_directive(<<>>, _), do: {:error, @invalid_input}
def parse_directive(date_string, %Directive{token: token} = directive) do
{token, do_parse_directive(date_string, directive)}
end
# Special handling for fractional seconds
defp do_parse_directive(<<?., date_string::binary>>, %Directive{token: :sec_fractional} = dir) do
do_parse_directive(date_string, dir)
end
# If we attempt to parse the next character and it's not a number, return an empty string since
# fractional seconds are optional
defp do_parse_directive(<<c::utf8, _::binary>>=date_string, %Directive{token: :sec_fractional})
when not c in ?0..?9 do
{"", date_string}
end
# Numeric directives
defp do_parse_directive(date_string, %Directive{token: token, type: :numeric, pad: pad} = dir) do
date_chars = date_string |> String.to_char_list
# Drop non-numeric characters
date_chars = date_chars |> Enum.drop_while(fn c -> (c in @numerics) == false end)
# Parse padding first
padding_stripped = date_chars |> strip_padding(pad, dir.pad_type)
# Parse value
case padding_stripped do
{:error, _} = error -> error
[] -> {:error, @invalid_input}
padding_stripped ->
# Extract a numeric value up to the maximum length allowed by dir.len
chars = extract_value(padding_stripped, dir.len, @numerics)
# Convert to numeric value
len = length(chars)
padding_stripped = padding_stripped |> Enum.drop(len)
valid? = chars
|> valid_length?(token, dir.len)
|> valid_value?(token, dir.validate)
|> within_bounds?(token, dir.min, dir.max)
case valid? do
{:error, _} = error -> error
str -> {str, padding_stripped |> List.to_string}
end
end
end
defp do_parse_directive(date_string, %Directive{token: token, type: :word} = dir) do
date_chars = date_string |> String.to_char_list
# Drop leading non-alpha characters.
date_chars = date_chars |> Enum.drop_while(&(Enum.member?(@allowed_chars, &1) == false))
# Extract a word value up to the maximum length allowed by dir.len
chars = extract_value(date_chars, dir.len, @allowed_chars)
len = length(chars)
date_chars = date_chars |> Enum.drop(len)
# Validate that the word value is of the correct length
valid? = chars
|> valid_length?(token, dir.len)
|> valid_value?(token, dir.validate)
case valid? do
{:error, _} = error -> error
str -> {str, date_chars |> List.to_string}
end
end
defp do_parse_directive(date_string, %Directive{token: token, type: :match, match: match} = dir) when match != false do
date_chars = date_string |> String.to_char_list
# Drop leading non-word characters.
date_chars = date_chars |> Enum.drop_while(fn c -> (c in @word_chars) == false end)
# Extract a value up to the maximum length allowed by dir.len
chars = extract_value(date_chars, dir.len, @word_chars)
len = length(chars)
date_chars = date_chars |> Enum.drop(len)
# Validate that the value is of the correct length
valid? = chars
|> valid_length?(token, dir.len)
|> valid_value?(token, match: match)
case valid? do
{:error, _} = error -> error
str -> {str, date_chars |> List.to_string}
end
end
defp do_parse_directive(_date_string, directive) do
{:error, "Unsupported directive: #{directive |> Macro.to_string}"}
end
# Strip the padding from a char list
# If 0 is given as padding, do nothing
defp strip_padding(str, 0, _)
when is_list(str),
do: str
# If we reach the end of the input string before
# we trim all the padding, return an error.
defp strip_padding([], pad, _)
when pad > 0,
do: {:error, "Unexpected end of string!"}
# Start trimming off padding, but pass along the source string as well
defp strip_padding(str, pad, pad_type)
when is_list(str),
do: strip_padding(str, str, pad, pad_type)
# If we hit 0, return the stripped string
defp strip_padding(str, _, 0, _),
do: str
# If we hit the end of the string before the
# we trim all the padding, return an error.
defp strip_padding([], _, _, _),
do: {:error, "Unexpected end of string!"}
# Trim off leading zeros
defp strip_padding([h|rest], str, pad, :zero)
when pad > 0 and h == ?0,
do: strip_padding(rest, str, pad - 1, :zero)
# Trim off leading spaces
defp strip_padding([h|rest], str, pad, :space)
when pad > 0 and h == 32,
do: strip_padding(rest, str, pad - 1, :space)
# If the first character is not padding, and is the same
# as the source string's first character, there is no padding
# to strip.
defp strip_padding([h|_], [h|_] = str, pad, _)
when pad > 0,
do: str
# Parse a value from a char list given a max length, and
# a list of valid characters the value can be composed of
defp extract_value(str, str_len, valid_chars) when is_list(str) do
Stream.transform(str, 0, fn char, chars_taken ->
valid_char? = Enum.member?(valid_chars, char)
case {char, str_len} do
{char, :word} when valid_char? ->
{[char], chars_taken + 1}
{char, str_len} when is_number(str_len) and chars_taken < str_len and valid_char? ->
{[char], chars_taken + 1}
{char, _..hi} when chars_taken < hi and valid_char? ->
{[char], chars_taken + 1}
_ ->
{:halt, chars_taken}
end
end) |> Enum.to_list
end
end
|
lib/parsers/dateformat/default.ex
| 0.778944
| 0.416678
|
default.ex
|
starcoder
|
defmodule NeoscanMonitor.Utils do
@moduledoc false
alias NeoscanSync.Blockchain
alias Neoscan.Transactions
alias Neoscan.Addresses
alias Neoscan.Stats
alias Neoscan.ChainAssets
alias NeoscanSync.Notifications
# blockchain api nodes
def seeds do
Application.fetch_env!(:neoscan_monitor, :seeds)
end
# function to load nodes state
def load do
data =
seeds()
|> Enum.map(fn url -> {url, Blockchain.get_current_height(url)} end)
|> Enum.filter(fn {url, result} -> evaluate_result(url, result) end)
|> Enum.map(fn {url, {:ok, height}} -> {url, height} end)
set_state(data)
end
# handler for nil data
defp set_state([] = data) do
%{:nodes => [], :height => {:ok, nil}, :data => data}
end
# call filters on results and set state
defp set_state(data) do
height = filter_height(data)
%{nodes: filter_nodes(data, height), height: {:ok, height}, data: data}
end
# filter working nodes
defp filter_nodes(data, height) do
data
|> Enum.filter(fn {_url, hgt} -> hgt == height end)
|> Enum.map(fn {url, _height} -> url end)
end
# filter current height
defp filter_height(data) do
{height, _count} =
data
|> Enum.map(fn {_url, height} -> height end)
|> Enum.reduce(%{}, fn height, acc ->
Map.update(acc, height, 1, &(&1 + 1))
end)
|> Enum.max_by(fn {_height, count} -> count end)
height
end
# handler to filter errors
defp evaluate_result(url, {:ok, height}) do
test_get_block(url, height)
end
defp evaluate_result(_url, {:error, _height}) do
false
end
# test node api
defp test_get_block(url, height) do
Blockchain.get_block_by_height(url, height - 1)
|> test()
end
# handler to test response
defp test({:ok, _block}) do
true
end
defp test({:error, _reason}) do
false
end
# function to cut extra elements
def cut_if_more(list, count) when count == 15 do
list
|> Enum.drop(-1)
end
def cut_if_more(list, _count) do
list
end
# function to get DB asset stats
def get_stats(assets) do
Enum.map(assets, fn asset ->
cond do
asset.contract == nil ->
Map.put(asset, :stats, %{
:addresses => Addresses.count_addresses_for_asset(asset.txid),
:transactions => Stats.count_transactions_for_asset(asset.txid)
})
asset.contract != nil ->
Map.put(asset, :stats, %{
:addresses => Addresses.count_addresses_for_asset(asset.contract),
:transactions => Stats.count_transactions_for_asset(asset.contract)
})
end
end)
end
# function to get general db stats
def get_general_stats do
%{
:total_blocks => Stats.count_blocks(),
:total_transactions => Stats.count_transactions(),
:total_transfers => Stats.count_transfers(),
:total_addresses => Stats.count_addresses()
}
end
# function to add vouts to transactions
def add_vouts(transactions) do
ids = Enum.map(transactions, fn tx -> tx.id end)
vouts = Transactions.get_transactions_vouts(ids)
transactions
|> Enum.map(fn tx ->
Map.put(
tx,
:vouts,
Enum.filter(vouts, fn vout ->
vout.transaction_id == tx.id
end)
)
end)
end
def add_new_tokens(old_list \\ []) do
Enum.filter(get_token_notifications(), fn %{"token" => token} ->
Enum.all?(old_list, fn %{"token" => old_token} -> token["script_hash"] != old_token["script_hash"] end)
end)
|> ChainAssets.create_tokens
end
defp get_token_notifications do
case Notifications.get_token_notifications do
{:error, _} ->
get_token_notifications()
result ->
result
end
end
end
|
apps/neoscan_monitor/lib/neoscan_monitor/monitor/utils.ex
| 0.586168
| 0.445288
|
utils.ex
|
starcoder
|
defmodule KV.GarbageCollector do
@moduledoc """
Module which expires keys in buckets, built on top of lightweight `OTP` processes (`proc_lib`).
"""
# Client API.
@doc """
Starts the garbage collection process with a given name that handles keys expiration.
"""
def start_link(name) do
:proc_lib.start_link(__MODULE__, :init, [ self(), name ])
end
def expire_key_after(bucket, key, ttl) do
Process.send(Process.whereis(__MODULE__), {:expire_key_after, bucket, key, ttl}, [])
end
# Required functions for `:proc_lib`.
def system_continue(parent, opts, state) do
loop(parent, opts, state)
end
def system_terminate(reason, _parent, _opts, _state) do
exit(reason)
end
def system_get_state(state) do
{:ok, state}
end
def write_debug(device, event, name) do
IO.inspect(device, "CUSTOM WRITE DEBUG: #{name} event = #{event}")
end
def system_replace_state(modify_state_fun, state) do
updated_state = modify_state_fun.(state)
{:ok, updated_state, updated_state}
end
def system_code_change(state, _module, _old_version, _extra) do
{:ok, state}
end
def init(parent, name) do
opts = :sys.debug_options([])
:proc_lib.init_ack(parent, {:ok, self()})
Process.register(self(), name)
loop(parent, opts, %{})
end
# Private API.
defp loop(parent, opts, state) do
receive do
{:expire_key_after, bucket, key, ttl} ->
modified_opts = :sys.handle_debug(opts, &write_debug/3, __MODULE__, {:in, :expire_key_after, bucket, key, ttl})
Process.send_after(self(), {:expired, bucket, key}, ttl)
new_opts = :sys.handle_debug(modified_opts, &write_debug/3, __MODULE__, {:out, :expired, bucket, key})
loop(parent, new_opts, Map.put(state, key, ttl))
{:expired, bucket, key} ->
new_opts = :sys.handle_debug(opts, &write_debug/3, __MODULE__, {:in, :expired, bucket, key})
KV.Bucket.delete(bucket, key)
loop(parent, new_opts, Map.delete(state, key))
{:system, from, request} ->
:sys.handle_system_msg(request, from, parent, __MODULE__, opts, state)
loop(parent, opts, state)
_ ->
loop(parent, opts, state)
end
end
end
|
apps/kv/lib/kv/garbage_collector.ex
| 0.690455
| 0.415996
|
garbage_collector.ex
|
starcoder
|
case Code.ensure_loaded(Ecto) do
{:module, _} ->
defmodule Surgex.DataPipe.TableSync do
@moduledoc """
Extracts and transforms data from one PostgreSQL table into another.
## Usage
Refer to `Surgex.DataPipe` for a complete data pipe example.
"""
import Ecto.Query
alias Ecto.Adapters.SQL
@doc """
Synchronizes the given repository's table with data fetched using a specified query.
The synchronization is done via a single SQL query by utilizing the `WITH` statement. It first
executes `INSERT .. ON CONFLICT` (called "upserting") to insert and update new rows, followed by
`DELETE .. WHERE` that removes old entries that didn't appear in the input query.
Returns a tuple with a number of upserts (inserts + updates) and a number of deletions.
"""
def call(repo, source, target, opts \\ [])
def call(repo, source, target, opts) do
table =
case target do
name when is_binary(name) -> name
schema -> schema.__schema__(:source)
end
columns =
Keyword.get_lazy(opts, :columns, fn ->
target.__schema__(:fields)
end)
conflict_target =
Keyword.get_lazy(opts, :conflict_target, fn ->
target.__schema__(:primary_key)
end)
query =
case(source) do
"SELECT " <> _ -> source
%{select: select} when not is_nil(select) -> source
_ -> select(source, ^columns)
end
default_opts = [
on_conflict: :replace_all,
conflict_target: conflict_target
]
do_sync(repo, table, columns, query, Keyword.merge(default_opts, opts))
end
defp do_sync(repo, table, columns, query, opts) do
delete_query_sql = "id NOT IN (SELECT id FROM upserts)"
input_scope = Keyword.get(opts, :scope)
delete_scope = Keyword.get(opts, :delete_scope)
scoped_query = apply_query_scope(query, input_scope)
scoped_delete_query_sql =
apply_delete_sql_scope(delete_query_sql, delete_scope || input_scope)
columns_sql = list_to_sql(columns)
{scoped_query_sql, params} = query_to_sql(repo, scoped_query)
on_conflict =
parse_on_conflict(
Keyword.get(opts, :on_conflict),
columns,
Keyword.get(opts, :conflict_target)
)
sql =
"WITH upserts AS (" <>
"INSERT INTO #{table} (#{columns_sql}) (#{scoped_query_sql}) #{on_conflict} RETURNING id" <>
"), deletions AS (" <>
"DELETE FROM #{table} WHERE #{scoped_delete_query_sql} RETURNING id" <>
") SELECT " <> "(SELECT COUNT(id) FROM upserts), (SELECT COUNT(id) FROM deletions)"
%{rows: [[upserts, deletions]]} = apply(repo, :query!, [sql, params])
{upserts, deletions}
end
defp apply_query_scope(query, nil), do: query
defp apply_query_scope(query = %{}, scope) when is_list(scope), do: where(query, ^scope)
defp apply_delete_sql_scope(delete_sql, nil), do: delete_sql
defp apply_delete_sql_scope(delete_sql, scope) when is_binary(scope) do
delete_sql <> " AND #{scope}"
end
defp apply_delete_sql_scope(delete_sql, scope) when is_list(scope) do
delete_sql <>
(scope
|> Enum.map(fn {col, val} -> " AND #{col} = #{val}" end)
|> Enum.join())
end
defp parse_on_conflict(nil, _, _), do: nil
defp parse_on_conflict(:replace_all, columns, conflict_target) do
setters = Enum.map(columns, fn col -> "#{col} = excluded.#{col}" end)
"ON CONFLICT (#{list_to_sql(conflict_target)}) DO UPDATE SET #{list_to_sql(setters)}"
end
defp query_to_sql(_repo, sql) when is_binary(sql), do: {sql, []}
defp query_to_sql(repo, query) do
SQL.to_sql(:all, repo, query)
end
defp list_to_sql(list), do: Enum.join(list, ", ")
end
_ ->
nil
end
|
lib/surgex/data_pipe/table_sync.ex
| 0.792022
| 0.428712
|
table_sync.ex
|
starcoder
|
defmodule Typo.PDF.Page do
@moduledoc """
Page size definitions.
"""
@doc """
Given a page size 2-tuple returns the last two values possibly transposed
so that the page width is the longest measurement.
"""
@spec landscape({number(), number()}) :: {number(), number()}
def landscape({width, height}) when is_number(width) and is_number(height) and height > width,
do: {height, width}
def landscape({width, height}) when is_number(width) and is_number(height),
do: {width, height}
@spec page_dims(Typo.page_size()) :: {number(), number()} | {:error, :invalid_page_size}
defp page_dims(:a0), do: {2380, 3368}
defp page_dims(:a1), do: {1684, 2380}
defp page_dims(:a2), do: {1190, 1684}
defp page_dims(:a3), do: {842, 1190}
defp page_dims(:a4), do: {595, 842}
defp page_dims(:a5), do: {421, 595}
defp page_dims(:a6), do: {297, 421}
defp page_dims(:a7), do: {210, 297}
defp page_dims(:a8), do: {148, 210}
defp page_dims(:a9), do: {105, 148}
defp page_dims(:b0), do: {2836, 4008}
defp page_dims(:b1), do: {2004, 2836}
defp page_dims(:b2), do: {1418, 2004}
defp page_dims(:b3), do: {1002, 1418}
defp page_dims(:b4), do: {709, 1002}
defp page_dims(:b5), do: {501, 709}
defp page_dims(:b6), do: {355, 501}
defp page_dims(:b7), do: {250, 355}
defp page_dims(:b8), do: {178, 250}
defp page_dims(:b9), do: {125, 178}
defp page_dims(:b10), do: {89, 125}
defp page_dims(:c5e), do: {462, 649}
defp page_dims(:comm10e), do: {298, 683}
defp page_dims(:dle), do: {312, 624}
defp page_dims(:executive), do: {542, 720}
defp page_dims(:folio), do: {595, 935}
defp page_dims(:ledger), do: {1224, 792}
defp page_dims(:legal), do: {612, 1008}
defp page_dims(:letter), do: {612, 792}
defp page_dims(:tabloid), do: {792, 1224}
defp page_dims(_), do: {:error, :invalid_page_size}
@doc """
Converts page size atom into a 2-tuple ({width, height}).
Returns `{:error, :invalid_page_size}` if page size atom not found.
"""
@spec page_size(Typo.page_size(), :portrait | :landscape) ::
{number(), number()} | {:error, :invalid_page_size}
def page_size(size, orientation)
def page_size(size, :portrait) when is_atom(size), do: page_dims(size)
def page_size(size, :landscape) when is_atom(size) do
with {width, height} when is_number(width) and is_number(height) <- page_dims(size),
do: {height, width}
end
@doc """
Given a page size 2-tuple returns the last two values possibly transposed
so that the page height is the longest measurement.
"""
@spec portrait({number(), number()}) :: {number(), number()}
def portrait({width, height}) when is_number(width) and is_number(height) and height < width,
do: {height, width}
def portrait({width, height}) when is_number(width) and is_number(height),
do: {width, height}
end
|
lib/typo/pdf/page.ex
| 0.888566
| 0.503357
|
page.ex
|
starcoder
|
defmodule StarkInfra.IssuingTransaction do
alias __MODULE__, as: IssuingTransaction
alias StarkInfra.Utils.Rest
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
alias StarkInfra.Error
@moduledoc """
# IssuingTransaction struct
"""
@doc """
The IssuingTransaction structs created in your Workspace to represent each balance shift.
## Attributes (return-only):
- `:id` [string]: unique id returned when IssuingTransaction is created. ex: "5656565656565656"
- `:amount` [integer]: IssuingTransaction value in cents. ex: 1234 (= R$ 12.34)
- `:balance` [integer]: balance amount of the Workspace at the instant of the Transaction in cents. ex: 200 (= R$ 2.00)
- `:description` [string]: IssuingTransaction description. ex: "Buying food"
- `:source` [string]: source of the transaction. ex: "issuing-purchase/5656565656565656"
- `:tags` [string]: list of strings inherited from the source resource. ex: ["tony", "stark"]
- `:created` [DateTime]: creation datetime for the IssuingTransaction. ex: ~U[2020-03-10 10:30:0:0]
"""
@enforce_keys [
:amount,
:description,
:balance,
:source,
:tags,
:id,
:created
]
defstruct [
:amount,
:description,
:balance,
:source,
:tags,
:id,
:created
]
@type t() :: %__MODULE__{}
@doc """
Receive a single IssuingTransaction struct previously created in the Stark Infra API by its id
## Options:
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- IssuingTransaction struct with updated attributes
"""
@spec get(
id: binary,
user: Organization.t() | Project.t() | nil
) ::
{:ok, IssuingTransaction.t()} |
{:error, [Error.t()]}
def get(id, options \\ []) do
Rest.get_id(
resource(),
id,
options
)
end
@doc """
Same as get(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec get!(
id: binary,
user: Organization.t() | Project.t() | nil
) :: any
def get!(id, options \\ []) do
Rest.get_id!(
resource(),
id,
options
)
end
@doc """
Receive a stream of IssuingTransaction structs previously created in the Stark Infra API
## Options:
- `:tags` [list of strings, default nil]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:external_ids` [list of strings, default []]: external IDs. ex: ["5656565656565656", "4545454545454545"]
- `:after` [Date or string, default nil]: date filter for structs created only after specified date. ex: ~D[2020-03-25]
- `:before` [Date or string, default nil]: date filter for structs created only before specified date. ex: ~D[2020-03-25]
- `:status` [string, default nil]: filter for status of retrieved structs. ex: "approved", "canceled", "denied", "confirmed" or "voided"
- `:ids` [list of strings, default [], default nil]: purchase IDs
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of IssuingTransaction structs with updated attributes
"""
@spec query(
tags: [binary] | nil,
external_ids: [binary] | nil,
after: Date.t() | binary | nil,
before: Date.t() | binary | nil,
status: binary | nil,
ids: [binary] | nil,
limit: integer | nil,
user: Organization.t() | Project.t() | nil
) ::
{:ok, {binary, [IssuingTransaction.t()]}} |
{:error, [Error.t()]}
def query(options \\ []) do
Rest.get_list(
resource(),
options
)
end
@doc """
Same as query(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec query!(
tags: [binary] | nil,
external_ids: [binary] | nil,
after: Date.t() | binary | nil,
before: Date.t() | binary | nil,
status: binary | nil,
ids: [binary] | nil,
limit: integer | nil,
user: Organization.t() | Project.t() | nil
) :: any
def query!(options \\ []) do
Rest.get_list!(
resource(),
options
)
end
@doc """
Receive a list of IssuingTransaction structs previously created in the Stark Infra API and the cursor to the next page.
## Options:
- `:tags` [list of strings, default nil]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:external_ids` [list of strings, default []]: external IDs. ex: ["5656565656565656", "4545454545454545"]
- `:after` [Date or string, default nil]: date filter for structs created only after specified date. ex: ~D[2020-03-25]
- `:before` [Date or string, default nil]: date filter for structs created only before specified date. ex: ~D[2020-03-25]
- `:status` [string, default nil]: filter for status of retrieved structs. ex: "approved", "canceled", "denied", "confirmed" or "voided"
- `:ids` [list of strings, default [], default nil]: purchase IDs
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:cursor` [string, default nil]: cursor returned on the previous page function call
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of IssuingTransaction structs with updated attributes
- cursor to retrieve the next page of IssuingPurchase structs
"""
@spec page(
tags: [binary] | nil,
external_ids: [binary] | nil,
after: Date.t() | binary | nil,
before: Date.t() | binary | nil,
status: binary | nil,
ids: [binary] | nil,
limit: integer | nil,
cursor: binary | nil,
user: Organization.t() | Project.t() | nil
) ::
{:ok, {binary, [IssuingTransaction.t()]}} |
{:error, [Error.t()]}
def page(options \\ []) do
Rest.get_page(
resource(),
options
)
end
@doc """
Same as page(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec page!(
tags: [binary] | nil,
external_ids: [binary] | nil,
after: Date.t() | binary | nil,
before: Date.t() | binary | nil,
status: binary | nil,
ids: [binary] | nil,
limit: integer | nil,
cursor: binary | nil,
user: Organization.t() | Project.t() | nil
) :: any
def page!(options \\ []) do
Rest.get_page!(
resource(),
options
)
end
@doc false
def resource() do
{
"IssuingTransaction",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%IssuingTransaction{
id: json[:id],
tags: json[:tags],
amount: json[:amount],
source: json[:source],
balance: json[:balance],
description: json[:description],
created: json[:created] |> Check.datetime()
}
end
end
|
lib/issuing_transaction/issuing_transaction.ex
| 0.904413
| 0.630628
|
issuing_transaction.ex
|
starcoder
|
defmodule DarknetToOnnx.ParseDarknet do
@moduledoc """
The Darknet parser (from: https://github.com/jkjung-avt/tensorrt_demos/blob/master/yolo/yolo_to_onnx.py)
"""
use Agent, restart: :transient
@doc """
Initializes a DarkNetParser object.
Keyword argument:
supported_layers -- a string list of supported layers in DarkNet naming convention,
parameters are only added to the class dictionary if a parsed layer is included.
"""
def start_link(
opts,
args \\ [
"net",
"convolutional",
"maxpool",
"shortcut",
"route",
"upsample",
"yolo"
]
) do
cfg_file_path = Keyword.fetch!(opts, :cfg_file_path)
initial_state = %{
parse_result: [],
keys: [],
output_convs: [],
supported_layers: args
}
Agent.start_link(fn -> parse_cfg_file(initial_state, cfg_file_path) end, name: __MODULE__)
end
@doc """
Identifies the parameters contained in one of the cfg file and returns
them in the required format for each parameter type, e.g. as a list, an int or a float.
Keyword argument:
param_line -- one parsed line within a layer block
"""
def parse_params(params, skip_params \\ ["steps", "scales", "mask"]) do
{param_type, param_value_raw} = params
param_value_raw = param_value_raw |> String.replace(~r/\s+/, "")
cond do
skip_params != nil and param_type in skip_params ->
[nil, param_value_raw]
param_type == "layers" or param_type == "anchors" ->
[
param_type,
param_value_raw
|> String.split(",")
|> Enum.map(fn x -> String.to_integer(String.trim(x)) end)
]
!String.match?(param_value_raw, ~r/^[[:alpha:]]+$/u) and String.match?(param_value_raw, ~r/\./) ->
# It is a float number
[zero, _] = param_value_raw |> String.split(".")
if zero === "" do
[param_type, String.to_float("0" <> param_value_raw)]
else
[param_type, param_value_raw]
end
!String.match?(param_value_raw, ~r/^[[:alpha:]]+$/u) ->
# Otherwise it is integer
[param_type, String.to_integer(param_value_raw)]
true ->
[param_type, param_value_raw]
end
end
@doc """
Takes the yolov?.cfg file and parses it layer by layer,
appending each layer's parameters as a dictionary to layer_configs.
Keyword argument:
cfg_file_path
"""
def parse_cfg_file(state, cfg_file_path) do
{:ok, parse_result} = ConfigParser.parse_file(cfg_file_path, overwrite_sections: false)
keys = Map.keys(parse_result) |> Enum.sort()
parse_result =
Enum.map(parse_result, fn {name, datamap} ->
[_, new_type] = String.split(name, "_")
if new_type not in state.supported_layers do
raise new_type <> " layer not supported!"
end
%{
:name => name,
:data =>
(Enum.map(datamap, fn {k, v} ->
[_ptype, pvalue] = DarknetToOnnx.ParseDarknet.parse_params({k, v})
{k, pvalue}
end) ++ [{"type", new_type}])
|> Map.new()
}
end)
|> Enum.sort_by(& &1.name)
# |> Enum.sort_by(fn e -> e.name end)
%{state | :parse_result => parse_result, :keys => keys}
end
def get_state() do
Agent.get(__MODULE__, fn state -> state end)
end
def is_pan_arch?(state) do
yolos = Enum.filter(state.keys, fn k -> Regex.run(~r{.*yolo}, k) end)
upsamples = Enum.filter(state.keys, fn k -> Regex.run(~r{.*upsample}, k) end)
yolo_count = Enum.count(yolos)
upsample_count = Enum.count(upsamples)
try do
(yolo_count in [2, 3, 4] and upsample_count == yolo_count - 1) or upsample_count == 0
rescue
_e ->
# Logger.error(Exception.format(:error, e, __STACKTRACE__))
nil
end
# the model is with PAN if an upsample layer appears before the 1st yolo
[yindex, _] = String.split(hd(yolos), "_")
[uindex, _] = String.split(hd(upsamples), "_")
uindex < yindex
end
@doc """
Find output conv layer names from layer configs.
The output conv layers are those conv layers immediately proceeding
the yolo layers.
# Arguments
layer_configs: output of the DarkNetParser, i.e. a OrderedDict of
the yolo layers.
"""
def get_output_convs(state) do
yolos = Enum.filter(state.keys, fn k -> Regex.run(~r{.*yolo}, k) end)
# convs = Enum.filter(state.keys, fn(k) -> Regex.run(~r{.*convolutional}, k) end)
state = %{state | :output_convs => []}
output_convs = inner_get_output_convs(state, yolos)
Agent.update(__MODULE__, fn _s -> %{state | output_convs: output_convs} end)
output_convs
end
defp inner_get_output_convs(state, yolos) when yolos != [] do
Enum.map(yolos, fn y ->
[yindex, _] = String.split(y, "_")
layer_to_find =
(String.to_integer(yindex) - 1)
|> Integer.to_string()
|> String.pad_leading(3, "0")
previous_layer = layer_to_find <> "_convolutional"
if Enum.member?(state.keys, previous_layer) do
%{name: _, data: %{"activation" => activation}} = Enum.at(state.parse_result, String.to_integer(layer_to_find))
case activation do
"linear" -> previous_layer
"logistic" -> previous_layer <> "_lgx"
_ -> raise("Unexpected activation: " <> activation)
end
else
y
end
end)
end
@doc """
Find number of output classes of the yolo model.
"""
def get_category_num(state) do
%{data: %{"classes" => classes}, name: _} = Enum.find(state.parse_result, fn x -> x.data["classes"] end)
classes
end
@doc """
Find input height and width of the yolo model from layer configs.
"""
def get_h_and_w(state) do
%{data: %{"height" => h, "width" => w}, name: _} = Enum.find(state.parse_result, fn x -> x.name == "000_net" end)
[h, w]
end
end
|
lib/darknet_to_onnx/parser.ex
| 0.851058
| 0.461199
|
parser.ex
|
starcoder
|
defmodule Astra.Rest do
alias Astra.Rest.Http
@moduledoc """
`Astra.Rest` provides functions to access the public methods of the REST interface for databases hosed on https://astra.datastax.com.
Astra's REST interface is implemented using the stargate project, https://stargate.io. Swagger docs for this interface are available here https://docs.astra.datastax.com/reference#keyspaces-2.
If required, raw access to the Astra REST api can be obtained through the `Astra.Rest.Http` module.
"""
@doc """
`get_row` retrieves one or more rows based on the `keyspace`, `table` and `primary_key`. Primary keys that span multiple fields
should be delimeted with a `\\` ex. "123\\tuesday".
## Examples
```
> Astra.Rest.get_row("test", "thing", "83b8d85d-bd33-4650-8b9d-b43354187114")
{:ok, [%{id: "83b8d85d-bd33-4650-8b9d-b43354187114", name: "test row"}]}
```
"""
@spec get_row(String, String, String) :: {Atom, list(Map)}
def get_row(keyspace, table, primary_key), do: Http.get("#{keyspace}/#{table}/#{primary_key}") |> Http.parse_response
@doc """
Add a row. All fields in the row are optional except for fields defined in the `PRIMARY KEY` of the table definition.
## Parameters
- keyspace: the keyspace containing the target table
- table: the table containing the entity we are retrieving
- entity: a Map of the attributes of the row we are adding
ex.
```
%{
id: "83b8d85d-bd33-4650-8b9d-b43354187114",
name: "test row"
}
```
## Examples
```
> new_row = %{id: "83b8d85d-bd33-4650-8b9d-b43354187114", name: "test row"}
> Astra.Rest.add_row("test", "thing", new_row)
{:ok, %{id: "83b8d85d-bd33-4650-8b9d-b43354187114"}}
```
"""
@spec add_row(String, String, Map) :: {Atom, Map}
def add_row(keyspace, table, entity), do: Http.post("#{keyspace}/#{table}", Http.json!(entity)) |> Http.parse_response
@doc """
Update part of a row, only fields provided in the entity will be updated.
## Parameters
- keyspace: the keyspace containing the target table
- table: the table containing the entity we are retrieving
- key: a primary key, if it contains multiple fields they should be seperated with `\\`
- entity: a Map of the attributes of the row we are adding
ex.
```
%{
id: "83b8d85d-bd33-4650-8b9d-b43354187114",
name: "test row"
}
```
## Examples
```
> new_row = %{id: "83b8d85d-bd33-4650-8b9d-b43354187114", name: "test row"}
> Astra.Rest.add_row("test", "thing", new_row)
{:ok, %{id: "83b8d85d-bd33-4650-8b9d-b43354187114"}}
```
"""
@spec update_partial_row(String, String, String, Map) :: {Atom, Map}
def update_partial_row(keyspace, table, key, changes), do: Http.patch("#{keyspace}/#{table}/#{key}", Http.json!(changes)) |> Http.parse_response
@doc """
Similar to `Astra.Rest.add_row/3`, the key needs to be provided explicity.
## Parameters
- keyspace: the keyspace containing the target table
- table: the table containing the entity we are retrieving
- key: a primary key, if it contains multiple fields they should be seperated with `\\`
- entity: a Map of the attributes of the row we are adding
ex.
```
%{
id: "83b8d85d-bd33-4650-8b9d-b43354187114",
name: "test row"
}
```
## Examples
```
> new_row = %{name: "test row"}
> Astra.Rest.replace_row("test", "thing", "83b8d85d-bd33-4650-8b9d-b43354187115", new_row)
{:ok, %{name: "test row"}}
> Astra.Rest.get_row("test", "thing", "83b8d85d-bd33-4650-8b9d-b43354187115")
{:ok, [%{id: "83b8d85d-bd33-4650-8b9d-b43354187115", name: "test row"}]}
```
"""
@spec replace_row(String, String, String, Map) :: {Atom, Map}
def replace_row(keyspace, table, key, entity), do: Http.put("#{keyspace}/#{table}/#{key}", Http.json!(entity)) |>Http.parse_response
@doc """
Remove a row.
## Parameters
- keyspace: the keyspace containing the target table
- table: the table containing the entity we are retrieving
- key: a primary key, if it contains multiple fields they should be seperated with `\\`
## Examples
```
> new_row = %{id: "83b8d85d-bd33-4650-8b9d-b43354187114", name: "test row"}
> Astra.Rest.delete_row("test", "thing", "83b8d85d-bd33-4650-8b9d-b43354187114")
{:ok, []]}
```
"""
# delete row
@spec delete_row(String, String, String) :: {Atom, []}
def delete_row(keyspace, table, key), do: Http.delete("#{keyspace}/#{table}/#{key}") |> Http.parse_response
# search a table
# CREATE CUSTOM INDEX things_name_idx ON thing (name) USING 'StorageAttachedIndex' WITH OPTIONS = {'normalize': 'true', 'case_sensitive': 'false'};
@doc """
Search for rows in a table. The following operators are available for the query: `$eq`, `$lt`, `$lte`, `$gt`, `$gte`, `$ne`, and `$exists`.
Please note that some restrictions exist for searches:
1. Search cannot be on a `PRIMARY KEY` field, unless a composite primary key is being used.
2. Search fields will require some form of secondary index. SAI is usually the best choice https://docs.astra.datastax.com/docs/using-storage-attached-indexing-sai
Example of creating an SAI index on a table:
```
CREATE TABLE thing (
id text PRIMARY KEY,
name text
)
CREATE CUSTOM INDEX things_name_idx
ON thing (name) USING 'StorageAttachedIndex'
WITH OPTIONS = {'normalize': 'true', 'case_sensitive': 'false'};
```
## Parameters
- keyspace: the keyspace containing the target table
- table: the table containing the entity we are retrieving
- query: the search query for the table. ex. `%{name: %{"$in": ["red", "blue"]}}`
## Examples
```
> Astra.Rest.search_table("test", "thing", %{name: %{"$eq": "test row"}})
```
"""
@spec search_table(String, String, Map) :: {Atom, []}
def search_table(keyspace, table, query), do: Http.get("#{keyspace}/#{table}",[], params: %{where: Http.json!(query)}) |> Http.parse_response
end
|
lib/rest/rest.ex
| 0.896477
| 0.881207
|
rest.ex
|
starcoder
|
defmodule Curvy.Key do
@moduledoc """
Module used to create ECDSA keypairs and convert to private and public key
binaries.
"""
alias Curvy.{Curve, Point}
defstruct crv: :secp256k1,
point: %Point{},
privkey: nil,
compressed: true
@typedoc """
ECDSA Keypair.
Always contains the `t:Point.t` coordinates and optionally a private key binary.
"""
@type t :: %__MODULE__{
crv: atom,
point: Point.t,
privkey: binary | nil,
compressed: boolean
}
@crv Curve.secp256k1
@doc """
Creates a new random ESCDA keypair.
"""
@spec generate(keyword) :: t
def generate(opts \\ []) do
compressed = Keyword.get(opts, :compressed, true)
{pubkey, privkey} = :crypto.generate_key(:ecdh, :secp256k1)
<<_::integer, x::big-size(256), y::big-size(256)>> = pubkey
%__MODULE__{
point: %Point{x: x, y: y},
privkey: privkey,
compressed: compressed
}
end
@doc """
Converts the given private key binary to a [`ECDSA Keypair`](`t:t`).
"""
@spec from_privkey(binary, keyword) :: t
def from_privkey(<<privkey::binary-size(32)>>, opts \\ []) do
compressed = Keyword.get(opts, :compressed, true)
{pubkey, _privkey} = :crypto.generate_key(:ecdh, :secp256k1, privkey)
<<_::integer, x::big-size(256), y::big-size(256)>> = pubkey
%__MODULE__{
point: %Point{x: x, y: y},
privkey: privkey,
compressed: compressed
}
end
@doc """
Converts the given public key binary to a [`ECDSA Keypair`](`t:t`) struct
without a private key.
"""
@spec from_pubkey(binary) :: t
def from_pubkey(pubkey)
def from_pubkey(<<_::integer, x::big-size(256), y::big-size(256)>>),
do: %__MODULE__{point: %Point{x: x, y: y}, compressed: false}
def from_pubkey(<<prefix::integer, x::big-size(256)>>) do
y = x
|> :crypto.mod_pow(3, @crv.p)
|> :binary.decode_unsigned()
|> Kernel.+(7)
|> rem(@crv.p)
|> :crypto.mod_pow(Integer.floor_div(@crv.p + 1, 4), @crv.p)
|> :binary.decode_unsigned()
y = if rem(y, 2) != rem(prefix, 2), do: @crv.p - y, else: y
%__MODULE__{point: %Point{x: x, y: y}, compressed: true}
end
@doc """
Converts the given [`Point`](`Point:t`) to a [`ECDSA Keypair`](`t:t`) struct
without a private key.
"""
@spec from_point(Point.t, keyword) :: t
def from_point(%Point{} = point, opts \\ []) do
compressed = Keyword.get(opts, :compressed, true)
%__MODULE__{point: point, compressed: compressed}
end
@doc """
Returns the 32 byte private key binary from the given [`ECDSA Keypair`](`t:t`).
"""
def to_privkey(%__MODULE__{privkey: privkey}), do: privkey
@doc """
Returns the public key binary from the given [`ECDSA Keypair`](`t:t`) in either
compressed or uncompressed form.
## Accepted options
* `:compressed` - Return a 32 byte compressed public key. Default is `true`.
"""
def to_pubkey(%__MODULE__{point: %Point{x: x, y: y}, compressed: compressed}, opts \\ []) do
case Keyword.get(opts, :compressed, compressed) do
true ->
prefix = if rem(y, 2) == 0, do: 0x02, else: 0x03
<<prefix::integer, x::big-size(256)>>
false ->
<<4, x::big-size(256), y::big-size(256)>>
end
end
end
|
lib/curvy/key.ex
| 0.781831
| 0.553988
|
key.ex
|
starcoder
|
defprotocol Buildable do
@moduledoc """
Documentation for `Buildable`.
"""
@type t :: term()
@type element :: term()
@type options :: keyword()
@type position :: :first | :last
@required_attributes [
:extract_position,
:insert_position,
:into_position,
:reversible?
]
@doc false
Kernel.def required_attributes() do
@required_attributes
end
defmodule Behaviour do
@moduledoc """
A module that extends the protocol `Buildable` defining callbacks where the first argument is not a buildable.
"""
@doc """
Defines the default options for the implementation.transform_fun
Option can be:
- `extract_position`: where to extract the element from the buildable. Accepted values are `:first`, and `:last`.
- `insert_position`: where to insert a new element in the buildable. Accepted values are `:first`, and `:last`.
- `into_position`: where to extract the element from the buildable. Accepted values are `:first`, and `:last`.
- `reversible?`: whether the buildable can be reversed. Accepted values are `true`, and `false`.
"""
@callback default(:extract_position) :: Buildable.position()
@callback default(:insert_position) :: Buildable.position()
@callback default(:into_position) :: Buildable.position()
@callback default(:reversible?) :: boolean()
@callback empty() :: Buildable.t()
@callback empty(Buildable.options()) :: Buildable.t()
@callback new(collection :: Buildable.t() | Range.t()) :: Buildable.t()
@callback new(collection :: Buildable.t() | Range.t(), Buildable.options()) :: Buildable.t()
# FIX THIS, REPORT TO ELIXIR: , to_empty: 1
@optional_callbacks empty: 0, new: 1
end
@spec extract(t()) ::
{:ok, element(), updated_buildable :: t()} | :error
def extract(buildable)
@spec extract(t(), position()) ::
{:ok, element(), updated_buildable :: t()} | :error
def extract(buildable, position)
@spec insert(t(), term) :: updated_buildable :: t()
def insert(buildable, term)
@spec insert(t(), term, position()) :: updated_buildable :: t()
def insert(buildable, term, position)
@spec into(t) ::
{initial_acc :: term, collector :: (term, Buildable.Collectable.command() -> t | term)}
def into(buildable)
@spec peek(t()) :: {:ok, element()} | :error
def peek(buildable)
@spec peek(t(), position) :: {:ok, element()} | :error
def peek(buildable, position)
@spec reverse(buildable) :: updated_buildable | buildable
when buildable: t(), updated_buildable: t()
def reverse(buildable)
@spec reduce(t(), Buildable.Reducible.acc(), Buildable.Reducible.reducer()) ::
Buildable.Reducible.result()
def reduce(buildable, acc, reducer_function)
@spec to_empty(t(), options) :: t()
def to_empty(buildable, options \\ [])
@optional_callbacks extract: 1, insert: 2, peek: 1, peek: 2
end
defmodule Buildable.CompileError do
defexception [:file, :line, :attributes, :module, :caller_module]
@impl true
def message(%{
file: file,
line: line,
attributes: attributes,
caller_module: caller_module,
module: module
}) do
attributes = Enum.map(attributes, &"@#{&1}")
Exception.format_file_line(Path.relative_to_cwd(file), line) <>
pluralize_attributes(attributes) <>
" required to be defined in #{inspect(caller_module)} before calling \"use #{inspect(module)}\""
end
defp pluralize_attributes([attribute]) do
"attribute #{attribute} is"
end
defp pluralize_attributes([_ | _] = attributes) do
attributes = Enum.join(attributes, ", ")
"attributes #{attributes} are"
end
end
|
lib/buildable.ex
| 0.788827
| 0.558297
|
buildable.ex
|
starcoder
|
defmodule Sanbase.Signal.History.PricesHistory do
@moduledoc """
Implementations of historical trigger points for price_percent_change and
price_absolute_change triggers. Historical prices are bucketed at `1 hour` intervals and goes
`90 days` back.
"""
import Sanbase.Signal.OperationEvaluation
import Sanbase.Signal.History.Utils
alias Sanbase.Signal.Trigger.{
PriceAbsoluteChangeSettings,
PricePercentChangeSettings
}
require Logger
@historical_days_from 90
@historical_days_interval "1h"
@type historical_trigger_points_type :: %{
datetime: %DateTime{},
price: float(),
triggered?: boolean()
}
def get_prices(%{target: %{slug: slug}}) when is_binary(slug) do
with {from, to, interval} <- get_timeseries_params(),
{:ok, data} when is_list(data) and data != [] <-
Sanbase.Price.timeseries_data(slug, from, to, interval) do
prices = Enum.map(data, & &1.price_usd)
datetimes = Enum.map(data, & &1.datetime)
{:ok, prices, datetimes}
else
error -> {:error, error}
end
end
def merge_change_calculations_into_points(datetimes, prices, change_calculations) do
Enum.zip([datetimes, prices, change_calculations])
|> Enum.map(fn
{dt, price, {percent_change, triggered?}} ->
%{datetime: dt, price: price, triggered?: triggered?, percent_change: percent_change}
{dt, price, triggered?} ->
%{datetime: dt, price: price, triggered?: triggered?}
end)
end
defp get_timeseries_params() do
now = Timex.now()
interval = @historical_days_interval
from = Timex.shift(now, days: -@historical_days_from)
to = now
{from, to, interval}
end
defimpl Sanbase.Signal.History, for: PriceAbsoluteChangeSettings do
alias Sanbase.Signal.History.PricesHistory
@spec historical_trigger_points(%PriceAbsoluteChangeSettings{}, String.t()) ::
{:ok, list(PricesHistory.historical_trigger_points_type())} | {:error, String.t()}
def historical_trigger_points(
%PriceAbsoluteChangeSettings{target: %{slug: target}} = settings,
cooldown
)
when is_binary(target) do
{:ok, prices, datetimes} = PricesHistory.get_prices(settings)
cooldown_in_hours = Sanbase.DateTimeUtils.str_to_hours(cooldown)
{absolute_price_calculations, _} =
prices
|> Enum.reduce({[], 0}, fn
price, {accumulated_calculations, 0} ->
if operation_triggered?(price, settings.operation) do
{[true | accumulated_calculations], cooldown_in_hours}
else
{[false | accumulated_calculations], 0}
end
_price, {accumulated_calculations, cooldown_left} ->
{[false | accumulated_calculations], cooldown_left - 1}
end)
absolute_price_calculations = absolute_price_calculations |> Enum.reverse()
points =
PricesHistory.merge_change_calculations_into_points(
datetimes,
prices,
absolute_price_calculations
)
{:ok, points}
end
end
defimpl Sanbase.Signal.History, for: PricePercentChangeSettings do
alias Sanbase.Signal.History.PricesHistory
# Minimal time window is set to 2 hours. That is due to interval buckets being 1 hour each.
@minimal_time_window 2
@spec historical_trigger_points(%PricePercentChangeSettings{}, String.t()) ::
{:ok, list(PricesHistory.historical_trigger_points_type())} | {:error, String.t()}
def historical_trigger_points(
%PricePercentChangeSettings{target: %{slug: target}} = settings,
cooldown
)
when is_binary(target) do
{:ok, prices, datetimes} = PricesHistory.get_prices(settings)
time_window_in_hours = time_window_in_hours(settings.time_window)
cooldown_in_hours = Sanbase.DateTimeUtils.str_to_hours(cooldown)
percent_change_calculations =
prices
|> Enum.chunk_every(time_window_in_hours, 1, :discard)
|> Enum.map(fn chunk -> {List.first(chunk), List.last(chunk)} end)
|> percent_change_calculations_with_cooldown(
settings.operation,
cooldown_in_hours
)
empty_calculations = Stream.cycle([{0.0, false}]) |> Enum.take(time_window_in_hours - 1)
points =
PricesHistory.merge_change_calculations_into_points(
datetimes,
prices,
empty_calculations ++ percent_change_calculations
)
{:ok, points}
end
defp time_window_in_hours(time_window) do
Sanbase.DateTimeUtils.str_to_hours(time_window)
|> max(@minimal_time_window)
end
end
end
|
lib/sanbase/signals/history/prices_history.ex
| 0.848863
| 0.58519
|
prices_history.ex
|
starcoder
|
defmodule LiveProps.States do
@moduledoc """
Functions for working with states. These will be imported
whenever you `use` LiveProps.LiveComponent or LiveProps.LiveView
"""
@doc """
Define state of given name and type. Returns :ok.
Types can be any atom and are just for documentation purposes.
### Options:
* `:default` - A default value that will be assigned on mount.
* `:compute` - 1-arity function that takes the socket assigns as an argument
and returns the value to be assigned. Can be an atom of the name
of a function in your component or a remote function call like `&MyModule.compute/1`.
If you use an atom, the referenced function must be **public**.
* `:after_connect` - boolean. Only applies in LiveViews. Setting this to true
will cause the initial value to be computed asynchronously after the socket connects.
"""
@spec state(name :: atom(), type :: atom(), opts :: list()) :: :ok
defmacro state(name, type, opts \\ []) do
quote do
LiveProps.__state__(unquote(name), unquote(type), unquote(opts), __MODULE__)
end
end
@doc """
Same as `set_state/3` but with a list or map of assigns.
"""
@spec set_state(socket :: Phoenix.LiveView.Socket.t(), assigns :: list() | map()) ::
Phoenix.LiveView.Socket.t()
defmacro set_state(socket, assigns) do
quote do
LiveProps.__set_state__(unquote(socket), unquote(assigns), __MODULE__)
end
end
@doc """
Assign the state to the socket and return the socket. This will also
trigger re-calculation of any computed state. If you do not wish to do this,
use `Phoenix.LiveView.assign/3` instead.
If the given `state` is has not been declared as a state, it will be ignored.
"""
@spec set_state(socket :: Phoenix.LiveView.Socket.t(), state :: atom(), value :: any()) ::
Phoenix.LiveView.Socket.t()
defmacro set_state(socket, state, value) do
quote do
LiveProps.__set_state__(unquote(socket), %{unquote(state) => unquote(value)}, __MODULE__)
end
end
@doc """
Same as `set_state/2` but raises if passed an invalid state
"""
@spec set_state!(socket :: Phoenix.LiveView.Socket.t(), assigns :: list() | map()) ::
Phoenix.LiveView.Socket.t()
defmacro set_state!(socket, assigns) do
quote do
LiveProps.__set_state__!(unquote(socket), unquote(assigns), __MODULE__)
end
end
@doc """
Same as `set_state/3` but raises if passed an invalid state
"""
@spec set_state!(socket :: Phoenix.LiveView.Socket.t(), state :: atom(), value :: any()) ::
Phoenix.LiveView.Socket.t()
defmacro set_state!(socket, state, value) do
quote do
LiveProps.__set_state__!(unquote(socket), %{unquote(state) => unquote(value)}, __MODULE__)
end
end
@doc """
A replacement for `Phoenix.LiveView.send_update/2`. Invalid states will be ignored.
"""
@spec send_state(module :: module(), id :: any(), assigns :: list()) :: :ok
def send_state(module, id, assigns) do
Phoenix.LiveView.send_update(module, [lp_command: :set_state, id: id] ++ assigns)
end
@doc """
Assign states of the given kind to the socket. The kind can be
one of `:all`, `:defaults`, `:computed` (computed states without the `:after_connect` option), or `:async` (computed states
defined with `after_connect: true`)
"""
defmacro assign_states(socket, kind) do
quote bind_quoted: [socket: socket, kind: kind] do
LiveProps.__assigns_states__(socket, kind, __MODULE__)
end
end
end
|
lib/live_props/states.ex
| 0.895705
| 0.586908
|
states.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.BasicReport do
@moduledoc """
This module implements the BASIC_REPORT command of the COMMAND_CLASS_BASIC command class
Params:
* `:value` - the current value (:on or :off or :unknown)
* `:target_value` - the target value (:on or :off or :unknown) - v2
* `:duration` - the time in seconds needed to reach the Target Value at the actual transition rate - v2
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.Basic
@type value :: :on | :off | :unknown
@type duration :: non_neg_integer | :unknown
@type param :: {:value, value()} | {:target_value, value()} | {:duration, duration()}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :basic_report,
command_byte: 0x03,
command_class: Basic,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
value_byte = Command.param!(command, :value) |> encode_value()
target_value = Command.param(command, :target_value)
if target_value == nil do
<<value_byte>>
else
duration_byte = Command.param!(command, :duration) |> encode_duration()
target_value_byte = encode_value(target_value)
<<value_byte, target_value_byte, duration_byte>>
end
end
@impl true
# v1
def decode_params(<<value_byte>>) do
case value_from_byte(value_byte, :value) do
{:ok, value} ->
{:ok, [value: value]}
{:error, %DecodeError{}} = error ->
error
end
end
# v2
def decode_params(<<value_byte, target_value_byte, duration_byte>>) do
with {:ok, value} <- value_from_byte(value_byte, :value),
{:ok, target_value} <- value_from_byte(target_value_byte, :target_value),
{:ok, duration} <- duration_from_byte(duration_byte) do
{:ok, [value: value, target_value: target_value, duration: duration]}
else
{:error, %DecodeError{}} = error ->
error
end
end
defp encode_value(:on), do: 0xFF
defp encode_value(:off), do: 0x00
defp encode_value(:unknown), do: 0xFE
defp encode_duration(secs) when is_number(secs) and secs in 0..127, do: secs
defp encode_duration(secs) when is_number(secs) and secs in 128..(126 * 60),
do: 0x80 + div(secs, 60)
defp encode_duration(:unknown), do: 0xFE
defp encode_duration(_), do: 0xFE
defp value_from_byte(0x00, _param), do: {:ok, :off}
defp value_from_byte(0xFF, _param), do: {:ok, :on}
defp value_from_byte(0xFE, _param), do: {:ok, :unknown}
defp value_from_byte(byte, param),
do: {:error, %DecodeError{value: byte, param: param, command: :basic_report}}
defp duration_from_byte(duration_byte) when duration_byte in 0x00..0x7F,
do: {:ok, duration_byte}
defp duration_from_byte(duration_byte) when duration_byte in 0x80..0xFD,
do: {:ok, (duration_byte - 0x80 + 1) * 60}
defp duration_from_byte(0xFE), do: {:ok, :unknown}
defp duration_from_byte(byte),
do: {:error, %DecodeError{value: byte, param: :duration, command: :basic_report}}
end
|
lib/grizzly/zwave/commands/basic_report.ex
| 0.895645
| 0.504333
|
basic_report.ex
|
starcoder
|
defmodule Explorer.Inspect do
# **Private** helpers for inspecting Explorer data structures.
@moduledoc false
alias Inspect.Algebra, as: IA
def to_string(i, _opts) when is_nil(i), do: "nil"
def to_string(i, _opts) when is_binary(i), do: "\"#{i}\""
def to_string(i, opts) when is_list(i),
do:
IA.container_doc(
open(opts),
i,
close(opts),
opts,
&to_string/2
)
def to_string(i, _opts), do: Kernel.to_string(i)
def open(opts), do: IA.color("[", :list, opts)
def close(opts), do: IA.color("]", :list, opts)
def s_shape(length, opts), do: IA.concat([open(opts), Integer.to_string(length), close(opts)])
def df_shape(rows, cols, [_ | _] = groups, opts),
do:
IA.nest(
IA.concat([
IA.line(),
open(opts),
IA.color("rows: ", :atom, opts),
to_string(rows, opts),
", ",
IA.color("columns: ", :atom, opts),
to_string(cols, opts),
", ",
IA.color("groups: ", :atom, opts),
to_string(groups, opts),
close(opts)
]),
2
)
def df_shape(rows, cols, [] = _groups, opts),
do:
IA.nest(
IA.concat([
IA.line(),
open(opts),
IA.color("rows: ", :atom, opts),
to_string(rows, opts),
", ",
IA.color("columns: ", :atom, opts),
to_string(cols, opts),
close(opts)
]),
2
)
def s_inner(dtype, length, values, opts) do
data = format_data(values, opts)
shape = s_shape(length, opts)
dtype = IA.color(to_string(dtype), :atom, opts)
IA.concat([IA.line(), dtype, shape, IA.line(), data])
end
def df_inner(name, dtype, values, opts) do
name = IA.color(name, :map, opts)
dtype = IA.color(to_string(dtype), :atom, opts)
data = format_data(values, opts)
IA.nest(IA.concat([IA.line(), name, " ", dtype, " ", data]), 2)
end
def format_data(values, opts) do
IA.container_doc(open(opts), values, close(opts), opts, &to_string/2)
end
end
defimpl Inspect, for: Explorer.Series do
alias Explorer.Series
import Inspect.Algebra
@printable_limit 50
def inspect(series, opts) do
{dtype, length, values} = inspect_data(series)
inner = Explorer.Inspect.s_inner(dtype, length, values, opts)
color("#Explorer.Series<", :map, opts)
|> concat(nest(inner, 2))
|> concat(color("\n>", :map, opts))
end
defp inspect_data(series) do
dtype = Series.dtype(series)
length = Series.length(series)
l = series |> Series.slice(0, @printable_limit) |> Series.to_list()
l = if length > @printable_limit, do: l ++ ["..."], else: l
{dtype, length, l}
end
end
defimpl Inspect, for: Explorer.DataFrame do
alias Explorer.DataFrame
alias Explorer.Series
import Inspect.Algebra
@printable_limit 5
def inspect(df, opts) do
{rows, cols} = DataFrame.shape(df)
groups = DataFrame.groups(df)
shape = Explorer.Inspect.df_shape(rows, cols, groups, opts)
names = DataFrame.names(df)
series =
names
|> Enum.map(&DataFrame.pull(df, &1))
|> Enum.map(&Series.slice(&1, 0, @printable_limit))
|> Enum.map(fn s -> {Series.dtype(s), Series.to_list(s)} end)
|> Enum.map(fn {dtype, vals} ->
if rows > @printable_limit, do: {dtype, vals ++ ["..."]}, else: {dtype, vals}
end)
|> Enum.zip(names)
|> Enum.map(fn {{dtype, vals}, name} ->
Explorer.Inspect.df_inner(name, dtype, vals, opts)
end)
color("#Explorer.DataFrame<", :map, opts)
|> concat(shape)
|> then(fn doc -> Enum.reduce(series, doc, &concat(&2, &1)) end)
|> concat(line())
|> concat(nest(color(">", :map, opts), 0))
end
end
|
lib/explorer/inspect.ex
| 0.570451
| 0.473657
|
inspect.ex
|
starcoder
|
defmodule Chunky.Sequence.OEIS.Constants do
@moduledoc """
Sequences from the [Online Encyclopedia of Integer Sequences](https://oeis.org) dealing with numeric
constants, digit expansions of constants, constant value sequences, or constant cycle sequences.
## Available Sequences
### Constant Value Sequences
The OEIS contains a large number of sequences that repeat the same number, the _constant sequences_.
- `create_sequence_a007395/1` - A007395 - Constant sequence: the all 2's sequence
- `create_sequence_a010701/1` - A010701 - Constant sequence: the all 3's sequence
- `create_sequence_a010709/1` - A010709 - Constant sequence: the all 4's sequence
- `create_sequence_a010716/1` - A010716 - Constant sequence: the all 5's sequence
- `create_sequence_a010722/1` - A010722 - Constant sequence: the all 6's sequence
- `create_sequence_a010727/1` - A010727 - Constant sequence: the all 7's sequence
- `create_sequence_a010731/1` - A010731 - Constant sequence: the all 8's sequence
- `create_sequence_a010734/1` - A010734 - Constant sequence: the all 9's sequence
- `create_sequence_a010692/1` - A010692 - Constant sequence: a(n) = 10
- `create_sequence_a010850/1` - A010850 - Constant sequence: a(n) = 11
- `create_sequence_a010851/1` - A010851 - Constant sequence: a(n) = 12
- `create_sequence_a010852/1` - A010852 - Constant sequence: a(n) = 13
- `create_sequence_a010853/1` - A010853 - Constant sequence: a(n) = 14
- `create_sequence_a010854/1` - A010854 - Constant sequence: a(n) = 15
- `create_sequence_a010855/1` - A010855 - Constant sequence: a(n) = 16
- `create_sequence_a010856/1` - A010856 - Constant sequence: a(n) = 17
- `create_sequence_a010857/1` - A010857 - Constant sequence: a(n) = 18
- `create_sequence_a010858/1` - A010858 - Constant sequence: a(n) = 19
- `create_sequence_a010859/1` - A010859 - Constant sequence: a(n) = 20
- `create_sequence_a010860/1` - A010860 - Constant sequence: a(n) = 21
- `create_sequence_a010861/1` - A010861 - Constant sequence: a(n) = 22
- `create_sequence_a010862/1` - A010862 - Constant sequence: a(n) = 23
- `create_sequence_a010863/1` - A010863 - Constant sequence: a(n) = 24
- `create_sequence_a010864/1` - A010864 - Constant sequence: a(n) = 25
- `create_sequence_a010865/1` - A010865 - Constant sequence: a(n) = 26
- `create_sequence_a010866/1` - A010866 - Constant sequence: a(n) = 27
- `create_sequence_a010867/1` - A010867 - Constant sequence: a(n) = 28
- `create_sequence_a010868/1` - A010868 - Constant sequence: a(n) = 29
- `create_sequence_a010869/1` - A010869 - Constant sequence: a(n) = 30
- `create_sequence_a010870/1` - A010870 - Constant sequence: a(n) = 31
- `create_sequence_a010871/1` - A010871 - Constant sequence: a(n) = 32
"""
import Chunky.Sequence, only: [sequence_for_function: 1]
@doc """
OEIS Sequence `A007395` - Constant sequence: the all 2's sequence.
From [OEIS A007395](https://oeis.org/A007395):
> Constant sequence: the all 2's sequence.
> (Formerly M0208)
**Sequence IDs**: `:a007395`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a007395) |> Sequence.take!(102)
[2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2]
"""
@doc offset: 1,
sequence: "Constant sequence: the all 2's sequence.",
references: [{:oeis, :a007395, "https://oeis.org/A007395"}]
def create_sequence_a007395(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a007395/1)
end
@doc false
@doc offset: 1
def seq_a007395(_idx) do
2
end
@doc """
OEIS Sequence `A010701` - Constant sequence: the all 3's sequence.
From [OEIS A010701](https://oeis.org/A010701):
> Constant sequence: the all 3's sequence.
> (Formerly )
**Sequence IDs**: `:a010701`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010701) |> Sequence.take!(81)
[3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3]
"""
@doc offset: 0,
sequence: "Constant sequence: the all 3's sequence.",
references: [{:oeis, :a010701, "https://oeis.org/A010701"}]
def create_sequence_a010701(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010701/1)
end
@doc false
@doc offset: 0
def seq_a010701(_idx) do
3
end
@doc """
OEIS Sequence `A010709` - Constant sequence: the all 4's sequence.
From [OEIS A010709](https://oeis.org/A010709):
> Constant sequence: the all 4's sequence.
> (Formerly )
**Sequence IDs**: `:a010709`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010709) |> Sequence.take!(105)
[4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
"""
@doc offset: 0,
sequence: "Constant sequence: the all 4's sequence.",
references: [{:oeis, :a010709, "https://oeis.org/A010709"}]
def create_sequence_a010709(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010709/1)
end
@doc false
@doc offset: 0
def seq_a010709(_idx) do
4
end
@doc """
OEIS Sequence `A010716` - Constant sequence: the all 5's sequence.
From [OEIS A010716](https://oeis.org/A010716):
> Constant sequence: the all 5's sequence.
> (Formerly )
**Sequence IDs**: `:a010716`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010716) |> Sequence.take!(81)
[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5]
"""
@doc offset: 0,
sequence: "Constant sequence: the all 5's sequence.",
references: [{:oeis, :a010716, "https://oeis.org/A010716"}]
def create_sequence_a010716(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010716/1)
end
@doc false
@doc offset: 0
def seq_a010716(_idx) do
5
end
@doc """
OEIS Sequence `A010722` - Constant sequence: the all 6's sequence.
From [OEIS A010722](https://oeis.org/A010722):
> Constant sequence: the all 6's sequence.
> (Formerly )
**Sequence IDs**: `:a010722`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010722) |> Sequence.take!(81)
[6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6]
"""
@doc offset: 0,
sequence: "Constant sequence: the all 6's sequence.",
references: [{:oeis, :a010722, "https://oeis.org/A010722"}]
def create_sequence_a010722(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010722/1)
end
@doc false
@doc offset: 0
def seq_a010722(_idx) do
6
end
@doc """
OEIS Sequence `A010727` - Constant sequence: the all 7's sequence.
From [OEIS A010727](https://oeis.org/A010727):
> Constant sequence: the all 7's sequence.
> (Formerly )
**Sequence IDs**: `:a010727`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010727) |> Sequence.take!(81)
[7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
"""
@doc offset: 0,
sequence: "Constant sequence: the all 7's sequence.",
references: [{:oeis, :a010727, "https://oeis.org/A010727"}]
def create_sequence_a010727(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010727/1)
end
@doc false
@doc offset: 0
def seq_a010727(_idx) do
7
end
@doc """
OEIS Sequence `A010731` - Constant sequence: the all 8's sequence.
From [OEIS A010731](https://oeis.org/A010731):
> Constant sequence: the all 8's sequence.
> (Formerly )
**Sequence IDs**: `:a010731`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010731) |> Sequence.take!(81)
[8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
"""
@doc offset: 0,
sequence: "Constant sequence: the all 8's sequence.",
references: [{:oeis, :a010731, "https://oeis.org/A010731"}]
def create_sequence_a010731(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010731/1)
end
@doc false
@doc offset: 0
def seq_a010731(_idx) do
8
end
@doc """
OEIS Sequence `A010734` - Constant sequence: the all 9's sequence.
From [OEIS A010734](https://oeis.org/A010734):
> Constant sequence: the all 9's sequence.
> (Formerly )
**Sequence IDs**: `:a010734`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010734) |> Sequence.take!(81)
[9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9]
"""
@doc offset: 0,
sequence: "Constant sequence: the all 9's sequence.",
references: [{:oeis, :a010734, "https://oeis.org/A010734"}]
def create_sequence_a010734(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010734/1)
end
@doc false
@doc offset: 0
def seq_a010734(_idx) do
9
end
@doc """
OEIS Sequence `A010692` - Constant sequence: a(n) = 10.
From [OEIS A010692](https://oeis.org/A010692):
> Constant sequence: a(n) = 10.
> (Formerly )
**Sequence IDs**: `:a010692`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010692) |> Sequence.take!(66)
[10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 10.",
references: [{:oeis, :a010692, "https://oeis.org/A010692"}]
def create_sequence_a010692(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010692/1)
end
@doc false
@doc offset: 0
def seq_a010692(_idx) do
10
end
@doc """
OEIS Sequence `A010850` - Constant sequence: a(n) = 11.
From [OEIS A010850](https://oeis.org/A010850):
> Constant sequence: a(n) = 11.
> (Formerly )
**Sequence IDs**: `:a010850`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010850) |> Sequence.take!(65)
[11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 11.",
references: [{:oeis, :a010850, "https://oeis.org/A010850"}]
def create_sequence_a010850(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010850/1)
end
@doc false
@doc offset: 0
def seq_a010850(_idx) do
11
end
@doc """
OEIS Sequence `A010851` - Constant sequence: a(n) = 12.
From [OEIS A010851](https://oeis.org/A010851):
> Constant sequence: a(n) = 12.
> (Formerly )
**Sequence IDs**: `:a010851`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010851) |> Sequence.take!(65)
[12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 12.",
references: [{:oeis, :a010851, "https://oeis.org/A010851"}]
def create_sequence_a010851(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010851/1)
end
@doc false
@doc offset: 0
def seq_a010851(_idx) do
12
end
@doc """
OEIS Sequence `A010852` - Constant sequence: a(n) = 13.
From [OEIS A010852](https://oeis.org/A010852):
> Constant sequence: a(n) = 13.
> (Formerly )
**Sequence IDs**: `:a010852`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010852) |> Sequence.take!(54)
[13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 13.",
references: [{:oeis, :a010852, "https://oeis.org/A010852"}]
def create_sequence_a010852(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010852/1)
end
@doc false
@doc offset: 0
def seq_a010852(_idx) do
13
end
@doc """
OEIS Sequence `A010853` - Constant sequence: a(n) = 14.
From [OEIS A010853](https://oeis.org/A010853):
> Constant sequence: a(n) = 14.
> (Formerly )
**Sequence IDs**: `:a010853`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010853) |> Sequence.take!(54)
[14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 14.",
references: [{:oeis, :a010853, "https://oeis.org/A010853"}]
def create_sequence_a010853(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010853/1)
end
@doc false
@doc offset: 0
def seq_a010853(_idx) do
14
end
@doc """
OEIS Sequence `A010854` - Constant sequence: a(n) = 15.
From [OEIS A010854](https://oeis.org/A010854):
> Constant sequence: a(n) = 15.
> (Formerly )
**Sequence IDs**: `:a010854`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010854) |> Sequence.take!(54)
[15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 15.",
references: [{:oeis, :a010854, "https://oeis.org/A010854"}]
def create_sequence_a010854(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010854/1)
end
@doc false
@doc offset: 0
def seq_a010854(_idx) do
15
end
@doc """
OEIS Sequence `A010855` - Constant sequence: a(n) = 16.
From [OEIS A010855](https://oeis.org/A010855):
> Constant sequence: a(n) = 16.
> (Formerly )
**Sequence IDs**: `:a010855`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010855) |> Sequence.take!(54)
[16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 16.",
references: [{:oeis, :a010855, "https://oeis.org/A010855"}]
def create_sequence_a010855(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010855/1)
end
@doc false
@doc offset: 0
def seq_a010855(_idx) do
16
end
@doc """
OEIS Sequence `A010856` - Constant sequence: a(n) = 17.
From [OEIS A010856](https://oeis.org/A010856):
> Constant sequence: a(n) = 17.
> (Formerly )
**Sequence IDs**: `:a010856`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010856) |> Sequence.take!(54)
[17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 17.",
references: [{:oeis, :a010856, "https://oeis.org/A010856"}]
def create_sequence_a010856(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010856/1)
end
@doc false
@doc offset: 0
def seq_a010856(_idx) do
17
end
@doc """
OEIS Sequence `A010857` - Constant sequence: a(n) = 18.
From [OEIS A010857](https://oeis.org/A010857):
> Constant sequence: a(n) = 18.
> (Formerly )
**Sequence IDs**: `:a010857`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010857) |> Sequence.take!(54)
[18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18,18]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 18.",
references: [{:oeis, :a010857, "https://oeis.org/A010857"}]
def create_sequence_a010857(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010857/1)
end
@doc false
@doc offset: 0
def seq_a010857(_idx) do
18
end
@doc """
OEIS Sequence `A010858` - Constant sequence: a(n) = 19.
From [OEIS A010858](https://oeis.org/A010858):
> Constant sequence: a(n) = 19.
> (Formerly )
**Sequence IDs**: `:a010858`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010858) |> Sequence.take!(54)
[19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19,19]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 19.",
references: [{:oeis, :a010858, "https://oeis.org/A010858"}]
def create_sequence_a010858(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010858/1)
end
@doc false
@doc offset: 0
def seq_a010858(_idx) do
19
end
@doc """
OEIS Sequence `A010859` - Constant sequence: a(n) = 20.
From [OEIS A010859](https://oeis.org/A010859):
> Constant sequence: a(n) = 20.
> (Formerly )
**Sequence IDs**: `:a010859`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010859) |> Sequence.take!(54)
[20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20,20]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 20.",
references: [{:oeis, :a010859, "https://oeis.org/A010859"}]
def create_sequence_a010859(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010859/1)
end
@doc false
@doc offset: 0
def seq_a010859(_idx) do
20
end
@doc """
OEIS Sequence `A010860` - Constant sequence: a(n) = 21.
From [OEIS A010860](https://oeis.org/A010860):
> Constant sequence: a(n) = 21.
> (Formerly )
**Sequence IDs**: `:a010860`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010860) |> Sequence.take!(54)
[21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21,21]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 21.",
references: [{:oeis, :a010860, "https://oeis.org/A010860"}]
def create_sequence_a010860(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010860/1)
end
@doc false
@doc offset: 0
def seq_a010860(_idx) do
21
end
@doc """
OEIS Sequence `A010861` - Constant sequence: a(n) = 22.
From [OEIS A010861](https://oeis.org/A010861):
> Constant sequence: a(n) = 22.
> (Formerly )
**Sequence IDs**: `:a010861`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010861) |> Sequence.take!(54)
[22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22,22]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 22.",
references: [{:oeis, :a010861, "https://oeis.org/A010861"}]
def create_sequence_a010861(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010861/1)
end
@doc false
@doc offset: 0
def seq_a010861(_idx) do
22
end
@doc """
OEIS Sequence `A010862` - Constant sequence: a(n) = 23.
From [OEIS A010862](https://oeis.org/A010862):
> Constant sequence: a(n) = 23.
> (Formerly )
**Sequence IDs**: `:a010862`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010862) |> Sequence.take!(54)
[23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23,23]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 23.",
references: [{:oeis, :a010862, "https://oeis.org/A010862"}]
def create_sequence_a010862(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010862/1)
end
@doc false
@doc offset: 0
def seq_a010862(_idx) do
23
end
@doc """
OEIS Sequence `A010863` - Constant sequence: a(n) = 24.
From [OEIS A010863](https://oeis.org/A010863):
> Constant sequence: a(n) = 24.
> (Formerly )
**Sequence IDs**: `:a010863`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010863) |> Sequence.take!(54)
[24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 24.",
references: [{:oeis, :a010863, "https://oeis.org/A010863"}]
def create_sequence_a010863(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010863/1)
end
@doc false
@doc offset: 0
def seq_a010863(_idx) do
24
end
@doc """
OEIS Sequence `A010864` - Constant sequence: a(n) = 25.
From [OEIS A010864](https://oeis.org/A010864):
> Constant sequence: a(n) = 25.
> (Formerly )
**Sequence IDs**: `:a010864`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010864) |> Sequence.take!(54)
[25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25,25]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 25.",
references: [{:oeis, :a010864, "https://oeis.org/A010864"}]
def create_sequence_a010864(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010864/1)
end
@doc false
@doc offset: 0
def seq_a010864(_idx) do
25
end
@doc """
OEIS Sequence `A010865` - Constant sequence: a(n) = 26.
From [OEIS A010865](https://oeis.org/A010865):
> Constant sequence: a(n) = 26.
> (Formerly )
**Sequence IDs**: `:a010865`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010865) |> Sequence.take!(54)
[26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26,26]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 26.",
references: [{:oeis, :a010865, "https://oeis.org/A010865"}]
def create_sequence_a010865(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010865/1)
end
@doc false
@doc offset: 0
def seq_a010865(_idx) do
26
end
@doc """
OEIS Sequence `A010866` - Constant sequence: a(n) = 27.
From [OEIS A010866](https://oeis.org/A010866):
> Constant sequence: a(n) = 27.
> (Formerly )
**Sequence IDs**: `:a010866`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010866) |> Sequence.take!(54)
[27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27,27]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 27.",
references: [{:oeis, :a010866, "https://oeis.org/A010866"}]
def create_sequence_a010866(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010866/1)
end
@doc false
@doc offset: 0
def seq_a010866(_idx) do
27
end
@doc """
OEIS Sequence `A010867` - Constant sequence: a(n) = 28.
From [OEIS A010867](https://oeis.org/A010867):
> Constant sequence: a(n) = 28.
> (Formerly )
**Sequence IDs**: `:a010867`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010867) |> Sequence.take!(54)
[28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28,28]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 28.",
references: [{:oeis, :a010867, "https://oeis.org/A010867"}]
def create_sequence_a010867(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010867/1)
end
@doc false
@doc offset: 0
def seq_a010867(_idx) do
28
end
@doc """
OEIS Sequence `A010868` - Constant sequence: a(n) = 29.
From [OEIS A010868](https://oeis.org/A010868):
> Constant sequence: a(n) = 29.
> (Formerly )
**Sequence IDs**: `:a010868`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010868) |> Sequence.take!(54)
[29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 29.",
references: [{:oeis, :a010868, "https://oeis.org/A010868"}]
def create_sequence_a010868(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010868/1)
end
@doc false
@doc offset: 0
def seq_a010868(_idx) do
29
end
@doc """
OEIS Sequence `A010869` - Constant sequence: a(n) = 30.
From [OEIS A010869](https://oeis.org/A010869):
> Constant sequence: a(n) = 30.
> (Formerly )
**Sequence IDs**: `:a010869`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010869) |> Sequence.take!(54)
[30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30,30]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 30.",
references: [{:oeis, :a010869, "https://oeis.org/A010869"}]
def create_sequence_a010869(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010869/1)
end
@doc false
@doc offset: 0
def seq_a010869(_idx) do
30
end
@doc """
OEIS Sequence `A010870` - Constant sequence: a(n) = 31.
From [OEIS A010870](https://oeis.org/A010870):
> Constant sequence: a(n) = 31.
> (Formerly )
**Sequence IDs**: `:a010870`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010870) |> Sequence.take!(54)
[31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31,31]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 31.",
references: [{:oeis, :a010870, "https://oeis.org/A010870"}]
def create_sequence_a010870(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010870/1)
end
@doc false
@doc offset: 0
def seq_a010870(_idx) do
31
end
@doc """
OEIS Sequence `A010871` - Constant sequence: a(n) = 32.
From [OEIS A010871](https://oeis.org/A010871):
> Constant sequence: a(n) = 32.
> (Formerly )
**Sequence IDs**: `:a010871`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Constants, :a010871) |> Sequence.take!(54)
[32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32]
"""
@doc offset: 0,
sequence: "Constant sequence: a(n) = 32.",
references: [{:oeis, :a010871, "https://oeis.org/A010871"}]
def create_sequence_a010871(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Constants.seq_a010871/1)
end
@doc false
@doc offset: 0
def seq_a010871(_idx) do
32
end
end
|
lib/sequence/oeis/constants.ex
| 0.899938
| 0.921781
|
constants.ex
|
starcoder
|
defmodule SudokuSolver.CPS do
@moduledoc """
Implements SudokuSolver using continuation passing style
"""
@behaviour SudokuSolver
@doc """
Solve a soduku
"""
@impl SudokuSolver
@spec solve(SudokuBoard.t()) :: SudokuBoard.t() | nil
def solve(%SudokuBoard{size: size} = board) do
max_index = size * size - 1
solve_helper(board, max_index, fn -> nil end)
end
# Solves sudoku by attempting to populate cells starting at the end of the board and moving
# to the front. Solve helper keps track of which cell we are currently trying.
# It calls the failure continuation `fc` when needs to backtrack.
@spec solve_helper(SudokuBoard.t(), integer(), fun()) :: SudokuBoard.t() | any()
defp solve_helper(%SudokuBoard{} = board, -1, fc) do
if SudokuBoard.solved?(board), do: board, else: fc.()
end
defp solve_helper(%SudokuBoard{size: size, grid: grid} = board, idx, fc) do
elt = Enum.at(grid, idx)
if elt != 0 do
solve_helper(board, idx - 1, fc)
else
try_solve(board, idx, Enum.to_list(1..size), fc)
end
end
# try_solve attempts to solve a board by populating a cell from a list of suggestions
defp try_solve(%SudokuBoard{}, _idx, [], fc), do: fc.()
defp try_solve(%SudokuBoard{} = board, idx, [suggestion | other_suggestions], fc) do
new_board = SudokuBoard.place_number(board, idx, suggestion)
if SudokuBoard.partial_solution?(new_board) do
solve_helper(new_board, idx - 1, fn -> try_solve(board, idx, other_suggestions, fc) end)
else
try_solve(board, idx, other_suggestions, fc)
end
end
@doc """
Finds all possible solutions to a sudoku.
## Parameters
- board: A sudoku board
"""
@impl SudokuSolver
@spec all_solutions(SudokuBoard.t()) :: [SudokuBoard.t()]
def all_solutions(%SudokuBoard{} = board) do
max_index = board.size * board.size - 1
find_all_solutions_helper(board, max_index, fn -> [] end)
end
defp find_all_solutions_helper(board, -1, continuation) do
if SudokuBoard.solved?(board), do: [board | continuation.()], else: continuation.()
end
defp find_all_solutions_helper(board, idx, continuation) do
elt = Enum.at(board.grid, idx)
if elt != 0 do
find_all_solutions_helper(board, idx - 1, continuation)
else
try_find_all_solutions(board, idx, Enum.to_list(1..board.size), continuation)
end
end
defp try_find_all_solutions(_board, _idx, [], continuation), do: continuation.()
defp try_find_all_solutions(board, idx, [suggestion | other_suggestions], continuation) do
new_board = SudokuBoard.place_number(board, idx, suggestion)
if SudokuBoard.partial_solution?(new_board) do
find_all_solutions_helper(new_board, idx - 1, fn ->
try_find_all_solutions(board, idx, other_suggestions, continuation)
end)
else
try_find_all_solutions(board, idx, other_suggestions, continuation)
end
end
end
|
lib/sudoku_solver/cps.ex
| 0.685529
| 0.4881
|
cps.ex
|
starcoder
|
defmodule Month.Period do
@moduledoc """
Represents a period of 1 month or more.
iex> range = Month.Period.new(~M[2019-01], ~M[2019-03])
{:ok, #Month.Period<~M[2019-01], ~M[2019-03]>}
iex> range.months
[~M[2019-01], ~M[2019-02], ~M[2019-03]]
The `months` field contains all months within the period, inclusive.
If you want a guarantee that the period would cover min 2 months or more,
look at `Month.Range` data structure instead.
"""
import Month.Utils
@type t :: %Month.Period{
start: Month.t(),
end: Month.t(),
months: list(Month.t())
}
@required_fields [
:start,
:end,
:months
]
@enforce_keys @required_fields
defstruct @required_fields
@doc """
Creates a new `Month.Period` using given `Month`s as a start and an end.
## Examples
iex> Month.Period.new(~M[2019-01], ~M[2019-03])
{:ok, #Month.Period<~M[2019-01], ~M[2019-03]>}
iex> Month.Period.new(~M[2019-03], ~M[2019-01])
{:ok, #Month.Period<~M[2019-01], ~M[2019-03]>}
"""
@spec new(Date.t(), Date.t()) :: {:ok, Month.Period.t()} | {:error, String.t()}
@spec new(Month.t(), Month.t()) :: {:ok, Month.Period.t()} | {:error, String.t()}
def new(%Date{month: first_month, year: first_year}, %Date{month: last_month, year: last_year}) do
with {:ok, first} <- Month.new(first_year, first_month),
{:ok, last} <- Month.new(last_year, last_month) do
new(first, last)
end
end
def new(%Month{} = first, %Month{} = last) do
{start_month, end_month} =
if Month.compare(first, last) in [:lt, :eq] do
{first, last}
else
{last, first}
end
result = %Month.Period{
start: start_month,
end: end_month,
months: months(start_month, end_month)
}
{:ok, result}
end
@doc """
Sames as `new/2` but returs either result or raises an exception.
"""
@spec new!(Date.t(), Date.t()) :: Month.Period.t()
@spec new!(Month.t(), Month.t()) :: Month.Period.t()
def new!(%Date{year: first_year, month: first_month}, %Date{year: last_year, month: last_month}) do
first = Month.new!(first_year, first_month)
last = Month.new!(last_year, last_month)
unwrap_or_raise(new(first, last))
end
def new!(%Month{} = first, %Month{} = last) do
unwrap_or_raise(new(first, last))
end
@doc """
Helper functions that returns the months between the two given
months, inclusive. Please make sure `from_month` is before `to_month`.
"""
def months(%Month{} = from, %Month{} = to) do
if Month.compare(from, to) == :eq do
[from]
else
{start_month, end_month} =
if Month.compare(from, to) == :lt do
{from, to}
else
{to, from}
end
{:ok, next_month} = Month.add(start_month, 1)
months =
next_month
|> Stream.unfold(fn month ->
if Month.compare(month, end_month) in [:eq, :gt] do
nil
else
{:ok, next_month} = Month.add(month, 1)
{month, next_month}
end
end)
|> Enum.to_list()
[start_month]
|> Enum.concat(months)
|> Enum.concat([end_month])
end
end
@doc """
Checks if the first period is within the second period (inclusive).
"""
@spec within?(Month.Period.t(), Month.Period.t()) :: boolean
@spec within?(Month.Range.t(), Month.Range.t()) :: boolean
@spec within?(Date.t(), Month.Period.t()) :: boolean
def within?(%Date{} = date, period) do
found_month =
Enum.find(period.months, fn month ->
month.month == date.month && month.year == date.year
end)
not is_nil(found_month)
end
def within?(%{months: a}, %{months: b}) do
MapSet.subset?(MapSet.new(a), MapSet.new(b))
end
@doc """
Shifts the given period forwards or backwards by given number of months.
"""
@spec shift(Month.Period.t(), integer) :: Month.Period.t()
@spec shift(Month.Range.t(), integer) :: Month.Range.t()
def shift(period, num_months) do
shifted_start = Month.add!(period.start, num_months)
shifted_end = Month.add!(period.end, num_months)
period.__struct__.new!(shifted_start, shifted_end)
end
defimpl Inspect do
def inspect(month_period, _opts) do
"#Month.Period<#{inspect(month_period.start)}, #{inspect(month_period.end)}>"
end
end
end
|
lib/month/period.ex
| 0.91114
| 0.695273
|
period.ex
|
starcoder
|
defmodule AdventOfCode.Day11 do
def get_value(input, row, col) do
case (x = Matrex.at(input, row, col)) < 0 do
true -> 0
false -> floor(x)
end
end
def neighborhood_value(input, row, col) do
center_value = get_value(input, row, col)
value =
Enum.reduce([-1, 0, 1], 0, fn row_off, count ->
Enum.reduce([-1, 0, 1], count, fn col_off, count ->
count + get_value(input, row + row_off, col + col_off)
end)
end)
value - center_value
end
def explore_direction(_input, {_row, _col}, 0, 0) do
0
end
def explore_direction(_input, {row, col}, _off_row, _off_col) when row <= 0 or col <= 0 do
0
end
def explore_direction(input, {row, col}, off_row, off_col) do
{num_rows, num_cols} = input[:size]
case row > num_rows or col > num_cols do
true ->
0
false ->
value =
input
|> Matrex.at(row, col)
|> floor()
case value do
1 -> 1
0 -> 0
_ -> explore_direction(input, {row + off_row, col + off_col}, off_row, off_col)
end
end
end
def single_seat_neighborhood_value(input, row, col) do
Enum.reduce([-1, 0, 1], 0, fn row_off, count ->
Enum.reduce([-1, 0, 1], count, fn col_off, count ->
count + explore_direction(input, {row + row_off, col + col_off}, row_off, col_off)
end)
end)
end
@spec evaluate_rule(integer, 0 | 1, :relaxed | :original) :: 0 | 1
def evaluate_rule(0, _, _), do: 1
def evaluate_rule(value, _, :relaxed) when value >= 5 do
0
end
def evaluate_rule(value, _, :original) when value >= 4 do
0
end
def evaluate_rule(_, current_value, _) do
current_value
end
def step(_input, _agg, start_pos, end_pos, acc) when start_pos > end_pos do
acc
end
def step(input, agg, {row, col}, {_end_row, end_col} = end_pos, acc) when col > end_col do
step(input, agg, {row + 1, 2}, end_pos, acc)
end
def step(input, {agg_value, agg_rule} = agg, {row, col}, end_pos, {acc, count_ones}) do
case Matrex.at(input, row, col) < 0 do
true ->
step(input, agg, {row, col + 1}, end_pos, {acc, count_ones})
false ->
current_value = get_value(input, row, col)
neigh_value = agg_value.(input, row, col)
new_value = agg_rule.(neigh_value, current_value)
acc = Matrex.set(acc, row, col, new_value)
count_ones = count_ones + new_value
step(input, agg, {row, col + 1}, end_pos, {acc, count_ones})
end
end
def life(
input,
agg_value \\ &neighborhood_value/3,
agg_rule \\ fn x, y -> evaluate_rule(x, y, :original) end
) do
{total_rows, total_cols} = input[:size]
{next_input, ones} =
step(input, {agg_value, agg_rule}, {2, 2}, {total_rows - 1, total_cols - 1}, {input, 0})
diff =
input
|> Matrex.add(next_input, 1.0, -1.0)
|> Matrex.sum()
|> floor()
case diff do
0 -> {input, ones}
_ -> life(next_input, agg_value, agg_rule)
end
end
def print_board(mat) do
{rows, cols} = mat[:size]
mat
|> Matrex.to_list_of_lists()
|> Enum.slice(1..(rows - 2))
|> Enum.reduce("", fn row, acc ->
row = Enum.slice(row, 1..(cols - 2))
row_repr =
Enum.reduce(row, "", fn value, acc ->
value = floor(value)
str_value =
case value do
-1 -> "."
0 -> "L"
1 -> "#"
end
"#{acc}#{str_value}"
end)
"#{acc}\n#{row_repr}"
end)
end
def day11() do
input =
"day11_input"
|> AdventOfCode.read_file()
|> Enum.map(fn row ->
parsed_row =
row
|> String.graphemes()
|> Enum.map(fn entry ->
case entry do
"." -> -1
"L" -> 0
"#" -> 1
end
end)
[-1 | parsed_row] ++ [-1]
end)
[first_row | _] = input
empty_row = for _ <- first_row, do: -1
input = [empty_row | input] ++ [empty_row]
mat = Matrex.new(input)
{_result, part1} = life(mat)
{_, part2} =
life(mat, &single_seat_neighborhood_value/3, fn x, y -> evaluate_rule(x, y, :relaxed) end)
{mat, part1, part2}
end
end
|
lib/day11.ex
| 0.568416
| 0.691777
|
day11.ex
|
starcoder
|
defmodule Lightbridge.EnergyMonitor do
@moduledoc """
Polls the smart socket to get it's current energy usage.
Publishes to configured MQTT endpoint.
"""
use GenServer
import Lightbridge.Config, only: [fetch: 1]
alias Lightbridge.Hs100
alias Lightbridge.EnergyMonitor.Stats
# Set the polling frequency for energy stats
@poll_frequency 15 * 1_000
def start_link(_opts) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(_args) do
Process.send_after(self(), :poll, @poll_frequency)
{:ok, %{client_id: fetch(:mqtt_client_id), energy_topic: fetch(:mqtt_energy_topic)}}
end
@doc """
Polls the energy stats then sends them up to the MQTT broker.
"""
@spec poll(client_id :: String.t(), energy_topic :: String.t()) :: any()
def poll(client_id, energy_topic) do
# Get the energy stats
# Parse them into a suitable structure
# Batch them up into async tasks
# TODO: This seems quite tightly coupled together...
tasks =
Hs100.get_energy()
|> parse_energy_stats()
|> Map.from_struct()
|> Enum.map(fn {path, stat} ->
Task.async(
Tortoise,
:publish,
[client_id, "#{energy_topic}/#{path}", to_string(stat), [qos: 0]]
)
end)
# Asyncly fire these off to the MQTT server
Task.await_many(tasks, _wait_for = 2_000)
end
def handle_info(:poll, %{client_id: client_id, energy_topic: energy_topic} = state) do
poll(client_id, energy_topic)
# Poll ourselves in `@poll_frequency` seconds
Process.send_after(self(), :poll, @poll_frequency)
{:noreply, state}
end
@doc """
Takes energy stats and parses them into a flattened map.
"""
@spec parse_energy_stats(stats :: String.t()) :: map()
def parse_energy_stats(stats) do
# Split the values into their own topics
{:ok, parsed_energy_stats} =
stats
|> Jason.decode(keys: :atoms)
# Get the stats from the nested structure
parsed_energy_stats
|> get_in([:emeter, :get_realtime])
|> Stats.new()
end
defmodule Stats do
@moduledoc """
Stats encapsulates the data coming back from the smart socket.
"""
defstruct current_ma: 0, err_code: 0, power_mw: 0, total_wh: 0, voltage_mv: 0
@typedoc """
Represents the Stats struct.
"""
@type t :: %__MODULE__{
current_ma: pos_integer(),
err_code: integer(),
power_mw: pos_integer(),
total_wh: pos_integer(),
voltage_mv: pos_integer()
}
@doc """
Takes in a map of energy stats and returns a new `t()`.
"""
@spec new(energy_stats :: map()) :: t()
def new(energy_stats) do
struct(__MODULE__, energy_stats)
end
end
defimpl String.Chars, for: Stats do
def to_string(energy_stats) do
~s"""
Power (mW): #{energy_stats.power_mw}
Voltage (mV): #{energy_stats.voltage_mv}
Current (mA): #{energy_stats.current_ma}
Total WH: #{energy_stats.total_wh}
"""
end
end
end
|
lib/lightbridge/energy_monitor.ex
| 0.643889
| 0.492676
|
energy_monitor.ex
|
starcoder
|
defmodule Scenic.Primitive.Text do
@moduledoc """
Draw text on the screen.
## Data
`text`
The data for a Text primitive is a bitstring
* `text` - the text to draw
## Styles
This primitive recognizes the following styles
* [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive
* [`fill`](Scenic.Primitive.Style.Fill.html) - fill in the area of the text. Only solid colors!
* [`font`](Scenic.Primitive.Style.Font.html) - name (or key) of font to use
* [`font_size`](Scenic.Primitive.Style.FontSize.html) - point size of the font
* [`font_blur`](Scenic.Primitive.Style.FontBlur.html) - option to blur the characters
* [`text_align`](Scenic.Primitive.Style.TextAlign.html) - alignment of lines of text
* [`text_height`](Scenic.Primitive.Style.TextHeight.html) - spacing between lines of text
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#text/3)
"""
use Scenic.Primitive
@styles [:hidden, :fill, :font, :font_size, :font_blur, :text_align, :text_height]
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
@doc false
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must be a bitstring
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
@doc false
@spec verify(any()) :: :invalid_data | {:ok, bitstring()}
def verify(text) when is_bitstring(text), do: {:ok, text}
def verify(_), do: :invalid_data
# ============================================================================
@doc """
Returns a list of styles recognized by this primitive.
"""
@spec valid_styles() :: [
:fill | :font | :font_blur | :font_size | :hidden | :text_align | :text_height,
...
]
def valid_styles(), do: @styles
end
|
lib/scenic/primitive/text.ex
| 0.898115
| 0.458167
|
text.ex
|
starcoder
|
defmodule WechatPay.Plug.Payment do
@moduledoc """
Plug behaviour to handle **Payment** Notification from Wechat's Payment Gateway.
Official document: https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_7
## Example
### Define a handler
See `WechatPay.Plug.Handler` for how to implement a handler.
```elixir
defmodule MyApp.WechatHandler do
use WechatPay.Plug.Handler
@impl WechatPay.Plug.Handler
def handle_data(conn, data) do
:ok
end
end
```
### Plug in
In your app's `lib/my_app_web/router.ex`:
```elixir
post "/wechat_pay/notification/payment", WechatPay.Plug.Payment, [handler: MyApp.WechatHandler, api_key: "my-api-key", sign_type: :md5]
```
"""
alias WechatPay.Utils.XMLParser
alias WechatPay.Utils.Signature
alias WechatPay.Error
import Plug.Conn
@spec init(keyword()) :: [
{:api_key, binary()} | {:handler, binary()} | {:sign_type, :md5 | :sha256}
]
def init(opts) do
handler = Keyword.get(opts, :handler)
api_key = Keyword.get(opts, :api_key)
sign_type = Keyword.get(opts, :sign_type)
[sign_type: :md5]
|> Keyword.merge(handler: handler, api_key: api_key, sign_type: sign_type)
end
@spec call(Plug.Conn.t(), [
{:api_key, binary()} | {:handler, binary()} | {:sign_type, :md5 | :sha256}
]) ::
Plug.Conn.t()
def call(conn, handler: handler_module, api_key: api_key, sign_type: sign_type) do
{:ok, body, conn} = Plug.Conn.read_body(conn)
with {:ok, data} <- XMLParser.parse(body),
:ok <- process_data(conn, data, handler_module, api_key, sign_type) do
response_with_success_info(conn)
else
{:error, %Error{reason: reason}} ->
conn
|> send_resp(:unprocessable_entity, reason)
end
end
defp response_with_success_info(conn) do
body = ~s"""
<xml>
<return_code><![CDATA[SUCCESS]]></return_code>
<return_msg><![CDATA[OK]]></return_msg>
</xml>
"""
conn
|> put_resp_content_type("application/xml")
|> send_resp(:ok, body)
end
defp process_data(conn, data, handler_module, api_key, sign_type) do
with {:ok, data} <- process_return_field(data),
:ok <- Signature.verify(data, api_key, sign_type),
:ok <- apply(handler_module, :handle_data, [conn, data]) do
:ok
else
{:error, %Error{} = error} ->
handle_error(handler_module, conn, error, data)
{:error, error}
end
end
defp process_return_field(%{return_code: "SUCCESS"} = data) do
{:ok, data}
end
defp process_return_field(%{return_code: "FAIL", return_msg: reason}) do
{:error, %Error{reason: reason, type: :failed_return}}
end
defp handle_error(handler_module, conn, error, data) do
handler_module.handle_error(conn, error, data)
end
end
|
lib/wechat_pay/plug/payment.ex
| 0.85753
| 0.609175
|
payment.ex
|
starcoder
|
defmodule PhStInvariant do
@moduledoc """
The long term goal of this library is allow developers to create property style
tests based on existing ExUnit assert statements. For example:
assert URI.encode_query([{"foo z", :bar}]) == "foo+z=bar"
This should be converted to the following code
success_funct = PhStPhenetic.congruent("foo+z=bar")
generate_input = fn ->
PhStPhenetic.mutate([{"foo z", :bar}])
end
Stream.repeatedly(generate_input)
|> Enum.take(@test_number)
|> Enum.partition( fn input -> success_funct.(URI.encode_query(input)) end )
That should produce an List of inputs for which the output is not congruent
to test data in the false partition. This example only works for a function with a single argument.
It needs to be extended to use Kernel.apply for functions that take many arguments.
An alternative approach would be to put a timer around it and run as many as possible
in a given interval.
"""
@doc """
Return a List of inputs for which function returns a value that
is not congruent to the supplied result. The module, function, args
are copied from the inputs for Kernel.apply.
From the example in the module docs.
congruent(URI, :encode_query, [[{"foo z", :bar}]], "foo+z=bar")
"""
def congruent(module, function, args, result, test_count \\ 1000 ) do
success_funct = PhStPhenetic.congruent(result)
# This should really be an input.
generate_input = fn ->
Enum.map(args, fn arg -> PhStMutate.mutate(arg) end)
end
property(module, function, generate_input , success_funct, test_count)
end
def exact(module, function, args, result, test_count \\ 1000 ) do
success_funct = PhStPhenetic.exact(result)
# This should really be an input.
generate_input = fn ->
Enum.map(args, fn arg -> PhStMutate.mutate(arg) end)
end
property(module, function, generate_input , success_funct, test_count)
end
def similar(module, function, args, result, test_count \\ 1000 ) do
success_funct = PhStPhenetic.similar(result)
generate_input = fn ->
Enum.map(args, fn arg -> PhStMutate.mutate(arg) end)
end
property(module, function, generate_input , success_funct, test_count)
end
@doc """
Returns a tuple of lists of input data, one list passes the success_test and one does not.
The input is a function to generate input, a function that returns t/f based on whether
the input passes the test or not and a test_count.
"""
def property(module, function, input_generator, success_test, test_count) do
Stream.repeatedly(input_generator)
|> Stream.take(test_count)
|> Enum.partition(fn input -> success_test.(Kernel.apply(module, function, input)) end )
end
end
|
lib/phst_invariant.ex
| 0.863363
| 0.792585
|
phst_invariant.ex
|
starcoder
|
defmodule Memex.TidBit do
@moduledoc """
modelled after the `tiddler` of TiddlyWiki.
https://tiddlywiki.com/#TiddlerFields
This module is really only supposed to contain the struct
definition for TidBits. However, on the command-line, it
it convenient/untuitive sometimes to think of things from
a TidBit-centric perspective, e.g. `TidBit.new()` to make
a new TidBit, so there are some functions declared here
purely as API-sugar.
"""
@enforce_keys [:uuid, :title, :created, :creator, :modified, :modifier]
@derive Jason.Encoder
defstruct [
uuid: nil, # each tiddler has a UUID
title: nil, # the unique name for this tidbit
data: nil, # the body text of the tidbit
modified: nil, # The time this tidbit was last modified
modifier: nil, # The name of the last person to modify this TidBit
created: nil, # the date this tidbit was created
creator: nil, # the name of the person who created ths TidBit
type: [], # the content-type of a tidbit - a list of strings
tags: [], # a list of tags associated with a TidBit
links: [], # a list of all the linked TidBits
backlinks: [], # a list of all the Tidbits which link to this one
status: nil, # an internal flag - we can "archive" TidBits this way
history: nil, # each time a TidBit changes, we track the history #TODO
caption: nil, # the text to be displayed in a tab or button
meta: [], # a place to put extra data, e.g. `due_date`
module: __MODULE__ # this allows us to reconstruct the correct Elixir struct from the JSON text files
]
@doc ~s(This is here for the sake of the nice API: TidBit.new/1)
def new(params) do
Memex.My.Wiki.new_tidbit(params)
end
@doc ~s(This is here for the sake of the nice API: TidBit.update/2)
def update(tidbit, params) do
Memex.My.Wiki.update(tidbit, params)
end
def list do
Memex.My.Wiki.list()
end
def find(search_term) do
Memex.My.Wiki.find(search_term)
end
def open(%{type: ["external"|_rest]} = tidbit) do
Memex.Utils.ToolBag.open_external_textfile(tidbit)
end
def link(base_node, link_node) do
Memex.My.Wiki.link(base_node, link_node)
end
def tag(tidbit, tag) do
add_tag(tidbit, tag)
end
def add_tag(tidbit, tag) do
Memex.My.Wiki.add_tag(tidbit, tag)
end
def construct(params) do
Memex.Utils.TidBits.ConstructorLogic.construct(params)
end
@doc ~s(When we need to reference a TidBit e.g. a list of TidBits, use this function to get the reference.)
def construct_reference(%{title: t, uuid: uuid}) do
%{title: t, uuid: uuid}
end
@doc ~s(This is the string format used to reference TidBits inside other TidBits.)
def construct_link_string(%{title: t, uuid: uuid}) do
"#{t}-[#{t}/#{uuid}]"
end
end
|
lib/structs/tidbit.ex
| 0.628407
| 0.472623
|
tidbit.ex
|
starcoder
|
defmodule StrawHat.Review.Reviews do
@moduledoc """
Interactor module that defines all the functionality for Reviews management.
"""
use StrawHat.Review.Interactor
alias StrawHat.Review.Review
@doc """
Gets the list of reviews.
"""
@spec get_reviews(Scrivener.Config.t() | keyword()) :: Scrivener.Page.t()
def get_reviews(pagination \\ []), do: Repo.paginate(Review, pagination)
@doc """
Creates a review.
"""
@spec create_review(Review.review_attrs()) :: {:ok, Review.t()} | {:error, Ecto.Changeset.t()}
def create_review(review_attrs) do
%Review{}
|> Review.changeset(review_attrs)
|> Repo.insert()
end
@doc """
Updates a review.
"""
@spec update_review(Review.t(), Review.review_attrs()) ::
{:ok, Review.t()} | {:error, Ecto.Changeset.t()}
def update_review(%Review{} = review, review_attrs) do
review
|> Review.changeset(review_attrs)
|> Repo.update()
end
@doc """
Destroys a review.
"""
@spec destroy_review(Review.t()) :: {:ok, Review.t()} | {:error, Ecto.Changeset.t()}
def destroy_review(%Review{} = review), do: Repo.delete(review)
@doc """
Finds a review by `id`.
"""
@spec find_review(Integer.t()) :: {:ok, Review.t()} | {:error, Error.t()}
def find_review(review_id) do
review_id
|> get_review()
|> Response.from_value(
Error.new(
"straw_hat_review.review.not_found",
metadata: [review_id: review_id]
)
)
end
@doc """
Gets a review by `id`.
"""
@spec get_review(Integer.t()) :: Review.t() | nil | no_return
def get_review(review_id) do
query =
from(
review in Review,
where: review.id == ^review_id,
preload: [aspects: :aspect],
preload: [:medias]
)
Repo.one(query)
end
@doc """
Gets a list of review by ids.
"""
@spec get_review_by_ids([Integer.t()]) :: [Review.t()] | no_return
def get_review_by_ids(review_ids) do
query =
from(
review in Review,
where: review.id in ^review_ids,
preload: [aspects: :aspect],
preload: [:medias]
)
Repo.all(query)
end
@doc """
Gets a list of comments by review ids.
"""
@spec get_comments([Integer.t()]) :: [Review.t()] | no_return
def get_comments(review_ids) do
query =
from(
review in Review,
where: review.id in ^review_ids,
join: comments in assoc(review, :comments),
preload: [comments: comments]
)
Repo.all(query)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking review changes.
"""
@spec change_review(Review.t()) :: Ecto.Changeset.t()
def change_review(%Review{} = review) do
Review.changeset(review, %{})
end
end
|
lib/straw_hat_review/reviews/reviews.ex
| 0.790328
| 0.469703
|
reviews.ex
|
starcoder
|
defmodule Nostrum.Consumer do
@moduledoc """
Consumer process for gateway event handling.
# Consuming Gateway Events
To handle events, Nostrum uses a GenStage implementation.
Nostrum defines the `producer` and `producer_consumer` in the GenStage design.
To consume the events you must create at least one `consumer` process. It is
generally recommended that you spawn a consumer per core. To find this number
you can use `System.schedulers_online/0`.
Nostrum uses a ConsumerSupervisor to dispatch events, meaning your handlers
will each be ran in their own seperate task.
## Example
An example consumer can be found
[here](https://github.com/Kraigie/nostrum/blob/master/examples/event_consumer.ex).
"""
use ConsumerSupervisor
alias Nostrum.Shard.Stage.Cache
alias Nostrum.Struct.{Channel, WSState}
alias Nostrum.Struct.Event.{MessageDelete, MessageDeleteBulk}
alias Nostrum.Util
@doc """
Callback used to handle events.
## Event
`event` is a tuple describing the event. The tuple will include information in
the following format:
```Elixir
{event_name, {event_payload(s)}, WSState.t}
```
For example, a message create will look like this
```Elixir
{:MESSAGE_CREATE, {Nostrum.Struct.Message.t}, WSState.t}
```
In some cases there will be multiple payloads when something is updated, so as
to include the new and the old versions. In the event of there being two payloads,
the old payload will always be first, followed by the new payload.
```Elixir
{:USER_UPDATE, {old_user :: Nostrum.Struct.User.t, new_user :: Nostrum.Struct.User.t}, WSState.t()}
```
For a full listing of events, please see `t:Nostrum.Consumer.event/0`.
"""
@callback handle_event(event) :: any
@type options :: [option] | []
@typedoc """
General process options.
The `subscribe_to` option should only be set if you want to use your own producer or producer consumer.
"""
@type option ::
{:registry, atom()}
| {:name, Supervisor.name()}
| {:max_restarts, non_neg_integer()}
| {:max_seconds, non_neg_integer()}
| {:subscribe_to, [GenStage.stage() | {GenStage.stage(), keyword()}]}
@type channel_create :: {:CHANNEL_CREATE, Channel.t(), WSState.t()}
@type channel_delete :: {:CHANNEL_DELETE, Channel.t(), WSState.t()}
@typedoc """
Dispatched when a channel is updated.
`old_channel` will be `nil` when the pre-update channel could not be fetched from the cache.
"""
@type channel_update ::
{:CHANNEL_UPDATE, {old_channel :: Channel.t() | nil, new_channel :: Channel.t()},
WSState.t()}
@type channel_pins_ack :: {:CHANNEL_PINS_ACK, map, WSState.t()}
@type channel_pins_update :: {:CHANNEL_PINS_UPDATE, map, WSState.t()}
@type guild_ban_add ::
{:GUILD_BAN_ADD, {guild_id :: integer, Nostrum.Struct.User.t()}, WSState.t()}
@type guild_ban_remove ::
{:GUILD_BAN_REMOVE, {guild_id :: integer, Nostrum.Struct.User.t()}, WSState.t()}
@type guild_create :: {:GUILD_CREATE, new_guild :: Nostrum.Struct.Guild.t(), WSState.t()}
@type guild_available :: {:GUILD_AVAILABLE, new_guild :: Nostrum.Struct.Guild.t(), WSState.t()}
@type guild_unavailable ::
{:GUILD_UNAVAILABLE, unavailable_guild :: Nostrum.Struct.Guild.UnavailableGuild.t(),
WSState.t()}
@type guild_update ::
{:GUILD_UPDATE,
{old_guild :: Nostrum.Struct.Guild.t(), new_guild :: Nostrum.Struct.Guild.t()},
WSState.t()}
@type guild_delete ::
{:GUILD_DELETE, {old_guild :: Nostrum.Struct.Guild.t(), unavailable :: boolean},
WSState.t()}
@type guild_emojis_update ::
{:GUILD_EMOJIS_UPDATE,
{guild_id :: integer, old_emojis :: [Nostrum.Struct.Message.Emoji.t()],
new_emojis :: [Nostrum.Struct.Message.Emoji.t()]}, WSState.t()}
@type guild_integrations_update :: {:GUILD_INTEGERATIONS_UPDATE, map, WSState.t()}
@type guild_member_add ::
{:GUILD_MEMBER_ADD,
{guild_id :: integer, new_member :: Nostrum.Struct.Guild.Member.t()}, WSState.t()}
@type guild_members_chunk :: {:GUILD_MEMBERS_CHUNK, map, WSState.t()}
@type guild_member_remove ::
{:GUILD_MEMBER_REMOVE,
{guild_id :: integer, old_member :: Nostrum.Struct.Guild.Member.t()}, WSState.t()}
@typedoc """
Dispatched when a guild member is updated.
`old_member` will be `nil` when the pre-update member could not be fetched from the cache.
"""
@type guild_member_update ::
{:GUILD_MEMBER_UPDATE,
{guild_id :: integer, old_member :: Nostrum.Struct.Guild.Member.t() | nil,
new_member :: Nostrum.Struct.Guild.Member.t()}, WSState.t()}
@type guild_role_create ::
{:GUILD_ROLE_CREATE, {guild_id :: integer, new_role :: Nostrum.Struct.Guild.Role.t()},
WSState.t()}
@type guild_role_delete ::
{:GUILD_ROLE_DELETE, {guild_id :: integer, old_role :: Nostrum.Struct.Guild.Role.t()},
WSState.t()}
@typedoc """
Dispatched when a role on a guild is updated.
`old_role` will be `nil` when the pre-update role could not be fetched from the cache.
"""
@type guild_role_update ::
{:GUILD_ROLE_UPDATE,
{guild_id :: integer, old_role :: Nostrum.Struct.Guild.Role.t() | nil,
new_role :: Nostrum.Struct.Guild.Role.t()}, WSState.t()}
@type message_create :: {:MESSAGE_CREATE, message :: Nostrum.Struct.Message.t(), WSState.t()}
@type message_delete :: {:MESSAGE_DELETE, MessageDelete.t(), WSState.t()}
@type message_delete_bulk :: {:MESSAGE_DELETE_BULK, MessageDeleteBulk.t(), WSState.t()}
@type message_update ::
{:MESSAGE_UPDATE, updated_message :: Nostrum.Struct.Message.t(), WSState.t()}
@type message_reaction_add :: {:MESSAGE_REACTION_ADD, Nostrum.Struct.Message.NewReaction.t(), WSState.t()}
@type message_reaction_remove :: {:MESSAGE_REACTION_REMOVE, map, WSState.t()}
@type message_reaction_remove_all :: {:MESSAGE_REACTION_REMOVE_ALL, map, WSState.t()}
@type message_reaction_remove_emoji :: {:MESSAGE_REACTION_REMOVE_EMOJI, map, WSState.t()}
@type message_ack :: {:MESSAGE_ACK, map, WSState.t()}
@typedoc """
Dispatched when a user's presence is updated.
`old_presence` will be `nil` when the pre-update presence could not be fetched from the cache.
"""
@type presence_update ::
{:PRESENCE_UPDATE,
{guild_id :: integer, old_presence :: map | nil, new_presence :: map}, WSState.t()}
@type ready :: {:READY, map, WSState.t()}
@type resumed :: {:RESUMED, map, WSState.t()}
@type typing_start :: {:TYPING_START, map, WSState.t()}
@type user_settings_update :: no_return
@typedoc """
Dispatched when a user is updated.
`old_user` will be `nil` when the pre-update user could not be fetched from the cache.
"""
@type user_update ::
{:USER_UPDATE,
{old_user :: Nostrum.Struct.User.t() | nil, new_user :: Nostrum.Struct.User.t()},
WSState.t()}
@type voice_state_update :: {:VOICE_STATE_UPDATE, map, WSState.t()}
@type voice_server_update :: {:VOICE_SERVER_UPDATE, map, WSState.t()}
@type webhooks_update :: {:WEBHOOKS_UPDATE, map, WSState.t()}
@type event ::
channel_create
| channel_delete
| channel_update
| channel_pins_ack
| channel_pins_update
| guild_ban_add
| guild_ban_remove
| guild_create
| guild_available
| guild_unavailable
| guild_update
| guild_delete
| guild_emojis_update
| guild_integrations_update
| guild_member_add
| guild_members_chunk
| guild_member_remove
| guild_member_update
| guild_role_create
| guild_role_delete
| guild_role_update
| message_create
| message_delete
| message_delete_bulk
| message_update
| message_reaction_add
| message_reaction_remove
| message_reaction_remove_all
| message_ack
| presence_update
| ready
| resumed
| typing_start
| user_settings_update
| user_update
| voice_state_update
| voice_server_update
| webhooks_update
defmacro __using__(opts) do
quote location: :keep do
@behaviour Nostrum.Consumer
alias Nostrum.Consumer
def start_link(event) do
Task.start_link(fn ->
__MODULE__.handle_event(event)
end)
end
def child_spec(_arg) do
spec = %{
id: __MODULE__,
start: {__MODULE__, :start_link, []}
}
Supervisor.child_spec(spec, unquote(Macro.escape(opts)))
end
def handle_event(_event) do
:ok
end
defoverridable handle_event: 1, child_spec: 1
end
end
@doc ~S"""
Starts a consumer process.
`mod` is the name of the module where you define your event callbacks, which should probably be
the current module which you can get with `__MODULE__`.
`options` is a list of general process options. See `t:Nostrum.Consumer.options/0` for more info.
"""
@spec start_link(module, options) :: Supervisor.on_start()
def start_link(mod, options \\ [])
def start_link(mod, [name: name] = options) do
ConsumerSupervisor.start_link(
__MODULE__,
[mod, Keyword.drop(options, [:name])],
name: name,
spawn_opt: [Util.fullsweep_after()]
)
end
def start_link(mod, options),
do:
ConsumerSupervisor.start_link(
__MODULE__,
[mod, options],
spawn_opt: [Util.fullsweep_after()]
)
@doc false
def init([mod, opt]) do
default = [strategy: :one_for_one, subscribe_to: [Cache]]
ConsumerSupervisor.init(
[
%{
id: mod,
start: {mod, :start_link, []},
restart: :transient
}
],
Keyword.merge(default, opt)
)
end
end
|
lib/nostrum/consumer.ex
| 0.885384
| 0.760628
|
consumer.ex
|
starcoder
|
defmodule Ecto.Adapters.SQL do
@moduledoc """
This application provides functionality for working with
SQL databases in `Ecto`.
## Built-in adapters
By default, we support the following adapters:
* `Ecto.Adapters.Postgres`
* `Ecto.Adapters.MySQL`
## Migrations
Ecto supports database migrations. You can generate a migration
with:
$ mix ecto.gen.migration create_posts
This will create a new file inside `priv/repo/migrations` with the
`change` function. Check `Ecto.Migration` for more information.
To interface with migrations, developers typically use mix tasks:
* `mix ecto.migrations` - lists all available migrations and their status
* `mix ecto.migrate` - runs a migration
* `mix ecto.rollback` - rolls back a previously run migration
If you want to run migrations programatically, see `Ecto.Migrator`.
## SQL sandbox
`ecto_sql` provides a sandbox for testing. The sandbox wraps each
test in a transaction, making sure the tests are isolated and can
run concurrently. See `Ecto.Adapters.SQL.Sandbox` for more information.
## Structure load and dumping
If you have an existing database, you may want to dump its existing
structure and make it reproducible from within Ecto. This can be
achieved with two Mix tasks:
* `mix ecto.load` - loads an existing structure into the database
* `mix ecto.dump` - dumps the existing database structure to the filesystem
For creating and dropping databases, see `mix ecto.create`
and `mix ecto.drop` that are included as part of Ecto.
## Custom adapters
Developers can implement their own SQL adapters by using
`Ecto.Adapters.SQL` and by implementing the callbacks required
by `Ecto.Adapters.SQL.Connection` for handling connections and
performing queries. The connection handling and pooling for SQL
adapters should be built using the `DBConnection` library.
When using `Ecto.Adapters.SQL`, the following options are required:
* `:driver` (required) - the database driver library.
For example: `:postgrex`
* `:migration_lock` - the lock to use on migration locks.
For example: "FOR UPDATE". It may also be `nil` (for no lock).
The user can still override this by setting `:migration_lock`
in the repository configuration
"""
require Logger
@doc false
defmacro __using__(opts) do
quote do
@behaviour Ecto.Adapter
@behaviour Ecto.Adapter.Migration
@behaviour Ecto.Adapter.Queryable
@behaviour Ecto.Adapter.Schema
@behaviour Ecto.Adapter.Transaction
opts = unquote(opts)
@conn __MODULE__.Connection
@driver Keyword.fetch!(opts, :driver)
@migration_lock Keyword.fetch!(opts, :migration_lock)
@impl true
defmacro __before_compile__(env) do
Ecto.Adapters.SQL.__before_compile__(@driver, env)
end
@impl true
def ensure_all_started(config, type) do
Ecto.Adapters.SQL.ensure_all_started(@driver, config, type)
end
@impl true
def init(config) do
Ecto.Adapters.SQL.init(@conn, @driver, config)
end
@impl true
def checkout(meta, opts, fun) do
Ecto.Adapters.SQL.checkout(meta, opts, fun)
end
@impl true
def loaders({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.load_embed(type, &1)]
def loaders({:map, _} = type, _), do: [&Ecto.Adapters.SQL.load_embed(type, &1)]
def loaders(:binary_id, type), do: [Ecto.UUID, type]
def loaders(_, type), do: [type]
@impl true
def dumpers({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)]
def dumpers({:map, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)]
def dumpers(:binary_id, type), do: [type, Ecto.UUID]
def dumpers(_, type), do: [type]
## Query
@impl true
def prepare(:all, query) do
{:cache, {System.unique_integer([:positive]), IO.iodata_to_binary(@conn.all(query))}}
end
def prepare(:update_all, query) do
{:cache, {System.unique_integer([:positive]), IO.iodata_to_binary(@conn.update_all(query))}}
end
def prepare(:delete_all, query) do
{:cache, {System.unique_integer([:positive]), IO.iodata_to_binary(@conn.delete_all(query))}}
end
@impl true
def execute(adapter_meta, query_meta, query, params, opts) do
Ecto.Adapters.SQL.execute(adapter_meta, query_meta, query, params, opts)
end
@impl true
def stream(adapter_meta, query_meta, query, params, opts) do
Ecto.Adapters.SQL.stream(adapter_meta, query_meta, query, params, opts)
end
## Schema
@impl true
def autogenerate(:id), do: nil
def autogenerate(:embed_id), do: Ecto.UUID.generate()
def autogenerate(:binary_id), do: Ecto.UUID.bingenerate()
@impl true
def insert_all(adapter_meta, schema_meta, header, rows, on_conflict, returning, opts) do
Ecto.Adapters.SQL.insert_all(adapter_meta, schema_meta, @conn, header, rows, on_conflict, returning, opts)
end
@impl true
def insert(adapter_meta, %{source: source, prefix: prefix}, params,
{kind, conflict_params, _} = on_conflict, returning, opts) do
{fields, values} = :lists.unzip(params)
sql = @conn.insert(prefix, source, fields, [fields], on_conflict, returning)
Ecto.Adapters.SQL.struct(adapter_meta, @conn, sql, :insert, source, [], values ++ conflict_params, kind, returning, opts)
end
@impl true
def update(adapter_meta, %{source: source, prefix: prefix}, fields, params, returning, opts) do
{fields, field_values} = :lists.unzip(fields)
filter_values = params |> Keyword.values() |> Enum.reject(&is_nil(&1))
sql = @conn.update(prefix, source, fields, params, returning)
Ecto.Adapters.SQL.struct(adapter_meta, @conn, sql, :update, source, params, field_values ++ filter_values, :raise, returning, opts)
end
@impl true
def delete(adapter_meta, %{source: source, prefix: prefix}, params, opts) do
filter_values = params |> Keyword.values() |> Enum.reject(&is_nil(&1))
sql = @conn.delete(prefix, source, params, [])
Ecto.Adapters.SQL.struct(adapter_meta, @conn, sql, :delete, source, params, filter_values, :raise, [], opts)
end
## Transaction
@impl true
def transaction(meta, opts, fun) do
Ecto.Adapters.SQL.transaction(meta, opts, fun)
end
@impl true
def in_transaction?(meta) do
Ecto.Adapters.SQL.in_transaction?(meta)
end
@impl true
def rollback(meta, value) do
Ecto.Adapters.SQL.rollback(meta, value)
end
## Migration
@impl true
def execute_ddl(meta, definition, opts) do
Ecto.Adapters.SQL.execute_ddl(meta, @conn, definition, opts)
end
@impl true
def lock_for_migrations(meta, query, opts, fun) do
Ecto.Adapters.SQL.lock_for_migrations(meta, query, opts, @migration_lock, fun)
end
defoverridable [prepare: 2, execute: 5, insert: 6, update: 6, delete: 4, insert_all: 7,
execute_ddl: 3, loaders: 2, dumpers: 2, autogenerate: 1,
ensure_all_started: 2, lock_for_migrations: 4]
end
end
@doc """
Converts the given query to SQL according to its kind and the
adapter in the given repository.
## Examples
The examples below are meant for reference. Each adapter will
return a different result:
iex> Ecto.Adapters.SQL.to_sql(:all, repo, Post)
{"SELECT p.id, p.title, p.inserted_at, p.created_at FROM posts as p", []}
iex> Ecto.Adapters.SQL.to_sql(:update_all, repo,
from(p in Post, update: [set: [title: ^"hello"]]))
{"UPDATE posts AS p SET title = $1", ["hello"]}
This function is also available under the repository with name `to_sql`:
iex> Repo.to_sql(:all, Post)
{"SELECT p.id, p.title, p.inserted_at, p.created_at FROM posts as p", []}
"""
@spec to_sql(:all | :update_all | :delete_all, Ecto.Repo.t, Ecto.Queryable.t) ::
{String.t, [term]}
def to_sql(kind, repo, queryable) do
case Ecto.Adapter.Queryable.prepare_query(kind, repo, queryable) do
{{:cached, _update, _reset, {_id, cached}}, params} ->
{String.Chars.to_string(cached), params}
{{:cache, _update, {_id, prepared}}, params} ->
{prepared, params}
{{:nocache, {_id, prepared}}, params} ->
{prepared, params}
end
end
@doc """
Returns a stream that runs a custom SQL query on given repo when reduced.
In case of success it is a enumerable containing maps with at least two keys:
* `:num_rows` - the number of rows affected
* `:rows` - the result set as a list. `nil` may be returned
instead of the list if the command does not yield any row
as result (but still yields the number of affected rows,
like a `delete` command without returning would)
In case of failure it raises an exception.
If the adapter supports a collectable stream, the stream may also be used as
the collectable in `Enum.into/3`. Behaviour depends on the adapter.
## Options
* `:timeout` - The time in milliseconds to wait for a query to finish,
`:infinity` will wait indefinitely (default: 15_000)
* `:log` - When false, does not log the query
* `:max_rows` - The number of rows to load from the database as we stream
## Examples
iex> Ecto.Adapters.SQL.stream(MyRepo, "SELECT $1::integer + $2", [40, 2]) |> Enum.to_list()
[%{rows: [[42]], num_rows: 1}]
"""
@spec stream(Ecto.Repo.t, String.t, [term], Keyword.t) :: Enum.t
def stream(repo, sql, params \\ [], opts \\ []) do
repo
|> Ecto.Adapter.lookup_meta()
|> Ecto.Adapters.SQL.Stream.build(sql, params, opts)
end
@doc """
Same as `query/4` but raises on invalid queries.
"""
@spec query!(Ecto.Repo.t | Ecto.Adapter.adapter_meta, String.t, [term], Keyword.t) ::
%{:rows => nil | [[term] | binary],
:num_rows => non_neg_integer,
optional(atom) => any}
def query!(repo, sql, params \\ [], opts \\ []) do
case query(repo, sql, params, opts) do
{:ok, result} -> result
{:error, err} -> raise_sql_call_error err
end
end
@doc """
Runs custom SQL query on given repo.
In case of success, it must return an `:ok` tuple containing
a map with at least two keys:
* `:num_rows` - the number of rows affected
* `:rows` - the result set as a list. `nil` may be returned
instead of the list if the command does not yield any row
as result (but still yields the number of affected rows,
like a `delete` command without returning would)
## Options
* `:timeout` - The time in milliseconds to wait for a query to finish,
`:infinity` will wait indefinitely. (default: 15_000)
* `:log` - When false, does not log the query
## Examples
iex> Ecto.Adapters.SQL.query(MyRepo, "SELECT $1::integer + $2", [40, 2])
{:ok, %{rows: [[42]], num_rows: 1}}
For convenience, this function is also available under the repository:
iex> MyRepo.query("SELECT $1::integer + $2", [40, 2])
{:ok, %{rows: [[42]], num_rows: 1}}
"""
@spec query(Ecto.Repo.t | Ecto.Adapter.adapter_meta, String.t, [term], Keyword.t) ::
{:ok, %{:rows => nil | [[term] | binary],
:num_rows => non_neg_integer,
optional(atom) => any}}
| {:error, Exception.t}
def query(repo, sql, params \\ [], opts \\ [])
def query(repo, sql, params, opts) when is_atom(repo) do
query(Ecto.Adapter.lookup_meta(repo), sql, params, opts)
end
def query(adapter_meta, sql, params, opts) do
sql_call(adapter_meta, :query, [sql], params, opts)
end
defp sql_call(adapter_meta, callback, args, params, opts) do
%{pid: pool, telemetry: telemetry, sql: sql, opts: default_opts} = adapter_meta
conn = get_conn_or_pool(pool)
opts = with_log(telemetry, params, opts ++ default_opts)
args = args ++ [params, opts]
apply(sql, callback, [conn | args])
end
defp put_source(opts, %{sources: sources}) when is_binary(elem(elem(sources, 0), 0)) do
{source, _, _} = elem(sources, 0)
Keyword.put(opts, :source, source)
end
defp put_source(opts, _) do
opts
end
## Callbacks
@doc false
def __before_compile__(driver, _env) do
case Application.get_env(:ecto, :json_library) do
nil ->
:ok
Jason ->
IO.warn """
Jason is the default :json_library in Ecto 3.0.
You no longer need to configure it explicitly,
please remove this line from your config files:
config :ecto, :json_library, Jason
"""
value ->
IO.warn """
The :json_library configuration for the :ecto application is deprecated.
Please configure the :json_library in the driver instead:
config #{inspect driver}, :json_library, #{inspect value}
"""
end
quote do
@doc """
A convenience function for SQL-based repositories that executes the given query.
See `Ecto.Adapters.SQL.query/4` for more information.
"""
def query(sql, params \\ [], opts \\ []) do
Ecto.Adapters.SQL.query(__MODULE__, sql, params, opts)
end
@doc """
A convenience function for SQL-based repositories that executes the given query.
See `Ecto.Adapters.SQL.query!/4` for more information.
"""
def query!(sql, params \\ [], opts \\ []) do
Ecto.Adapters.SQL.query!(__MODULE__, sql, params, opts)
end
@doc """
A convenience function for SQL-based repositories that translates the given query to SQL.
See `Ecto.Adapters.SQL.to_sql/3` for more information.
"""
def to_sql(operation, queryable) do
Ecto.Adapters.SQL.to_sql(operation, __MODULE__, queryable)
end
end
end
@doc false
def ensure_all_started(driver, _config, type) do
with {:ok, from_driver} <- Application.ensure_all_started(driver, type),
# We always return the adapter to force it to be restarted if necessary
do: {:ok, List.delete(from_driver, driver) ++ [driver]}
end
@pool_opts [:timeout, :pool, :pool_size, :migration_lock, :queue_target, :queue_interval]
@doc false
def init(connection, driver, config) do
unless Code.ensure_loaded?(connection) do
raise """
could not find #{inspect connection}.
Please verify you have added #{inspect driver} as a dependency:
{#{inspect driver}, ">= 0.0.0"}
And remember to recompile Ecto afterwards by cleaning the current build:
mix deps.clean --build ecto
"""
end
log = Keyword.get(config, :log, :debug)
telemetry_prefix = Keyword.fetch!(config, :telemetry_prefix)
telemetry = {config[:repo], log, telemetry_prefix ++ [:query]}
config = adapter_config(config)
opts = Keyword.take(config, @pool_opts)
meta = %{telemetry: telemetry, sql: connection, opts: opts}
{:ok, connection.child_spec(config), meta}
end
defp adapter_config(config) do
if Keyword.has_key?(config, :pool_timeout) do
message = """
:pool_timeout option no longer has an effect and has been replaced with an improved queuing system.
See \"Queue config\" in DBConnection.start_link/2 documentation for more information.
"""
IO.warn(message)
end
config
|> Keyword.delete(:name)
|> Keyword.update(:pool, DBConnection.ConnectionPool, &normalize_pool/1)
end
defp normalize_pool(pool) do
if Code.ensure_loaded?(pool) && function_exported?(pool, :unboxed_run, 2) do
DBConnection.Ownership
else
pool
end
end
@doc false
def checkout(adapter_meta, opts, callback) do
checkout_or_transaction(:run, adapter_meta, opts, callback)
end
## Types
@doc false
def load_embed(type, value) do
Ecto.Type.load(type, value, fn
{:embed, _} = type, value ->
load_embed(type, value)
type, value ->
case Ecto.Type.cast(type, value) do
{:ok, _} = ok -> ok
_ -> :error
end
end)
end
@doc false
def dump_embed(type, value) do
Ecto.Type.dump(type, value, fn
{:embed, _} = type, value -> dump_embed(type, value)
_type, value -> {:ok, value}
end)
end
## Query
@doc false
def insert_all(adapter_meta, schema_meta, conn, header, rows, on_conflict, returning, opts) do
%{source: source, prefix: prefix} = schema_meta
{_, conflict_params, _} = on_conflict
{rows, params} = unzip_inserts(header, rows)
sql = conn.insert(prefix, source, header, rows, on_conflict, returning)
%{num_rows: num, rows: rows} =
query!(adapter_meta, sql, Enum.reverse(params) ++ conflict_params, opts)
{num, rows}
end
defp unzip_inserts(header, rows) do
Enum.map_reduce rows, [], fn fields, params ->
Enum.map_reduce header, params, fn key, acc ->
case :lists.keyfind(key, 1, fields) do
{^key, {%Ecto.Query{} = query, query_params}} ->
{{query, length(query_params)}, Enum.reverse(query_params) ++ acc}
{^key, value} ->
{key, [value | acc]}
false -> {nil, acc}
end
end
end
end
@doc false
def execute(adapter_meta, query_meta, prepared, params, opts) do
%{num_rows: num, rows: rows} =
execute!(adapter_meta, prepared, params, put_source(opts, query_meta))
{num, rows}
end
defp execute!(adapter_meta, {:cache, update, {id, prepared}}, params, opts) do
name = "ecto_" <> Integer.to_string(id)
case sql_call(adapter_meta, :prepare_execute, [name, prepared], params, opts) do
{:ok, query, result} ->
update.({id, query})
result
{:error, err} ->
raise_sql_call_error err
end
end
defp execute!(adapter_meta, {:cached, update, reset, {id, cached}}, params, opts) do
case sql_call(adapter_meta, :execute, [cached], params, opts) do
{:ok, query, result} ->
update.({id, query})
result
{:ok, result} ->
result
{:error, err} ->
raise_sql_call_error err
{:reset, err} ->
reset.({id, String.Chars.to_string(cached)})
raise_sql_call_error err
end
end
defp execute!(adapter_meta, {:nocache, {_id, prepared}}, params, opts) do
case sql_call(adapter_meta, :query, [prepared], params, opts) do
{:ok, res} -> res
{:error, err} -> raise_sql_call_error err
end
end
@doc false
def stream(adapter_meta, query_meta, prepared, params, opts) do
do_stream(adapter_meta, prepared, params, put_source(opts, query_meta))
end
defp do_stream(adapter_meta, {:cache, _, {_, prepared}}, params, opts) do
prepare_stream(adapter_meta, prepared, params, opts)
end
defp do_stream(adapter_meta, {:cached, _, _, {_, cached}}, params, opts) do
prepare_stream(adapter_meta, String.Chars.to_string(cached), params, opts)
end
defp do_stream(adapter_meta, {:nocache, {_id, prepared}}, params, opts) do
prepare_stream(adapter_meta, prepared, params, opts)
end
defp prepare_stream(adapter_meta, prepared, params, opts) do
adapter_meta
|> Ecto.Adapters.SQL.Stream.build(prepared, params, opts)
|> Stream.map(fn(%{num_rows: nrows, rows: rows}) -> {nrows, rows} end)
end
defp raise_sql_call_error(%DBConnection.OwnershipError{} = err) do
message = err.message <> "\nSee Ecto.Adapters.SQL.Sandbox docs for more information."
raise %{err | message: message}
end
defp raise_sql_call_error(err), do: raise err
@doc false
def reduce(adapter_meta, statement, params, opts, acc, fun) do
%{pid: pool, telemetry: telemetry, sql: sql, opts: default_opts} = adapter_meta
opts = with_log(telemetry, params, opts ++ default_opts)
case get_conn(pool) do
nil ->
raise "cannot reduce stream outside of transaction"
conn ->
sql
|> apply(:stream, [conn, statement, params, opts])
|> Enumerable.reduce(acc, fun)
end
end
@doc false
def into(adapter_meta, statement, params, opts) do
%{pid: pool, telemetry: telemetry, sql: sql, opts: default_opts} = adapter_meta
opts = with_log(telemetry, params, opts ++ default_opts)
case get_conn(pool) do
nil ->
raise "cannot collect into stream outside of transaction"
conn ->
sql
|> apply(:stream, [conn, statement, params, opts])
|> Collectable.into()
end
end
@doc false
def struct(adapter_meta, conn, sql, operation, source, params, values, on_conflict, returning, opts) do
cache_statement = "ecto_#{operation}_#{source}"
case query(adapter_meta, sql, values, [cache_statement: cache_statement] ++ opts) do
{:ok, %{rows: nil, num_rows: 1}} ->
{:ok, []}
{:ok, %{rows: [values], num_rows: 1}} ->
{:ok, Enum.zip(returning, values)}
{:ok, %{num_rows: 0}} ->
if on_conflict == :nothing, do: {:ok, []}, else: {:error, :stale}
{:ok, %{num_rows: num_rows}} when num_rows > 1 ->
raise Ecto.MultiplePrimaryKeyError,
source: source, params: params, count: num_rows, operation: operation
{:error, err} ->
case conn.to_constraints(err) do
[] -> raise_sql_call_error err
constraints -> {:invalid, constraints}
end
end
end
## Transactions
@doc false
def transaction(adapter_meta, opts, callback) do
checkout_or_transaction(:transaction, adapter_meta, opts, callback)
end
@doc false
def in_transaction?(%{pid: pool}) do
match?(%DBConnection{conn_mode: :transaction}, get_conn(pool))
end
@doc false
def rollback(%{pid: pool}, value) do
case get_conn(pool) do
%DBConnection{conn_mode: :transaction} = conn -> DBConnection.rollback(conn, value)
_ -> raise "cannot call rollback outside of transaction"
end
end
## Migrations
@doc false
def execute_ddl(meta, conn, definition, opts) do
ddl_logs =
definition
|> conn.execute_ddl()
|> List.wrap()
|> Enum.map(&query!(meta, &1, [], opts))
|> Enum.flat_map(&conn.ddl_logs/1)
{:ok, ddl_logs}
end
@doc false
def lock_for_migrations(meta, query, opts, migration_lock, fun) do
%{opts: adapter_opts} = meta
if lock = Keyword.get(adapter_opts, :migration_lock, migration_lock) do
if Keyword.fetch(adapter_opts, :pool_size) == {:ok, 1} do
raise_pool_size_error()
end
{:ok, result} =
transaction(meta, opts ++ [log: false, timeout: :infinity], fn ->
query |> Map.put(:lock, lock) |> fun.()
end)
result
else
fun.(query)
end
end
defp raise_pool_size_error do
raise Ecto.MigrationError, """
Migrations failed to run because the connection pool size is less than 2.
Ecto requires a pool size of at least 2 to support concurrent migrators.
When migrations run, Ecto uses one connection to maintain a lock and
another to run migrations.
If you are running migrations with Mix, you can increase the number
of connections via the pool size option:
mix ecto.migrate --pool-size 2
If you are running the Ecto.Migrator programmatically, you can configure
the pool size via your application config:
config :my_app, Repo,
...,
pool_size: 2 # at least
"""
end
## Log
defp with_log(telemetry, params, opts) do
[log: &log(telemetry, params, &1, opts)] ++ opts
end
defp log({repo, log, event_name}, params, entry, opts) do
%{
connection_time: query_time,
decode_time: decode_time,
pool_time: queue_time,
result: result,
query: query
} = entry
source = Keyword.get(opts, :source)
query_string = String.Chars.to_string(query)
params =
Enum.map(params, fn
%Ecto.Query.Tagged{value: value} -> value
value -> value
end)
measurements =
log_measurements(
[query_time: query_time, decode_time: decode_time, queue_time: queue_time],
0,
[]
)
metadata = %{
type: :ecto_sql_query,
repo: repo,
result: log_result(result),
params: params,
query: query_string,
source: source
}
if event_name = Keyword.get(opts, :telemetry_event, event_name) do
:telemetry.execute(event_name, measurements, metadata)
end
case Keyword.get(opts, :log, log) do
true ->
Logger.log(
log,
fn -> log_iodata(measurements, metadata) end,
ansi_color: sql_color(query_string)
)
false ->
:ok
level ->
Logger.log(
level,
fn -> log_iodata(measurements, metadata) end,
ansi_color: sql_color(query_string)
)
end
:ok
end
defp log_measurements([{_, nil} | rest], total, acc),
do: log_measurements(rest, total, acc)
defp log_measurements([{key, value} | rest], total, acc),
do: log_measurements(rest, total + value, [{key, value} | acc])
defp log_measurements([], total, acc),
do: Map.new([total_time: total] ++ acc)
defp log_result({:ok, _query, _res}), do: :ok
defp log_result({:ok, _res}), do: :ok
defp log_result(_), do: :error
defp log_iodata(measurements, metadata) do
%{
query_time: query_time,
decode_time: decode_time,
queue_time: queue_time,
} = measurements
%{
params: params,
query: query,
result: result,
source: source
} = metadata
[
"QUERY",
?\s,
log_ok_error(result),
log_ok_source(source),
log_time("db", query_time, true),
log_time("decode", decode_time, false),
log_time("queue", queue_time, false),
?\n,
query,
?\s,
inspect(params, charlists: false)
]
end
defp log_ok_error(:ok), do: "OK"
defp log_ok_error(:error), do: "ERROR"
defp log_ok_source(nil), do: ""
defp log_ok_source(source), do: " source=#{inspect(source)}"
defp log_time(_label, nil, _force), do: []
defp log_time(label, time, force) do
us = System.convert_time_unit(time, :native, :microsecond)
ms = div(us, 100) / 10
if force or ms > 0 do
[?\s, label, ?=, :io_lib_format.fwrite_g(ms), ?m, ?s]
else
[]
end
end
## Connection helpers
defp checkout_or_transaction(fun, adapter_meta, opts, callback) do
%{pid: pool, telemetry: telemetry, opts: default_opts} = adapter_meta
opts = with_log(telemetry, [], opts ++ default_opts)
callback = fn conn ->
previous_conn = put_conn(pool, conn)
try do
callback.()
after
reset_conn(pool, previous_conn)
end
end
apply(DBConnection, fun, [get_conn_or_pool(pool), callback, opts])
end
defp get_conn_or_pool(pool) do
Process.get(key(pool), pool)
end
defp get_conn(pool) do
Process.get(key(pool))
end
defp put_conn(pool, conn) do
Process.put(key(pool), conn)
end
defp reset_conn(pool, conn) do
if conn do
put_conn(pool, conn)
else
Process.delete(key(pool))
end
end
defp key(pool), do: {__MODULE__, pool}
defp sql_color("SELECT" <> _), do: :cyan
defp sql_color("ROLLBACK" <> _), do: :red
defp sql_color("LOCK" <> _), do: :white
defp sql_color("INSERT" <> _), do: :green
defp sql_color("UPDATE" <> _), do: :yellow
defp sql_color("DELETE" <> _), do: :red
defp sql_color("begin" <> _), do: :magenta
defp sql_color("commit" <> _), do: :magenta
defp sql_color(_), do: nil
end
|
lib/ecto/adapters/sql.ex
| 0.879237
| 0.566198
|
sql.ex
|
starcoder
|
defmodule Data.Script.Line do
@moduledoc """
Lines of the script the NPC converses with
"""
import Data.Type
@enforce_keys [:key, :message]
defstruct [:key, :message, :unknown, :trigger, listeners: []]
@type t() :: map()
@behaviour Ecto.Type
@impl Ecto.Type
def type, do: :map
@impl Ecto.Type
def cast(line) when is_map(line), do: {:ok, line}
def cast(_), do: :error
@doc """
Load a line from a stored map
Cast it properly
iex> Data.Script.Line.load(%{"key" => "start", "message" => "How are you?"})
{:ok, %Data.Script.Line{key: "start", message: "How are you?"}}
iex> Data.Script.Line.load(%{"key" => "start", "message" => "How are you?", "listeners" => [%{"phrase" => "good", "key" => "next"}]})
{:ok, %Data.Script.Line{key: "start", message: "How are you?", listeners: [%{phrase: "good", key: "next"}]}}
"""
@impl Ecto.Type
def load(line) do
line = for {key, val} <- line, into: %{}, do: {String.to_atom(key), val}
line = line |> load_listeners()
{:ok, struct(__MODULE__, line)}
end
defp load_listeners(event = %{listeners: listeners}) when listeners != nil do
listeners =
listeners
|> Enum.map(fn map ->
for {key, val} <- map, into: %{}, do: {String.to_atom(key), val}
end)
%{event | listeners: listeners}
end
defp load_listeners(event), do: event
@impl Ecto.Type
def dump(line) when is_map(line) do
line = line |> Map.delete(:__struct__)
{:ok, line}
end
def dump(_), do: :error
@doc """
Validate a line
Basic line
iex> Data.Script.Line.valid?(%{key: "start", message: "hi"})
true
Must have `key` and `message` as non-nil
iex> Data.Script.Line.valid?(%{key: nil, message: "hi"})
false
iex> Data.Script.Line.valid?(%{key: "start", message: nil})
false
Listen is validated, must have `phrase` and `key` if present
iex> Data.Script.Line.valid?(%{key: "start", message: "hi", listeners: []})
true
iex> Data.Script.Line.valid?(%{key: "start", message: "hi", listeners: [%{phrase: "hi", key: "next"}]})
true
iex> Data.Script.Line.valid?(%{key: "start", message: "hi", listeners: [%{phrase: "hi"}]})
false
For a quest
iex> Data.Script.Line.valid?(%{key: "start", message: "Hello", trigger: "quest"})
true
iex> Data.Script.Line.valid?(%{key: "start"})
false
"""
@spec valid?(t()) :: boolean()
def valid?(line) do
Enum.all?(keys(line), fn key -> key in [:key, :message, :listeners, :unknown, :trigger] end) &&
Enum.all?([:key, :message], fn key -> key in keys(line) end) && valid_listeners?(line)
end
def valid_listeners?(%{listeners: listeners}) do
Enum.all?(listeners, fn listener ->
keys(listener) == [:key, :phrase]
end)
end
def valid_listeners?(_), do: true
end
|
lib/data/script/line.ex
| 0.73029
| 0.412294
|
line.ex
|
starcoder
|
defmodule Phoenix.LiveView do
@moduledoc ~S'''
LiveView provides rich, real-time user experiences with
server-rendered HTML.
The LiveView programming model is declarative: instead of
saying "once event X happens, change Y on the page",
events in LiveView are regular messages which may cause
changes to its state. Once the state changes, LiveView will
re-render the relevant parts of its HTML template and push it
to the browser, which updates itself in the most efficient
manner. This means developers write LiveView templates as
any other server-rendered HTML and LiveView does the hard
work of tracking changes and sending the relevant diffs to
the browser.
At the end of the day, a LiveView is nothing more than a
process that receives events as messages and updates its
state. The state itself is nothing more than functional
and immutable Elixir data structures. The events are either
internal application messages (usually emitted by `Phoenix.PubSub`)
or sent by the client/browser.
LiveView is first rendered statically as part of regular
HTTP requests, which provides quick times for "First Meaningful
Paint", in addition to helping search and indexing engines.
Then a persistent connection is established between client and
server. This allows LiveView applications to react faster to user
events as there is less work to be done and less data to be sent
compared to stateless requests that have to authenticate, decode, load,
and encode data on every request. The flipside is that LiveView
uses more memory on the server compared to stateless requests.
## Use cases
There are many use cases where LiveView is an excellent
fit right now:
* Handling of user interaction and inputs, buttons, and
forms - such as input validation, dynamic forms,
autocomplete, etc;
* Events and updates pushed by server - such as
notifications, dashboards, etc;
* Page and data navigation - such as navigating between
pages, pagination, etc can be built with LiveView
using the excellent live navigation feature set.
This reduces the amount of data sent over the wire,
gives developers full control over the LiveView
life-cycle, while controlling how the browser
tracks those changes in state;
There are also use cases which are a bad fit for LiveView:
* Animations - animations, menus, and general UI events
that do not need the server in the first place are a
bad fit for LiveView. Those can be achieved without
LiveView in multiple ways, such as with CSS and CSS
transitions, using LiveView hooks, or even integrating
with UI toolkits designed for this purpose, such as
Bootstrap, Alpine.JS, and similar.
## Life-cycle
A LiveView begins as a regular HTTP request and HTML response,
and then upgrades to a stateful view on client connect,
guaranteeing a regular HTML page even if JavaScript is disabled.
Any time a stateful view changes or updates its socket assigns, it is
automatically re-rendered and the updates are pushed to the client.
You begin by rendering a LiveView typically from your router.
When LiveView is first rendered, the `c:mount/3` callback is invoked
with the current params, the current session and the LiveView socket.
As in a regular request, `params` contains public data that can be
modified by the user. The `session` always contains private data set
by the application itself. The `c:mount/3` callback wires up socket
assigns necessary for rendering the view. After mounting, `c:render/1`
is invoked and the HTML is sent as a regular HTML response to the
client.
After rendering the static page, LiveView connects from the client
to the server where stateful views are spawned to push rendered updates
to the browser, and receive client events via `phx-` bindings. Just like
the first rendering, `c:mount/3` is invoked with params, session,
and socket state, where mount assigns values for rendering. However
in the connected client case, a LiveView process is spawned on
the server, pushes the result of `c:render/1` to the client and
continues on for the duration of the connection. If at any point
during the stateful life-cycle a crash is encountered, or the client
connection drops, the client gracefully reconnects to the server,
calling `c:mount/3` once again.
## Example
Before writing your first example, make sure that Phoenix LiveView
is properly installed. If you are just getting started, this can
be easily done by running `mix phx.new my_app --live`. The `phx.new`
command with the `--live` flag will create a new project with
LiveView installed and configured. Otherwise, please follow the steps
in the [installation guide](installation.md) before continuing.
A LiveView is a simple module that requires two callbacks: `c:mount/3`
and `c:render/1`:
defmodule MyAppWeb.ThermostatLive do
# If you generated an app with mix phx.new --live,
# the line below would be: use MyAppWeb, :live_view
use Phoenix.LiveView
def render(assigns) do
~L"""
Current temperature: <%= @temperature %>
"""
end
def mount(_params, %{"current_user_id" => user_id}, socket) do
temperature = Thermostat.get_user_reading(user_id)
{:ok, assign(socket, :temperature, temperature)}
end
end
The `c:render/1` callback receives the `socket.assigns` and is responsible
for returning rendered content. You can use `Phoenix.LiveView.Helpers.sigil_L/2`
to inline LiveView templates.
Next, decide where you want to use your LiveView.
You can serve the LiveView directly from your router (recommended):
defmodule MyAppWeb.Router do
use Phoenix.Router
import Phoenix.LiveView.Router
scope "/", MyAppWeb do
live "/thermostat", ThermostatLive
end
end
*Note:* the above assumes there is `plug :put_root_layout` call
in your router that configures the LiveView layout. This call is
automatically included by `mix phx.new --live` and described in
the installation guide. If you don't want to configure a root layout,
you must pass `layout: {MyAppWeb.LayoutView, "app.html"}` as an
option to the `live` macro above.
Alternatively, you can `live_render` from any template:
<h1>Temperature Control</h1>
<%= live_render(@conn, MyAppWeb.ThermostatLive) %>
Or you can `live_render` your view from any controller:
defmodule MyAppWeb.ThermostatController do
...
import Phoenix.LiveView.Controller
def show(conn, %{"id" => id}) do
live_render(conn, MyAppWeb.ThermostatLive)
end
end
When a LiveView is rendered, all of the data currently stored in the
connection session (see `Plug.Conn.get_session/1`) will be given to
the LiveView.
It is also possible to pass additional session information to the LiveView
through a session parameter:
# In the router
live "/thermostat", ThermostatLive, session: %{"extra_token" => "foo"}
# In a view
<%= live_render(@conn, MyAppWeb.ThermostatLive, session: %{"extra_token" => "foo"}) %>
Notice the `:session` uses string keys as a reminder that session data
is serialized and sent to the client. So you should always keep the data
in the session to a minimum. For example, instead of storing a User struct,
you should store the "user_id" and load the User when the LiveView mounts.
Once the LiveView is rendered, a regular HTML response is sent. In your
app.js file, you should find the following:
import {Socket} from "phoenix"
import LiveSocket from "phoenix_live_view"
let csrfToken = document.querySelector("meta[name='csrf-token']").getAttribute("content")
let liveSocket = new LiveSocket("/live", Socket, {params: {_csrf_token: csrfToken}})
liveSocket.connect()
After the client connects, `c:mount/3` will be invoked inside a spawned
LiveView process. At this point, you can use `connected?/1` to
conditionally perform stateful work, such as subscribing to pubsub topics,
sending messages, etc. For example, you can periodically update a LiveView
with a timer:
defmodule DemoWeb.ThermostatLive do
use Phoenix.LiveView
...
def mount(_params, %{"current_user_id" => user_id}, socket) do
if connected?(socket), do: Process.send_after(self(), :update, 30000)
case Thermostat.get_user_reading(user_id) do
{:ok, temperature} ->
{:ok, assign(socket, temperature: temperature, user_id: user_id)}
{:error, _reason} ->
{:ok, redirect(socket, to: "/error")}
end
end
def handle_info(:update, socket) do
Process.send_after(self(), :update, 30000)
{:ok, temperature} = Thermostat.get_reading(socket.assigns.user_id)
{:noreply, assign(socket, :temperature, temperature)}
end
end
We used `connected?(socket)` on mount to send our view a message every 30s if
the socket is in a connected state. We receive the `:update` message in the
`handle_info/2` callback, just like in an Elixir `GenServer`, and update our
socket assigns. Whenever a socket's assigns change, `c:render/1` is automatically
invoked, and the updates are sent to the client.
## Colocating templates
In the examples above, we have placed the template directly inside the
LiveView:
defmodule MyAppWeb.ThermostatLive do
use Phoenix.LiveView
def render(assigns) do
~L"""
Current temperature: <%= @temperature %>
"""
end
For larger templates, you can place them in a file in the same directory
and same name as the LiveView. For example, if the file above is placed
at `lib/my_app_web/live/thermostat_live.ex`, you can also remove the
`c:render/1` definition above and instead put the template code at
`lib/my_app_web/live/thermostat_live.html.leex`.
Alternatively, you can keep the `c:render/1` callback but delegate to an
existing `Phoenix.View` module in your application. For example:
defmodule MyAppWeb.ThermostatLive do
use Phoenix.LiveView
def render(assigns) do
Phoenix.View.render(MyAppWeb.PageView, "page.html", assigns)
end
end
In all cases, each assign in the template will be accessible as `@assign`.
You can learn more about [assigns and LiveEEx templates in their own guide](assigns-eex.md).
## Bindings
Phoenix supports DOM element bindings for client-server interaction. For
example, to react to a click on a button, you would render the element:
<button phx-click="inc_temperature">+</button>
Then on the server, all LiveView bindings are handled with the `handle_event`
callback, for example:
def handle_event("inc_temperature", _value, socket) do
{:ok, new_temp} = Thermostat.inc_temperature(socket.assigns.id)
{:noreply, assign(socket, :temperature, new_temp)}
end
| Binding | Attributes |
|------------------------|------------|
| [Params](bindings.md#click-events) | `phx-value-*` |
| [Click Events](bindings.md#click-events) | `phx-click`, `phx-capture-click` |
| [Focus/Blur Events](bindings.md#focus-and-blur-events) | `phx-blur`, `phx-focus`, `phx-window-blur`, `phx-window-focus` |
| [Key Events](bindings.md#key-events) | `phx-keydown`, `phx-keyup`, `phx-window-keydown`, `phx-window-keyup` |
| [Form Events](form-bindings.md) | `phx-change`, `phx-submit`, `phx-feedback-for`, `phx-disable-with`, `phx-trigger-action`, `phx-auto-recover` |
| [Rate Limiting](bindings.md#rate-limiting-events-with-debounce-and-throttle) | `phx-debounce`, `phx-throttle` |
| [DOM Patching](dom-patching.md) | `phx-update` |
| [JS Interop](js-interop.md#client-hooks) | `phx-hook` |
## Compartmentalizing markup and events with `render`, `live_render`, and `live_component`
We can render another template directly from a LiveView template by simply
calling `render`:
render SomeView, "child_template.html", assigns
Where `SomeView` is a regular `Phoenix.View`, typically defined in
`lib/my_app_web/views/some_view.ex` and "child_template.html" is defined
at `lib/my_app_web/templates/some_view/child_template.html.leex`. As long
as the template has the `.leex` extension and all assigns are passed,
LiveView change tracking will also work across templates.
When rendering a child template, any of the `phx-*` events in the child
template will be sent to the LiveView. In other words, similar to regular
Phoenix templates, a regular `render` call does not start another LiveView.
This means `render` is useful for sharing markup between views.
If you want to start a separate LiveView from within a LiveView, then you
can call `live_render/3` instead of `render/3`. This child LiveView runs
in a separate process than the parent, with its own `mount` and `handle_event`
callbacks. If a child LiveView crashes, it won't affect the parent. If the
parent crashes, all children are terminated.
When rendering a child LiveView, the `:id` option is required to uniquely
identify the child. A child LiveView will only ever be rendered and mounted
a single time, provided its ID remains unchanged. Updates to a child session
will be merged on the client, but not passed back up until either a crash and
re-mount or a connection drop and recovery. To force a child to re-mount with
new session data, a new ID must be provided.
Given that a LiveView runs on its own process, it is an excellent tool for creating
completely isolated UI elements, but it is a slightly expensive abstraction if
all you want is to compartmentalize markup and events. For example, if you are
showing a table with all users in the system, and you want to compartmentalize
this logic, rendering a separate `LiveView` for each user, then using a process
per user would likely be too expensive. For these cases, LiveView provides
`Phoenix.LiveComponent`, which are rendered using `live_component/3`:
<%= live_component(@socket, UserComponent, id: user.id, user: user) %>
Components have their own `mount` and `handle_event` callbacks, as well as their
own state with change tracking support. Components are also lightweight as they
"run" in the same process as the parent `LiveView`. However, this means an error
in a component would cause the whole view to fail to render. See `Phoenix.LiveComponent`
for a complete rundown on components.
To sum it up:
* `render` - compartmentalizes markup
* `live_component` - compartmentalizes state, markup, and events
* `live_render` - compartmentalizes state, markup, events, and error isolation
## Endpoint configuration
LiveView accepts the following configuration in your endpoint under
the `:live_view` key:
* `:signing_salt` (required) - the salt used to sign data sent
to the client
* `:hibernate_after` (optional) - the idle time in milliseconds allowed in
the LiveView before compressing its own memory and state.
Defaults to 15000ms (15 seconds)
## Guides
LiveView has many guides to help you on your journey.
## Server-side
These guides focus on server-side functionality:
* [Assigns and LiveEEx](assigns-eex.md)
* [Error and exception handling](error-handling.md)
* [Live Layouts](live-layouts.md)
* [Live Navigation](live-navigation.md)
* [Security considerations of the LiveView model](security-model.md)
* [Telemetry](telemetry.md)
* [Uploads](uploads.md)
* [Using Gettext for internationalization](using-gettext.md)
## Client-side
These guides focus on LiveView bindings and client-side integration:
* [Bindings](bindings.md)
* [Form bindings](form-bindings.md)
* [DOM patching and temporary assigns](dom-patching.md)
* [JavaScript interoperability](js-interop.md)
* [Uploads (External)](uploads-external.md)
'''
alias Phoenix.LiveView.Socket
@type unsigned_params :: map
@doc """
The LiveView entry-point.
For each LiveView in the root of a template, `c:mount/3` is invoked twice:
once to do the initial page load and again to establish the live socket.
It expects three parameters:
* `params` - a map of string keys which contain public information that
can be set by the user. The map contains the query params as well as any
router path parameter. If the LiveView was not mounted at the router,
this argument is the atom `:not_mounted_at_router`
* `session` - the connection session
* `socket` - the LiveView socket
It must return either `{:ok, socket}` or `{:ok, socket, options}`, where
`options` is one of:
* `:temporary_assigns` - a keyword list of assigns that are temporary
and must be reset to their value after every render. Note that once
the value is reset, it won't be re-rendered again until it is explicitly
assigned
* `:layout` - the optional layout to be used by the LiveView
"""
@callback mount(
unsigned_params() | :not_mounted_at_router,
session :: map,
socket :: Socket.t()
) ::
{:ok, Socket.t()} | {:ok, Socket.t(), keyword()}
@callback render(assigns :: Socket.assigns()) :: Phoenix.LiveView.Rendered.t()
@callback terminate(reason, socket :: Socket.t()) :: term
when reason: :normal | :shutdown | {:shutdown, :left | :closed | term}
@callback handle_params(unsigned_params(), uri :: String.t(), socket :: Socket.t()) ::
{:noreply, Socket.t()}
@callback handle_event(event :: binary, unsigned_params(), socket :: Socket.t()) ::
{:noreply, Socket.t()} | {:reply, map, Socket.t()}
@callback handle_call(msg :: term, {pid, reference}, socket :: Socket.t()) ::
{:noreply, Socket.t()} | {:reply, term, Socket.t()}
@callback handle_info(msg :: term, socket :: Socket.t()) ::
{:noreply, Socket.t()}
@callback handle_cast(msg :: term, socket :: Socket.t()) ::
{:noreply, Socket.t()}
@optional_callbacks mount: 3,
terminate: 2,
handle_params: 3,
handle_event: 3,
handle_call: 3,
handle_info: 2,
handle_cast: 2
@doc """
Uses LiveView in the current module to mark it a LiveView.
use Phoenix.LiveView,
namespace: MyAppWeb,
container: {:tr, class: "colorized"},
layout: {MyAppWeb.LayoutView, "live.html"}
## Options
* `:namespace` - configures the namespace the `LiveView` is in
* `:container` - configures the container the `LiveView` will be wrapped in
* `:layout` - configures the layout the `LiveView` will be rendered in
"""
defmacro __using__(opts) do
# Expand layout if possible to avoid compile-time dependencies
opts =
with true <- Keyword.keyword?(opts),
{layout, template} <- Keyword.get(opts, :layout) do
layout = Macro.expand(layout, %{__CALLER__ | function: {:__live__, 0}})
Keyword.replace!(opts, :layout, {layout, template})
else
_ -> opts
end
quote bind_quoted: [opts: opts] do
import Phoenix.LiveView
import Phoenix.LiveView.Helpers
@behaviour Phoenix.LiveView
require Phoenix.LiveView.Renderer
@before_compile Phoenix.LiveView.Renderer
@doc false
def __live__, do: unquote(Macro.escape(Phoenix.LiveView.__live__(__MODULE__, opts)))
end
end
@doc false
def __live__(module, opts) do
container = opts[:container] || {:div, []}
namespace = opts[:namespace] || module |> Module.split() |> Enum.take(1) |> Module.concat()
name = module |> Atom.to_string() |> String.replace_prefix("#{namespace}.", "")
layout =
case opts[:layout] do
{mod, template} when is_atom(mod) and is_binary(template) ->
{mod, template}
nil ->
nil
other ->
raise ArgumentError,
":layout expects a tuple of the form {MyLayoutView, \"my_template.html\"}, " <>
"got: #{inspect(other)}"
end
%{container: container, name: name, kind: :view, module: module, layout: layout}
end
@doc """
Returns true if the socket is connected.
Useful for checking the connectivity status when mounting the view.
For example, on initial page render, the view is mounted statically,
rendered, and the HTML is sent to the client. Once the client
connects to the server, a LiveView is then spawned and mounted
statefully within a process. Use `connected?/1` to conditionally
perform stateful work, such as subscribing to pubsub topics,
sending messages, etc.
## Examples
defmodule DemoWeb.ClockLive do
use Phoenix.LiveView
...
def mount(_params, _session, socket) do
if connected?(socket), do: :timer.send_interval(1000, self(), :tick)
{:ok, assign(socket, date: :calendar.local_time())}
end
def handle_info(:tick, socket) do
{:noreply, assign(socket, date: :calendar.local_time())}
end
end
"""
def connected?(%Socket{connected?: connected?}), do: connected?
@doc """
Assigns a value into the socket only if it does not exist.
Useful for lazily assigning values and referencing parent assigns.
## Referencing parent assigns
When a LiveView is mounted in a disconnected state, the `Plug.Conn` assigns
will be available for reference via `assign_new/3`, allowing assigns to
be shared for the initial HTTP request. The `Plug.Conn` assigns will not be
available during the connected mount. Likewise, nested LiveView children have
access to their parent's assigns on mount using `assign_new`, which allows
assigns to be shared down the nested LiveView tree.
## Examples
# controller
conn
|> assign(:current_user, user)
|> LiveView.Controller.live_render(MyLive, session: %{"user_id" => user.id})
# LiveView mount
def mount(_params, %{"user_id" => user_id}, socket) do
{:ok, assign_new(socket, :current_user, fn -> Accounts.get_user!(user_id) end)}
end
"""
def assign_new(%Socket{} = socket, key, func) when is_function(func, 0) do
validate_assign_key!(key)
case socket do
%{assigns: %{^key => _}} ->
socket
%{private: %{assign_new: {assigns, keys}}} ->
# It is important to store the keys even if they are not in assigns
# because maybe the controller doesn't have it but the view does.
socket = put_in(socket.private.assign_new, {assigns, [key | keys]})
Phoenix.LiveView.Utils.force_assign(socket, key, Map.get_lazy(assigns, key, func))
%{} ->
Phoenix.LiveView.Utils.force_assign(socket, key, func.())
end
end
@doc """
Adds key value pairs to socket assigns.
A single key value pair may be passed, or a keyword list or a map
of assigns may be provided to be merged into existing socket assigns.
## Examples
iex> assign(socket, :name, "Elixir")
iex> assign(socket, name: "Elixir", logo: "💧")
iex> assign(socket, %{name: "Elixir"})
"""
def assign(%Socket{} = socket, key, value) do
validate_assign_key!(key)
Phoenix.LiveView.Utils.assign(socket, key, value)
end
@doc """
See `assign/3`.
"""
def assign(%Socket{} = socket, attrs) when is_map(attrs) or is_list(attrs) do
Enum.reduce(attrs, socket, fn {key, value}, acc ->
validate_assign_key!(key)
Phoenix.LiveView.Utils.assign(acc, key, value)
end)
end
defp validate_assign_key!(:flash) do
raise ArgumentError,
":flash is a reserved assign by LiveView and it cannot be set directly. " <>
"Use the appropriate flash functions instead."
end
defp validate_assign_key!(_key), do: :ok
@doc """
Updates an existing key in the socket assigns.
The update function receives the current key's value and
returns the updated value. Raises if the key does not exist.
## Examples
iex> update(socket, :count, fn count -> count + 1 end)
iex> update(socket, :count, &(&1 + 1))
"""
def update(%Socket{assigns: assigns} = socket, key, func) do
case Map.fetch(assigns, key) do
{:ok, val} -> assign(socket, [{key, func.(val)}])
:error -> raise KeyError, key: key, term: assigns
end
end
@doc """
Adds a flash message to the socket to be displayed.
*Note*: While you can use `put_flash/3` inside a `Phoenix.LiveComponent`,
components have their own `@flash` assigns. The `@flash` assign
in a component is only copied to its parent LiveView if the component
calls `push_redirect/2` or `push_patch/2`.
*Note*: You must also place the `Phoenix.LiveView.Router.fetch_live_flash/2`
plug in your browser's pipeline in place of `fetch_flash` to be supported,
for example:
import Phoenix.LiveView.Router
pipeline :browser do
...
plug :fetch_live_flash
end
## Examples
iex> put_flash(socket, :info, "It worked!")
iex> put_flash(socket, :error, "You can't access that page")
"""
defdelegate put_flash(socket, kind, msg), to: Phoenix.LiveView.Utils
@doc """
Clears the flash.
## Examples
iex> clear_flash(socket)
"""
defdelegate clear_flash(socket), to: Phoenix.LiveView.Utils
@doc """
Clears a key from the flash.
## Examples
iex> clear_flash(socket, :info)
"""
defdelegate clear_flash(socket, key), to: Phoenix.LiveView.Utils
@doc """
Pushes an event to the client to be consumed by hooks.
*Note*: events will be dispatched to all active hooks on the client who are
handling the given `event`. Scoped events can be achieved by namespacing
your event names.
## Examples
{:noreply, push_event(socket, "scores", %{points: 100, user: "josé"})}
"""
defdelegate push_event(socket, event, payload), to: Phoenix.LiveView.Utils
@doc ~S"""
Allows an upload for the provided name.
## Options
* `:accept` - Required. A list of unique file type specifiers or the
atom :any to allow any kind of file. For example, `[".jpeg"]`, `:any`, etc.
* `:max_entries` - The maximum number of selected files to allow per
file input. Defaults to 1.
* `:max_file_size` - The maximum file size in bytes to allow to be uploaded.
Defaults 8MB. For example, `12_000_000`.
* `:chunk_size` - The chunk size in bytes to send when uploading.
Defaults `64_000`.
* `:chunk_timeout` - The time in milliseconds to wait before closing the
upload channel when a new chunk has not been received. Defaults `10_000`.
* `:external` - The 2-arity function for generating metadata for external
client uploaders. See the Uploads section for example usage.
* `:progress` - The optional 3-arity function for receiving progress events
* `:auto_upload` - Instructs the client to upload the file automatically
on file selection instead of waiting for form submits. Default false.
Raises when a previously allowed upload under the same name is still active.
## Examples
allow_upload(socket, :avatar, accept: ~w(.jpg .jpeg), max_entries: 2)
allow_upload(socket, :avatar, accept: :any)
For consuming files automatically as they are uploaded, you can pair `auto_upload: true` with
a custom progress function to consume the entries as they are completed. For example:
allow_upload(socket, :avatar, accept: :any, progress: &handle_progress/3, auto_upload: true)
defp handle_progress(:avatar, entry, socket) do
if entry.done? do
uploaded_file =
consume_uploaded_entry(socket, entry, fn %{} = meta ->
...
end)
{:noreply, put_flash(socket, :info, "file #{uploaded_file.name} uploaded")
else
{:noreply, socket}
end
end
"""
defdelegate allow_upload(socket, name, options), to: Phoenix.LiveView.Upload
@doc """
Revokes a previously allowed upload from `allow_upload/3`.
## Examples
disallow_upload(socket, :avatar)
"""
defdelegate disallow_upload(socket, name), to: Phoenix.LiveView.Upload
@doc """
Cancels an upload for the given entry.
## Examples
<%= for entry <- @uploads.avatar.entries do %>
...
<button phx-click="cancel-upload" phx-value-ref="<%= entry.ref %>">cancel</button>
<% end %>
def handle_event("cancel-upload", %{"ref" => ref}, socket) do
{:noreply, cancel_upload(socket, :avatar, ref)}
end
"""
defdelegate cancel_upload(socket, name, entry_ref), to: Phoenix.LiveView.Upload
@doc """
Returns the completed and in progress entries for the upload.
## Examples
case uploaded_entries(socket, :photos) do
{[_ | _] = completed, []} ->
# all entries are completed
{[], [_ | _] = in_progress} ->
# all entries are still in progress
end
"""
defdelegate uploaded_entries(socket, name), to: Phoenix.LiveView.Upload
@doc ~S"""
Consumes the uploaded entries.
Raises when there are still entries in progress.
Typically called when submitting a form to handle the
uploaded entries alongside the form data. For form submissions,
it is guaranteed that all entries have completed before the submit event
is invoked. Once entries are consumed, they are removed from the upload.
## Examples
def handle_event("save", _params, socket) do
uploaded_files =
consume_uploaded_entries(socket, :avatar, fn %{path: path}, _entry ->
dest = Path.join("priv/static/uploads", Path.basename(path))
File.cp!(path, dest)
Routes.static_path(socket, "/uploads/#{Path.basename(dest)}")
end)
{:noreply, update(socket, :uploaded_files, &(&1 ++ uploaded_files))}
end
"""
defdelegate consume_uploaded_entries(socket, name, func), to: Phoenix.LiveView.Upload
@doc ~S"""
Consumes an individual uploaded entry.
Raises when the entry is still in progress.
Typically called when submitting a form to handle the
uploaded entries alongside the form data. Once entries are consumed,
they are removed from the upload.
This is a lower-level feature than `consume_uploaded_entries/3` and useful
for scenarios where you want to consume entries as they are individually completed.
## Examples
def handle_event("save", _params, socket) do
case uploaded_entries(socket, :avatar) do
{[_|_] = entries, []} ->
uploaded_files = for entry <- entries do
consume_uploaded_entry(socket, entry, fn %{path: path} ->
dest = Path.join("priv/static/uploads", Path.basename(path))
File.cp!(path, dest)
Routes.static_path(socket, "/uploads/#{Path.basename(dest)}")
end)
end
{:noreply, update(socket, :uploaded_files, &(&1 ++ uploaded_files))}
_ ->
{:noreply, socket}
end
end
"""
defdelegate consume_uploaded_entry(socket, entry, func), to: Phoenix.LiveView.Upload
@doc """
Annotates the socket for redirect to a destination path.
*Note*: LiveView redirects rely on instructing client
to perform a `window.location` update on the provided
redirect location. The whole page will be reloaded and
all state will be discarded.
## Options
* `:to` - the path to redirect to. It must always be a local path
* `:external` - an external path to redirect to
"""
def redirect(%Socket{} = socket, to: url) do
validate_local_url!(url, "redirect/2")
put_redirect(socket, {:redirect, %{to: url}})
end
def redirect(%Socket{} = socket, external: url) do
put_redirect(socket, {:redirect, %{external: url}})
end
def redirect(%Socket{}, _) do
raise ArgumentError, "expected :to or :external option in redirect/2"
end
@doc """
Annotates the socket for navigation within the current LiveView.
When navigating to the current LiveView, `c:handle_params/3` is
immediately invoked to handle the change of params and URL state.
Then the new state is pushed to the client, without reloading the
whole page while also maintaining the current scroll position.
For live redirects to another LiveView, use `push_redirect/2`.
## Options
* `:to` - the required path to link to. It must always be a local path
* `:replace` - the flag to replace the current history or push a new state.
Defaults `false`.
## Examples
{:noreply, push_patch(socket, to: "/")}
{:noreply, push_patch(socket, to: "/", replace: true)}
"""
def push_patch(%Socket{} = socket, opts) do
%{to: to} = opts = push_opts!(opts, "push_patch/2")
case Phoenix.LiveView.Utils.live_link_info!(socket, socket.root_view, to) do
{:internal, params, action, _parsed_uri} ->
put_redirect(socket, {:live, {params, action}, opts})
{:external, _uri} ->
raise ArgumentError,
"cannot push_patch/2 to #{inspect(to)} because the given path " <>
"does not point to the current root view #{inspect(socket.root_view)}"
end
end
@doc """
Annotates the socket for navigation to another LiveView.
The current LiveView will be shutdown and a new one will be mounted
in its place, without reloading the whole page. This can
also be used to remount the same LiveView, in case you want to start
fresh. If you want to navigate to the same LiveView without remounting
it, use `push_patch/2` instead.
## Options
* `:to` - the required path to link to. It must always be a local path
* `:replace` - the flag to replace the current history or push a new state.
Defaults `false`.
## Examples
{:noreply, push_redirect(socket, to: "/")}
{:noreply, push_redirect(socket, to: "/", replace: true)}
"""
def push_redirect(%Socket{} = socket, opts) do
opts = push_opts!(opts, "push_redirect/2")
put_redirect(socket, {:live, :redirect, opts})
end
defp push_opts!(opts, context) do
to = Keyword.fetch!(opts, :to)
validate_local_url!(to, context)
kind = if opts[:replace], do: :replace, else: :push
%{to: to, kind: kind}
end
defp put_redirect(%Socket{redirected: nil} = socket, command) do
%Socket{socket | redirected: command}
end
defp put_redirect(%Socket{redirected: to} = _socket, _command) do
raise ArgumentError, "socket already prepared to redirect with #{inspect(to)}"
end
@invalid_local_url_chars ["\\"]
defp validate_local_url!("//" <> _ = to, where) do
raise_invalid_local_url!(to, where)
end
defp validate_local_url!("/" <> _ = to, where) do
if String.contains?(to, @invalid_local_url_chars) do
raise ArgumentError, "unsafe characters detected for #{where} in URL #{inspect(to)}"
else
to
end
end
defp validate_local_url!(to, where) do
raise_invalid_local_url!(to, where)
end
defp raise_invalid_local_url!(to, where) do
raise ArgumentError, "the :to option in #{where} expects a path but was #{inspect(to)}"
end
@doc """
Accesses the connect params sent by the client for use on connected mount.
Connect params are only sent when the client connects to the server and
only remain available during mount. `nil` is returned when called in a
disconnected state and a `RuntimeError` is raised if called after mount.
## Reserved params
The following params have special meaning in LiveView:
* "_csrf_token" - the CSRF Token which must be explicitly set by the user
when connecting
* "_mounts" - the number of times the current LiveView is mounted.
It is 0 on first mount, then increases on each reconnect. It resets
when navigating away from the current LiveView or on errors
* "_track_static" - set automatically with a list of all href/src from
tags with the "phx-track-static" annotation in them. If there are no
such tags, nothing is sent
## Examples
def mount(_params, _session, socket) do
{:ok, assign(socket, width: get_connect_params(socket)["width"] || @width)}
end
"""
def get_connect_params(%Socket{private: private} = socket) do
if connect_params = private[:connect_params] do
if connected?(socket), do: connect_params, else: nil
else
raise_connect_only!(socket, "connect_params")
end
end
@doc """
Accesses the connect info from the socket to use on connected mount.
Connect info are only sent when the client connects to the server and
only remain available during mount. `nil` is returned when called in a
disconnected state and a `RuntimeError` is raised if called after mount.
## Examples
First, when invoking the LiveView socket, you need to declare the
`connect_info` you want to receive. Typically, it includes at least
the session but it may include other keys, such as `:peer_data`.
See `Phoenix.Endpoint.socket/3`:
socket "/live", Phoenix.LiveView.Socket,
websocket: [connect_info: [:peer_data, session: @session_options]]
Those values can now be accessed on the connected mount as
`get_connect_info/1`:
def mount(_params, _session, socket) do
if info = get_connect_info(socket) do
{:ok, assign(socket, ip: info.peer_data.address)}
else
{:ok, assign(socket, ip: nil)}
end
end
"""
def get_connect_info(%Socket{private: private} = socket) do
if connect_info = private[:connect_info] do
if connected?(socket), do: connect_info, else: nil
else
raise_connect_only!(socket, "connect_info")
end
end
@doc """
Returns true if the socket is connected and the tracked static assets have changed.
This function is useful to detect if the client is running on an outdated
version of the marked static files. It works by comparing the static paths
sent by the client with the one on the server.
**Note:** this functionality requires Phoenix v1.5.2 or later.
To use this functionality, the first step is to annotate which static files
you want to be tracked by LiveView, with the `phx-track-static`. For example:
<link phx-track-static rel="stylesheet" href="<%= Routes.static_path(@conn, "/css/app.css") %>"/>
<script defer phx-track-static type="text/javascript" src="<%= Routes.static_path(@conn, "/js/app.js") %>"></script>
Now, whenever LiveView connects to the server, it will send a copy `src`
or `href` attributes of all tracked statics and compare those values with
the latest entries computed by `mix phx.digest` in the server.
The tracked statics on the client will match the ones on the server the
huge majority of times. However, if there is a new deployment, those values
may differ. You can use this function to detect those cases and show a
banner to the user, asking them to reload the page. To do so, first set the
assign on mount:
def mount(params, session, socket) do
{:ok, assign(socket, static_change: static_changed?(socket))}
end
And then in your views:
<%= if @static_changed? do %>
<div id="reload-static">
The app has been updated. Click here to <a href="#" onclick="window.location.reload()">reload</a>.
</div>
<% end %>
If you prefer, you can also send a JavaScript script that immediately
reloads the page.
**Note:** only set `phx-track-static` on your own assets. For example, do
not set it in external JavaScript files:
<script defer phx-track-static type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/3.4.1/jquery.min.js"></script>
Because you don't actually serve the file above, LiveView will interpret
the static above as missing, and this function will return true.
"""
def static_changed?(%Socket{private: private, endpoint: endpoint} = socket) do
if connect_params = private[:connect_params] do
connected?(socket) and
static_changed?(
connect_params["_track_static"],
endpoint.config(:cache_static_manifest_latest)
)
else
raise_connect_only!(socket, "static_changed?")
end
end
defp static_changed?([_ | _] = statics, %{} = latest) do
latest = Map.to_list(latest)
not Enum.all?(statics, fn static ->
[static | _] = :binary.split(static, "?")
Enum.any?(latest, fn {non_digested, digested} ->
String.ends_with?(static, non_digested) or String.ends_with?(static, digested)
end)
end)
end
defp static_changed?(_, _), do: false
defp raise_connect_only!(socket, fun) do
if child?(socket) do
raise RuntimeError, """
attempted to read #{fun} from a nested child LiveView #{inspect(socket.view)}.
Only the root LiveView has access to #{fun}.
"""
else
raise RuntimeError, """
attempted to read #{fun} outside of #{inspect(socket.view)}.mount/3.
#{fun} only exists while mounting. If you require access to this information
after mount, store the state in socket assigns.
"""
end
end
@doc """
Asynchronously updates a `Phoenix.LiveComponent` with new assigns.
The component that is updated must be stateful (the `:id` in the assigns must
match the `:id` associated with the component) and the component must be
mounted within the current LiveView.
When the component receives the update, the optional
[`preload/1`](`c:Phoenix.LiveComponent.preload/1`) callback is invoked, then
the updated values are merged with the component's assigns and
[`update/2`](`c:Phoenix.LiveComponent.update/2`) is called for the updated
component(s).
While a component may always be updated from the parent by updating some
parent assigns which will re-render the child, thus invoking
[`update/2`](`c:Phoenix.LiveComponent.update/2`) on the child component,
`send_update/2` is useful for updating a component that entirely manages its
own state, as well as messaging between components mounted in the same
LiveView.
**Note:** `send_update/2` cannot update a LiveComponent that is mounted in a
different LiveView. To update a component in a different LiveView you must
send a message to the LiveView process that the LiveComponent is mounted
within (often via `Phoenix.PubSub`).
## Examples
def handle_event("cancel-order", _, socket) do
...
send_update(Cart, id: "cart", status: "cancelled")
{:noreply, socket}
end
"""
def send_update(module, assigns) when is_atom(module) do
assigns = Enum.into(assigns, %{})
id =
assigns[:id] ||
raise ArgumentError, "missing required :id in send_update. Got: #{inspect(assigns)}"
Phoenix.LiveView.Channel.send_update(module, id, assigns)
end
@doc """
Similar to `send_update/2` but the update will be delayed according to the given `time_in_milliseconds`.
## Examples
def handle_event("cancel-order", _, socket) do
...
send_update_after(Cart, [id: "cart", status: "cancelled"], 3000)
{:noreply, socket}
end
"""
def send_update_after(module, assigns, time_in_milliseconds)
when is_atom(module) and is_integer(time_in_milliseconds) do
assigns = Enum.into(assigns, %{})
id =
assigns[:id] ||
raise ArgumentError, "missing required :id in send_update_after. Got: #{inspect(assigns)}"
Phoenix.LiveView.Channel.send_update_after(module, id, assigns, time_in_milliseconds)
end
@doc """
Returns the transport pid of the socket.
Raises `ArgumentError` if the socket is not connected.
## Examples
iex> transport_pid(socket)
#PID<0.107.0>
"""
def transport_pid(%Socket{}) do
case Process.get(:"$callers") do
[transport_pid | _] -> transport_pid
_ -> raise ArgumentError, "transport_pid/1 may only be called when the socket is connected."
end
end
defp child?(%Socket{parent_pid: pid}), do: is_pid(pid)
end
|
lib/phoenix_live_view.ex
| 0.848408
| 0.666921
|
phoenix_live_view.ex
|
starcoder
|
defmodule Ink do
@moduledoc """
A backend for the Elixir `Logger` that logs JSON and filters your secrets.
## Usage
To use `Ink` for your logging, just configure it as a backend:
config :logger, backends: [Ink]
# optional additional configuration
config :logger, Ink,
name: "your app",
level: :info
### Options
In total, the following options are supported by `Ink`:
- `:name` the name of your app that will be added to all logs
- `:io_device` the IO device the logs are written to (default: `:stdio`)
- `:level` the minimum log level for outputting messages (default: `:debug`)
- `:filtered_strings` secret strings that should not be printed in logs
(default: `[]`)
- `:filtered_uri_credentials` URIs that contain credentials for filtering
(default: `[]`)
- `:metadata` the metadata keys that should be included in the logs (default:
all)
### Filtering secrets
`Ink` can be configured to filter secrets out of your logs:
config :logger, Ink,
filtered_strings: ["password"]
Sometimes, you configure a connection using a URL. For example, a RabbitMQ
connection could be configured with the URL
`"amqp://user:password@localhost:5672"`. Filtering the whole URL from your
logs doesn't do you any good. Therefore, `Ink` has a separate option to pass
secret URLs:
config :logger, Ink,
filtered_uri_credentials: ["amqp://user:password@localhost:5672"]
This code will parse the URL and only filter `"password"` from your logs.
#### Preventing reports on the terminal
When processes crash - which is a normal thing to happen in Elixir - OTP sends
reports to the handlers of the `:error_logger`. In some environments, there is
a default handler that prints these to the terminal. Since it includes the
state of the crashed process, this can include secrets from your application.
`Ink` is unable to filter these reports, because they are not printed using
the `Logger`.
You can disable printing of these reports with the following line in your
config:
config :sasl, sasl_error_logger: false
### Metadata
If you don't configure any specific metadata, `Ink` will include all metadata
as separate fields in the logged JSON. If you only want to include specific
metadata in your logs, you need to configure the included fields.
config :logger, Ink,
metadata: [:pid, :my_field]
*Note*: Since the term PID is also prevalent in the UNIX world, services like
LogStash expect an integer if they encounter a field named `pid`. Therefore,
`Ink` will log the PID as `erlang_pid`.
"""
@behaviour :gen_event
def init(__MODULE__) do
{:ok, configure(Application.get_env(:logger, Ink, []), default_options())}
end
def handle_call({:configure, options}, state) do
{:ok, :ok, configure(options, state)}
end
def handle_event({_, gl, {Logger, _, _, _}}, state) when node(gl) != node() do
{:ok, state}
end
def handle_event(:flush, state) do
{:ok, state}
end
def handle_event({level, _, {Logger, message, timestamp, metadata}}, state) do
log_message(message, level, timestamp, metadata, state)
{:ok, state}
end
def handle_info(_msg, state) do
{:ok, state}
end
def terminate(_reason, _state) do
:ok
end
def code_change(_old, state, _extra) do
{:ok, state}
end
defp configure(options, state) do
state
|> Map.merge(Enum.into(options, %{}))
|> update_secret_strings
end
defp log_message(message, level, timestamp, metadata, config) do
if Logger.compare_levels(level, config.level) != :lt do
message
|> base_map(timestamp, level)
|> Map.merge(process_metadata(metadata, config))
|> Ink.Encoder.encode()
|> log_json(config)
end
end
defp process_metadata(metadata, config) do
metadata
|> filter_metadata(config)
|> rename_metadata_fields
|> Enum.into(%{})
end
defp filter_metadata(metadata, %{metadata: nil}), do: metadata
defp filter_metadata(metadata, config) do
metadata |> Enum.filter(fn {key, _} -> key in config.metadata end)
end
defp rename_metadata_fields(metadata) do
metadata
|> Enum.map(fn
{:pid, value} -> {:erlang_pid, value}
other -> other
end)
end
defp log_json({:ok, json}, config) do
json
|> filter_secret_strings(config.secret_strings)
|> log_to_device(config.io_device)
end
defp log_json(other, config) do
if Mix.env() == :dev, do: log_to_device(inspect(other), config.io_device)
end
defp log_to_device(msg, io_device), do: IO.puts(io_device, msg)
defp base_map(message, timestamp, level) when is_binary(message) do
%{
name: name(),
pid: System.get_pid() |> String.to_integer(),
hostname: hostname(),
msg: message,
time: formatted_timestamp(timestamp),
level: level(level),
v: 0
}
end
defp base_map(message, timestamp, level) when is_list(message) do
base_map(IO.iodata_to_binary(message), timestamp, level)
end
defp formatted_timestamp({date, {hours, minutes, seconds, milliseconds}}) do
{date, {hours, minutes, seconds}}
|> NaiveDateTime.from_erl!({milliseconds * 1000, 3})
|> DateTime.from_naive!("Etc/UTC")
|> DateTime.to_iso8601()
end
defp update_secret_strings(config) do
secret_strings =
config.filtered_strings
|> Kernel.++(uri_credentials(config.filtered_uri_credentials))
|> Enum.reject(fn s -> s == "" || is_nil(s) end)
Map.put(config, :secret_strings, secret_strings)
end
defp uri_credentials(uris) do
uris
|> Enum.reject(&is_nil/1)
|> Enum.map(fn uri -> uri |> URI.parse() |> Map.get(:userinfo) end)
|> Enum.reject(&is_nil/1)
|> Enum.map(fn userinfo -> userinfo |> String.split(":") |> List.last() end)
end
defp filter_secret_strings(message, secret_strings) do
Enum.reduce(secret_strings, message, fn secret, msg ->
String.replace(msg, secret, "[FILTERED]")
end)
end
defp default_options do
%{
level: :debug,
filtered_strings: [],
filtered_uri_credentials: [],
secret_strings: [],
io_device: :stdio,
metadata: nil
}
end
defp level(level) do
case level do
:debug -> 20
:info -> 30
:warn -> 40
:error -> 50
end
end
defp hostname do
with {:ok, hostname} <- :inet.gethostname(), do: List.to_string(hostname)
end
defp name do
:logger
|> Application.get_env(Ink)
|> Keyword.fetch!(:name)
end
end
|
lib/ink.ex
| 0.849488
| 0.585457
|
ink.ex
|
starcoder
|
defmodule GrapevineData.Authorizations.Authorization do
@moduledoc """
Authorization schema
"""
use Ecto.Schema
import Ecto.Changeset
alias GrapevineData.Accounts.User
alias GrapevineData.Authorizations.AccessToken
alias GrapevineData.Games.Game
@type t :: %__MODULE__{}
@scopes ["profile", "email"]
schema "authorizations" do
field(:redirect_uri, :string)
field(:state, :string)
field(:scopes, {:array, :string}, default: [])
field(:code, Ecto.UUID)
field(:active, :boolean, default: false)
belongs_to(:user, User)
belongs_to(:game, Game)
has_many(:access_tokens, AccessToken)
timestamps()
end
def create_changeset(struct, game, params) do
struct
|> cast(params, [:redirect_uri, :state, :scopes])
|> validate_required([:redirect_uri, :state, :scopes])
|> validate_redirect_uri()
|> validate_redirect_uri_known(game)
|> validate_scopes()
|> put_change(:game_id, game.id)
|> put_change(:code, UUID.uuid4())
end
def authorize_changeset(struct) do
struct
|> change()
|> put_change(:active, true)
end
def used_changeset(struct) do
struct
|> change()
|> put_change(:code, nil)
end
def refresh_code_changeset(struct) do
struct
|> change()
|> put_change(:code, UUID.uuid4())
end
defp validate_scopes(changeset) do
case get_field(changeset, :scopes) do
[] ->
add_error(changeset, :scopes, "must be provided")
scopes ->
case Enum.all?(scopes, &Enum.member?(@scopes, &1)) do
true ->
changeset
false ->
add_error(changeset, :scopes, "are invalid")
end
end
end
defp validate_redirect_uri(changeset) do
case get_field(changeset, :redirect_uri) do
nil ->
changeset
"urn:ietf:wg:oauth:2.0:oob" ->
changeset
redirect_uri ->
uri = URI.parse(redirect_uri)
changeset
|> validate_redirect_uri_scheme(uri)
|> validate_redirect_uri_host(uri)
|> validate_redirect_uri_path(uri)
|> validate_redirect_uri_query(uri)
|> validate_redirect_uri_fragment(uri)
end
end
defp validate_redirect_uri_scheme(changeset, uri) do
case uri.scheme do
"https" ->
changeset
"http" ->
case uri.host do
"localhost" ->
changeset
_ ->
add_error(changeset, :redirect_uri, "must be https")
end
_ ->
add_error(changeset, :redirect_uri, "must be https")
end
end
defp validate_redirect_uri_host(changeset, uri) do
case uri.host do
nil ->
add_error(changeset, :redirect_uri, "must be a fully qualified URI")
_ ->
changeset
end
end
defp validate_redirect_uri_path(changeset, uri) do
case uri.path do
nil ->
add_error(changeset, :redirect_uri, "must be a fully qualified URI")
_ ->
changeset
end
end
defp validate_redirect_uri_query(changeset, uri) do
case uri.query do
nil ->
changeset
_ ->
add_error(changeset, :redirect_uri, "must be a fully qualified URI")
end
end
defp validate_redirect_uri_fragment(changeset, uri) do
case uri.fragment do
nil ->
changeset
_ ->
add_error(changeset, :redirect_uri, "must be a fully qualified URI")
end
end
defp validate_redirect_uri_known(changeset, game) do
case get_field(changeset, :redirect_uri) do
nil ->
changeset
"urn:ietf:wg:oauth:2.0:oob" ->
changeset
redirect_uri ->
redirect_uris = Enum.map(game.redirect_uris, &(&1.uri))
case redirect_uri in redirect_uris do
true ->
changeset
false ->
add_error(changeset, :redirect_uri, "does not match a know URI")
end
end
end
end
|
apps/data/lib/grapevine_data/authorizations/authorization.ex
| 0.726717
| 0.431884
|
authorization.ex
|
starcoder
|
defmodule Hui.Query.FacetRange do
@moduledoc """
Struct related to [range faceting](http://lucene.apache.org/solr/guide/faceting.html#range-faceting) query.
### Example
iex> x = %Hui.Query.FacetRange{range: "year", gap: "+10YEARS", start: 1700, end: 1799}
%Hui.Query.FacetRange{
end: 1799,
gap: "+10YEARS",
hardend: nil,
include: nil,
method: nil,
other: nil,
per_field: false,
range: "year",
start: 1700
}
iex> y = %Hui.Query.Facet{range: x, field: ["type", "year"], query: "year:[2000 TO NOW]"}
%Hui.Query.Facet{
contains: nil,
"contains.ignoreCase": nil,
"enum.cache.minDf": nil,
excludeTerms: nil,
exists: nil,
facet: true,
field: ["type", "year"],
interval: nil,
limit: nil,
matches: nil,
method: nil,
mincount: nil,
missing: nil,
offset: nil,
"overrequest.count": nil,
"overrequest.ratio": nil,
pivot: nil,
"pivot.mincount": nil,
prefix: nil,
query: "year:[2000 TO NOW]",
range: %Hui.Query.FacetRange{
end: 1799,
gap: "+10YEARS",
hardend: nil,
include: nil,
method: nil,
other: nil,
per_field: false,
range: "year",
start: 1700
},
sort: nil,
threads: nil
}
iex> y |> Hui.Encoder.encode # render struct into URL query string with `facet` prefixes
"facet=true&facet.field=type&facet.field=year&facet.query=year%3A%5B2000+TO+NOW%5D&facet.range.end=1799&facet.range.gap=%2B10YEARS&facet.range=year&facet.range.start=1700"
### Example - per field ranges, f.[fieldname].facet.range
iex> x = %Hui.Query.FacetRange{range: "year", gap: "+10YEARS", start: 1700, end: 1799, per_field: true}
%Hui.Query.FacetRange{
end: 1799,
gap: "+10YEARS",
hardend: nil,
include: nil,
method: nil,
other: nil,
per_field: true,
range: "year",
start: 1700
}
iex> x |> Hui.Encoder.encode
"f.year.facet.range.end=1799&f.year.facet.range.gap=%2B10YEARS&facet.range=year&f.year.facet.range.start=1700"
# another range
iex> y = %Hui.Query.FacetRange{range: "price", gap: "10", start: 0, end: 100, per_field: true}
%Hui.Query.FacetRange{
end: 100,
gap: "10",
hardend: nil,
include: nil,
method: nil,
other: nil,
per_field: true,
range: "price",
start: 0
}
iex> z = %Hui.Query.Facet{field: "type", range: [x, y]} # field and multiple ranges faceting
%Hui.Query.Facet{
contains: nil,
"contains.ignoreCase": nil,
"enum.cache.minDf": nil,
excludeTerms: nil,
exists: nil,
facet: true,
field: "type",
interval: nil,
limit: nil,
matches: nil,
method: nil,
mincount: nil,
missing: nil,
offset: nil,
"overrequest.count": nil,
"overrequest.ratio": nil,
pivot: nil,
"pivot.mincount": nil,
prefix: nil,
query: nil,
range: [
%Hui.Query.FacetRange{
end: 1799,
gap: "+10YEARS",
hardend: nil,
include: nil,
method: nil,
other: nil,
per_field: true,
range: "year",
start: 1700
},
%Hui.Query.FacetRange{
end: 100,
gap: "10",
hardend: nil,
include: nil,
method: nil,
other: nil,
per_field: true,
range: "price",
start: 0
}
],
sort: nil,
threads: nil
}
iex> z |> Hui.Encoder.encode
"facet=true&facet.field=type&f.year.facet.range.end=1799&f.year.facet.range.gap=%2B10YEARS&facet.range=year&f.year.facet.range.start=1700&f.price.facet.range.end=100&f.price.facet.range.gap=10&facet.range=price&f.price.facet.range.start=0"
"""
defstruct [
:end,
:gap,
:hardend,
:include,
:method,
:other,
:range,
:start,
per_field: false
]
@typedoc """
Struct for range faceting parameters, use in conjunction with
the faceting struct -`t:Hui.Query.Facet.t/0`.
"""
@type t :: %__MODULE__{
end: binary,
gap: binary,
hardend: boolean,
include: :lower | :upper | :edge | :outer | :all,
method: :filter | :dv,
other: :before | :after | :between | :none | :all,
per_field: boolean,
range: binary,
start: binary
}
@spec new(binary, binary, binary, binary) :: t
def new(r, g, s, e), do: %__MODULE__{range: r, gap: g, start: s, end: e}
@spec new :: t
def new(), do: %__MODULE__{}
end
|
lib/hui/query/facet_range.ex
| 0.853562
| 0.415284
|
facet_range.ex
|
starcoder
|
defmodule AWS.MediaStore do
@moduledoc """
An AWS Elemental MediaStore container is a namespace that holds folders and
objects. You use a container endpoint to create, read, and delete objects.
"""
@doc """
Creates a storage container to hold objects. A container is similar to a
bucket in the Amazon S3 service.
"""
def create_container(client, input, options \\ []) do
request(client, "CreateContainer", input, options)
end
@doc """
Deletes the specified container. Before you make a `DeleteContainer`
request, delete any objects in the container or in any folders in the
container. You can delete only empty containers.
"""
def delete_container(client, input, options \\ []) do
request(client, "DeleteContainer", input, options)
end
@doc """
Deletes the access policy that is associated with the specified container.
"""
def delete_container_policy(client, input, options \\ []) do
request(client, "DeleteContainerPolicy", input, options)
end
@doc """
Deletes the cross-origin resource sharing (CORS) configuration information
that is set for the container.
To use this operation, you must have permission to perform the
`MediaStore:DeleteCorsPolicy` action. The container owner has this
permission by default and can grant this permission to others.
"""
def delete_cors_policy(client, input, options \\ []) do
request(client, "DeleteCorsPolicy", input, options)
end
@doc """
Removes an object lifecycle policy from a container. It takes up to 20
minutes for the change to take effect.
"""
def delete_lifecycle_policy(client, input, options \\ []) do
request(client, "DeleteLifecyclePolicy", input, options)
end
@doc """
Deletes the metric policy that is associated with the specified container.
If there is no metric policy associated with the container, MediaStore
doesn't send metrics to CloudWatch.
"""
def delete_metric_policy(client, input, options \\ []) do
request(client, "DeleteMetricPolicy", input, options)
end
@doc """
Retrieves the properties of the requested container. This request is
commonly used to retrieve the endpoint of a container. An endpoint is a
value assigned by the service when a new container is created. A
container's endpoint does not change after it has been assigned. The
`DescribeContainer` request returns a single `Container` object based on
`ContainerName`. To return all `Container` objects that are associated with
a specified AWS account, use `ListContainers`.
"""
def describe_container(client, input, options \\ []) do
request(client, "DescribeContainer", input, options)
end
@doc """
Retrieves the access policy for the specified container. For information
about the data that is included in an access policy, see the [AWS Identity
and Access Management User
Guide](https://aws.amazon.com/documentation/iam/).
"""
def get_container_policy(client, input, options \\ []) do
request(client, "GetContainerPolicy", input, options)
end
@doc """
Returns the cross-origin resource sharing (CORS) configuration information
that is set for the container.
To use this operation, you must have permission to perform the
`MediaStore:GetCorsPolicy` action. By default, the container owner has this
permission and can grant it to others.
"""
def get_cors_policy(client, input, options \\ []) do
request(client, "GetCorsPolicy", input, options)
end
@doc """
Retrieves the object lifecycle policy that is assigned to a container.
"""
def get_lifecycle_policy(client, input, options \\ []) do
request(client, "GetLifecyclePolicy", input, options)
end
@doc """
Returns the metric policy for the specified container.
"""
def get_metric_policy(client, input, options \\ []) do
request(client, "GetMetricPolicy", input, options)
end
@doc """
Lists the properties of all containers in AWS Elemental MediaStore.
You can query to receive all the containers in one response. Or you can
include the `MaxResults` parameter to receive a limited number of
containers in each response. In this case, the response includes a token.
To get the next set of containers, send the command again, this time with
the `NextToken` parameter (with the returned token as its value). The next
set of responses appears, with a token if there are still more containers
to receive.
See also `DescribeContainer`, which gets the properties of one container.
"""
def list_containers(client, input, options \\ []) do
request(client, "ListContainers", input, options)
end
@doc """
Returns a list of the tags assigned to the specified container.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Creates an access policy for the specified container to restrict the users
and clients that can access it. For information about the data that is
included in an access policy, see the [AWS Identity and Access Management
User Guide](https://aws.amazon.com/documentation/iam/).
For this release of the REST API, you can create only one policy for a
container. If you enter `PutContainerPolicy` twice, the second command
modifies the existing policy.
"""
def put_container_policy(client, input, options \\ []) do
request(client, "PutContainerPolicy", input, options)
end
@doc """
Sets the cross-origin resource sharing (CORS) configuration on a container
so that the container can service cross-origin requests. For example, you
might want to enable a request whose origin is http://www.example.com to
access your AWS Elemental MediaStore container at my.example.container.com
by using the browser's XMLHttpRequest capability.
To enable CORS on a container, you attach a CORS policy to the container.
In the CORS policy, you configure rules that identify origins and the HTTP
methods that can be executed on your container. The policy can contain up
to 398,000 characters. You can add up to 100 rules to a CORS policy. If
more than one rule applies, the service uses the first applicable rule
listed.
To learn more about CORS, see [Cross-Origin Resource Sharing (CORS) in AWS
Elemental
MediaStore](https://docs.aws.amazon.com/mediastore/latest/ug/cors-policy.html).
"""
def put_cors_policy(client, input, options \\ []) do
request(client, "PutCorsPolicy", input, options)
end
@doc """
Writes an object lifecycle policy to a container. If the container already
has an object lifecycle policy, the service replaces the existing policy
with the new policy. It takes up to 20 minutes for the change to take
effect.
For information about how to construct an object lifecycle policy, see
[Components of an Object Lifecycle
Policy](https://docs.aws.amazon.com/mediastore/latest/ug/policies-object-lifecycle-components.html).
"""
def put_lifecycle_policy(client, input, options \\ []) do
request(client, "PutLifecyclePolicy", input, options)
end
@doc """
The metric policy that you want to add to the container. A metric policy
allows AWS Elemental MediaStore to send metrics to Amazon CloudWatch. It
takes up to 20 minutes for the new policy to take effect.
"""
def put_metric_policy(client, input, options \\ []) do
request(client, "PutMetricPolicy", input, options)
end
@doc """
Starts access logging on the specified container. When you enable access
logging on a container, MediaStore delivers access logs for objects stored
in that container to Amazon CloudWatch Logs.
"""
def start_access_logging(client, input, options \\ []) do
request(client, "StartAccessLogging", input, options)
end
@doc """
Stops access logging on the specified container. When you stop access
logging on a container, MediaStore stops sending access logs to Amazon
CloudWatch Logs. These access logs are not saved and are not retrievable.
"""
def stop_access_logging(client, input, options \\ []) do
request(client, "StopAccessLogging", input, options)
end
@doc """
Adds tags to the specified AWS Elemental MediaStore container. Tags are
key:value pairs that you can associate with AWS resources. For example, the
tag key might be "customer" and the tag value might be "companyA." You can
specify one or more tags to add to each container. You can add up to 50
tags to each container. For more information about tagging, including
naming and usage conventions, see [Tagging Resources in
MediaStore](https://docs.aws.amazon.com/mediastore/latest/ug/tagging.html).
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes tags from the specified container. You can specify one or more tags
to remove.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "mediastore"}
host = build_host("mediastore", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "MediaStore_20170901.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/media_store.ex
| 0.911682
| 0.405684
|
media_store.ex
|
starcoder
|
defmodule Day4 do
defmodule Game do
defstruct [:draws, :boards, :found]
end
defmodule Board do
defstruct [:contents, bingo: false, win: nil, won_at: -1]
end
defmodule Item do
defstruct [:value, state: :unmarked]
end
def from_input(input) do
input
|> String.trim()
|> String.split("\n")
|> parse_game()
end
def evaulate(game) do
game
|> evaluate_draws(:first)
end
def evaulate_last(game) do
evaluated = evaluate_draws(game, :last)
boards =
Enum.sort(evaluated.boards, fn %{won_at: won_at_a}, %{won_at: won_at_b} ->
# IO.inspect({won_at_a, won_at_b})
won_at_a >= won_at_b
end)
# IO.inspect(boards)
%{evaluated | boards: boards}
evaluate_draws(%{evaluated | boards: boards}, :last)
end
def evaluate_draws(%{draws: [draw | rest], boards: boards} = game, which) do
boards =
Enum.map(boards, fn board ->
evaluate_board(board, draw)
end)
found =
Enum.filter(boards, fn %{bingo: bingo} ->
bingo
end)
found_count = Enum.count(found)
board_count = Enum.count(boards)
case which do
:first when found_count == 1 ->
%{game | found: found, draws: rest, boards: boards}
:last when found_count == board_count ->
%{game | found: found, draws: rest, boards: boards}
_ ->
evaluate_draws(%{game | found: found, draws: rest, boards: boards}, which)
end
end
def evaluate_draws(%{draws: []} = game, _which) do
game
end
def evaluate_board(%{bingo: true} = board, _) do
# IO.inspect(board, label: "already bingo")
board
end
def evaluate_board(%{} = board, draw) do
contents =
Enum.map(board.contents, fn row ->
evaluate_row(row, draw)
end)
%{board | contents: contents}
|> scan_wins(draw)
end
def scan_wins(%{bingo: true} = board, _draw) do
board
end
def scan_wins(%{contents: contents} = board, draw) do
found1 =
Enum.find_value(contents, fn
[%Item{state: :marked}, %Item{state: :marked}, %Item{state: :marked}, %Item{state: :marked}, %Item{state: :marked}] = found -> found
[_, _, _, _, _] -> false
end)
[
[a1, b1, c1, d1, e1],
[a2, b2, c2, d2, e2],
[a3, b3, c3, d3, e3],
[a4, b4, c4, d4, e4],
[a5, b5, c5, d5, e5]
] = contents
found2 =
Enum.find_value(
[
[a1, a2, a3, a4, a5],
[b1, b2, b3, b4, b5],
[c1, c2, c3, c4, c5],
[d1, d2, d3, d4, d5],
[e1, e2, e3, e4, e5]
],
fn
[%Item{state: :marked}, %Item{state: :marked}, %Item{state: :marked}, %Item{state: :marked}, %Item{state: :marked}] = found -> found
[_, _, _, _, _] -> false
end
)
if found1 || found2 do
# IO.inspect(found1 || found2, label: "BINGO")
end
%{board | bingo: is_list(found1) || is_list(found2), win: draw, won_at: :os.system_time(:microsecond)}
end
def evaluate_row(row, draw) do
Enum.map(row, fn
%{value: ^draw} = item ->
%{item | state: :marked}
%{} = item ->
item
end)
end
def parse_game([draws, "" | boards]) do
%Game{draws: parse_draws(draws), boards: parse_boards(boards, [], [])}
end
def parse_boards(["" | rest], buffer, acc) do
parsed = parse_board(buffer)
parse_boards(rest, [], [parsed | acc])
end
def parse_boards([board | rest], buffer, acc) do
parse_boards(rest, [board | buffer], acc)
end
def parse_boards([], buffer, acc) do
parsed = parse_board(buffer)
Enum.reverse([parsed | acc])
end
def parse_board(buffer) do
items =
Enum.map(buffer, fn line ->
[[_ | data]] = Regex.scan(~r/^\s*(\d+)\s*(\d+)\s*(\d+)\s*(\d+)\s*(\d+)+/, line)
Enum.map(data, fn c ->
%Item{value: String.to_integer(c)}
end)
end)
|> Enum.reverse()
%Board{contents: items}
end
def parse_draws(draws) do
draws
|> String.split(",")
|> Enum.map(&String.to_integer/1)
end
end
|
lib/day4.ex
| 0.52902
| 0.599866
|
day4.ex
|
starcoder
|
defmodule Gateway.RateLimit.Sweeper do
@moduledoc """
Periodically cleans up the ETS table.
By default, the remote IP is considered for rate-limiting, and,
consequently, used within the ETS table key. This means that without
cleanup, the table would grow quite large very fast.
The Sweeper cleans the table by removing all records that own a number of
tokens equal to the configured burst size. This is okay because
- removing records is atomic (per record)
- if no record is found for a given endpoint and ip, it is (re-)created
with the number of tokens equal to the burst size.
Can be disabled by setting the sweep interval to 0:
config :gateway, proxy_rate_limit_sweep_interval_ms: 0
"""
use GenServer
require Logger
import Ex2ms
import Gateway.RateLimit.Common, only: [now_unix: 0, ensure_table: 1]
alias Gateway.RateLimit
def start_link do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
@impl GenServer
def init(:ok) do
conf = RateLimit.config()
if conf.sweep_interval_ms > 0 do
Logger.info("Rate-limit table-GC enabled at a #{conf.sweep_interval_ms} ms interval")
send(self(), :sweep)
end
{:ok, :unused_state}
end
@impl GenServer
def handle_info(:sweep, :unused_state) do
conf = RateLimit.config()
if conf.sweep_interval_ms > 0 do
sweep()
Process.send_after(self(), :sweep, conf.sweep_interval_ms)
end
{:noreply, :unused_state}
end
@spec sweep([atom: any]) :: non_neg_integer()
def sweep(opts \\ []) do
n_affected = do_sweep(Enum.into(opts, RateLimit.config()))
log_result(n_affected)
n_affected
end
defp do_sweep(%{table_name: tab, avg_rate_per_sec: avg_rate_per_sec,
burst_size: burst_size} = opts) do
ensure_table(tab)
now = Map.get(opts, :current_unix_time, now_unix())
sweep_matchspec =
fun do {_key, n_tokens, last_used}
when n_tokens + (^now - last_used) * ^avg_rate_per_sec >= ^burst_size
->
true
end
# Deletes all records where the matchspec returns true
# and returns the number of deleted records:
:ets.select_delete(tab, sweep_matchspec)
end
defp log_result(0), do: nil
defp log_result(1), do: Logger.debug("Rate-limit table-GC: 1 record purged")
defp log_result(n), do: Logger.debug("Rate-limit table-GC: #{n} records purged")
end
|
lib/gateway/rate_limit/sweeper.ex
| 0.819857
| 0.441974
|
sweeper.ex
|
starcoder
|
defmodule DBConnection.Ownership do
@moduledoc """
A `DBConnection.Pool` that requires explicit checkout and checking
as a mechanism to coordinate between processes.
### Options
* `:ownership_pool` - The actual pool to use to power the ownership
mechanism. The pool is started when the ownership pool is started,
although this option may also be given on `ownership_checkout/2`
allowing developers to customize the pool checkout/checkin
* `:ownership_mode` - When mode is `:manual`, all connections must
be explicitly checked out before by using `ownership_checkout/2`.
Otherwise, mode is `:auto` and connections are checked out
implicitly. `{:shared, owner}` mode is also supported so
processes are allowed on demand. On all cases, checkins are
explicit via `ownership_checkin/2`. Defaults to `:auto`.
If the `:ownership_pool` has an atom name given in the `:name` option,
an ETS table will be created and automatically used for lookups whenever
the name is used on checkout.
Finally, if the `:caller` option is given on checkout with a pid and no
pool is assigned to the current process, a connection will be allowed
from the given pid and used on checkout with `:pool_timeout` of `:infinity`.
This is useful when multiple tasks need to collaborate on the same
connection (hence the `:infinity` timeout).
"""
@behaviour DBConnection.Pool
alias DBConnection.Ownership.Manager
alias DBConnection.Ownership.Owner
## Ownership API
@doc """
Explicitly checks a connection out from the ownership manager.
It may return `:ok` if the connection is checked out.
`{:already, :owner | :allowed}` if the caller process already
has a connection, `:error` if it could be not checked out or
raise if there was an error.
"""
@spec ownership_checkout(GenServer.server, Keyword.t) ::
:ok | {:already, :owner | :allowed} | :error | no_return
def ownership_checkout(manager, opts) do
case Manager.checkout(manager, opts) do
{:init, owner} -> Owner.init(owner, opts)
{:already, _} = already -> already
end
end
@doc """
Changes the ownwership mode.
`mode` may be `:auto`, `:manual` or `{:shared, owner}`.
The operation will always succeed when setting the mode to
`:auto` or `:manual`. It may fail with reason `:not_owner`
or `:not_found` when setting `{:shared, pid}` and the
given pid does not own any connection. May return
`:already_shared` if another process set the ownership
mode to `{:shared, _}` and is still alive.
"""
@spec ownership_mode(GenServer.server, :auto | :manual | {:shared, pid}, Keyword.t) ::
:ok | :already_shared | :not_owner | :not_found
defdelegate ownership_mode(manager, mode, opts), to: Manager, as: :mode
@doc """
Checks a connection back in.
A connection can only be checked back in by its owner.
"""
@spec ownership_checkin(GenServer.server, Keyword.t) ::
:ok | :not_owner | :not_found
defdelegate ownership_checkin(manager, opts), to: Manager, as: :checkin
@doc """
Allows the process given by `allow` to use the connection checked out by `owner`.
It may return `:ok` if the connection is checked out.
`{:already, :owner | :allowed}` if the `allow` process already
has a connection, `:not_owner` if the owner process is not an
owner or `:not_found` if the owner process does not have any
connection checked out.
"""
@spec ownership_allow(GenServer.server, owner :: pid, allow :: pid, Keyword.t) ::
:ok | {:already, :owner | :allowed} | :not_owner | :not_found
defdelegate ownership_allow(manager, owner, allow, opts), to: Manager, as: :allow
## Pool callbacks
@doc false
def start_link(module, opts) do
Manager.start_link(module, opts)
end
@doc false
def child_spec(module, opts, child_opts) do
Supervisor.Spec.worker(Manager, [module, opts], child_opts)
end
@doc false
def checkout(manager, opts) do
case Manager.lookup(manager, opts) do
{:init, owner} ->
case Owner.init(owner, opts) do
:ok -> Owner.checkout(owner, opts)
{:error, _} = error -> error
end
{:ok, owner} ->
Owner.checkout(owner, opts)
:not_found ->
case Keyword.pop(opts, :caller) do
{nil, _} ->
msg = "cannot find ownership process for #{inspect self()}. " <>
"This may happen if you have not explicitly checked out or " <>
"the checked out process crashed"
{:error, RuntimeError.exception(msg)}
{owner, opts} ->
ownership_allow(manager, owner, self(), opts)
checkout(manager, [pool_timeout: :infinity] ++ opts)
end
end
end
@doc false
def checkin(owner, state, opts) do
Owner.checkin(owner, state, opts)
end
@doc false
def disconnect(owner, exception, state, opts) do
Owner.disconnect(owner, exception, state, opts)
end
@doc false
def stop(owner, reason, state, opts) do
Owner.stop(owner, reason, state, opts)
end
end
|
throwaway/hello/deps/db_connection/lib/db_connection/ownership.ex
| 0.845273
| 0.544922
|
ownership.ex
|
starcoder
|
defmodule NebulexRedisAdapter do
@moduledoc ~S"""
Nebulex adapter for Redis. This adapter is implemented using `Redix`,
a Redis driver for Elixir.
**NebulexRedisAdapter** provides three setup alternatives:
* **Standalone** - The adapter establishes a pool of connections
with a single Redis node. The `:standalone` is the default mode.
* **Redis Cluster** - [Redis Cluster](https://redis.io/topics/cluster-tutorial)
is a built-in feature in Redis since version 3, and it may be the most
convenient and recommendable way to set up Redis in a cluster and have
a distributed cache storage out-of-box. This adapter provides the
`:redis_cluster` mode to set up **Redis Cluster** from the client-side
automatically and be able to use it transparently.
* **Built-in client-side cluster based on sharding** - This adapter
provides a simple client-side cluster implementation based on
Sharding distribution model via `:client_side_cluster` mode.
## Shared Options
In addition to `Nebulex.Cache` shared options, this adapters supports the
following options:
* `:mode` - Defines the mode Redis will be set up. It can be one of the
next values: `:standalone`, `:client_side_cluster`, `:redis_cluster`.
Defaults to `:standalone`.
* `:pool_size` - Number of connections in the pool. Defaults to
`System.schedulers_online()`.
* `:conn_opts` - Redis client options (`Redix` options in this case).
For more information about connection options, see `Redix` docs.
## TTL or Expiration Time
As is explained in `Nebulex.Cache`, most of the write-like functions support
the `:ttl` option to define the expiration time, and it is defined in
**milliseconds**. Despite Redis work with **seconds**, the conversion logic
is handled by the adapter transparently, so when using a cache even with the
Redis adapter, be sure you pass the `:ttl` option in **milliseconds**.
## Data Types
This adapter only works with strings internally, which means the given
Elixir terms are encoded to binaries before executing the Redis command.
The encoding/decoding process is performed by the adapter under-the-hood,
so it is completely transparent for the user.
**NOTE:** Support for other Redis Data Types is in the roadmap.
## Standalone
We can define a cache to use Redis as follows:
defmodule MyApp.RedisCache do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: NebulexRedisAdapter
end
The configuration for the cache must be in your application environment,
usually defined in your `config/config.exs`:
config :my_app, MyApp.RedisCache,
conn_opts: [
host: "127.0.0.1",
port: 6379
]
## Redis Cluster
We can define a cache to use Redis Cluster as follows:
defmodule MyApp.RedisClusterCache do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: NebulexRedisAdapter
end
The config:
config :my_app, MyApp.RedisClusterCache,
mode: :redis_cluster,
master_nodes: [
[
host: "127.0.0.1",
port: 7000
],
[
url: "redis://127.0.0.1:7001"
],
[
url: "redis://127.0.0.1:7002"
]
],
conn_opts: [
# Redix options, except `:host` and `:port`; unless we have a cluster
# of nodes with the same host and/or port, which doesn't make sense.
]
### Redis Cluster Options
In addition to shared options, `:redis_cluster` mode supports the following
options:
* `:master_nodes` - The list with the configuration for the Redis cluster
master nodes. The configuration for each master nodes contains the same
options as `:conn_opts`. The adapter traverses the list trying to
establish connection at least with one of them and get the cluster slots
to finally setup the Redis cluster from client side properly. If one
fails, the adapter retries with the next in the list, that's why at least
one master node must be set.
* `:conn_opts` - Same as shared options (optional). The `:conn_opts` will
be applied to each connection pool with the cluster (they will override
the host and port retrieved from cluster slots info). For that reason,
be careful when setting `:host` or `:port` options since they will be
used globally and can cause connection issues. Normally, we add here
the desired client options except `:host` and `:port`. If you have a
cluster with the same host for all nodes, in that case make sense to
add also the `:host` option.
* `:pool_size` - Same as shared options (optional). It applies to all
cluster slots, meaning all connection pools will have the same size.
## Client-side cluster
We can define a cache with "client-side cluster mode" as follows:
defmodule MyApp.ClusteredCache do
use Nebulex.Cache,
otp_app: :nebulex,
adapter: NebulexRedisAdapter
end
The config:
config :my_app, MyApp.ClusteredCache,
mode: :client_side_cluster,
nodes: [
node1: [
pool_size: 10,
conn_opts: [
host: "127.0.0.1",
port: 9001
]
],
node2: [
pool_size: 4,
conn_opts: [
url: "redis://127.0.0.1:9002"
]
],
node3: [
conn_opts: [
host: "127.0.0.1",
port: 9003
]
]
]
By default, the adapter uses `NebulexRedisAdapter.ClientCluster.Keyslot` for the
keyslot. Besides, if `:jchash` is defined as dependency, the adapter will use
consistent-hashing automatically. However, you can also provide your own
implementation by implementing the `Nebulex.Adapter.Keyslot` and set it into
the `:keyslot` option. For example:
defmodule MyApp.ClusteredCache.Keyslot do
use Nebulex.Adapter.Keyslot
@impl true
def hash_slot(key, range) do
# your implementation goes here
end
end
And the config:
config :my_app, MyApp.ClusteredCache,
mode: :client_side_cluster,
keyslot: MyApp.ClusteredCache.Keyslot,
nodes: [
...
]
### Client-side cluster options
In addition to shared options, `:client_side_cluster` mode supports the following
options:
* `:nodes` - The list of nodes the adapter will setup the cluster with;
a pool of connections is established per node. The `:client_side_cluster` mode
enables resilience to be able to survive in case any node(s) gets
unreachable. For each element of the list, we set the configuration
for each node, such as `:conn_opts`, `:pool_size`, etc.
* `:keyslot` - Defines the module implementing `Nebulex.Adapter.Keyslot`
behaviour, used to compute the node where the command will be applied to.
It is highly recommendable to provide a consistent hashing implementation.
## Queryable API
Since the queryable API is implemented by using `KEYS` command:
* Only strings (`String.t()`) are allowed as query parameter.
* Only keys can be queried.
### Examples
iex> MyApp.RedisCache.put_all(%{
...> "firstname" => "Albert",
...> "lastname" => "Einstein",
...> "age" => 76
...> })
:ok
iex> MyApp.RedisCache.all("**name**")
["firstname", "lastname"]
iex> MyApp.RedisCache.all("a??")
["age"]
iex> MyApp.RedisCache.all()
["age", "firstname", "lastname"]
iex> stream = TestCache.stream("**name**")
iex> stream |> Enum.to_list()
["firstname", "lastname"]
# get the values for the returned queried keys
iex> "**name**" |> MyApp.RedisCache.all() |> MyApp.RedisCache.get_all()
%{"firstname" => "Albert", "lastname" => "Einstein"}
## Using the cache for executing a Redis command or pipeline
Since `NebulexRedisAdapter` works on top of `Redix` and provides features like
connection pools and "Redis Cluster" support, it may be seen also as a sort of
Redis client, but it is meant to be used mainly with the Nebulex cache API.
However, Redis API is quite extensive and there are a lot of useful commands
we may want to run taking advantage of the `NebulexRedisAdapter` features.
Therefore, the adapter injects two additional/extended functions to the
defined cache: `command!/3` and `pipeline!/3`.
### `command!(key \\ nil, name \\ __MODULE__, command)`
iex> MyCache.command!("mylist", ["LPUSH", "mylist", "world"])
1
iex> MyCache.command!("mylist", ["LPUSH", "mylist", "hello"])
2
iex> MyCache.command!("mylist", ["LRANGE", "mylist", "0", "-1"])
["hello", "world"]
### `pipeline!(key \\ nil, name \\ __MODULE__, commands)`
iex> cache.pipeline!("mylist", [
...> ["LPUSH", "mylist", "world"],
...> ["LPUSH", "mylist", "hello"],
...> ["LRANGE", "mylist", "0", "-1"]
...> ])
[1, 2, ["hello", "world"]]
Arguments for `command!/3` and `pipeline!/3`:
* `key` - it is required when used the adapter in mode `:redis_cluster`
or `:client_side_cluster` so that the node where the commands will
take place can be selected properly. For `:standalone` it is optional.
* `name` - The name of the cache in case you are using dynamic caches,
otherwise it is not required.
* `commands` - Redis commands.
## Transactions
This adapter doesn't provide support for transactions, since there is no way
to guarantee its execution on Redis itself, at least not in the way the
`c:Nebulex.Adapter.Transaction.transaction/3` works, because the anonymous
function can have any kind of logic, which cannot be translated easily into
Redis commands.
> In the future, it is planned to add to Nebulex a `multi`-like function to
perform multiple commands at once, perhaps that will be the best way to
perform [transactions via Redis](https://redis.io/topics/transactions).
"""
# Provide Cache Implementation
@behaviour Nebulex.Adapter
@behaviour Nebulex.Adapter.Entry
@behaviour Nebulex.Adapter.Queryable
import Nebulex.Helpers
import NebulexRedisAdapter.Encoder
alias Nebulex.Adapter
alias NebulexRedisAdapter.{ClientCluster, Command, Connection, RedisCluster}
## Nebulex.Adapter
@impl true
defmacro __before_compile__(_env) do
quote do
@doc """
A convenience function for executing a Redis command.
"""
def command!(key \\ nil, name \\ __MODULE__, command) do
Adapter.with_meta(name, fn _, meta ->
Command.exec!(meta, command, key)
end)
end
@doc """
A convenience function for executing a Redis pipeline.
"""
def pipeline!(key \\ nil, name \\ __MODULE__, commands) do
Adapter.with_meta(name, fn _, meta ->
Command.pipeline!(meta, commands, key)
end)
end
end
end
@impl true
def init(opts) do
# required cache name
name = opts[:name] || Keyword.fetch!(opts, :cache)
# adapter mode
mode = Keyword.get(opts, :mode, :standalone)
# pool size
pool_size =
get_option(
opts,
:pool_size,
"an integer > 0",
&(is_integer(&1) and &1 > 0),
System.schedulers_online()
)
# init the specs according to the adapter mode
{children, default_keyslot} = do_init(mode, name, pool_size, opts)
# keyslot module for selecting nodes
keyslot =
opts
|> Keyword.get(:keyslot, default_keyslot)
|> assert_behaviour(Nebulex.Adapter.Keyslot, "keyslot")
# cluster nodes
nodes =
for {node_name, node_opts} <- Keyword.get(opts, :nodes, []) do
{node_name, Keyword.get(node_opts, :pool_size, System.schedulers_online())}
end
child_spec =
Nebulex.Adapters.Supervisor.child_spec(
name: normalize_module_name([name, Supervisor]),
strategy: :rest_for_one,
children: children
)
meta = %{
name: name,
mode: mode,
keyslot: keyslot,
nodes: nodes,
pool_size: pool_size,
default_dt: Keyword.get(opts, :default_data_type, :object)
}
{:ok, child_spec, meta}
end
defp do_init(:standalone, name, pool_size, opts) do
{:ok, children} = Connection.init(name, pool_size, opts)
{children, ClientCluster.Keyslot}
end
defp do_init(:client_side_cluster, _name, _pool_size, opts) do
{:ok, children} = ClientCluster.init(opts)
{children, ClientCluster.Keyslot}
end
defp do_init(:redis_cluster, name, pool_size, opts) do
{:ok, children} = RedisCluster.init(name, pool_size, opts)
{children, RedisCluster.Keyslot}
end
## Nebulex.Adapter.Entry
@impl true
def get(adapter_meta, key, _opts) do
with_pipeline(adapter_meta, key, [["GET", encode(key)]])
end
@impl true
def get_all(%{mode: :standalone} = adapter_meta, keys, _opts) do
mget(nil, adapter_meta, keys)
end
def get_all(adapter_meta, keys, _opts) do
keys
|> group_keys_by_hash_slot(adapter_meta)
|> Enum.reduce(%{}, fn {hash_slot, keys}, acc ->
return = mget(hash_slot, adapter_meta, keys)
Map.merge(acc, return)
end)
end
defp mget(hash_slot_key, adapter_meta, keys) do
adapter_meta
|> Command.exec!(["MGET" | for(k <- keys, do: encode(k))], hash_slot_key)
|> Enum.reduce({keys, %{}}, fn
nil, {[_key | keys], acc} ->
{keys, acc}
value, {[key | keys], acc} ->
{keys, Map.put(acc, key, decode(value))}
end)
|> elem(1)
end
@impl true
def put(adapter_meta, key, value, ttl, on_write, opts) do
cmd_opts = cmd_opts(action: on_write, ttl: fix_ttl(ttl))
redis_k = encode(key)
redis_v = encode(value, opts)
case Command.exec!(adapter_meta, ["SET", redis_k, redis_v | cmd_opts], key) do
"OK" -> true
nil -> false
end
end
@impl true
def put_all(%{mode: :standalone} = adapter_meta, entries, ttl, on_write, opts) do
do_put_all(adapter_meta, nil, entries, fix_ttl(ttl), on_write, opts)
end
def put_all(adapter_meta, entries, ttl, on_write, opts) do
ttl = fix_ttl(ttl)
entries
|> group_keys_by_hash_slot(adapter_meta)
|> Enum.reduce(:ok, fn {hash_slot, group}, acc ->
acc && do_put_all(adapter_meta, hash_slot, group, ttl, on_write, opts)
end)
end
defp do_put_all(adapter_meta, hash_slot, entries, ttl, on_write, opts) do
cmd =
case on_write do
:put -> "MSET"
:put_new -> "MSETNX"
end
{mset, expire} =
Enum.reduce(entries, {[cmd], []}, fn {key, val}, {acc1, acc2} ->
redis_k = encode(key)
acc2 =
if is_integer(ttl),
do: [["EXPIRE", redis_k, ttl] | acc2],
else: acc2
{[encode(val, opts), redis_k | acc1], acc2}
end)
adapter_meta
|> Command.pipeline!([Enum.reverse(mset) | expire], hash_slot)
|> hd()
|> case do
"OK" -> :ok
1 -> true
0 -> false
end
end
@impl true
def delete(adapter_meta, key, _opts) do
_ = Command.exec!(adapter_meta, ["DEL", encode(key)], key)
:ok
end
@impl true
def take(adapter_meta, key, _opts) do
redis_k = encode(key)
with_pipeline(adapter_meta, key, [["GET", redis_k], ["DEL", redis_k]])
end
@impl true
def has_key?(adapter_meta, key) do
case Command.exec!(adapter_meta, ["EXISTS", encode(key)], key) do
1 -> true
0 -> false
end
end
@impl true
def ttl(adapter_meta, key) do
case Command.exec!(adapter_meta, ["TTL", encode(key)], key) do
-1 -> :infinity
-2 -> nil
ttl -> ttl * 1000
end
end
@impl true
def expire(adapter_meta, key, :infinity) do
redis_k = encode(key)
case Command.pipeline!(adapter_meta, [["TTL", redis_k], ["PERSIST", redis_k]], key) do
[-2, 0] -> false
[_, _] -> true
end
end
def expire(adapter_meta, key, ttl) do
case Command.exec!(adapter_meta, ["EXPIRE", encode(key), fix_ttl(ttl)], key) do
1 -> true
0 -> false
end
end
@impl true
def touch(adapter_meta, key) do
case Command.exec!(adapter_meta, ["TOUCH", encode(key)], key) do
1 -> true
0 -> false
end
end
@impl true
def update_counter(adapter_meta, key, incr, :infinity, default, _opts) do
redis_k = encode(key)
adapter_meta
|> maybe_incr_default(key, redis_k, default)
|> Command.exec!(["INCRBY", redis_k, incr], key)
end
def update_counter(adapter_meta, key, incr, ttl, default, _opts) do
redis_k = encode(key)
adapter_meta
|> maybe_incr_default(key, redis_k, default)
|> Command.pipeline!([["INCRBY", redis_k, incr], ["EXPIRE", redis_k, fix_ttl(ttl)]], key)
|> hd()
end
defp maybe_incr_default(adapter_meta, key, redis_k, default)
when is_integer(default) and default > 0 do
case Command.exec!(adapter_meta, ["EXISTS", redis_k], key) do
1 ->
adapter_meta
0 ->
_ = Command.exec!(adapter_meta, ["INCRBY", redis_k, default], key)
adapter_meta
end
end
defp maybe_incr_default(adapter_meta, _, _, _), do: adapter_meta
## Nebulex.Adapter.Queryable
@impl true
def execute(%{mode: mode} = adapter_meta, :count_all, nil, _opts) do
exec!(mode, [adapter_meta, ["DBSIZE"]], [0, &Kernel.+(&2, &1)])
end
def execute(%{mode: mode} = adapter_meta, :delete_all, nil, _opts) do
size = exec!(mode, [adapter_meta, ["DBSIZE"]], [0, &Kernel.+(&2, &1)])
_ = exec!(mode, [adapter_meta, ["FLUSHDB"]], [])
size
end
def execute(adapter_meta, :all, query, _opts) do
execute_query(query, adapter_meta)
end
@impl true
def stream(adapter_meta, query, _opts) do
Stream.resource(
fn ->
execute_query(query, adapter_meta)
end,
fn
[] -> {:halt, []}
elems -> {elems, []}
end,
& &1
)
end
## Private Functions
defp with_pipeline(adapter_meta, key, pipeline) do
adapter_meta
|> Command.pipeline!(pipeline, key)
|> hd()
|> decode()
end
defp cmd_opts(keys), do: Enum.reduce(keys, [], &cmd_opts/2)
defp cmd_opts({:action, :put}, acc), do: acc
defp cmd_opts({:action, :put_new}, acc), do: ["NX" | acc]
defp cmd_opts({:action, :replace}, acc), do: ["XX" | acc]
defp cmd_opts({:ttl, :infinity}, acc), do: acc
defp cmd_opts({:ttl, ttl}, acc), do: ["EX", "#{ttl}" | acc]
defp fix_ttl(:infinity), do: :infinity
defp fix_ttl(ttl) when is_integer(ttl) and ttl >= 1000, do: div(ttl, 1000)
defp fix_ttl(ttl) do
raise ArgumentError,
"expected ttl: to be an integer >= 1000 or :intinity, got: #{inspect(ttl)}"
end
defp execute_query(nil, adapter_meta) do
for key <- execute_query("*", adapter_meta), do: decode(key)
end
defp execute_query(pattern, %{mode: mode} = adapter_meta) when is_binary(pattern) do
exec!(mode, [adapter_meta, ["KEYS", pattern]], [[], &Kernel.++(&1, &2)])
end
defp execute_query(pattern, _adapter_meta) do
raise Nebulex.QueryError, message: "invalid pattern", query: pattern
end
defp exec!(:standalone, args, _extra_args) do
apply(Command, :exec!, args)
end
defp exec!(:client_side_cluster, args, extra_args) do
apply(ClientCluster, :exec!, args ++ extra_args)
end
defp exec!(:redis_cluster, args, extra_args) do
apply(RedisCluster, :exec!, args ++ extra_args)
end
defp group_keys_by_hash_slot(enum, %{mode: :client_side_cluster, nodes: nodes, keyslot: keyslot}) do
ClientCluster.group_keys_by_hash_slot(enum, nodes, keyslot)
end
defp group_keys_by_hash_slot(enum, %{mode: :redis_cluster, keyslot: keyslot}) do
RedisCluster.group_keys_by_hash_slot(enum, keyslot)
end
end
|
lib/nebulex_redis_adapter.ex
| 0.903031
| 0.679511
|
nebulex_redis_adapter.ex
|
starcoder
|
defmodule Data.Skill do
@moduledoc """
Skill schema
"""
use Data.Schema
import Data.Effect, only: [validate_effects: 1]
alias Data.ClassSkill
alias Data.Effect
schema "skills" do
field(:api_id, Ecto.UUID, read_after_writes: true)
field(:name, :string)
field(:description, :string)
field(:level, :integer)
field(:points, :integer)
field(:user_text, :string)
field(:usee_text, :string)
field(:command, :string)
field(:cooldown_time, :integer, default: 3000)
field(:whitelist_effects, {:array, :string}, default: [])
field(:effects, {:array, Effect}, default: [])
field(:tags, {:array, :string}, default: [])
field(:is_global, :boolean, default: false)
field(:require_target, :boolean, default: false)
has_many(:class_skills, ClassSkill)
has_many(:classes, through: [:class_skills, :class])
timestamps()
end
def changeset(struct, params) do
struct
|> cast(params, [
:name,
:description,
:level,
:points,
:user_text,
:usee_text,
:command,
:cooldown_time,
:whitelist_effects,
:effects,
:tags,
:is_global,
:require_target
])
|> validate_required([
:name,
:description,
:level,
:points,
:user_text,
:usee_text,
:command,
:cooldown_time,
:whitelist_effects,
:effects,
:tags,
:is_global,
:require_target
])
|> validate_effects()
|> validate_whitelist()
|> validate_number(:cooldown_time, greater_than_or_equal_to: 0)
end
defp validate_whitelist(changeset) do
case get_field(changeset, :whitelist_effects) do
nil ->
changeset
whitelist_effects ->
_validate_whitelist(changeset, whitelist_effects)
end
end
defp _validate_whitelist(changeset, whitelist_effects) do
case Enum.all?(whitelist_effects, &(&1 in Effect.types())) do
true ->
changeset
false ->
add_error(changeset, :whitelist_effects, "must all be a real type")
end
end
end
|
lib/data/skill.ex
| 0.663887
| 0.421165
|
skill.ex
|
starcoder
|
defmodule Rayray.Canvas do
alias Rayray.Tuple
def canvas(w, h) do
black = Tuple.color(0, 0, 0)
Enum.reduce(0..(w - 1), %{}, fn x, acc ->
Map.put(
acc,
x,
Enum.reduce(0..(h - 1), acc, fn y, acc2 ->
Map.put(acc2, y, black)
end)
)
end)
end
def width(c) do
Enum.count(c)
end
def height(c) do
c
|> Enum.take(1)
|> List.first()
|> (fn {_k, v} -> Enum.count(v) end).()
end
def write_pixel(canvas, x, y, pixel) do
Kernel.put_in(canvas, [x, y], pixel)
end
def pixel_at(canvas, x, y) do
%{^x => %{^y => color}} = canvas
color
end
def canvas_to_ppm(canvas) do
width = width(canvas)
height = height(canvas)
chunk_fun = fn item, acc ->
item_plus_acc =
if acc == "" do
item
else
acc <> " " <> item
end
if String.length(item_plus_acc) >= 70 do
{:cont, acc <> "\n", item}
else
{:cont, item_plus_acc}
end
end
after_fun = fn
"" ->
{:cont, ""}
acc ->
{:cont, acc <> "\n", ""}
end
x_indexes = 0..(width - 1)
y_indexes = 0..(height - 1)
pixels =
y_indexes
# |> Flow.from_enumerable()
|> Enum.map(fn y ->
pixel_row =
Enum.flat_map(x_indexes, fn x ->
%{^x => %{^y => %{red: red, green: green, blue: blue}}} = canvas
r = red |> clamp() |> scale()
g = green |> clamp() |> scale()
b = blue |> clamp() |> scale()
["#{r}", "#{g}", "#{b}"]
end)
Enum.chunk_while(pixel_row, "", chunk_fun, after_fun)
end)
|> Enum.to_list()
"""
P3
#{width} #{height}
255
#{pixels}
"""
end
defp clamp(color_channel) when color_channel > 1 do
1
end
defp clamp(color_channel) when color_channel < 0 do
0
end
defp clamp(color_channel), do: color_channel
defp scale(color_channel) do
(color_channel * 255) |> ceil()
end
end
|
lib/rayray/canvas.ex
| 0.594198
| 0.53959
|
canvas.ex
|
starcoder
|
defmodule Snek.SmallStaticCycle do
@moduledoc false
# This test is designed to be static/consistent for profiling/benchmarking.
# It is designed to run a solo game for exactly 147 turns (no randomness)
alias Snek.Board
alias Snek.Board.{Point, Size, Snake}
alias Snek.Ruleset.Solo
@apple_spawn_chance 0.0
@snake_id "p1"
@cycle %{
{0, 0} => :right,
{1, 0} => :right,
{2, 0} => :right,
{3, 0} => :right,
{4, 0} => :right,
{5, 0} => :right,
{6, 0} => :down,
{6, 1} => :left,
{5, 1} => :left,
{4, 1} => :left,
{3, 1} => :left,
{2, 1} => :left,
{1, 1} => :down,
{1, 2} => :right,
{2, 2} => :right,
{3, 2} => :right,
{4, 2} => :right,
{5, 2} => :right,
{6, 2} => :down,
{6, 3} => :left,
{5, 3} => :left,
{4, 3} => :left,
{3, 3} => :left,
{2, 3} => :left,
{1, 3} => :down,
{1, 4} => :right,
{2, 4} => :right,
{3, 4} => :right,
{4, 4} => :right,
{5, 4} => :right,
{6, 4} => :down,
{6, 5} => :down,
{6, 6} => :left,
{5, 6} => :up,
{5, 5} => :left,
{4, 5} => :down,
{4, 6} => :left,
{3, 6} => :up,
{3, 5} => :left,
{2, 5} => :down,
{2, 6} => :left,
{1, 6} => :up,
{1, 5} => :left,
{0, 5} => :up,
{0, 4} => :up,
{0, 3} => :up,
{0, 2} => :up,
{0, 1} => :up,
{0, 6} => :up
}
def run do
stream = Stream.iterate({:ok, init()}, fn {:ok, board} ->
if Solo.done?(board) do
{:error, :game_over}
else
snake_moves = %{@snake_id => get(board)}
next_board = Solo.next(board, snake_moves, @apple_spawn_chance)
{:ok, next_board}
end
end)
stream_until_end = Stream.take_while(stream, fn
{:ok, %Board{}} -> true
{:error, :game_over} -> false
end)
{:ok, final_board} = Enum.at(stream_until_end, -1)
final_board
end
defp init do
start = Point.new(1, 3)
apples = [Point.new(0, 4), Point.new(3, 3)]
with board <- Board.new(Size.small),
{:ok, board} <- Board.spawn_snake(board, @snake_id, start),
{:ok, board} <- Board.spawn_apples(board, apples) do
board
end
end
defp get(%Board{snakes: [%Snake{body: [{x, y} | _]} | _]}) do
@cycle[{x, y}]
end
end
|
test/support/small_static_cycle.ex
| 0.67854
| 0.566798
|
small_static_cycle.ex
|
starcoder
|
defmodule D14 do
defstruct elem_freq: %{}, pair_freq: %{}, pair_rules: %{}
def p1(input) do
{template, pair_rules} = parse_input(input)
polymer = develop_polymer_naive(template, pair_rules, 10)
elem_freq = Enum.frequencies(polymer)
Enum.max(Map.values(elem_freq)) - Enum.min(Map.values(elem_freq))
end
def p2(input) do
{template, pair_rules} = parse_input(input)
%D14{elem_freq: elem_freq} =
%D14{
elem_freq: Enum.frequencies(template),
pair_freq: Enum.frequencies(derive_pairs(template)),
pair_rules: pair_rules
}
|> develop_polymer_smart(40)
Enum.max(Map.values(elem_freq)) - Enum.min(Map.values(elem_freq))
end
defp parse_input(input) do
[template_str | pairs_line] = String.split(input, "\n", trim: true)
pairs =
pairs_line
|> Enum.map(&String.split(&1, " -> "))
|> Map.new(fn [k, v] -> {k, v} end)
{String.codepoints(template_str), pairs}
end
defp develop_polymer_naive(polymer, pair_rules, steps) do
Enum.reduce(0..(steps - 1), polymer, fn _, polymer ->
insert_elements(polymer, pair_rules)
end)
end
defp insert_elements([e1, e2], pair_rules), do: [e1, pair_rules[e1 <> e2], e2]
defp insert_elements([e1 | [e2 | tail]], pairs) do
[e1, pairs[e1 <> e2] | insert_elements([e2 | tail], pairs)]
end
def derive_pairs([e1, e2]), do: [e1 <> e2]
def derive_pairs([e1 | [e2 | tail]]), do: [e1 <> e2 | derive_pairs([e2 | tail])]
def develop_polymer_smart(container, steps) do
Enum.reduce(0..(steps - 1), container, fn _, container ->
develop_polymer_smart_once(container)
end)
end
defp develop_polymer_smart_once(container) do
Enum.reduce(container.pair_freq, container, fn {pair, freq}, container ->
[l, r] = String.codepoints(pair)
new_elem = container.pair_rules[pair]
elem_freq = increment_key_by(container.elem_freq, new_elem, freq)
pair_freq =
container.pair_freq
|> increment_key_by(pair, -freq)
|> increment_key_by(l <> new_elem, freq)
|> increment_key_by(new_elem <> r, freq)
%{container | elem_freq: elem_freq, pair_freq: pair_freq}
end)
end
defp increment_key_by(map, key, x), do: Map.put(map, key, Map.get(map, key, 0) + x)
end
|
d14/lib/d14.ex
| 0.527803
| 0.505981
|
d14.ex
|
starcoder
|
defmodule ErrorInfo do
@moduledoc false
# The ErrorInfo struct holds all the information about the exception.
# It includes the error message, the stacktrace, context information
# (information about the request, the current controller and action,
# among other things) and custom data depending on the configuration.
@enforce_keys [:reason, :stack, :timestamp]
defstruct [:name, :reason, :stack, :controller, :action, :request, :timestamp, :metadata]
@type option ::
:logger
| [logger: [fields: list(atom())]]
| :assigns
| [assigns: [fields: list(atom())]]
@type custom_data_strategy_type :: :nothing | option | [option]
@spec build(
%{
required(:reason) => any(),
required(:stack) => Exception.stacktrace(),
optional(any()) => any()
},
map(),
custom_data_strategy_type
) :: {atom(), %ErrorInfo{}}
def build(%{reason: reason, stack: stack} = error, conn, custom_data_strategy) do
{error_reason, error_name} = error_reason(reason)
error_info = %ErrorInfo{
reason: error_reason,
stack: stack,
controller: get_in(conn.private, [:phoenix_controller]),
action: get_in(conn.private, [:phoenix_action]),
request: build_request_info(conn),
timestamp: DateTime.utc_now(),
name: error_name,
metadata: build_custom_data(conn, custom_data_strategy)
}
{error_type(error), error_info}
end
defp error_reason(%name{message: reason}), do: {reason, name}
defp error_reason(%{message: reason}), do: {reason, "Error"}
defp error_reason(reason) when is_binary(reason), do: error_reason(%{message: reason})
defp error_reason(reason), do: error_reason(%{message: inspect(reason)})
defp error_type(%{reason: %name{}}), do: name
defp error_type(%{error: %{kind: kind}}), do: kind
defp error_type(_), do: :error
defp build_request_info(conn) do
%{
path: conn.request_path,
method: conn.method,
url: get_full_url(conn),
port: conn.port,
scheme: conn.scheme,
query_string: conn.query_string,
client_ip: format_ip(conn.remote_ip)
}
end
# Credit: https://github.com/jarednorman/plugsnag/blob/master/lib/plugsnag/basic_error_report_builder.ex
defp get_full_url(conn) do
base = "#{conn.scheme}://#{conn.host}#{conn.request_path}"
case conn.query_string do
"" -> base
qs -> "#{base}?#{qs}"
end
end
# Credit: https://github.com/jarednorman/plugsnag/blob/master/lib/plugsnag/basic_error_report_builder.ex
defp format_ip(ip) do
ip
|> Tuple.to_list()
|> Enum.join(".")
end
@spec build_custom_data(map(), custom_data_strategy_type) :: map()
defp build_custom_data(_conn, :nothing), do: nil
defp build_custom_data(_conn, :logger),
do: %{logger: Enum.into(Logger.metadata(), %{})}
defp build_custom_data(_conn, logger: [fields: field_names]),
do: %{
logger:
Enum.reduce(field_names, %{}, fn field_name, acc ->
Map.put(acc, field_name, Logger.metadata()[field_name])
end)
}
defp build_custom_data(conn, :assigns),
do: %{assigns: conn.assigns()}
defp build_custom_data(conn, assigns: [fields: field_names]),
do: %{
assigns:
Enum.reduce(field_names, %{}, fn field_name, acc ->
Map.put(acc, field_name, conn.assigns[field_name])
end)
}
defp build_custom_data(conn, options),
do:
Enum.reduce(options, %{}, fn opt, acc ->
Map.merge(acc, build_custom_data(conn, opt))
end)
end
|
lib/boom_notifier/error_info.ex
| 0.806052
| 0.416144
|
error_info.ex
|
starcoder
|
defmodule Nostrum.Struct.Channel do
@moduledoc ~S"""
Struct representing a Discord guild channel.
A `Nostrum.Struct.Channel` represents all 5 types of channels. Each
channel has a field `:type` with any of the following values:
* `0` - GUILD_TEXT
* `1` - DM
* `2` - GUILD_VOICE
* `3` - GROUP_DM
* `4` - GUILD_CATEGORY
More information can be found on the
[Discord API Channel Documentation](https://discord.com/developers/docs/resources/channel#channel-object).
## Mentioning Channels in Messages
A `Nostrum.Struct.Channel` can be mentioned in message content using the `String.Chars`
protocol or `mention/1`.
```Elixir
channel = %Nostrum.Struct.Channel{id: 381889573426429952}
Nostrum.Api.create_message!(184046599834435585, "#{channel}")
%Nostrum.Struct.Message{content: "<#381889573426429952>"}
channel = %Nostrum.Struct.Channel{id: 280085880452939778}
Nostrum.Api.create_message!(280085880452939778, "#{Nostrum.Struct.Channel.mention(channel)}")
%Nostrum.Struct.Message{content: "<#280085880452939778>"}
```
"""
alias Nostrum.Struct.{Channel, Guild, Message, Overwrite, User}
alias Nostrum.{Snowflake, Util}
defstruct [
:id,
:type,
:guild_id,
:position,
:permission_overwrites,
:name,
:topic,
:nsfw,
:last_message_id,
:bitrate,
:user_limit,
:recipients,
:icon,
:owner_id,
:application_id,
:parent_id,
:last_pin_timestamp
]
defimpl String.Chars do
def to_string(channel), do: @for.mention(channel)
end
@typedoc "The channel's id"
@type id :: Snowflake.t()
@typedoc "The id of the channel's guild"
@type guild_id :: Guild.id()
@typedoc "The ordered position of the channel"
@type position :: integer
@typedoc "The list of overwrites"
@type permission_overwrites :: [Overwrite.t()]
@typedoc "The name of the channel"
@type name :: String.t()
@typedoc "Current channel topic"
@type topic :: String.t()
@typedoc "If the channel is nsfw"
@type nsfw :: boolean
@typedoc "Id of the last message sent"
@type last_message_id :: Message.id() | nil
@typedoc "The bitrate of the voice channel"
@type bitrate :: integer
@typedoc "The user limit of the voice channel"
@type user_limit :: integer
@typedoc "The recipients of the DM"
@type recipients :: [User.t()]
@typedoc "The icon hash of the channel"
@type icon :: String.t() | nil
@typedoc "The id of the DM creator"
@type owner_id :: User.id()
@typedoc "The application id of the group DM creator if it is bot-created"
@type application_id :: Snowflake.t() | nil
@typedoc "The id of the parent category for a channel"
@type parent_id :: Channel.id() | nil
@typedoc "When the last pinned message was pinned"
@type last_pin_timestamp :: String.t() | nil
@typedoc """
A `Nostrum.Struct.Channel` that represents a text channel in a guild.
"""
@type guild_text_channel :: %__MODULE__{
id: id,
type: 0,
guild_id: guild_id,
position: position,
permission_overwrites: permission_overwrites,
name: name,
topic: topic,
nsfw: nsfw,
last_message_id: last_message_id,
bitrate: nil,
user_limit: nil,
recipients: nil,
icon: nil,
owner_id: nil,
application_id: nil,
parent_id: parent_id,
last_pin_timestamp: last_pin_timestamp
}
@typedoc """
A `Nostrum.Struct.Channel` that represents a DM channel.
"""
@type dm_channel :: %__MODULE__{
id: id,
type: 1,
guild_id: nil,
position: nil,
permission_overwrites: nil,
name: nil,
topic: nil,
nsfw: nil,
last_message_id: last_message_id,
bitrate: nil,
user_limit: nil,
recipients: recipients,
icon: nil,
owner_id: nil,
application_id: nil,
parent_id: nil,
last_pin_timestamp: nil
}
@typedoc """
A `Nostrum.Struct.Channel` that represents a voice channel in a guild.
"""
@type guild_voice_channel :: %__MODULE__{
id: id,
type: 2,
guild_id: guild_id,
position: position,
permission_overwrites: permission_overwrites,
name: name,
topic: nil,
nsfw: nsfw,
last_message_id: nil,
bitrate: bitrate,
user_limit: user_limit,
recipients: nil,
icon: nil,
owner_id: nil,
application_id: nil,
parent_id: parent_id,
last_pin_timestamp: nil
}
@typedoc """
A `Nostrum.Struct.Channel` that represents a group DM channel.
"""
@type group_dm_channel :: %__MODULE__{
id: id,
type: 3,
guild_id: nil,
position: nil,
permission_overwrites: nil,
name: name,
topic: nil,
nsfw: nil,
last_message_id: last_message_id,
bitrate: nil,
user_limit: nil,
recipients: recipients,
icon: icon,
owner_id: owner_id,
application_id: application_id,
parent_id: nil,
last_pin_timestamp: nil
}
@typedoc """
A `Nostrum.Struct.Channel` that represents a channel category in a guild.
"""
@type channel_category :: %__MODULE__{
id: id,
type: 4,
guild_id: guild_id,
position: position,
permission_overwrites: permission_overwrites,
name: name,
topic: nil,
nsfw: nsfw,
last_message_id: nil,
bitrate: nil,
user_limit: nil,
recipients: nil,
icon: nil,
owner_id: nil,
application_id: nil,
parent_id: parent_id,
last_pin_timestamp: nil
}
@typedoc """
A `Nostrum.Struct.Channel` that represents a channel in a guild.
"""
@type guild_channel ::
guild_text_channel
| guild_voice_channel
| channel_category
@typedoc """
A `Nostrum.Struct.Channel` that represents a text channel.
"""
@type text_channel ::
guild_text_channel
| dm_channel
| group_dm_channel
@typedoc """
A `Nostrum.Struct.Channel` that represents a voice channel.
"""
@type voice_channel :: guild_voice_channel
@type t ::
guild_text_channel
| dm_channel
| guild_voice_channel
| group_dm_channel
| channel_category
@doc ~S"""
Formats a `Nostrum.Struct.Channel` into a mention.
## Examples
```Elixir
iex> channel = %Nostrum.Struct.Channel{id: 381889573426429952}
...> Nostrum.Struct.Channel.mention(channel)
"<#381889573426429952>"
```
"""
@spec mention(t) :: String.t()
def mention(%__MODULE__{id: id}), do: "<##{id}>"
@doc false
def p_encode do
%__MODULE__{
permission_overwrites: [Overwrite.p_encode()]
}
end
@doc false
def to_struct(map) do
new =
map
|> Map.new(fn {k, v} -> {Util.maybe_to_atom(k), v} end)
|> Map.update(:id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:guild_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:permission_overwrites, nil, &Util.cast(&1, {:list, {:struct, Overwrite}}))
|> Map.update(:last_message_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:recipients, nil, &Util.cast(&1, {:list, {:struct, User}}))
|> Map.update(:owner_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:application_id, nil, &Util.cast(&1, Snowflake))
|> Map.update(:parent_id, nil, &Util.cast(&1, Snowflake))
struct(__MODULE__, new)
end
end
|
lib/nostrum/struct/channel.ex
| 0.908275
| 0.74425
|
channel.ex
|
starcoder
|
defmodule Absinthe.Type.Directive do
@moduledoc """
Used by the GraphQL runtime as a way of modifying execution
behavior.
Type system creators will usually not create these directly.
"""
alias Absinthe.Type
use Absinthe.Introspection.Kind
@typedoc """
A defined directive.
* `:name` - The name of the directivee. Should be a lowercase `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:args` - A map of `Absinthe.Type.Argument` structs. See `Absinthe.Schema.Notation.arg/1`.
* `:on` - A list of places the directives can be used (can be `:operation`, `:fragment`, `:field`).
* `:instruction` - A function that, given an argument, returns an instruction for the correct action to take
The `:__reference__` key is for internal use.
"""
@type t :: %{name: binary, description: binary, args: map, on: [atom], instruction: ((map) -> atom), __reference__: Type.Reference.t}
defstruct name: nil, description: nil, args: nil, on: [], instruction: nil, __reference__: nil
def build(%{attrs: attrs}) do
args = attrs
|> Keyword.get(:args, [])
|> Enum.map(fn
{name, attrs} ->
{name, ensure_reference(attrs, attrs[:__reference__])}
end)
|> Type.Argument.build
attrs = Keyword.put(attrs, :args, args)
quote do: %unquote(__MODULE__){unquote_splicing(attrs)}
end
defp ensure_reference(arg_attrs, default_reference) do
case Keyword.has_key?(arg_attrs, :__reference__) do
true ->
arg_attrs
false ->
Keyword.put(arg_attrs, :__reference__, default_reference)
end
end
# Whether the directive is active in `place`
@doc false
@spec on?(t, atom) :: boolean
def on?(%{on: places}, place) do
Enum.member?(places, place)
end
# Check a directive and return an instruction
@doc false
@spec check(t, Language.t, map) :: atom
def check(definition, %{__struct__: place}, args) do
if on?(definition, place) && definition.instruction do
definition.instruction.(args)
else
:ok
end
end
end
|
lib/absinthe/type/directive.ex
| 0.860765
| 0.546496
|
directive.ex
|
starcoder
|
defmodule AdventOfCode2021.Day2 do
@moduledoc """
Advent of Code 2021
Day 2
"""
@spec part1() :: integer
def part1() do
"./assets/day2.txt"
|> read_commands()
|> follow_planned_course()
|> multiply_h_pos_by_depth()
end
@spec read_commands(String.t()) :: [{String.t(), pos_integer}]
def read_commands(filename) do
filename
|> File.read!()
|> String.split("\n")
|> Stream.map(&String.split/1)
|> Stream.map(&List.to_tuple/1)
|> Enum.map(fn {dir, units} -> {dir, String.to_integer(units)} end)
end
@spec follow_planned_course([{String.t(), pos_integer}]) :: %{h_pos: integer, depth: integer}
def follow_planned_course(commands) do
Enum.reduce(commands, %{h_pos: 0, depth: 0}, &move(&1, &2))
end
@spec move({String.t(), pos_integer}, %{h_pos: integer, depth: integer}) :: %{
h_pos: integer,
depth: integer
}
def move({"forward", units}, %{h_pos: h, depth: d}), do: %{h_pos: h + units, depth: d}
def move({"up", units}, %{h_pos: h, depth: d}), do: %{h_pos: h, depth: d - units}
def move({"down", units}, %{h_pos: h, depth: d}), do: %{h_pos: h, depth: d + units}
@spec multiply_h_pos_by_depth(map) :: integer
def multiply_h_pos_by_depth(%{h_pos: h, depth: d}), do: h * d
@spec part2() :: integer
def part2() do
"./assets/day2.txt"
|> read_commands()
|> follow_adjusted_planned_course()
|> multiply_h_pos_by_depth()
end
@spec follow_adjusted_planned_course([{String.t(), pos_integer}]) :: %{
h_pos: integer,
depth: integer,
aim: integer
}
def follow_adjusted_planned_course(commands) do
Enum.reduce(commands, %{h_pos: 0, depth: 0, aim: 0}, &adjusted_move(&1, &2))
end
@spec adjusted_move({String.t(), pos_integer}, %{h_pos: integer, depth: integer, aim: integer}) ::
%{
h_pos: integer,
depth: integer,
aim: integer
}
def adjusted_move({"forward", units}, %{h_pos: h, depth: d, aim: a}),
do: %{h_pos: h + units, depth: d + units * a, aim: a}
def adjusted_move({"up", units}, %{h_pos: h, depth: d, aim: a}),
do: %{h_pos: h, depth: d, aim: a - units}
def adjusted_move({"down", units}, %{h_pos: h, depth: d, aim: a}),
do: %{h_pos: h, depth: d, aim: a + units}
end
|
lib/day2.ex
| 0.878327
| 0.49823
|
day2.ex
|
starcoder
|
defmodule Iyzico.Iyzipay do
@moduledoc """
A module containing payment related functions.
## Making a payment
In order to process a payment, one needs to create a `Iyzico.PaymentRequest`
struct, which consists of a payment card,
a buyer, two seperate addresses for shipping and billing and basket
information (aka *items*).
```
payment_request =
%PaymentRequest{
locale: @current_locale,
conversation_id: "123456789",
price: "0.5",
paid_price: "0.7",
currency: :try,
basket_id: "B67832",
payment_channel: :web,
payment_group: :product,
payment_card: card,
installment: 1,
buyer: buyer,
shipping_address: shipping_address,
billing_address: billing_address,
basket_items: [
binocular_item,
game_item
]
}
```
With that `Iyzico.PaymentRequest`, it is straightforward to process the
request.
```
{:ok, payment, metadata} = process_payment_req(payment_request)
```
#### 3D Secure support
Authenticity of a transaction can be enhanced using *3D Secure* feature,
which is optional, although some associations might require the use of
*3D Secure* explicitly.
*3D Secure* based transaction could performed with
`process_secure_payment_req/2` function, which is analogical to its insecure
friend `process_payment_req/3`.
## Making a secure payment
Processing a secure payment is on par with insecure payments, what is more,
secure payments require a callback URL
since remote authority will finalize the transaction by making a call to
given URL.
#### Instantiation
```
payment_request =
%SecurePaymentRequest{
locale: @current_locale,
conversation_id: "123456789",
price: "0.5",
paid_price: "0.7",
currency: :try,
basket_id: "B67832",
payment_channel: :web,
payment_group: :product,
payment_card: card,
installment: 1,
buyer: buyer,
shipping_address: shipping_address,
billing_address: billing_address,
basket_items: [
binocular_item,
game_item
],
callback_url: "https://some.domain.to/be-specified/"
}
{:ok, artifact, metadata} = init_secure_payment_req(payment_request)
```
#### Finalization
```
handle =
%SecurePaymentHandle{
conversation_id: "123456789",
payment_id: "10533265",
conversation_data: "some data"
}
{:ok, payment, metadata} = finalize_secure_payment_req(handle)
```
## Post-payment operations
After payment is successfully completed, it can be revoked (cancelled) or
refunded.
A revoke operation deletes the payment and can be utilized if and only if
transaction has not reconciliated by the bank, which often happens at the
end of a day.
Successful revoke operations are invisible in card statement.
**Some regulations applied by banks on transactions might restrict
cancellation operations.**
Refund operations could also be performed in order to pay back specified
amount of funds and can be performed in any time, without any restrictions.
Merchants are able to refund up to full amount of the transaction, and
able to do it with proportions of the amount.
Multiple refund operations could be performed by making sequential calls.
## Discussion
Although utilization of *3D secure* featured transactions become overwhelming
in terms of duration of the payment it is highly discouraged to perform
insecure transactions directly, especially without concerning about customer's
consent.
Secure transactions involve two-factor authentication provided by
associations, hence displacing the responsibility of
the developer to be not concerned about authenticity of the credit card
information.
## Common options
- `:api_key`: API key to be used in authentication, optional. Configuration
is used instead if not supplied.
- `:api_secret`: API secret key to be used in authentication. Configuration
is used instead if not supplied.
"""
import Iyzico.Client
import Iyzico.CompileTime
alias Iyzico.Payment
alias Iyzico.Transaction
alias Iyzico.ConvertedPayout
alias Iyzico.Metadata
alias Iyzico.Card
alias Iyzico.CardReference
alias Iyzico.SecurePaymentArtifact
alias Iyzico.RevokePaymentRequest
alias Iyzico.RefundPaymentRequest
@type currency :: :try
@server_ip Keyword.get(Application.get_env(:iyzico, Iyzico), :server_ip, nil)
static_assert_tuple(@server_ip)
@doc """
Processes the given payment request on the remote API.
## Options
See common options.
"""
@spec process_payment_req(Iyzico.PaymentRequest.t, Keyword.t) ::
{:ok, Iyzico.Payment.t, Iyzico.Metadata.t} |
{:error, atom}
def process_payment_req(payment_request = %Iyzico.PaymentRequest{}, opts \\ []) do
case request([], :post, url_for_path("/payment/auth"), [], payment_request, opts) do
{:ok, resp} ->
if resp["status"] == "success",
do: serialize_resp(resp),
else: handle_error(resp)
any ->
any
end
end
@doc """
Same as `process_payment_req/1`, but raises an
`Iyzico.PaymentProcessingError` exception in case of failure.
Otherwise returns successfully processed payment.
"""
def process_payment_req!(payment = %Iyzico.PaymentRequest{}, opts \\ []) do
case process_payment_req(payment, opts) do
{:ok, payment, metadata} ->
{payment, metadata}
{:error, code} ->
raise Iyzico.PaymentProcessingError, code: code
end
end
@doc """
Instantiates the given secure payment request on the remote API.
## Options
See common options.
"""
@spec init_secure_payment_req(Iyzico.SecurePaymentRequest.t, Keyword.t) ::
{:ok, Iyzico.SecurePaymentArtifact.t, Iyzico.Metadata.t} |
{:error, atom}
def init_secure_payment_req(payment_request = %Iyzico.SecurePaymentRequest{}, opts \\ []) do
case request([], :post, url_for_path("/payment/3dsecure/initialize"), [], payment_request, opts) do
{:ok, resp} ->
secure_payment_artifact =
%SecurePaymentArtifact{
conversation_id: resp["conversationId"],
page_body: Base.decode64!(resp["threeDSHtmlContent"])}
metadata =
%Metadata{
system_time: resp["systemTime"],
succeed?: resp["status"] == "success",
phase: resp["phase"],
locale: resp["locale"],
auth_code: resp["authCode"]}
{:ok, secure_payment_artifact, metadata}
any ->
any
end
end
@doc """
Finalizes a valid secure payment artifact on the remote API.
## Options
See common options.
"""
@spec finalize_secure_payment_req(Iyzico.SecurePaymentHandle.t, Keyword.t) ::
{:ok, Iyzico.Payment.t, Iyzico.Metadata.t} |
{:error, atom}
def finalize_secure_payment_req(handle = %Iyzico.SecurePaymentHandle{}, opts \\ []) do
case request([], :post, url_for_path("/payment/3dsecure/auth"), [], handle, opts) do
{:ok, resp} ->
if resp["status"] == "success",
do: serialize_resp(resp),
else: handle_error(resp)
any ->
any
end
end
@doc """
Revokes an existing payment on the remote API.
Returns `{:error, :unowned}` if payment is not owned by the API user.
## Options
See common options.
"""
@spec revoke_payment(binary, binary, Keyword.t) ::
{:ok, Iyzico.Metadata.t} |
{:error, :unowned}
def revoke_payment(payment_id, conversation_id, opts \\ [])
when is_binary(payment_id) and is_binary(conversation_id) do
revoke = %RevokePaymentRequest{
conversation_id: conversation_id,
payment_id: payment_id,
ip: @server_ip
}
case request([], :post, url_for_path("/payment/cancel"), [], revoke, opts) do
{:ok, resp} ->
if resp["status"] == "success" do
metadata =
%Metadata{
system_time: resp["systemTime"],
succeed?: resp["status"] == "success",
phase: resp["phase"],
locale: resp["locale"],
auth_code: resp["authCode"]}
{:ok, metadata}
else
handle_error(resp)
end
any ->
any
end
end
@doc """
Same as `revoke_payment/3`, but raises `Iyzico.InternalInconsistencyError` if
there was an error.
"""
@spec revoke_payment!(binary, binary, Keyword.t) ::
Iyzico.Metadata.t |
no_return
def revoke_payment!(payment_id, conversation_id, opts \\ []) do
case revoke_payment(payment_id, conversation_id, opts) do
{:ok, metadata} ->
metadata
{:error, code} ->
raise Iyzico.InternalInconsistencyError, code: code
end
end
@doc """
Refunds a payment of a successful transaction by given amount.
## Options
See common options.
"""
@spec refund_payment(binary, binary, binary, currency, Keyword.t) ::
{:ok, Iyzico.Metadata.t} |
{:error, :excessive_funds} |
{:error, :unowned}
def refund_payment(transaction_id, conversation_id, price, currency, opts \\ [])
when is_binary(transaction_id) and is_binary(conversation_id) and
is_binary(price) do
refund =
%RefundPaymentRequest{
conversation_id: conversation_id,
transaction_id: transaction_id,
price: price,
ip: @server_ip,
currency: currency
}
case request([], :post, url_for_path("/payment/refund"), [], refund, opts) do
{:ok, resp} ->
if resp["status"] == "success" do
metadata =
%Metadata{
system_time: resp["systemTime"],
succeed?: resp["status"] == "success",
phase: resp["phase"],
locale: resp["locale"],
auth_code: resp["authCode"]}
{:ok, metadata}
else
handle_error(resp)
end
any ->
any
end
end
@doc """
Same as `refund_payment/5`, but raises `Iyzico.InternalInconsistencyError` if
there was an error.
"""
@spec refund_payment!(binary, binary, binary, currency, Keyword.t) ::
Iyzico.Metadata.t |
no_return
def refund_payment!(transaction_id, conversation_id, price, currency, opts \\ []) do
case refund_payment(transaction_id, conversation_id, price, currency, opts) do
{:ok, metadata} ->
metadata
{:error, code} ->
raise Iyzico.InternalInconsistencyError, code: code
end
end
defp handle_error(%{"errorCode" => "5093"}), do: {:error, :excessive_funds}
defp handle_error(%{"errorCode" => "5115"}), do: {:error, :unavail}
defp handle_error(%{"errorCode" => "5086"}), do: {:error, :unowned}
defp handle_error(%{"errorCode" => "5092"}), do: {:error, :unowned}
defp handle_error(%{"errorCode" => "10051"}), do: {:error, :insufficient_funds}
defp handle_error(%{"errorCode" => "10005"}), do: {:error, :do_not_honor}
defp handle_error(%{"errorCode" => "10057"}), do: {:error, :holder_permit}
defp handle_error(%{"errorCode" => "10058"}), do: {:error, :terminal}
defp handle_error(%{"errorCode" => "6001"}), do: {:error, :stolen}
defp handle_error(%{"errorCode" => "10034"}), do: {:error, :fraud}
defp handle_error(%{"errorCode" => "10054"}), do: {:error, :expired}
defp handle_error(%{"errorCode" => "10084"}), do: {:error, :invalid_cvc}
defp handle_error(%{"errorCode" => "10012"}), do: {:error, :invalid}
defp handle_error(%{"errorCode" => "10202"}), do: {:error, nil}
defp handle_error(_), do: raise Iyzico.InternalInconsistencyError
defp serialize_resp(resp) do
transactions =
resp["itemTransactions"]
|> Enum.map(fn x ->
%Transaction{
blockage_rate: x["blockageRate"],
merchant_blockage_amount: x["blockageRateAmountMerchant"],
submerchant_blockage_amount: x["blockageRateAmountSubMerchant"],
resolution_date: x["blockageResolvedDate"],
converted_payout: %ConvertedPayout{
merchant_blockage_amount: x["convertedPayout"]["blockageRateAmountMerchant"],
submerchant_blockage_amount: x["convertedPayout"]["blockageRateAmountSubMerchant"],
currency: x["convertedPayout"]["currency"],
commission_fee: x["convertedPayout"]["iyziCommissionFee"],
commission_amount: x["convertedPayout"]["iyziCommissionRateAmount"],
conversion_rate: x["convertedPayout"]["iyziConversionRate"],
conversion_cost: x["convertedPayout"]["iyziConversionRateAmount"],
merchant_payout_amount: x["convertedPayout"]["merchantPayoutAmount"],
paid_price: x["convertedPayout"]["paidPrice"],
submerchant_payout_amount: x["convertedPayout"]["subMerchantPayoutAmount"]
},
item_id: x["itemId"],
commission_amount: x["iyziCommissionRateAmount"],
commission_fee: x["iyziCommissionFee"],
merchant_commission_rate: x["merchantCommissionRate"],
merchant_commission_amount: x["merchantCommissionRateAmount"],
merchant_payout_amount: x["merchantPayoutAmount"],
paid_price: x["paidPrice"],
id: x["paymentTransactionId"],
price: x["price"],
submerchant_payout_amount: x["subMerchantPayoutAmount"],
submerchant_payout_rate: x["subMerchantPayoutRate"],
submerchant_price: x["subMerchantPrice"],
transaction_status: Transaction.to_transaction_status(x["transactionStatus"])}
end)
payment =
%Payment{
basket_id: resp["basketId"],
bin_id: resp["binNumber"],
card_ref: %CardReference{
assoc: Card.get_card_assoc(resp["cardAssociation"]),
family: Card.get_card_family(resp["cardFamily"]),
type: Card.get_card_type(resp["cardType"]),
user_key: resp["cardUserKey"],
token: resp["cardToken"],
},
conversation_id: resp["conversationId"],
currency: resp["currency"] |> String.downcase() |> String.to_atom(),
fraud_status: Payment.to_fraud_status(resp["fraudStatus"]),
installment: resp["installment"],
transactions: transactions,
commission_fee: resp["iyziCommissionFee"],
commission_amount: resp["iyziCommissionRateAmount"],
last_four_digits: resp["lastFourDigits"],
merchant_commission_rate: resp["merchantCommissionRate"],
merchant_commission_amount: resp["merchantCommissionRateAmount"],
paid_price: resp["paidPrice"],
price: resp["price"],
id: resp["paymentId"]}
metadata =
%Metadata{
system_time: resp["systemTime"],
succeed?: resp["status"] == "success",
phase: resp["phase"],
locale: resp["locale"],
auth_code: resp["authCode"]}
{:ok, payment, metadata}
end
end
|
lib/endpoint/iyzipay.ex
| 0.87766
| 0.778818
|
iyzipay.ex
|
starcoder
|
defmodule EctoIPRange.Util.Range do
@moduledoc false
use Bitwise, skip_operators: true
alias EctoIPRange.Util.Inet
@doc """
Create a CIDR (if possible) or range notation for two IPv4 tuples.
## Examples
iex> parse_ipv4({1, 2, 3, 4}, {1, 2, 3, 4})
"1.2.3.4/32"
iex> parse_ipv4({127, 0, 0, 0}, {127, 0, 0, 255})
"127.0.0.0/24"
iex> parse_ipv4({1, 2, 0, 1}, {1, 2, 0, 0})
"1.2.0.1-1.2.0.0"
iex> parse_ipv4({1, 2, 3, 4}, {2, 3, 4, 5})
"1.2.3.4-2.3.4.5"
"""
@spec parse_ipv4(:inet.ip4_address(), :inet.ip4_address()) :: binary | :error
def parse_ipv4(ip4_address, ip4_address) do
case Inet.ntoa(ip4_address) do
ip when is_binary(ip) -> ip <> "/32"
_ -> :error
end
end
def parse_ipv4(first_ip4_address, last_ip4_address) do
with first_ip when is_binary(first_ip) <- Inet.ntoa(first_ip4_address),
last_ip when is_binary(last_ip) <- Inet.ntoa(last_ip4_address) do
case netmask_ipv4(first_ip4_address, last_ip4_address) do
nil -> first_ip <> "-" <> last_ip
maskbits -> first_ip <> "/" <> Integer.to_string(maskbits)
end
else
_ -> :error
end
end
@doc """
Create a CIDR (if possible) or range notation for two IPv6 tuples.
## Examples
iex> parse_ipv6({1, 2, 3, 4, 5, 6, 7, 8}, {1, 2, 3, 4, 5, 6, 7, 8})
"1:2:3:4:5:6:7:8/128"
iex> parse_ipv6({1, 2, 3, 4, 0, 0, 0, 0}, {1, 2, 3, 4, 0, 0, 0, 65_535})
"1:2:3:4::/112"
iex> parse_ipv6({1, 2, 3, 4, 5, 6, 7, 1}, {1, 2, 3, 4, 5, 6, 7, 0})
"1:2:3:4:5:6:7:1-1:2:3:4:5:6:7:0"
iex> parse_ipv6({1, 2, 3, 4, 5, 6, 7, 8}, {2, 3, 4, 5, 6, 7, 8, 9})
"1:2:3:4:5:6:7:8-2:3:4:5:6:7:8:9"
"""
@spec parse_ipv6(:inet.ip6_address(), :inet.ip6_address()) :: binary | :error
def parse_ipv6(ip6_address, ip6_address) do
case Inet.ntoa(ip6_address) do
ip when is_binary(ip) -> ip <> "/128"
_ -> :error
end
end
def parse_ipv6(first_ip6_address, last_ip6_address) do
with first_ip when is_binary(first_ip) <- Inet.ntoa(first_ip6_address),
last_ip when is_binary(last_ip) <- Inet.ntoa(last_ip6_address) do
case netmask_ipv6(first_ip6_address, last_ip6_address) do
nil -> first_ip <> "-" <> last_ip
maskbits -> first_ip <> "/" <> Integer.to_string(maskbits)
end
else
_ -> :error
end
end
defp netmask_ipv4({first_a, first_b, first_c, first_d}, {last_a, last_b, last_c, last_d}) do
netmask_ipv4([first_d, first_c, first_b, first_a], [last_d, last_c, last_b, last_a], 0)
end
defp netmask_ipv4([], [], rangebits), do: 32 - rangebits
defp netmask_ipv4([first | _], [first | _], rangebits), do: 32 - rangebits
defp netmask_ipv4([first | first_parts], [last | last_parts], rangebits) do
partbits =
Enum.reduce_while(0..7, 0, fn bit, acc ->
first_bit = band(first, bsl(1, bit))
last_bit = band(last, bsl(1, bit))
cond do
0 == first_bit and 0 != last_bit -> {:cont, acc + 1}
bsr(first_bit, bit) == bsr(last_bit, bit) -> {:halt, acc}
true -> {:halt, nil}
end
end)
cond do
partbits == nil -> nil
partbits == 8 -> netmask_ipv4(first_parts, last_parts, rangebits + partbits)
first_parts == last_parts -> 32 - (rangebits + partbits)
true -> nil
end
end
defp netmask_ipv6(
{first_a, first_b, first_c, first_d, first_e, first_f, first_g, first_h},
{last_a, last_b, last_c, last_d, last_e, last_f, last_g, last_h}
) do
netmask_ipv6(
[first_h, first_g, first_f, first_e, first_d, first_c, first_b, first_a],
[last_h, last_g, last_f, last_e, last_d, last_c, last_b, last_a],
0
)
end
defp netmask_ipv6([], [], rangebits), do: 128 - rangebits
defp netmask_ipv6([first | _], [first | _], rangebits), do: 128 - rangebits
defp netmask_ipv6([first | first_parts], [last | last_parts], rangebits) do
partbits =
Enum.reduce_while(0..15, 0, fn bit, acc ->
first_bit = band(first, bsl(1, bit))
last_bit = band(last, bsl(1, bit))
cond do
0 == first_bit and 0 != last_bit -> {:cont, acc + 1}
bsr(first_bit, bit) == bsr(last_bit, bit) -> {:halt, acc}
true -> {:halt, nil}
end
end)
cond do
partbits == nil -> nil
partbits == 16 -> netmask_ipv6(first_parts, last_parts, rangebits + partbits)
first_parts == last_parts -> 128 - (rangebits + partbits)
true -> nil
end
end
end
|
lib/ecto_ip_range/util/range.ex
| 0.727395
| 0.558628
|
range.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.