code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Picam do
@moduledoc """
This module contains functions to manipulate, capture, and stream
MJPEG video on a Raspberry Pi using the camera module.
"""
@camera Application.get_env(:picam, :camera, Picam.Camera)
@doc """
Returns a binary with the contents of a single JPEG frame from the camera.
"""
def next_frame do
GenServer.call(@camera, :next_frame)
end
@doc """
Set the image size. One of the dimensions may be set
to 0 to auto-calculate it based on the aspect ratio of
the camera.
"""
def set_size(width, height)
when is_integer(width) and is_integer(height) and (width > 0 or height > 0),
do: set("size=#{width},#{height}")
def set_size(_width, _height), do: {:error, :invalid_size}
@doc """
Annotate the JPEG frames with the text in `annotation`.
"""
def set_annotation_text(annotation \\ "")
def set_annotation_text(annotation) when is_binary(annotation),
do: set("annotation=#{annotation}")
def set_annotation_text(_other), do: {:error, :invalid_annotation}
@doc """
Enable or disable a black background behind the annotation.
"""
def set_annotation_bg(false), do: set("anno_background=off")
def set_annotation_bg(true), do: set("anno_background=on")
def set_annotation_bg(_other), do: {:error, :invalid_annotation_bg}
@doc """
Set the image sharpness.
The accepted range is [-100, 100].
"""
def set_sharpness(sharpness \\ 0)
def set_sharpness(sharpness) when sharpness in -100..100, do: set("sharpness=#{sharpness}")
def set_sharpness(_other), do: {:error, :invalid_sharpness}
@doc """
Set the image contrast.
The accepted range is [-100, 100].
"""
def set_contrast(contrast \\ 0)
def set_contrast(contrast) when contrast in -100..100, do: set("contrast=#{contrast}")
def set_contrast(_other), do: {:error, :invalid_contrast}
@doc """
Set the image brightness.
The accepted range is [0, 100].
"""
def set_brightness(brightness \\ 50)
def set_brightness(brightness) when brightness in 0..100, do: set("brightness=#{brightness}")
def set_brightness(_other), do: {:error, :invalid_brightness}
@doc """
Set the image saturation.
The accepted range is [-100, 100].
"""
def set_saturation(saturation \\ 0)
def set_saturation(saturation) when saturation in -100..100, do: set("saturation=#{saturation}")
def set_saturation(_other), do: {:error, :invalid_saturation}
@doc """
Set the capture ISO.
The accepted range is [0, 800].
If the `iso` given is 0, it will be automatically regulated by the camera.
"""
def set_iso(iso \\ 0)
def set_iso(iso) when iso in 0..800, do: set("ISO=#{iso}")
def set_iso(_other), do: {:error, :invalid_iso}
@doc """
Enable or disable video stabilization.
"""
def set_vstab(false), do: set("vstab=off")
def set_vstab(true), do: set("vstab=on")
def set_vstab(_other), do: {:error, :invalid_vstab}
@doc """
Set the exposure compensation (EV) level.
The accepted range is [-25, 25].
"""
def set_ev(ev \\ 0)
def set_ev(ev) when ev in -25..25, do: set("ev=#{ev}")
def set_ev(_other), do: {:error, :invalid_ev}
@doc """
Set the exposure mode.
The accepted modes are:
* `:auto`
* `:night`
* `:nightpreview`
* `:backlight`
* `:spotlight`
* `:sports`
* `:snow`
* `:beach`
* `:verylong`
* `:fixedfps`
* `:antishake`
* `:fireworks`
"""
@exposure_modes [
:auto,
:night,
:nightpreview,
:backlight,
:spotlight,
:sports,
:snow,
:beach,
:verylong,
:fixedfps,
:antishake,
:fireworks
]
def set_exposure_mode(mode \\ :auto)
def set_exposure_mode(mode) when mode in @exposure_modes, do: set("exposure=#{mode}")
def set_exposure_mode(_other), do: {:error, :unknown_exposure_mode}
@doc """
Limit the frame rate to the given `rate`.
The accepted range is [0.0, 90.0], but the actual rate used is governed
by the current `sensor_mode`.
If the `rate` given is 0 (or 0.0), frame rate will be automatically regulated.
"""
def set_fps(rate \\ 0)
def set_fps(rate) when is_integer(rate) and rate in 0..90, do: set_fps(:erlang.float(rate))
def set_fps(rate) when is_float(rate) and rate >= 0.0 and rate <= 90.0, do: set("fps=#{rate}")
def set_fps(_other), do: {:error, :invalid_frame_rate}
@doc """
Set the Automatic White Balance (AWB) mode.
The accepted modes are:
* `:off`
* `:auto`
* `:sun`
* `:cloud`
* `:shade`
* `:tungsten`
* `:fluorescent`
* `:incandescent`
* `:flash`
* `:horizon`
"""
@awb_modes [
:off,
:auto,
:sun,
:cloud,
:shade,
:tungsten,
:fluorescent,
:incandescent,
:flash,
:horizon
]
def set_awb_mode(mode \\ :auto)
def set_awb_mode(mode) when mode in @awb_modes, do: set("awb=#{mode}")
def set_awb_mode(_other), do: {:error, :unknown_awb_mode}
@doc """
Set the image effect.
The accepted effects are:
* `:none`
* `:negative`
* `:solarise`
* `:sketch`
* `:denoise`
* `:emboss`
* `:oilpaint`
* `:hatch`
* `:gpen`
* `:pastel`
* `:watercolor`
* `:film`
* `:blur`
* `:saturation`
* `:colorswap`
* `:washedout`
* `:posterise`
* `:colorpoint`
* `:colorbalance`
* `:cartoon`
"""
@img_effects [
:none,
:negative,
:solarise,
:sketch,
:denoise,
:emboss,
:oilpaint,
:hatch,
:gpen,
:pastel,
:watercolour,
:watercolor,
:film,
:blur,
:saturation,
:colourswap,
:colorswap,
:washedout,
:posterise,
:colourpoint,
:colorpoint,
:colourbalance,
:colorbalance,
:cartoon
]
def set_img_effect(effect \\ :none)
def set_img_effect(effect) when effect in @img_effects, do: set("imxfx=#{effect}")
def set_img_effect(_other), do: {:error, :unknown_image_effect}
@doc """
Set the color effect applied by the camera.
The effect is set with the tuple `{u,v}`.
The accepted range for both values is [0, 255].
If the `effect` given is `:none`, color effects will be disabled.
## Examples
iex> Picam.set_col_effect({128,128}) # Black and white
:ok
"""
def set_col_effect(effect \\ :none)
def set_col_effect({u, v}) when u in 0..255 and v in 0..255, do: set("colfx=#{u}:#{v}")
def set_col_effect(:none), do: set("colfx=")
def set_col_effect(_other), do: {:error, :invalid_color_effect}
@doc """
Set the sensor mode.
Details on the accepted modes (0-7) are listed in the tables below:
## V1 Camera Module
| # | Resolution | Ratio | FPS Range | Video | Image | FoV | Binning |
|---|------------|-------|-----------|-------|-------|---------|---------|
| 1 | 1920x1080 | 16:9 | (1, 30] | Y | | Partial | None |
| 2 | 2592x1944 | 4:3 | (1, 15] | Y | Y | Full | None |
| 3 | 2592x1944 | 4:3 | [0.16, 1] | Y | Y | Full | None |
| 4 | 1296x972 | 4:3 | (1, 42] | Y | | Full | 2x2 |
| 5 | 1296x730 | 16:9 | (1, 49] | Y | | Full | 2x2 |
| 6 | 640x480 | 4:3 | (42, 60] | Y | | Full | 4x4 |
| 7 | 640x480 | 4:3 | (60, 90] | Y | | Full | 4x4 |
## V2 Camera Module
| # | Resolution | Ratio | FPS Range | Video | Image | FoV | Binning |
|---|------------|-------|------------|-------|-------|---------|---------|
| 1 | 1920x1080 | 16:9 | [0.10, 30] | Y | | Partial | None |
| 2 | 3280x2464 | 4:3 | [0.10, 15] | Y | N | Full | None |
| 3 | 3280x2464 | 4:3 | [0.10, 15] | Y | N | Full | None |
| 4 | 1640x1232 | 4:3 | [0.10, 40] | Y | | Full | 2x2 |
| 5 | 1640x922 | 16:9 | [0.10, 40] | Y | | Full | 2x2 |
| 6 | 1280x720 | 16:9 | (40, 90] | Y | | Partial | 2x2 |
| 7 | 640x480 | 4:3 | (40, 90] | Y | | Partial | 2x2 |
If the `mode` given is 0, the camera will select a mode automatically.
"""
def set_sensor_mode(mode \\ 0)
def set_sensor_mode(mode) when mode in 0..7, do: set("mode=#{mode}")
def set_sensor_mode(_other), do: {:error, :unknown_sensor_mode}
@doc """
Set the metering mode.
The accepted modes are:
* `:average`
* `:spot`
* `:backlit`
* `:matrix`
"""
@metering_modes [:average, :spot, :backlit, :matrix]
def set_metering_mode(mode \\ :average)
def set_metering_mode(mode) when mode in @metering_modes, do: set("metering=#{mode}")
def set_metering_mode(_other), do: {:error, :unknown_metering_mode}
@doc """
Set the image rotation angle in degrees.
The accepted angles are 0, 90, 180, or 270.
"""
def set_rotation(angle \\ 0)
def set_rotation(angle) when angle in [0, 90, 180, 270], do: set("rotation=#{angle}")
def set_rotation(_other), do: {:error, :invalid_rotation_angle}
@doc """
Flip the image horizontally.
"""
def set_hflip(false), do: set("hflip=off")
def set_hflip(true), do: set("hflip=on")
def set_hflip(_other), do: {:error, :invalid_hflip}
@doc """
Flip the image vertically.
"""
def set_vflip(false), do: set("vflip=off")
def set_vflip(true), do: set("vflip=on")
def set_vflip(_other), do: {:error, :invalid_vflip}
@doc """
Set a region of interest.
(x,y,w,h as normalized coordinates [0.0, 1.0])
"""
def set_roi(roi \\ "0:0:1:1")
def set_roi(roi) when is_binary(roi), do: set("roi=#{roi}")
def set_roi(_other), do: {:error, :invalid_roi}
@doc """
Set the shutter speed in microseconds
If the `speed` given is 0, it will be automatically regulated.
"""
def set_shutter_speed(speed \\ 0)
def set_shutter_speed(speed) when is_integer(speed) and speed >= 0, do: set("shutter=#{speed}")
def set_shutter_speed(_other), do: {:error, :invalid_shutter_speed}
@doc """
Set the JPEG quality.
The accepted range is [1, 100].
"""
def set_quality(quality \\ 15)
def set_quality(quality) when quality in 1..100, do: set("quality=#{quality}")
def set_quality(_other), do: {:error, :invalid_quality}
@doc """
Set the JPEG restart interval.
If the `interval` given is 0, restart intervals will not be used.
"""
def set_restart_interval(interval \\ 0)
def set_restart_interval(interval) when is_integer(interval) and interval >= 0,
do: set("restart_interval=#{interval}")
def set_restart_interval(_other), do: {:error, :invalid_restart_interval}
# Private helper functions
defp set(msg) do
GenServer.cast(@camera, {:set, msg})
end
end
|
lib/picam.ex
| 0.92976
| 0.68092
|
picam.ex
|
starcoder
|
defmodule Surface.Components.Link do
@moduledoc """
Generates a link to the given URL.
Provides similar capabilities to Phoenix's built-in `link/2` function.
Options `label` and `class` can be set directly and will override anything in `opts`.
All other options are forwarded to the underlying <a> tag.
## Examples
```
<Link
label="user"
to="/users/1"
class="is-danger"
opts={{ method: :delete, data: [confirm: "Really?"] }}
/>
<Link
to="/users/1"
class="is-link"
>
<span>user</span>
</Link>
```
"""
use Surface.Component
import Surface.Components.Utils
@doc "The page to link to"
prop to, :any, required: true
@doc "The method to use with the link"
prop method, :atom, default: :get
@doc "Id to apply to the link"
prop id, :string
@doc "Class or classes to apply to the link"
prop class, :css_class
@doc """
The label for the generated `<a>` element, if no content (default slot) is provided.
"""
prop label, :string
@doc "Triggered when the component loses focus"
prop blur, :event
@doc "Triggered when the component receives focus"
prop focus, :event
@doc "Triggered when the component receives click"
prop capture_click, :event
@doc "Triggered when a button on the keyboard is pressed"
prop keydown, :event
@doc "Triggered when a button on the keyboard is released"
prop keyup, :event
@doc """
Additional attributes to add onto the generated element
"""
prop opts, :keyword, default: []
@doc """
The content of the generated `<a>` element. If no content is provided,
the value of property `label` is used instead.
"""
slot default
def update(assigns, socket) do
valid_label!(assigns)
{:ok, assign(socket, assigns)}
end
def render(assigns) do
to = valid_destination!(assigns.to, "<Link />")
opts = apply_method(to, assigns.method, assigns.opts) ++ events_to_opts(assigns)
attrs = opts_to_attrs(opts)
~H"""
<a id={{ @id }} class={{ @class }} href={{ to }} :attrs={{ attrs }}><slot>{{ @label }}</slot></a>
"""
end
defp valid_label!(assigns) do
unless assigns[:default] || assigns[:label] || Keyword.get(assigns.opts, :label) do
raise ArgumentError, "<Link /> requires a label prop or contents in the default slot"
end
end
defp apply_method(to, method, opts) do
if method == :get do
skip_csrf(opts)
else
{csrf_data, opts} = csrf_data(to, opts)
opts = Keyword.put_new(opts, :rel, "nofollow")
[data: [method: method, to: to] ++ csrf_data] ++ opts
end
end
end
|
lib/surface/components/link.ex
| 0.916006
| 0.844473
|
link.ex
|
starcoder
|
defmodule Cartographer.Encoder do
@moduledoc """
Cartographer.Encoder is a module which provides way to encode coordinates into
a geohash with given precision measured by amount of characters.
It uses a finite alphabet to present geocoordinates in human readable form.
"""
@min_lat -90
@max_lat +90
@min_lng -180
@max_lng +180
@doc """
Encodes provided latitude (as `lat`) and longitude (as `lng`)
with desired length (in characters) to a geohash, which uses
standard `base32` alphabet.
iex> Cartographer.Encoder.to_base32_geohash(0.0, 0.0, -1)
** (FunctionClauseError) no function clause matching in Cartographer.Encoder.to_base32_geohash/3
iex> Cartographer.Encoder.to_base32_geohash(0.0, 0.0, 0)
** (FunctionClauseError) no function clause matching in Cartographer.Encoder.to_base32_geohash/3
iex> Cartographer.Encoder.to_base32_geohash(-90.1, 0.0, 0)
** (FunctionClauseError) no function clause matching in Cartographer.Encoder.to_base32_geohash/3
iex> Cartographer.Encoder.to_base32_geohash(90.1, 0.0, 0)
** (FunctionClauseError) no function clause matching in Cartographer.Encoder.to_base32_geohash/3
iex> Cartographer.Encoder.to_base32_geohash(0, -180.1, 0)
** (FunctionClauseError) no function clause matching in Cartographer.Encoder.to_base32_geohash/3
iex> Cartographer.Encoder.to_base32_geohash(0, 180.1, 0)
** (FunctionClauseError) no function clause matching in Cartographer.Encoder.to_base32_geohash/3
iex> Cartographer.Encoder.to_base32_geohash(0.0, 0.0)
"s000"
iex> Cartographer.Encoder.to_base32_geohash(-1.0, -1.0)
"7zz6"
iex> Cartographer.Encoder.to_base32_geohash(1.0, 1.0)
"s00t"
iex> Cartographer.Encoder.to_base32_geohash(90.0, 180.0)
"zzzz"
iex> Cartographer.Encoder.to_base32_geohash(-90.0, -180.0)
"0000"
iex> Cartographer.Encoder.to_base32_geohash(10.0, 10.0)
"s1z0"
iex> Cartographer.Encoder.to_base32_geohash(52.2333, 21.0167, 9)
"u3qcnhzch"
iex> Cartographer.Encoder.to_base32_geohash(57.64911, 10.40744, 11)
"u4pruydqqvj"
"""
def to_base32_geohash(lat, lng, length \\ 4)
when length > 0 and
lat >= @min_lat and
lat <= @max_lat and
lng >= @min_lng and
lng <= @max_lng
do
result = _encode(0, length * Cartographer.base32_size, @min_lat, @max_lat, @min_lng, @max_lng, lat, lng, <<>>)
Cartographer.to_geohash(result)
end
defp _encode(i, precision, _minLat, _maxLat, _minLng, _maxLng, _lat, _lng, result) when i >= precision do
result
end
defp _encode(i, precision, minLat, maxLat, minLng, maxLng, lat, lng, result) when rem(i, 2) == 0 do
midpoint = (minLng + maxLng) / 2
if lng < midpoint do
_encode(i + 1, precision, minLat, maxLat, minLng, midpoint, lat, lng, <<result::bitstring, 0::1>>)
else
_encode(i + 1, precision, minLat, maxLat, midpoint, maxLng, lat, lng, <<result::bitstring, 1::1>>)
end
end
defp _encode(i, precision, minLat, maxLat, minLng, maxLng, lat, lng, result) when rem(i, 2) == 1 do
midpoint = (minLat + maxLat) / 2
if lat < midpoint do
_encode(i + 1, precision, minLat, midpoint, minLng, maxLng, lat, lng, <<result::bitstring, 0::1>>)
else
_encode(i + 1, precision, midpoint, maxLat, minLng, maxLng, lat, lng, <<result::bitstring, 1::1>>)
end
end
end
|
lib/cartographer/encoder.ex
| 0.80329
| 0.582164
|
encoder.ex
|
starcoder
|
defmodule K8s.Client.Runner.Base do
@moduledoc """
Base HTTP processor for `K8s.Client`
"""
@type result :: {:ok, map() | reference()} | {:error, atom} | {:error, binary()}
alias K8s.Cluster
alias K8s.Conf.RequestOptions
alias K8s.Operation
@doc """
Runs a `K8s.Operation`.
## Examples
*Note:* Examples assume a cluster was registered named "test-cluster", see `K8s.Cluster.register/2`.
Running a list pods operation:
```elixir
operation = K8s.Client.list("v1", "Pod", namespace: :all)
{:ok, %{"items" => pods}} = K8s.Client.run(operation, "test-cluster")
```
Running a dry-run of a create deployment operation:
```elixir
deployment = %{
"apiVersion" => "apps/v1",
"kind" => "Deployment",
"metadata" => %{
"labels" => %{
"app" => "nginx"
},
"name" => "nginx",
"namespace" => "test"
},
"spec" => %{
"replicas" => 2,
"selector" => %{
"matchLabels" => %{
"app" => "nginx"
}
},
"template" => %{
"metadata" => %{
"labels" => %{
"app" => "nginx"
}
},
"spec" => %{
"containers" => %{
"image" => "nginx",
"name" => "nginx"
}
}
}
}
}
operation = K8s.Client.create(deployment)
# opts is passed to HTTPoison as opts.
opts = [params: %{"dryRun" => "all"}]
:ok = K8s.Client.Runner.Base.run(operation, "test-cluster", opts)
```
"""
@spec run(Operation.t(), nil | binary | atom) :: result
def run(operation = %Operation{}, cluster_name \\ "default"),
do: run(operation, cluster_name, [])
@doc """
Run an operation and pass `opts` to HTTPoison.
See `run/2`
"""
@spec run(Operation.t(), binary | atom, keyword()) :: result
def run(operation = %Operation{}, cluster_name, opts) when is_list(opts) do
run(operation, cluster_name, operation.resource, opts)
end
@doc """
Run an operation with an alternative HTTP Body (map) and pass `opts` to HTTPoison.
See `run/2`
"""
@spec run(Operation.t(), binary | atom, map(), keyword()) :: result
def run(operation = %Operation{}, cluster_name, body, opts \\ []) do
case Cluster.url_for(operation, cluster_name) do
{:ok, url} ->
{:ok, conf} = Cluster.conf(cluster_name)
request_options = RequestOptions.generate(conf)
http_headers = K8s.http_provider().headers(request_options)
http_opts = Keyword.merge([ssl: request_options.ssl_options], opts)
case encode(body, operation.method) do
{:ok, http_body} ->
K8s.http_provider().request(operation.method, url, http_body, http_headers, http_opts)
{:error, error} ->
{:error, error}
end
{:error, error} ->
{:error, error}
end
end
@spec encode(any(), atom()) :: {:ok, binary} | {:error, any}
def encode(body, http_method) when http_method in [:put, :patch, :post] do
Jason.encode(body)
end
def encode(_, _), do: {:ok, ""}
end
|
lib/k8s/client/runner/base.ex
| 0.912048
| 0.46223
|
base.ex
|
starcoder
|
defmodule JSONC do
@moduledoc """
This is a package for decoding [jsonc](https://komkom.github.io/jsonc-playground) documents and also transcoding them to json.
I'm also working on it to add a formatter.
jsonc is a superset of json, which means any json document is also a jsonc document but the reverse is not true.
So you can use this package to decode json documents too.
jsonc allows you to have single-line and multi-line comments in your documents using `//` and `/* */`. these comments can be anywhere.
you can also have multi-line strings that don't need escaping using `` ` `` (backtick).
also, strings and object keys may be unquoted. but any unquoated string that can be intrepreted as a number(whether integer or float), will be treated as such.
you also don't need to use commas for separating key-value pairs in objets or elements in arrays, whitespace is enough.
This is an example of a valid jsonc document:
```
// a valid jsonc document
{
/* you can have
multi-line comments
*/
key1 /* object keys can be unquoted */ : value // you don't need a comma here
key2: [ 25.23e-5 74 unquoated_string ]
key3: `this
is a
multi-line string`,
"regular_key": "regular_string"
}
```
I should say that right now, the performance for large documents is not acceptable, so use this package only when
human-readability is more important than performance, like using a jsonc file for specifying environmental variables
for your app (for example with [enux](https://hex.pm/packages/enux)).
but I'll definitely keep working on optimising the parser or maybe writing a new parser if needed.
## Installation
```
defp deps do
[
{:jsonc, "~> 0.8.0"}
]
end
```
## Usage
```
iex(1)> JSONC.decode!(~s(// language information \\n { name: elixir github_stars: 19.8e3 forks: 2.8e3 creator: "<NAME>" 😔 : 😃 }))
%{
"creator" => "<NAME>",
"forks" => 2.8e3,
"github_stars" => 1.98e4,
"name" => "elixir",
"😔" => "😃"
}
iex(2)> JSONC.transcode!(~s(// language information \\n { name: elixir github_stars: 19.8e3 forks: 2.8e3 creator: "<NAME>" 😔 : 😃 }))
"{\\n \\"name\\": \"elixir\\",\\n \\"github_stars\\": 1.98e4,\\n \\"forks\\": 2.8e3,\\n \\"creator\": \\"<NAME>\\"\\n \"😔\": \"😃\"\\n}"
```
"""
@doc delegate_to: {JSONC.Decoder, :decode!, 1}
defdelegate decode!(content), to: JSONC.Decoder
@doc delegate_to: {JSONC.Decoder, :decode, 1}
defdelegate decode(content), to: JSONC.Decoder
@doc delegate_to: {JSONC.Transcoder, :transcode!, 1}
defdelegate transcode!(content), to: JSONC.Transcoder
@doc delegate_to: {JSONC.Transcoder, :transcode, 1}
defdelegate transcode(content), to: JSONC.Transcoder
end
|
lib/jsonc.ex
| 0.910984
| 0.766381
|
jsonc.ex
|
starcoder
|
defmodule Elixium.KeyPair do
alias Elixium.Mnemonic
use Bitwise
require Integer
@algorithm :ecdh
@sigtype :ecdsa
@curve :secp256k1
@hashtype :sha256
@store "keys"
@moduledoc """
All the functions responsible for creating keypairs and using them to sign
data / verify signatures
"""
@doc """
Creates a new keypair and stores the private key in a keyfile. Returns the
public and private key
"""
@spec create_keypair :: {binary, binary}
def create_keypair do
keypair = :crypto.generate_key(@algorithm, @curve)
create_keyfile(keypair)
end
@doc """
Reads in a private key from the given file, and returns a tuple with the
public and private key
"""
@spec get_from_file(String.t()) :: {binary, binary}
def get_from_file(path) do
{:ok, private} = File.read(path)
:crypto.generate_key(@algorithm, @curve, private)
end
@doc """
Creates a new mnemonic to give to users based off private key
"""
@spec create_mnemonic(binary) :: String.t()
def create_mnemonic(private), do: Mnemonic.from_entropy(private)
@doc """
Generates a keypair from the seed phrase or from the private key, leading " " will switch to mnemonic to import key from
"""
@spec gen_keypair(String.t() | binary) :: {binary, binary}
def gen_keypair(phrase) do
if String.contains?(phrase, " ") do
private = Mnemonic.to_entropy(phrase)
{pub, priv} = get_from_private(private)
create_keyfile({pub, priv})
else
{pub, priv} = get_from_private(phrase)
create_keyfile({pub, priv})
end
end
@spec sign(binary, String.t()) :: String.t()
def sign(private_key, data) do
:crypto.sign(@sigtype, @hashtype, data, [private_key, @curve])
end
@spec verify_signature(binary, binary, String.t()) :: boolean
def verify_signature(public_key, signature, data) do
:crypto.verify(@sigtype, @hashtype, data, signature, [public_key, @curve])
end
@doc """
Using a public address, fetch the correct keyfile and return the only the private key
"""
@spec get_priv_from_file(String.t()) :: {binary, binary}
def get_priv_from_file(pub) do
unix_address = Elixium.Store.store_path(@store)
key_path = "#{unix_address}/#{pub}.key"
{_, priv} = get_from_file(key_path)
priv
end
@doc """
Returns a 4 byte checksum of the provided pubkey
"""
@spec checksum(String.t(), binary) :: binary
def checksum(version, compressed_pubkey) do
<<check::bytes-size(4), _::bits>> = :crypto.hash(:sha256, version <> compressed_pubkey)
check
end
@doc """
Generates a Base58 encoded compressed address based on a public key.
First 3 bytes of the address are the version number of the address, and last
4 bytes of the address are the checksum of the public key. This checksum
allows for address validation, i.e. checking mistyped addresses before creating
a transaction.
"""
@spec address_from_pubkey(binary) :: String.t()
def address_from_pubkey(pubkey) do
version = Application.get_env(:elixium_core, :address_version)
compressed_pubkey = compress_pubkey(pubkey)
addr =
compressed_pubkey <> checksum(version, compressed_pubkey)
|> Base58.encode()
version <> addr
end
@doc """
Compresses an ECDSA public key from 65 bytes to 33 bytes by discarding
the y coordinate.
"""
@spec compress_pubkey(binary) :: binary
def compress_pubkey(<<4, x::bytes-size(32), y::bytes-size(32)>>) do
y_even =
y
|> :binary.decode_unsigned()
|> Integer.is_even()
prefix = if y_even, do: <<2>>, else: <<3>>
prefix <> x
end
@doc """
Returns the uncompressed public key stored within the given address.
"""
@spec address_to_pubkey(String.t()) :: binary
def address_to_pubkey(address) do
<<_key_version::bytes-size(3)>> <> addr = address
<<prefix::bytes-size(1), x::bytes-size(32), _checksum::binary>> = Base58.decode(addr)
y = calculate_y_from_x(x, prefix)
<<4>> <> x <> y
end
def get_from_private(private) do
:crypto.generate_key(@algorithm, @curve, private)
end
@spec create_keyfile(tuple) :: :ok | {:error, any}
defp create_keyfile({public, private}) do
unix_address = Elixium.Store.store_path(@store)
if !File.dir?(unix_address), do: File.mkdir(unix_address)
address = address_from_pubkey(public)
File.write!("#{unix_address}/#{address}.key", private)
{public, private}
end
# Adapted from stackoverflow answer
# https://stackoverflow.com/questions/43629265/deriving-an-ecdsa-uncompressed-public-key-from-a-compressed-one/43654055
defp calculate_y_from_x(x, prefix) do
p =
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F"
|> Base.decode16!()
|> :binary.decode_unsigned()
y_square_root =
x
|> :crypto.mod_pow(3, p)
|> :binary.decode_unsigned()
|> Kernel.+(7)
|> mod(p)
|> :crypto.mod_pow(Integer.floor_div(p + 1, 4), p)
|> :binary.decode_unsigned
y =
if (prefix == <<2>> && (y_square_root &&& 1) != 0) || (prefix == <<3>> && ((y_square_root &&& 1) == 0)) do
mod(-y_square_root, p)
else
y_square_root
end
:binary.encode_unsigned(y)
end
# Erlang rem/2 is not the same as modulus. This is true modulus
defp mod(x, y) when x > 0, do: rem(x, y)
defp mod(x, y) when x < 0, do: rem(x + y, y)
defp mod(0, _y), do: 0
end
|
lib/keypair.ex
| 0.726231
| 0.460653
|
keypair.ex
|
starcoder
|
defmodule AWS.ElasticLoadBalancingv2 do
@moduledoc """
Elastic Load Balancing
A load balancer distributes incoming traffic across targets, such as your
EC2 instances. This enables you to increase the availability of your
application. The load balancer also monitors the health of its registered
targets and ensures that it routes traffic only to healthy targets. You
configure your load balancer to accept incoming traffic by specifying one
or more listeners, which are configured with a protocol and port number for
connections from clients to the load balancer. You configure a target group
with a protocol and port number for connections from the load balancer to
the targets, and with health check settings to be used when checking the
health status of the targets.
Elastic Load Balancing supports the following types of load balancers:
Application Load Balancers, Network Load Balancers, and Classic Load
Balancers. This reference covers Application Load Balancers and Network
Load Balancers.
An Application Load Balancer makes routing and load balancing decisions at
the application layer (HTTP/HTTPS). A Network Load Balancer makes routing
and load balancing decisions at the transport layer (TCP/TLS). Both
Application Load Balancers and Network Load Balancers can route requests to
one or more ports on each EC2 instance or container instance in your
virtual private cloud (VPC). For more information, see the [Elastic Load
Balancing User
Guide](https://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/).
All Elastic Load Balancing operations are idempotent, which means that they
complete at most one time. If you repeat an operation, it succeeds.
"""
@doc """
Adds the specified SSL server certificate to the certificate list for the
specified HTTPS or TLS listener.
If the certificate in already in the certificate list, the call is
successful but the certificate is not added again.
To get the certificate list for a listener, use
`DescribeListenerCertificates`. To remove certificates from the certificate
list for a listener, use `RemoveListenerCertificates`. To replace the
default certificate for a listener, use `ModifyListener`.
For more information, see [SSL
Certificates](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/create-https-listener.html#https-listener-certificates)
in the *Application Load Balancers Guide*.
"""
def add_listener_certificates(client, input, options \\ []) do
request(client, "AddListenerCertificates", input, options)
end
@doc """
Adds the specified tags to the specified Elastic Load Balancing resource.
You can tag your Application Load Balancers, Network Load Balancers, and
your target groups.
Each tag consists of a key and an optional value. If a resource already has
a tag with the same key, `AddTags` updates its value.
To list the current tags for your resources, use `DescribeTags`. To remove
tags from your resources, use `RemoveTags`.
"""
def add_tags(client, input, options \\ []) do
request(client, "AddTags", input, options)
end
@doc """
Creates a listener for the specified Application Load Balancer or Network
Load Balancer.
To update a listener, use `ModifyListener`. When you are finished with a
listener, you can delete it using `DeleteListener`. If you are finished
with both the listener and the load balancer, you can delete them both
using `DeleteLoadBalancer`.
This operation is idempotent, which means that it completes at most one
time. If you attempt to create multiple listeners with the same settings,
each call succeeds.
For more information, see [Listeners for Your Application Load
Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-listeners.html)
in the *Application Load Balancers Guide* and [Listeners for Your Network
Load
Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-listeners.html)
in the *Network Load Balancers Guide*.
"""
def create_listener(client, input, options \\ []) do
request(client, "CreateListener", input, options)
end
@doc """
Creates an Application Load Balancer or a Network Load Balancer.
When you create a load balancer, you can specify security groups, public
subnets, IP address type, and tags. Otherwise, you could do so later using
`SetSecurityGroups`, `SetSubnets`, `SetIpAddressType`, and `AddTags`.
To create listeners for your load balancer, use `CreateListener`. To
describe your current load balancers, see `DescribeLoadBalancers`. When you
are finished with a load balancer, you can delete it using
`DeleteLoadBalancer`.
For limit information, see [Limits for Your Application Load
Balancer](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-limits.html)
in the *Application Load Balancers Guide* and [Limits for Your Network Load
Balancer](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-limits.html)
in the *Network Load Balancers Guide*.
This operation is idempotent, which means that it completes at most one
time. If you attempt to create multiple load balancers with the same
settings, each call succeeds.
For more information, see [Application Load
Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html)
in the *Application Load Balancers Guide* and [Network Load
Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/network-load-balancers.html)
in the *Network Load Balancers Guide*.
"""
def create_load_balancer(client, input, options \\ []) do
request(client, "CreateLoadBalancer", input, options)
end
@doc """
Creates a rule for the specified listener. The listener must be associated
with an Application Load Balancer.
Rules are evaluated in priority order, from the lowest value to the highest
value. When the conditions for a rule are met, its actions are performed.
If the conditions for no rules are met, the actions for the default rule
are performed. For more information, see [Listener
Rules](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-listeners.html#listener-rules)
in the *Application Load Balancers Guide*.
To view your current rules, use `DescribeRules`. To update a rule, use
`ModifyRule`. To set the priorities of your rules, use `SetRulePriorities`.
To delete a rule, use `DeleteRule`.
"""
def create_rule(client, input, options \\ []) do
request(client, "CreateRule", input, options)
end
@doc """
Creates a target group.
To register targets with the target group, use `RegisterTargets`. To update
the health check settings for the target group, use `ModifyTargetGroup`. To
monitor the health of targets in the target group, use
`DescribeTargetHealth`.
To route traffic to the targets in a target group, specify the target group
in an action using `CreateListener` or `CreateRule`.
To delete a target group, use `DeleteTargetGroup`.
This operation is idempotent, which means that it completes at most one
time. If you attempt to create multiple target groups with the same
settings, each call succeeds.
For more information, see [Target Groups for Your Application Load
Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-target-groups.html)
in the *Application Load Balancers Guide* or [Target Groups for Your
Network Load
Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html)
in the *Network Load Balancers Guide*.
"""
def create_target_group(client, input, options \\ []) do
request(client, "CreateTargetGroup", input, options)
end
@doc """
Deletes the specified listener.
Alternatively, your listener is deleted when you delete the load balancer
to which it is attached, using `DeleteLoadBalancer`.
"""
def delete_listener(client, input, options \\ []) do
request(client, "DeleteListener", input, options)
end
@doc """
Deletes the specified Application Load Balancer or Network Load Balancer
and its attached listeners.
You can't delete a load balancer if deletion protection is enabled. If the
load balancer does not exist or has already been deleted, the call
succeeds.
Deleting a load balancer does not affect its registered targets. For
example, your EC2 instances continue to run and are still registered to
their target groups. If you no longer need these EC2 instances, you can
stop or terminate them.
"""
def delete_load_balancer(client, input, options \\ []) do
request(client, "DeleteLoadBalancer", input, options)
end
@doc """
Deletes the specified rule.
"""
def delete_rule(client, input, options \\ []) do
request(client, "DeleteRule", input, options)
end
@doc """
Deletes the specified target group.
You can delete a target group if it is not referenced by any actions.
Deleting a target group also deletes any associated health checks.
"""
def delete_target_group(client, input, options \\ []) do
request(client, "DeleteTargetGroup", input, options)
end
@doc """
Deregisters the specified targets from the specified target group. After
the targets are deregistered, they no longer receive traffic from the load
balancer.
"""
def deregister_targets(client, input, options \\ []) do
request(client, "DeregisterTargets", input, options)
end
@doc """
Describes the current Elastic Load Balancing resource limits for your AWS
account.
For more information, see [Limits for Your Application Load
Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-limits.html)
in the *Application Load Balancer Guide* or [Limits for Your Network Load
Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-limits.html)
in the *Network Load Balancers Guide*.
"""
def describe_account_limits(client, input, options \\ []) do
request(client, "DescribeAccountLimits", input, options)
end
@doc """
Describes the default certificate and the certificate list for the
specified HTTPS or TLS listener.
If the default certificate is also in the certificate list, it appears
twice in the results (once with `IsDefault` set to true and once with
`IsDefault` set to false).
For more information, see [SSL
Certificates](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/create-https-listener.html#https-listener-certificates)
in the *Application Load Balancers Guide*.
"""
def describe_listener_certificates(client, input, options \\ []) do
request(client, "DescribeListenerCertificates", input, options)
end
@doc """
Describes the specified listeners or the listeners for the specified
Application Load Balancer or Network Load Balancer. You must specify either
a load balancer or one or more listeners.
For an HTTPS or TLS listener, the output includes the default certificate
for the listener. To describe the certificate list for the listener, use
`DescribeListenerCertificates`.
"""
def describe_listeners(client, input, options \\ []) do
request(client, "DescribeListeners", input, options)
end
@doc """
Describes the attributes for the specified Application Load Balancer or
Network Load Balancer.
For more information, see [Load Balancer
Attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#load-balancer-attributes)
in the *Application Load Balancers Guide* or [Load Balancer
Attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/network-load-balancers.html#load-balancer-attributes)
in the *Network Load Balancers Guide*.
"""
def describe_load_balancer_attributes(client, input, options \\ []) do
request(client, "DescribeLoadBalancerAttributes", input, options)
end
@doc """
Describes the specified load balancers or all of your load balancers.
To describe the listeners for a load balancer, use `DescribeListeners`. To
describe the attributes for a load balancer, use
`DescribeLoadBalancerAttributes`.
"""
def describe_load_balancers(client, input, options \\ []) do
request(client, "DescribeLoadBalancers", input, options)
end
@doc """
Describes the specified rules or the rules for the specified listener. You
must specify either a listener or one or more rules.
"""
def describe_rules(client, input, options \\ []) do
request(client, "DescribeRules", input, options)
end
@doc """
Describes the specified policies or all policies used for SSL negotiation.
For more information, see [Security
Policies](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/create-https-listener.html#describe-ssl-policies)
in the *Application Load Balancers Guide*.
"""
def describe_s_s_l_policies(client, input, options \\ []) do
request(client, "DescribeSSLPolicies", input, options)
end
@doc """
Describes the tags for the specified resources. You can describe the tags
for one or more Application Load Balancers, Network Load Balancers, and
target groups.
"""
def describe_tags(client, input, options \\ []) do
request(client, "DescribeTags", input, options)
end
@doc """
Describes the attributes for the specified target group.
For more information, see [Target Group
Attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-target-groups.html#target-group-attributes)
in the *Application Load Balancers Guide* or [Target Group
Attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html#target-group-attributes)
in the *Network Load Balancers Guide*.
"""
def describe_target_group_attributes(client, input, options \\ []) do
request(client, "DescribeTargetGroupAttributes", input, options)
end
@doc """
Describes the specified target groups or all of your target groups. By
default, all target groups are described. Alternatively, you can specify
one of the following to filter the results: the ARN of the load balancer,
the names of one or more target groups, or the ARNs of one or more target
groups.
To describe the targets for a target group, use `DescribeTargetHealth`. To
describe the attributes of a target group, use
`DescribeTargetGroupAttributes`.
"""
def describe_target_groups(client, input, options \\ []) do
request(client, "DescribeTargetGroups", input, options)
end
@doc """
Describes the health of the specified targets or all of your targets.
"""
def describe_target_health(client, input, options \\ []) do
request(client, "DescribeTargetHealth", input, options)
end
@doc """
Replaces the specified properties of the specified listener. Any properties
that you do not specify remain unchanged.
Changing the protocol from HTTPS to HTTP, or from TLS to TCP, removes the
security policy and default certificate properties. If you change the
protocol from HTTP to HTTPS, or from TCP to TLS, you must add the security
policy and default certificate properties.
To add an item to a list, remove an item from a list, or update an item in
a list, you must provide the entire list. For example, to add an action,
specify a list with the current actions plus the new action.
"""
def modify_listener(client, input, options \\ []) do
request(client, "ModifyListener", input, options)
end
@doc """
Modifies the specified attributes of the specified Application Load
Balancer or Network Load Balancer.
If any of the specified attributes can't be modified as requested, the call
fails. Any existing attributes that you do not modify retain their current
values.
"""
def modify_load_balancer_attributes(client, input, options \\ []) do
request(client, "ModifyLoadBalancerAttributes", input, options)
end
@doc """
Replaces the specified properties of the specified rule. Any properties
that you do not specify are unchanged.
To add an item to a list, remove an item from a list, or update an item in
a list, you must provide the entire list. For example, to add an action,
specify a list with the current actions plus the new action.
To modify the actions for the default rule, use `ModifyListener`.
"""
def modify_rule(client, input, options \\ []) do
request(client, "ModifyRule", input, options)
end
@doc """
Modifies the health checks used when evaluating the health state of the
targets in the specified target group.
To monitor the health of the targets, use `DescribeTargetHealth`.
"""
def modify_target_group(client, input, options \\ []) do
request(client, "ModifyTargetGroup", input, options)
end
@doc """
Modifies the specified attributes of the specified target group.
"""
def modify_target_group_attributes(client, input, options \\ []) do
request(client, "ModifyTargetGroupAttributes", input, options)
end
@doc """
Registers the specified targets with the specified target group.
If the target is an EC2 instance, it must be in the `running` state when
you register it.
By default, the load balancer routes requests to registered targets using
the protocol and port for the target group. Alternatively, you can override
the port for a target when you register it. You can register each EC2
instance or IP address with the same target group multiple times using
different ports.
With a Network Load Balancer, you cannot register instances by instance ID
if they have the following instance types: C1, CC1, CC2, CG1, CG2, CR1,
CS1, G1, G2, HI1, HS1, M1, M2, M3, and T1. You can register instances of
these types by IP address.
To remove a target from a target group, use `DeregisterTargets`.
"""
def register_targets(client, input, options \\ []) do
request(client, "RegisterTargets", input, options)
end
@doc """
Removes the specified certificate from the certificate list for the
specified HTTPS or TLS listener.
You can't remove the default certificate for a listener. To replace the
default certificate, call `ModifyListener`.
To list the certificates for your listener, use
`DescribeListenerCertificates`.
"""
def remove_listener_certificates(client, input, options \\ []) do
request(client, "RemoveListenerCertificates", input, options)
end
@doc """
Removes the specified tags from the specified Elastic Load Balancing
resource.
To list the current tags for your resources, use `DescribeTags`.
"""
def remove_tags(client, input, options \\ []) do
request(client, "RemoveTags", input, options)
end
@doc """
Sets the type of IP addresses used by the subnets of the specified
Application Load Balancer or Network Load Balancer.
"""
def set_ip_address_type(client, input, options \\ []) do
request(client, "SetIpAddressType", input, options)
end
@doc """
Sets the priorities of the specified rules.
You can reorder the rules as long as there are no priority conflicts in the
new order. Any existing rules that you do not specify retain their current
priority.
"""
def set_rule_priorities(client, input, options \\ []) do
request(client, "SetRulePriorities", input, options)
end
@doc """
Associates the specified security groups with the specified Application
Load Balancer. The specified security groups override the previously
associated security groups.
You can't specify a security group for a Network Load Balancer.
"""
def set_security_groups(client, input, options \\ []) do
request(client, "SetSecurityGroups", input, options)
end
@doc """
Enables the Availability Zones for the specified public subnets for the
specified load balancer. The specified subnets replace the previously
enabled subnets.
When you specify subnets for a Network Load Balancer, you must include all
subnets that were enabled previously, with their existing configurations,
plus any additional subnets.
"""
def set_subnets(client, input, options \\ []) do
request(client, "SetSubnets", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "elasticloadbalancing"}
host = build_host("elasticloadbalancing", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-www-form-urlencoded"}
]
input = Map.merge(input, %{"Action" => action, "Version" => "2015-12-01"})
payload = AWS.Util.encode_query(input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, AWS.Util.decode_xml(body), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = AWS.Util.decode_xml(body)
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/elastic_load_balancingv2.ex
| 0.913268
| 0.706656
|
elastic_load_balancingv2.ex
|
starcoder
|
defmodule AWS.DevOpsGuru do
@moduledoc """
Amazon DevOps Guru is a fully managed service that helps you identify anomalous
behavior in business critical operational applications.
You specify the Amazon Web Services resources that you want DevOps Guru to
cover, then the Amazon CloudWatch metrics and Amazon Web Services CloudTrail
events related to those resources are analyzed. When anomalous behavior is
detected, DevOps Guru creates an *insight* that includes recommendations,
related events, and related metrics that can help you improve your operational
applications. For more information, see [What is Amazon DevOps Guru](https://docs.aws.amazon.com/devops-guru/latest/userguide/welcome.html).
You can specify 1 or 2 Amazon Simple Notification Service topics so you are
notified every time a new insight is created. You can also enable DevOps Guru to
generate an OpsItem in Amazon Web Services Systems Manager for each insight to
help you manage and track your work addressing insights.
To learn about the DevOps Guru workflow, see [How DevOps Guru works](https://docs.aws.amazon.com/devops-guru/latest/userguide/welcome.html#how-it-works).
To learn about DevOps Guru concepts, see [Concepts in DevOps Guru](https://docs.aws.amazon.com/devops-guru/latest/userguide/concepts.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2020-12-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "devops-guru",
global?: false,
protocol: "rest-json",
service_id: "DevOps Guru",
signature_version: "v4",
signing_name: "devops-guru",
target_prefix: nil
}
end
@doc """
Adds a notification channel to DevOps Guru.
A notification channel is used to notify you about important DevOps Guru events,
such as when an insight is generated.
If you use an Amazon SNS topic in another account, you must attach a policy to
it that grants DevOps Guru permission to it notifications. DevOps Guru adds the
required policy on your behalf to send notifications using Amazon SNS in your
account. For more information, see [Permissions for cross account Amazon SNS topics](https://docs.aws.amazon.com/devops-guru/latest/userguide/sns-required-permissions.html).
If you use an Amazon SNS topic that is encrypted by an Amazon Web Services Key
Management Service customer-managed key (CMK), then you must add permissions to
the CMK. For more information, see [Permissions for Amazon Web Services KMS–encrypted Amazon SNS
topics](https://docs.aws.amazon.com/devops-guru/latest/userguide/sns-kms-permissions.html).
"""
def add_notification_channel(%Client{} = client, input, options \\ []) do
url_path = "/channels"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns the number of open reactive insights, the number of open proactive
insights, and the number of metrics analyzed in your Amazon Web Services
account.
Use these numbers to gauge the health of operations in your Amazon Web Services
account.
"""
def describe_account_health(%Client{} = client, options \\ []) do
url_path = "/accounts/health"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
For the time range passed in, returns the number of open reactive insight that
were created, the number of open proactive insights that were created, and the
Mean Time to Recover (MTTR) for all closed reactive insights.
"""
def describe_account_overview(%Client{} = client, input, options \\ []) do
url_path = "/accounts/overview"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns details about an anomaly that you specify using its ID.
"""
def describe_anomaly(%Client{} = client, id, account_id \\ nil, options \\ []) do
url_path = "/anomalies/#{AWS.Util.encode_uri(id)}"
headers = []
query_params = []
query_params =
if !is_nil(account_id) do
[{"AccountId", account_id} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the most recent feedback submitted in the current Amazon Web Services
account and Region.
"""
def describe_feedback(%Client{} = client, input, options \\ []) do
url_path = "/feedback"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns details about an insight that you specify using its ID.
"""
def describe_insight(%Client{} = client, id, account_id \\ nil, options \\ []) do
url_path = "/insights/#{AWS.Util.encode_uri(id)}"
headers = []
query_params = []
query_params =
if !is_nil(account_id) do
[{"AccountId", account_id} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns active insights, predictive insights, and resource hours analyzed in
last hour.
"""
def describe_organization_health(%Client{} = client, input, options \\ []) do
url_path = "/organization/health"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns an overview of your organization's history based on the specified time
range.
The overview includes the total reactive and proactive insights.
"""
def describe_organization_overview(%Client{} = client, input, options \\ []) do
url_path = "/organization/overview"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Provides an overview of your system's health.
If additional member accounts are part of your organization, you can filter
those accounts using the `AccountIds` field.
"""
def describe_organization_resource_collection_health(%Client{} = client, input, options \\ []) do
url_path = "/organization/health/resource-collection"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns the number of open proactive insights, open reactive insights, and the
Mean Time to Recover (MTTR) for all closed insights in resource collections in
your account.
You specify the type of Amazon Web Services resources collection. The two types
of Amazon Web Services resource collections supported are Amazon Web Services
CloudFormation stacks and Amazon Web Services resources that contain the same
Amazon Web Services tag. DevOps Guru can be configured to analyze the Amazon Web
Services resources that are defined in the stacks or that are tagged using the
same tag *key*. You can specify up to 500 Amazon Web Services CloudFormation
stacks.
"""
def describe_resource_collection_health(
%Client{} = client,
resource_collection_type,
next_token \\ nil,
options \\ []
) do
url_path =
"/accounts/health/resource-collection/#{AWS.Util.encode_uri(resource_collection_type)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the integration status of services that are integrated with DevOps Guru.
The one service that can be integrated with DevOps Guru is Amazon Web Services
Systems Manager, which can be used to create an OpsItem for each generated
insight.
"""
def describe_service_integration(%Client{} = client, options \\ []) do
url_path = "/service-integrations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns an estimate of the monthly cost for DevOps Guru to analyze your Amazon
Web Services resources.
For more information, see [Estimate your Amazon DevOps Guru costs](https://docs.aws.amazon.com/devops-guru/latest/userguide/cost-estimate.html)
and [Amazon DevOps Guru pricing](http://aws.amazon.com/devops-guru/pricing/).
"""
def get_cost_estimation(%Client{} = client, next_token \\ nil, options \\ []) do
url_path = "/cost-estimation"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns lists Amazon Web Services resources that are of the specified resource
collection type.
The two types of Amazon Web Services resource collections supported are Amazon
Web Services CloudFormation stacks and Amazon Web Services resources that
contain the same Amazon Web Services tag. DevOps Guru can be configured to
analyze the Amazon Web Services resources that are defined in the stacks or that
are tagged using the same tag *key*. You can specify up to 500 Amazon Web
Services CloudFormation stacks.
"""
def get_resource_collection(
%Client{} = client,
resource_collection_type,
next_token \\ nil,
options \\ []
) do
url_path = "/resource-collections/#{AWS.Util.encode_uri(resource_collection_type)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns a list of the anomalies that belong to an insight that you specify using
its ID.
"""
def list_anomalies_for_insight(%Client{} = client, insight_id, input, options \\ []) do
url_path = "/anomalies/insight/#{AWS.Util.encode_uri(insight_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns a list of the events emitted by the resources that are evaluated by
DevOps Guru.
You can use filters to specify which events are returned.
"""
def list_events(%Client{} = client, input, options \\ []) do
url_path = "/events"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns a list of insights in your Amazon Web Services account.
You can specify which insights are returned by their start time and status
(`ONGOING`, `CLOSED`, or `ANY`).
"""
def list_insights(%Client{} = client, input, options \\ []) do
url_path = "/insights"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns a list of notification channels configured for DevOps Guru.
Each notification channel is used to notify you when DevOps Guru generates an
insight that contains information about how to improve your operations. The one
supported notification channel is Amazon Simple Notification Service (Amazon
SNS).
"""
def list_notification_channels(%Client{} = client, input, options \\ []) do
url_path = "/channels"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns a list of insights associated with the account or OU Id.
"""
def list_organization_insights(%Client{} = client, input, options \\ []) do
url_path = "/organization/insights"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns a list of a specified insight's recommendations.
Each recommendation includes a list of related metrics and a list of related
events.
"""
def list_recommendations(%Client{} = client, input, options \\ []) do
url_path = "/recommendations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Collects customer feedback about the specified insight.
"""
def put_feedback(%Client{} = client, input, options \\ []) do
url_path = "/feedback"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Removes a notification channel from DevOps Guru.
A notification channel is used to notify you when DevOps Guru generates an
insight that contains information about how to improve your operations.
"""
def remove_notification_channel(%Client{} = client, id, input, options \\ []) do
url_path = "/channels/#{AWS.Util.encode_uri(id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns a list of insights in your Amazon Web Services account.
You can specify which insights are returned by their start time, one or more
statuses (`ONGOING`, `CLOSED`, and `CLOSED`), one or more severities (`LOW`,
`MEDIUM`, and `HIGH`), and type (`REACTIVE` or `PROACTIVE`).
Use the `Filters` parameter to specify status and severity search parameters.
Use the `Type` parameter to specify `REACTIVE` or `PROACTIVE` in your search.
"""
def search_insights(%Client{} = client, input, options \\ []) do
url_path = "/insights/search"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Returns a list of insights in your organization.
You can specify which insights are returned by their start time, one or more
statuses (`ONGOING`, `CLOSED`, and `CLOSED`), one or more severities (`LOW`,
`MEDIUM`, and `HIGH`), and type (`REACTIVE` or `PROACTIVE`).
Use the `Filters` parameter to specify status and severity search parameters.
Use the `Type` parameter to specify `REACTIVE` or `PROACTIVE` in your search.
"""
def search_organization_insights(%Client{} = client, input, options \\ []) do
url_path = "/organization/insights/search"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Starts the creation of an estimate of the monthly cost to analyze your Amazon
Web Services resources.
"""
def start_cost_estimation(%Client{} = client, input, options \\ []) do
url_path = "/cost-estimation"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates the collection of resources that DevOps Guru analyzes.
The two types of Amazon Web Services resource collections supported are Amazon
Web Services CloudFormation stacks and Amazon Web Services resources that
contain the same Amazon Web Services tag. DevOps Guru can be configured to
analyze the Amazon Web Services resources that are defined in the stacks or that
are tagged using the same tag *key*. You can specify up to 500 Amazon Web
Services CloudFormation stacks. This method also creates the IAM role required
for you to use DevOps Guru.
"""
def update_resource_collection(%Client{} = client, input, options \\ []) do
url_path = "/resource-collections"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Enables or disables integration with a service that can be integrated with
DevOps Guru.
The one service that can be integrated with DevOps Guru is Amazon Web Services
Systems Manager, which can be used to create an OpsItem for each generated
insight.
"""
def update_service_integration(%Client{} = client, input, options \\ []) do
url_path = "/service-integrations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
|
lib/aws/generated/dev_ops_guru.ex
| 0.840815
| 0.494751
|
dev_ops_guru.ex
|
starcoder
|
defmodule BlockKeys.Mnemonic do
@moduledoc """
BIP32 implementation responsible for generating mnemonic phrases, seeds and public / private address trees.
"""
alias BlockKeys.Crypto
@pad_length_mnemonic 8
@pad_length_phrase 11
@pbkdf2_initial_round 1
@pbkdf2_rounds 2048
@allowed_entropy_lengths [33, 66, 99, 132, 165, 198, 231, 264]
@doc """
Generates the 24 random manmonic words.
Can optionally accept entropy string to used to generate a mnemonic.
## Examples
iex> BlockKeys.Bip32Mnemonic.generate_phrase()
"baby shadow city tower diamond magnet avocado champion crash ..."
iex> BlockKeys.Mnemonic.generate_phrase("1234")
"couple muscle snack"
NOTE: For now the seed can be only generated from 32 bytes entropy
"""
def generate_phrase(entropy \\ :crypto.strong_rand_bytes(32)) do
entropy
|> entropy_hash()
|> extract_checksum()
|> append_checksum()
|> binary_to_bitstring()
|> mnemonic()
end
@doc """
Takes a string of word phrases and converts them back to 256bit entropy
## Examples
iex> BlockKeys.Mnemonic.entropy_from_phrase("safe result wire cattle sauce luggage couple legend pause rather employ pear trigger live daring unlock music lyrics smoke mistake endorse kite obey siren"")
be16fbf0922bf9098c4bfca1764923d10e89054de77091f0af3346f49cf665fe
"""
def entropy_from_phrase(phrase) do
phrase
|> phrase_to_bitstring()
|> verify_checksum()
|> maybe_return_entropy()
end
@doc """
Given a binary of entropy it will generate teh hex encoded seed
## Examples
iex> BlockKeys.Mnemonic.generate_seed("weather neither click twin monster night bridge door immense tornado crack model canal answer harbor weasel winter fan universe burden price quote tail ride"
"af7f48a70d0ecedc77df984117e336e12f0f0e681a4c95b25f4f17516d7dc4cca456e3a400bd1c6a5a604af67eb58dc6e0eb46fd520ad99ef27855d119dca517"
"""
def generate_seed(mnemonic, password \\ "") do
mnemonic
|> phrase_to_bitstring()
|> verify_checksum()
|> pbkdf2_key_stretching(mnemonic, password)
end
defp binary_to_bitstring(binary) do
:binary.bin_to_list(binary)
|> Enum.map(fn byte -> to_bitstring(byte, @pad_length_mnemonic) end)
|> Enum.join()
end
defp pbkdf2_key_stretching({:error, message}, _, _), do: {:error, message}
defp pbkdf2_key_stretching({:ok, _binary_mnemonic}, mnemonic, password) do
salt = <<salt(password)::binary, @pbkdf2_initial_round::integer-32>>
initial_round = :crypto.hmac(:sha512, mnemonic, salt)
iterate(mnemonic, @pbkdf2_initial_round + 1, initial_round, initial_round)
|> Base.encode16(case: :lower)
end
defp phrase_to_bitstring(phrase) do
phrase
|> phrase_to_list
|> word_indexes(words())
|> Enum.map(fn index -> to_bitstring(index, @pad_length_phrase) end)
|> Enum.join()
end
defp maybe_return_entropy({:ok, entropy}), do: Base.encode16(entropy, case: :lower)
defp maybe_return_entropy({:error, message}), do: {:error, message}
defp iterate(_entropy, round, _previous, result) when round > @pbkdf2_rounds, do: result
defp iterate(entropy, round, previous, result) do
next = :crypto.hmac(:sha512, entropy, previous)
iterate(entropy, round + 1, next, :crypto.exor(next, result))
end
# hash the initial entropy
defp entropy_hash(sequence), do: {Crypto.sha256(sequence), sequence}
# extract the first byte (8bits)
defp extract_checksum({<<checksum::binary-1, _bits::bitstring>>, sequence}),
do: {checksum, sequence}
# append the checksum to initial entropy
defp append_checksum({checksum, sequence}), do: sequence <> checksum
# convert a byte to a bitstring (8bits)
defp to_bitstring(byte, pad_length) do
byte
|> Integer.to_string(2)
|> String.pad_leading(pad_length, "0")
end
# split the 264bit string into groups of 11, convert to base 10 integer, map it to word list
defp mnemonic(entropy) do
Regex.scan(~r/.{11}/, entropy)
|> List.flatten()
|> Enum.map(fn binary ->
word_index(binary, words())
end)
|> Enum.join(" ")
end
defp word_index(binary, words) do
binary
|> String.to_integer(2)
|> element_at_index(words)
end
defp element_at_index(index, words), do: Kernel.elem(words, index)
defp words do
:block_keys
|> Application.app_dir()
|> Path.join("priv/assets/english.txt")
|> File.stream!()
|> Stream.map(&String.trim/1)
|> Enum.to_list()
|> List.to_tuple()
end
defp bitstring_to_binary(bitstring) do
Regex.scan(~r/.{8}/, bitstring)
|> List.flatten()
|> Enum.map(&String.to_integer(&1, 2))
|> :binary.list_to_bin()
end
defp verify_checksum(ent_bitstring) do
if String.length(ent_bitstring) in @allowed_entropy_lengths do
ent_binary = bitstring_to_binary(ent_bitstring)
{calculated_cs, cs, entropy} =
case byte_size(ent_binary) do
33 ->
<<entropy::binary-32, cs::binary-1>> = ent_binary
<<calculated_cs::binary-1, _rest::binary>> = Crypto.sha256(entropy)
{calculated_cs, cs, entropy}
size ->
extract_and_compare_checksum(ent_bitstring, div(size, 4))
end
if calculated_cs == cs do
{:ok, entropy}
else
{:error, "Checksum is not valid"}
end
else
{:error, "Invalid mnemonic"}
end
end
defp extract_and_compare_checksum(ent_bitstring, cs_length) do
cs = String.slice(ent_bitstring, -cs_length, String.length(ent_bitstring))
ent = String.slice(ent_bitstring, 0, String.length(ent_bitstring) - cs_length)
entropy_hash =
ent_bitstring
|> bitstring_to_binary()
|> Crypto.sha256()
entropy_bitstring_hash = binary_to_bitstring(entropy_hash)
calculated_cs = String.slice(entropy_bitstring_hash, 0, cs_length)
{calculated_cs, cs, bitstring_to_binary(ent)}
end
defp phrase_to_list(phrase) do
phrase
|> String.split()
|> Enum.map(&String.trim/1)
end
defp word_indexes(phrase_list, words) do
phrase_list
|> Enum.map(fn phrase_word ->
words
|> Tuple.to_list()
|> Enum.find_index(fn el -> el === phrase_word end)
end)
end
def salt(password), do: "mnemonic" <> password
end
|
lib/block_keys/mnemonic.ex
| 0.857485
| 0.479077
|
mnemonic.ex
|
starcoder
|
defmodule AWS.MechanicalTurk do
@moduledoc """
Amazon Mechanical Turk API Reference
"""
@doc """
The `AcceptQualificationRequest` operation approves a Worker's request for
a Qualification.
Only the owner of the Qualification type can grant a Qualification request
for that type.
A successful request for the `AcceptQualificationRequest` operation returns
with no errors and an empty body.
"""
def accept_qualification_request(client, input, options \\ []) do
request(client, "AcceptQualificationRequest", input, options)
end
@doc """
The `ApproveAssignment` operation approves the results of a completed
assignment.
Approving an assignment initiates two payments from the Requester's
Amazon.com account
<ul> <li> The Worker who submitted the results is paid the reward specified
in the HIT.
</li> <li> Amazon Mechanical Turk fees are debited.
</li> </ul> If the Requester's account does not have adequate funds for
these payments, the call to ApproveAssignment returns an exception, and the
approval is not processed. You can include an optional feedback message
with the approval, which the Worker can see in the Status section of the
web site.
You can also call this operation for assignments that were previous
rejected and approve them by explicitly overriding the previous rejection.
This only works on rejected assignments that were submitted within the
previous 30 days and only if the assignment's related HIT has not been
deleted.
"""
def approve_assignment(client, input, options \\ []) do
request(client, "ApproveAssignment", input, options)
end
@doc """
The `AssociateQualificationWithWorker` operation gives a Worker a
Qualification. `AssociateQualificationWithWorker` does not require that the
Worker submit a Qualification request. It gives the Qualification directly
to the Worker.
You can only assign a Qualification of a Qualification type that you
created (using the `CreateQualificationType` operation).
<note> Note: `AssociateQualificationWithWorker` does not affect any pending
Qualification requests for the Qualification by the Worker. If you assign a
Qualification to a Worker, then later grant a Qualification request made by
the Worker, the granting of the request may modify the Qualification score.
To resolve a pending Qualification request without affecting the
Qualification the Worker already has, reject the request with the
`RejectQualificationRequest` operation.
</note>
"""
def associate_qualification_with_worker(client, input, options \\ []) do
request(client, "AssociateQualificationWithWorker", input, options)
end
@doc """
The `CreateAdditionalAssignmentsForHIT` operation increases the maximum
number of assignments of an existing HIT.
To extend the maximum number of assignments, specify the number of
additional assignments.
<note> <ul> <li> HITs created with fewer than 10 assignments cannot be
extended to have 10 or more assignments. Attempting to add assignments in a
way that brings the total number of assignments for a HIT from fewer than
10 assignments to 10 or more assignments will result in an
`AWS.MechanicalTurk.InvalidMaximumAssignmentsIncrease` exception.
</li> <li> HITs that were created before July 22, 2015 cannot be extended.
Attempting to extend HITs that were created before July 22, 2015 will
result in an `AWS.MechanicalTurk.HITTooOldForExtension` exception.
</li> </ul> </note>
"""
def create_additional_assignments_for_h_i_t(client, input, options \\ []) do
request(client, "CreateAdditionalAssignmentsForHIT", input, options)
end
@doc """
The `CreateHIT` operation creates a new Human Intelligence Task (HIT). The
new HIT is made available for Workers to find and accept on the Amazon
Mechanical Turk website.
This operation allows you to specify a new HIT by passing in values for the
properties of the HIT, such as its title, reward amount and number of
assignments. When you pass these values to `CreateHIT`, a new HIT is
created for you, with a new `HITTypeID`. The HITTypeID can be used to
create additional HITs in the future without needing to specify common
parameters such as the title, description and reward amount each time.
An alternative way to create HITs is to first generate a HITTypeID using
the `CreateHITType` operation and then call the `CreateHITWithHITType`
operation. This is the recommended best practice for Requesters who are
creating large numbers of HITs.
CreateHIT also supports several ways to provide question data: by providing
a value for the `Question` parameter that fully specifies the contents of
the HIT, or by providing a `HitLayoutId` and associated
`HitLayoutParameters`.
<note> If a HIT is created with 10 or more maximum assignments, there is an
additional fee. For more information, see [Amazon Mechanical Turk
Pricing](https://requester.mturk.com/pricing).
</note>
"""
def create_h_i_t(client, input, options \\ []) do
request(client, "CreateHIT", input, options)
end
@doc """
The `CreateHITType` operation creates a new HIT type. This operation allows
you to define a standard set of HIT properties to use when creating HITs.
If you register a HIT type with values that match an existing HIT type, the
HIT type ID of the existing type will be returned.
"""
def create_h_i_t_type(client, input, options \\ []) do
request(client, "CreateHITType", input, options)
end
@doc """
The `CreateHITWithHITType` operation creates a new Human Intelligence Task
(HIT) using an existing HITTypeID generated by the `CreateHITType`
operation.
This is an alternative way to create HITs from the `CreateHIT` operation.
This is the recommended best practice for Requesters who are creating large
numbers of HITs.
CreateHITWithHITType also supports several ways to provide question data:
by providing a value for the `Question` parameter that fully specifies the
contents of the HIT, or by providing a `HitLayoutId` and associated
`HitLayoutParameters`.
<note> If a HIT is created with 10 or more maximum assignments, there is an
additional fee. For more information, see [Amazon Mechanical Turk
Pricing](https://requester.mturk.com/pricing).
</note>
"""
def create_h_i_t_with_h_i_t_type(client, input, options \\ []) do
request(client, "CreateHITWithHITType", input, options)
end
@doc """
The `CreateQualificationType` operation creates a new Qualification type,
which is represented by a `QualificationType` data structure.
"""
def create_qualification_type(client, input, options \\ []) do
request(client, "CreateQualificationType", input, options)
end
@doc """
The `CreateWorkerBlock` operation allows you to prevent a Worker from
working on your HITs. For example, you can block a Worker who is producing
poor quality work. You can block up to 100,000 Workers.
"""
def create_worker_block(client, input, options \\ []) do
request(client, "CreateWorkerBlock", input, options)
end
@doc """
The `DeleteHIT` operation is used to delete HIT that is no longer needed.
Only the Requester who created the HIT can delete it.
You can only dispose of HITs that are in the `Reviewable` state, with all
of their submitted assignments already either approved or rejected. If you
call the DeleteHIT operation on a HIT that is not in the `Reviewable` state
(for example, that has not expired, or still has active assignments), or on
a HIT that is Reviewable but without all of its submitted assignments
already approved or rejected, the service will return an error.
<note> <ul> <li> HITs are automatically disposed of after 120 days.
</li> <li> After you dispose of a HIT, you can no longer approve the HIT's
rejected assignments.
</li> <li> Disposed HITs are not returned in results for the ListHITs
operation.
</li> <li> Disposing HITs can improve the performance of operations such as
ListReviewableHITs and ListHITs.
</li> </ul> </note>
"""
def delete_h_i_t(client, input, options \\ []) do
request(client, "DeleteHIT", input, options)
end
@doc """
The `DeleteQualificationType` deletes a Qualification type and deletes any
HIT types that are associated with the Qualification type.
This operation does not revoke Qualifications already assigned to Workers
because the Qualifications might be needed for active HITs. If there are
any pending requests for the Qualification type, Amazon Mechanical Turk
rejects those requests. After you delete a Qualification type, you can no
longer use it to create HITs or HIT types.
<note> DeleteQualificationType must wait for all the HITs that use the
deleted Qualification type to be deleted before completing. It may take up
to 48 hours before DeleteQualificationType completes and the unique name of
the Qualification type is available for reuse with CreateQualificationType.
</note>
"""
def delete_qualification_type(client, input, options \\ []) do
request(client, "DeleteQualificationType", input, options)
end
@doc """
The `DeleteWorkerBlock` operation allows you to reinstate a blocked Worker
to work on your HITs. This operation reverses the effects of the
CreateWorkerBlock operation. You need the Worker ID to use this operation.
If the Worker ID is missing or invalid, this operation fails and returns
the message “WorkerId is invalid.” If the specified Worker is not blocked,
this operation returns successfully.
"""
def delete_worker_block(client, input, options \\ []) do
request(client, "DeleteWorkerBlock", input, options)
end
@doc """
The `DisassociateQualificationFromWorker` revokes a previously granted
Qualification from a user.
You can provide a text message explaining why the Qualification was
revoked. The user who had the Qualification can see this message.
"""
def disassociate_qualification_from_worker(client, input, options \\ []) do
request(client, "DisassociateQualificationFromWorker", input, options)
end
@doc """
The `GetAccountBalance` operation retrieves the amount of money in your
Amazon Mechanical Turk account.
"""
def get_account_balance(client, input, options \\ []) do
request(client, "GetAccountBalance", input, options)
end
@doc """
The `GetAssignment` operation retrieves the details of the specified
Assignment.
"""
def get_assignment(client, input, options \\ []) do
request(client, "GetAssignment", input, options)
end
@doc """
The `GetFileUploadURL` operation generates and returns a temporary URL. You
use the temporary URL to retrieve a file uploaded by a Worker as an answer
to a FileUploadAnswer question for a HIT. The temporary URL is generated
the instant the GetFileUploadURL operation is called, and is valid for 60
seconds. You can get a temporary file upload URL any time until the HIT is
disposed. After the HIT is disposed, any uploaded files are deleted, and
cannot be retrieved. Pending Deprecation on December 12, 2017. The Answer
Specification structure will no longer support the `FileUploadAnswer`
element to be used for the QuestionForm data structure. Instead, we
recommend that Requesters who want to create HITs asking Workers to upload
files to use Amazon S3.
"""
def get_file_upload_u_r_l(client, input, options \\ []) do
request(client, "GetFileUploadURL", input, options)
end
@doc """
The `GetHIT` operation retrieves the details of the specified HIT.
"""
def get_h_i_t(client, input, options \\ []) do
request(client, "GetHIT", input, options)
end
@doc """
The `GetQualificationScore` operation returns the value of a Worker's
Qualification for a given Qualification type.
To get a Worker's Qualification, you must know the Worker's ID. The
Worker's ID is included in the assignment data returned by the
`ListAssignmentsForHIT` operation.
Only the owner of a Qualification type can query the value of a Worker's
Qualification of that type.
"""
def get_qualification_score(client, input, options \\ []) do
request(client, "GetQualificationScore", input, options)
end
@doc """
The `GetQualificationType`operation retrieves information about a
Qualification type using its ID.
"""
def get_qualification_type(client, input, options \\ []) do
request(client, "GetQualificationType", input, options)
end
@doc """
The `ListAssignmentsForHIT` operation retrieves completed assignments for a
HIT. You can use this operation to retrieve the results for a HIT.
You can get assignments for a HIT at any time, even if the HIT is not yet
Reviewable. If a HIT requested multiple assignments, and has received some
results but has not yet become Reviewable, you can still retrieve the
partial results with this operation.
Use the AssignmentStatus parameter to control which set of assignments for
a HIT are returned. The ListAssignmentsForHIT operation can return
submitted assignments awaiting approval, or it can return assignments that
have already been approved or rejected. You can set
AssignmentStatus=Approved,Rejected to get assignments that have already
been approved and rejected together in one result set.
Only the Requester who created the HIT can retrieve the assignments for
that HIT.
Results are sorted and divided into numbered pages and the operation
returns a single page of results. You can use the parameters of the
operation to control sorting and pagination.
"""
def list_assignments_for_h_i_t(client, input, options \\ []) do
request(client, "ListAssignmentsForHIT", input, options)
end
@doc """
The `ListBonusPayments` operation retrieves the amounts of bonuses you have
paid to Workers for a given HIT or assignment.
"""
def list_bonus_payments(client, input, options \\ []) do
request(client, "ListBonusPayments", input, options)
end
@doc """
The `ListHITs` operation returns all of a Requester's HITs. The operation
returns HITs of any status, except for HITs that have been deleted of with
the DeleteHIT operation or that have been auto-deleted.
"""
def list_h_i_ts(client, input, options \\ []) do
request(client, "ListHITs", input, options)
end
@doc """
The `ListHITsForQualificationType` operation returns the HITs that use the
given Qualification type for a Qualification requirement. The operation
returns HITs of any status, except for HITs that have been deleted with the
`DeleteHIT` operation or that have been auto-deleted.
"""
def list_h_i_ts_for_qualification_type(client, input, options \\ []) do
request(client, "ListHITsForQualificationType", input, options)
end
@doc """
The `ListQualificationRequests` operation retrieves requests for
Qualifications of a particular Qualification type. The owner of the
Qualification type calls this operation to poll for pending requests, and
accepts them using the AcceptQualification operation.
"""
def list_qualification_requests(client, input, options \\ []) do
request(client, "ListQualificationRequests", input, options)
end
@doc """
The `ListQualificationTypes` operation returns a list of Qualification
types, filtered by an optional search term.
"""
def list_qualification_types(client, input, options \\ []) do
request(client, "ListQualificationTypes", input, options)
end
@doc """
The `ListReviewPolicyResultsForHIT` operation retrieves the computed
results and the actions taken in the course of executing your Review
Policies for a given HIT. For information about how to specify Review
Policies when you call CreateHIT, see Review Policies. The
ListReviewPolicyResultsForHIT operation can return results for both
Assignment-level and HIT-level review results.
"""
def list_review_policy_results_for_h_i_t(client, input, options \\ []) do
request(client, "ListReviewPolicyResultsForHIT", input, options)
end
@doc """
The `ListReviewableHITs` operation retrieves the HITs with Status equal to
Reviewable or Status equal to Reviewing that belong to the Requester
calling the operation.
"""
def list_reviewable_h_i_ts(client, input, options \\ []) do
request(client, "ListReviewableHITs", input, options)
end
@doc """
The `ListWorkersBlocks` operation retrieves a list of Workers who are
blocked from working on your HITs.
"""
def list_worker_blocks(client, input, options \\ []) do
request(client, "ListWorkerBlocks", input, options)
end
@doc """
The `ListWorkersWithQualificationType` operation returns all of the Workers
that have been associated with a given Qualification type.
"""
def list_workers_with_qualification_type(client, input, options \\ []) do
request(client, "ListWorkersWithQualificationType", input, options)
end
@doc """
The `NotifyWorkers` operation sends an email to one or more Workers that
you specify with the Worker ID. You can specify up to 100 Worker IDs to
send the same message with a single call to the NotifyWorkers operation.
The NotifyWorkers operation will send a notification email to a Worker only
if you have previously approved or rejected work from the Worker.
"""
def notify_workers(client, input, options \\ []) do
request(client, "NotifyWorkers", input, options)
end
@doc """
The `RejectAssignment` operation rejects the results of a completed
assignment.
You can include an optional feedback message with the rejection, which the
Worker can see in the Status section of the web site. When you include a
feedback message with the rejection, it helps the Worker understand why the
assignment was rejected, and can improve the quality of the results the
Worker submits in the future.
Only the Requester who created the HIT can reject an assignment for the
HIT.
"""
def reject_assignment(client, input, options \\ []) do
request(client, "RejectAssignment", input, options)
end
@doc """
The `RejectQualificationRequest` operation rejects a user's request for a
Qualification.
You can provide a text message explaining why the request was rejected. The
Worker who made the request can see this message.
"""
def reject_qualification_request(client, input, options \\ []) do
request(client, "RejectQualificationRequest", input, options)
end
@doc """
The `SendBonus` operation issues a payment of money from your account to a
Worker. This payment happens separately from the reward you pay to the
Worker when you approve the Worker's assignment. The SendBonus operation
requires the Worker's ID and the assignment ID as parameters to initiate
payment of the bonus. You must include a message that explains the reason
for the bonus payment, as the Worker may not be expecting the payment.
Amazon Mechanical Turk collects a fee for bonus payments, similar to the
HIT listing fee. This operation fails if your account does not have enough
funds to pay for both the bonus and the fees.
"""
def send_bonus(client, input, options \\ []) do
request(client, "SendBonus", input, options)
end
@doc """
The `SendTestEventNotification` operation causes Amazon Mechanical Turk to
send a notification message as if a HIT event occurred, according to the
provided notification specification. This allows you to test notifications
without setting up notifications for a real HIT type and trying to trigger
them using the website. When you call this operation, the service attempts
to send the test notification immediately.
"""
def send_test_event_notification(client, input, options \\ []) do
request(client, "SendTestEventNotification", input, options)
end
@doc """
The `UpdateExpirationForHIT` operation allows you update the expiration
time of a HIT. If you update it to a time in the past, the HIT will be
immediately expired.
"""
def update_expiration_for_h_i_t(client, input, options \\ []) do
request(client, "UpdateExpirationForHIT", input, options)
end
@doc """
The `UpdateHITReviewStatus` operation updates the status of a HIT. If the
status is Reviewable, this operation can update the status to Reviewing, or
it can revert a Reviewing HIT back to the Reviewable status.
"""
def update_h_i_t_review_status(client, input, options \\ []) do
request(client, "UpdateHITReviewStatus", input, options)
end
@doc """
The `UpdateHITTypeOfHIT` operation allows you to change the HITType
properties of a HIT. This operation disassociates the HIT from its old
HITType properties and associates it with the new HITType properties. The
HIT takes on the properties of the new HITType in place of the old ones.
"""
def update_h_i_t_type_of_h_i_t(client, input, options \\ []) do
request(client, "UpdateHITTypeOfHIT", input, options)
end
@doc """
The `UpdateNotificationSettings` operation creates, updates, disables or
re-enables notifications for a HIT type. If you call the
UpdateNotificationSettings operation for a HIT type that already has a
notification specification, the operation replaces the old specification
with a new one. You can call the UpdateNotificationSettings operation to
enable or disable notifications for the HIT type, without having to modify
the notification specification itself by providing updates to the Active
status without specifying a new notification specification. To change the
Active status of a HIT type's notifications, the HIT type must already have
a notification specification, or one must be provided in the same call to
`UpdateNotificationSettings`.
"""
def update_notification_settings(client, input, options \\ []) do
request(client, "UpdateNotificationSettings", input, options)
end
@doc """
The `UpdateQualificationType` operation modifies the attributes of an
existing Qualification type, which is represented by a QualificationType
data structure. Only the owner of a Qualification type can modify its
attributes.
Most attributes of a Qualification type can be changed after the type has
been created. However, the Name and Keywords fields cannot be modified. The
RetryDelayInSeconds parameter can be modified or added to change the delay
or to enable retries, but RetryDelayInSeconds cannot be used to disable
retries.
You can use this operation to update the test for a Qualification type. The
test is updated based on the values specified for the Test,
TestDurationInSeconds and AnswerKey parameters. All three parameters
specify the updated test. If you are updating the test for a type, you must
specify the Test and TestDurationInSeconds parameters. The AnswerKey
parameter is optional; omitting it specifies that the updated test does not
have an answer key.
If you omit the Test parameter, the test for the Qualification type is
unchanged. There is no way to remove a test from a Qualification type that
has one. If the type already has a test, you cannot update it to be
AutoGranted. If the Qualification type does not have a test and one is
provided by an update, the type will henceforth have a test.
If you want to update the test duration or answer key for an existing test
without changing the questions, you must specify a Test parameter with the
original questions, along with the updated values.
If you provide an updated Test but no AnswerKey, the new test will not have
an answer key. Requests for such Qualifications must be granted manually.
You can also update the AutoGranted and AutoGrantedValue attributes of the
Qualification type.
"""
def update_qualification_type(client, input, options \\ []) do
request(client, "UpdateQualificationType", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "mturk-requester"}
host = get_host("mturk-requester", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "MTurkRequesterServiceV20170117.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/mechanical_turk.ex
| 0.888269
| 0.616416
|
mechanical_turk.ex
|
starcoder
|
defmodule Appsignal.NifBehaviour do
@callback loaded?() :: boolean()
@callback running_in_container?() :: boolean()
end
defmodule Appsignal.Nif do
@behaviour Appsignal.NifBehaviour
@moduledoc """
It's a NIF! Oh no!
While people generally think NIFs are a bad idea, the overhead of
this particular NIF is low. The C code that the NIF calls has been
designed to be as fast as possible and to do as little as possible
on the calling thread.
Internally, the AppSignal NIF works as follows: it fork/execs a
separate agent process, to which the NIF sends its data (protobuf)
over a unix socket. This agent process (which is a separate unix
process!) then takes care of sending the data the server
periodically.
The C library that the NIF interfaces with, is specifically written
with performance in mind and is very robust and battle tested;
written in Rust and it is the same code that the Ruby AppSignal Gem
uses, which is used in production in thousands of sites.
While doing native Elixir protobufs to communicate directly with
this agent makes more sense from a BEAM standpoint, from a
maintainability point the NIF choice is more logical because
AppSignal is planning more language integrations in the future (PHP,
Java) which all will use this same C library and agent process.
"""
@on_load :init
def init do
path = :filename.join(:code.priv_dir(:appsignal), 'appsignal_extension')
case :erlang.load_nif(path, 1) do
:ok ->
:ok
{:error, {:load_failed, reason}} ->
arch = :erlang.system_info(:system_architecture)
IO.warn(
"Error loading NIF (Is your operating system (#{arch}) supported? Please check http://docs.appsignal.com/support/operating-systems.html):\n#{
reason
}"
)
:ok
end
end
def agent_version do
case :appsignal
|> :code.priv_dir()
|> Path.join("appsignal.version")
|> File.read() do
{:ok, contents} -> String.trim(contents)
_ -> nil
end
end
def env_put(key, value) do
_env_put(key, value)
end
def env_get(key) do
_env_get(key)
end
def env_delete(key) do
_env_delete(key)
end
def env_clear do
_env_clear()
end
def start do
_start()
end
def stop do
_stop()
end
def diagnose do
_diagnose()
end
def start_transaction(transaction_id, namespace) do
_start_transaction(transaction_id, namespace)
end
def start_event(transaction_resource) do
_start_event(transaction_resource)
end
def finish_event(transaction_resource, name, title, body, body_format) do
_finish_event(transaction_resource, name, title, body, body_format)
end
def finish_event_data(transaction_resource, name, title, body, body_format) do
_finish_event_data(transaction_resource, name, title, body, body_format)
end
def record_event(transaction_resource, name, title, body, body_format, duration) do
_record_event(transaction_resource, name, title, body, body_format, duration)
end
def set_error(transaction_resource, error, message, backtrace) do
_set_error(transaction_resource, error, message, backtrace)
end
def set_sample_data(transaction_resource, key, payload) do
_set_sample_data(transaction_resource, key, payload)
end
def set_action(transaction_resource, action) do
_set_action(transaction_resource, action)
end
def set_namespace(transaction_resource, namespace) do
_set_namespace(transaction_resource, namespace)
end
def set_queue_start(transaction_resource, start) do
_set_queue_start(transaction_resource, start)
end
def set_meta_data(transaction_resource, key, value) do
_set_meta_data(transaction_resource, key, value)
end
def finish(transaction_resource) do
_finish(transaction_resource)
end
def complete(transaction_resource) do
_complete(transaction_resource)
end
def set_gauge(key, value, tags) do
_set_gauge(key, value, tags)
end
def increment_counter(key, count, tags) do
_increment_counter(key, count, tags)
end
def add_distribution_value(key, value, tags) do
_add_distribution_value(key, value, tags)
end
def data_map_new do
_data_map_new()
end
def data_set_string(resource, key, value) do
_data_set_string(resource, key, value)
end
def data_set_string(resource, value) do
_data_set_string(resource, value)
end
def data_set_integer(resource, key, value) do
_data_set_integer(resource, key, value)
end
def data_set_integer(resource, value) do
_data_set_integer(resource, value)
end
def data_set_float(resource, key, value) do
_data_set_float(resource, key, value)
end
def data_set_float(resource, value) do
_data_set_float(resource, value)
end
def data_set_boolean(resource, key, value) do
_data_set_boolean(resource, key, value)
end
def data_set_boolean(resource, value) do
_data_set_boolean(resource, value)
end
def data_set_nil(resource, key) do
_data_set_nil(resource, key)
end
def data_set_nil(resource) do
_data_set_nil(resource)
end
def data_set_data(resource, key, value) do
_data_set_data(resource, key, value)
end
def data_set_data(resource, value) do
_data_set_data(resource, value)
end
def data_list_new do
_data_list_new()
end
def running_in_container? do
_running_in_container()
end
def loaded? do
_loaded()
end
if Mix.env() in [:test, :test_phoenix] do
def data_to_json(resource) do
_data_to_json(resource)
end
def transaction_to_json(resource) do
_transaction_to_json(resource)
end
end
def _env_put(_key, _value) do
:ok
end
def _env_get(_key) do
''
end
def _env_delete(_key) do
:ok
end
def _env_clear do
:ok
end
def _start do
:ok
end
def _stop do
:ok
end
def _diagnose do
:error
end
def _start_transaction(_id, _namespace) do
if System.otp_release() >= "20" do
{:ok, make_ref()}
else
{:ok, <<>>}
end
end
def _start_event(_transaction_resource) do
:ok
end
def _finish_event(_transaction_resource, _name, _title, _body, _body_format) do
:ok
end
def _finish_event_data(_transaction_resource, _name, _title, _body, _body_format) do
:ok
end
def _record_event(_transaction_resource, _name, _title, _body, _body_format, _duration) do
:ok
end
def _set_error(_transaction_resource, _error, _message, _backtrace) do
:ok
end
def _set_sample_data(_transaction_resource, _key, _payload) do
:ok
end
def _set_action(_transaction_resource, _action) do
:ok
end
def _set_namespace(_transaction_resource, _action) do
:ok
end
def _set_queue_start(_transaction_resource, _start) do
:ok
end
def _set_meta_data(_transaction_resource, _key, _value) do
:ok
end
def _finish(_transaction_resource) do
# Using `String.to_atom("no_sample") instead of `:no_sample` to trick
# Dialyzer into thinking this value isn't hardcoded.
String.to_atom("no_sample")
end
def _complete(_transaction_resource) do
:ok
end
def _set_gauge(_key, _value, _tags) do
:ok
end
def _increment_counter(_key, _count, _tags) do
:ok
end
def _add_distribution_value(_key, _value, _tags) do
:ok
end
def _data_map_new do
{:ok, nil}
end
def _data_set_string(resource, _key, _value) do
resource
end
def _data_set_string(resource, _value) do
resource
end
def _data_set_integer(resource, _key, _value) do
resource
end
def _data_set_integer(resource, _value) do
resource
end
def _data_set_float(resource, _key, _value) do
resource
end
def _data_set_float(resource, _value) do
resource
end
def _data_set_boolean(resource, _key, _value) do
resource
end
def _data_set_boolean(resource, _value) do
resource
end
def _data_set_nil(resource, _key) do
resource
end
def _data_set_nil(resource) do
resource
end
def _data_set_data(resource, _key, _value) do
resource
end
def _data_set_data(resource, _value) do
resource
end
def _data_list_new do
{:ok, nil}
end
def _running_in_container do
false
end
def _loaded do
false
end
if Mix.env() in [:test, :test_phoenix, :test_no_nif] do
def _data_to_json(resource) do
resource
end
def _transaction_to_json(resource) do
{:ok, resource}
end
end
end
|
lib/appsignal/nif.ex
| 0.585457
| 0.436502
|
nif.ex
|
starcoder
|
defmodule ExPurpleTiger do
@moduledoc """
ExPurpleTiger generates animal-based hash digests meant to be memorable and
human-readable. This library is suitble for anthropomorphizing project names,
crypto addresses, UUIDs, or any complex string of characters that needs to be
displayed in a user interface.
"""
require Bitwise
alias ExPurpleTiger.Data
@adjectives Data.adjectives()
@colors Data.colors()
@animals Data.animals()
@doc """
Generates an animal-based hash.
## Options
* `:separator` - the separator for the words in the hash, defaults to " ".
* `:style` - the style to apply to the words in the hash. The available options
are `:lowercase` (default), `:uppercase`, and `:titlecase`.
## Examples
iex> ExPurpleTiger.animal_hash("my ugly input string")
{:ok, "rapid grey rattlesnake"}
iex> ExPurpleTiger.animal_hash("my ugly input string", style: :uppercase)
{:ok, "RAPID GREY RATTLESNAKE"}
iex> ExPurpleTiger.animal_hash("my ugly input string", separator: "-", style: :titlecase)
{:ok, "Rapid-Grey-Rattlesnake"}
"""
@spec animal_hash(String.t(), Keyword.t()) :: {:ok, String.t()} | :error
def animal_hash(text, opts \\ []) when is_bitstring(text) and is_list(opts) do
separator = Keyword.get(opts, :separator, " ")
style = Keyword.get(opts, :style, :lowercase)
unless is_bitstring(separator) do
raise ArgumentError, ":separator must be a string, got: #{inspect(separator)}"
end
unless style == :lowercase or style == :uppercase or style == :titlecase do
raise ArgumentError,
"unsupported :style, has to be one of :lowercase, " <>
":uppercase, or :titlecase, got: #{inspect(style)}"
end
[adjective_index, color_index, animal_index] = hex_digest(text)
hash =
[
Enum.at(@adjectives, adjective_index),
Enum.at(@colors, color_index),
Enum.at(@animals, animal_index)
]
|> stylize(style)
|> Enum.join(separator)
{:ok, hash}
end
defp hex_digest(input) do
digest = :erlang.md5(input)
compress(div(byte_size(digest), 3), digest, [])
end
defp compress(size, binary, acc) do
case binary do
<<segment::binary-size(size), remain::binary>> when byte_size(remain) >= size ->
res =
List.foldl(:erlang.binary_to_list(segment), 0, fn left, right ->
Bitwise.bxor(left, right)
end)
compress(size, remain, [res | acc])
segment ->
res =
List.foldl(:erlang.binary_to_list(segment), 0, fn left, right ->
Bitwise.bxor(left, right)
end)
Enum.reverse([res | acc])
end
end
defp stylize(words, :lowercase) do
words |> Enum.map(fn x -> String.downcase(x) end)
end
defp stylize(words, :uppercase) do
words |> Enum.map(fn x -> String.upcase(x) end)
end
defp stylize(words, :titlecase) do
words |> Enum.map(fn x -> String.capitalize(x) end)
end
end
|
lib/ex_purple_tiger.ex
| 0.879484
| 0.554651
|
ex_purple_tiger.ex
|
starcoder
|
defprotocol ExMatch.Diff do
@fallback_to_any true
@spec diff(t, any, (atom -> any)) :: nil | {left :: any, right :: any}
def diff(left, right, get_opts)
end
defimpl ExMatch.Diff, for: Any do
def diff(value, value, _), do: nil
def diff(left = %struct{}, right = %struct{}, get_opts) do
case ExMatch.Diff.Map.diff(
left |> Map.from_struct(),
right |> Map.from_struct(),
get_opts
) do
nil ->
nil
{left_map, right_map} ->
{
Map.put(left_map, :__struct__, struct),
Map.put(right_map, :__struct__, struct)
}
end
end
def diff(left, right = %_{}, get_opts) do
with impl = ExMatch.Diff.impl_for(right),
false <- impl == ExMatch.Diff.Any,
{right_result, left_result} <- ExMatch.Diff.diff(right, left, get_opts) do
{left_result, right_result}
else
nil -> nil
true -> {left, right}
end
end
def diff(left, right, _),
do: {left, right}
end
defimpl ExMatch.Diff, for: List do
def diff([left_value | left], [right_value | right], get_opts) do
this_diff = ExMatch.Diff.diff(left_value, right_value, get_opts)
rest_diff = diff(left, right, get_opts)
case {this_diff, rest_diff} do
{nil, nil} ->
nil
{nil, {left_results, right_results}} ->
{[:eq | left_results], [:eq | right_results]}
{{left_result, right_result}, nil} ->
{[left_result], [right_result]}
{{left_result, right_result}, {left_results, right_results}} ->
{[left_result | left_results], [right_result | right_results]}
end
end
def diff([], [], _), do: nil
def diff(left, right, _) do
{left, right}
end
end
defimpl ExMatch.Diff, for: Tuple do
def diff(left, right, get_opts) when is_tuple(right) do
left = Tuple.to_list(left)
right = Tuple.to_list(right)
case ExMatch.Diff.List.diff(left, right, get_opts) do
{left, right} ->
{List.to_tuple(left), List.to_tuple(right)}
nil ->
nil
end
end
def diff(left, right, _) do
{left, right}
end
end
defimpl ExMatch.Diff, for: Map do
def diff(left, right, get_opts) when is_map(right) do
case diff_items(left, right, get_opts) do
{left_diffs, right_diffs, right}
when left_diffs == [] and right_diffs == %{} and right == %{} ->
nil
{left_diffs, right_diffs, right} ->
{Map.new(left_diffs), Map.merge(right, right_diffs)}
end
end
def diff(left, right, _opts) do
{left, right}
end
def diff_items(left, right, get_opts) do
Enum.reduce(left, {%{}, %{}, right}, &diff_item(&1, &2, get_opts))
end
defp diff_item({key, field}, {left_diffs, right_diffs, right}, get_opts) do
case right do
%{^key => right_value} ->
right = Map.delete(right, key)
case ExMatch.Diff.diff(field, right_value, get_opts) do
{left_diff, right_diff} ->
left_diffs = Map.put(left_diffs, key, left_diff)
right_diffs = Map.put(right_diffs, key, right_diff)
{left_diffs, right_diffs, right}
nil ->
{left_diffs, right_diffs, right}
end
_ ->
left_diffs = Map.put(left_diffs, key, field)
{left_diffs, right_diffs, right}
end
end
end
defimpl ExMatch.Diff, for: DateTime do
def diff(left, right, get_opts) when is_binary(right) do
opts = get_opts.(DateTime) || []
case :match_string in opts and DateTime.from_iso8601(right) do
{:ok, right_date, _} ->
diff_dates(left, right, right_date)
_ ->
{left, right}
end
end
def diff(left, right = %DateTime{}, _) do
diff_dates(left, right, right)
end
def diff(left, right, _) do
{left, right}
end
defp diff_dates(left, right, right_date) do
case DateTime.compare(left, right_date) do
:eq -> nil
_ -> {left, right}
end
end
end
if Code.ensure_loaded?(Decimal) do
defimpl ExMatch.Diff, for: Decimal do
require Decimal
def diff(left, right, get_opts) do
opts = get_opts.(Decimal) || []
if is_float(right) or
(is_binary(right) and :match_string not in opts) or
(is_integer(right) and :match_integer not in opts) do
{left, right}
else
parse_and_diff(left, right)
end
end
defp parse_and_diff(left, right) do
Decimal.new(right)
:eq = Decimal.compare(left, right)
nil
catch
_, _ ->
{left, right}
end
end
end
|
lib/exmatch/diff.ex
| 0.810666
| 0.653486
|
diff.ex
|
starcoder
|
defmodule Thesis.Utilities do
@moduledoc """
Module that provides helper functions.
"""
@doc """
Removes special characters, keeps dashes and underscores, and replaces spaces
with dashes. Also downcases the entire string.
iex> import Thesis.Utilities
iex> parameterize("Jamon is so cool!")
"jamon-is-so-cool"
iex> parameterize("%#d50SDF dfsJ FDS lkdsf f dfka a")
"d50sdf-dfsj-fds--lkdsf-f-dfka---a"
iex> parameterize(:this_is_a_test)
"this-is-a-test"
"""
def parameterize(str) when is_atom(str) do
str
|> Atom.to_string
|> String.replace("_", " ")
|> parameterize()
end
def parameterize(str) do
str = Regex.replace(~r/[^a-z0-9\-\s\.]/i, str, "")
Regex.split(~r/\%20|\s/, str)
|> Enum.join("-")
|> String.downcase
end
@doc """
Generates a random string of letters of a given length.
iex> import Thesis.Utilities
iex> String.length(random_string(15))
15
iex> random_string(15) != random_string(15)
true
"""
def random_string(length) do
length
|> :crypto.strong_rand_bytes
|> Base.url_encode64
|> String.replace(~r/[^0-9a-zA-Z]+/, "")
|> String.downcase
|> binary_part(0, length)
end
@doc """
Generates a random string of digits of a given length.
iex> import Thesis.Utilities
iex> String.length(random_string(15, :numeric))
15
iex> random_string(15, :numeric) != random_string(15, :numeric)
true
iex> String.to_integer(random_string(15, :numeric)) > 0
true
"""
def random_string(length, :numeric) do
length
|> :crypto.strong_rand_bytes
|> :crypto.bytes_to_integer
|> Integer.to_string
|> binary_part(0, length)
end
@doc """
Takes a URL and strips unnecessary characters.
iex> import Thesis.Utilities
iex> normalize_path("//ignite//foo")
"/ignite/foo"
iex> normalize_path("/ignite/foo/")
"/ignite/foo"
iex> normalize_path("/")
"/"
"""
def normalize_path("/"), do: "/"
def normalize_path(path) do
path
|> String.replace("//", "/")
|> String.replace_trailing("/", "")
end
@doc """
Shorthand `to_string`.
iex> import Thesis.Utilities
iex> to_s(000001)
"1"
iex> to_s(123)
"123"
"""
def to_s(arg), do: to_string(arg)
@doc """
Returns the data type for the provided data using guard functions.
iex> import Thesis.Utilities
iex> typeof(000001)
"integer"
iex> typeof([1,2,3])
"list"
"""
@types ~w[function nil integer binary bitstring list map float atom tuple pid port reference]
for type <- @types do
def typeof(x) when unquote(:"is_#{type}")(x), do: unquote(type)
end
end
|
lib/thesis/utilities.ex
| 0.695855
| 0.413004
|
utilities.ex
|
starcoder
|
defmodule Rails do
@moduledoc """
Partial Railway Oriented Programming implementation in Elixir.
See this [blog article](https://fsharpforfunandprofit.com/rop/)
for more details.
"""
use Boundary, deps: [], exports: []
require Logger
@type success(x) :: {:ok, x}
@type success :: success(any)
@type failure(reason) :: {:error, reason}
@type failure :: failure(any)
@type two_track(x, reason) :: success(x) | failure(reason)
@type two_track(x) :: two_track(x, any)
@type two_track_fun(a, b, c, d) :: (two_track(a, c) -> two_track(b, d))
@type two_track_fun(a, b) :: two_track_fun(a, b, any, any)
@type switch_fun(a, b) :: (a -> two_track(b))
@type chardata_or_fun :: Logger.message() | (() -> Logger.message())
@spec success(a) :: success(a) when a: any
def success(a), do: {:ok, a}
@spec failure(reason) :: failure(reason) when reason: term
def failure(reason), do: {:error, reason}
@spec switch((a -> b)) :: switch_fun(a, b) when a: any, b: any
def switch(f), do: &{:ok, f.(&1)}
@spec bind(switch_fun(a, b)) :: two_track_fun(a, b) when a: any, b: any
def bind(f), do: &bind(&1, f)
@spec bind(two_track(a), switch_fun(a, b)) :: two_track(b) when a: any, b: any
def bind({:error, reason}, _), do: {:error, reason}
def bind({:ok, a}, f), do: f.(a)
@spec map((a -> b)) :: two_track_fun(a, b) when a: any, b: any
def map(f), do: &map(&1, f)
@spec map(two_track(a), (a -> b)) :: two_track(b) when a: any, b: any
def map({:error, reason}, _), do: {:error, reason}
def map({:ok, a}, f), do: {:ok, f.(a)}
@spec tee((a -> :ok)) :: (a -> a) when a: any
def tee(f), do: &tee(&1, f)
@spec tee(a, (a -> :ok)) :: a when a: any
def tee(a, f) do
_ = f.(a)
a
end
@spec safe((a -> b | no_return)) :: switch_fun(a, b) when a: any, b: any
def safe(f), do: &safe(&1, f)
@spec safe(a, (a -> b | no_return)) :: two_track(b) when a: any, b: any
def safe(a, f) do
f.(a)
rescue
reason -> {:error, reason}
catch
:exit, reason -> {:error, {:exit, reason}}
x -> {:error, {:caught, x}}
end
@spec supervise((a -> b), (c -> d)) :: two_track_fun(a, b, c, d)
when a: any, b: any, c: any, d: any
def supervise(on_success, on_fail), do: &supervise(&1, on_success, on_fail)
@spec supervise(two_track(a, c), (a -> b), (c -> d)) :: two_track(b, d)
when a: any, b: any, c: any, d: any
def supervise({:error, reason}, _on_success, on_fail), do: map({:error, reason}, on_fail)
def supervise({:ok, a}, on_success, _on_fail), do: map({:ok, a}, on_success)
@spec curry((... -> any), [...]) :: (any -> any)
def curry(f, _) when is_function(f, 0), do: fn _ -> f.() end
def curry(f, _) when is_function(f, 1), do: f
def curry(f, args) when is_function(f), do: &apply(f, [&1 | args])
@spec log(two_track(a), (a -> chardata_or_fun), keyword) :: two_track(a) when a: any
def log(two_track, message_fun, metadata \\ []) do
supervise(
two_track,
tee(&Logger.info(message_fun.(&1), metadata)),
tee(&Logger.error(fn -> error_message(&1) end, metadata))
)
end
@spec error_message(reason :: term) :: Logger.message()
def error_message(%{__exception__: true} = error) do
Exception.format(:error, error, [])
end
def error_message(reason) do
case String.Chars.impl_for(reason) do
nil -> inspect(reason)
otherwise -> to_string(otherwise)
end
end
end
|
lib/rails.ex
| 0.77949
| 0.461988
|
rails.ex
|
starcoder
|
defmodule Ockam.Messaging.PipeChannel.Simple do
@moduledoc """
Simple implementation of pipe channel.
Does not manage the session.
Requires a known address to the local pipe sender and remote channel end
Using two addresses for inner and outer communication.
forwards messages from outer address to the sender and remote channel
forwards messages from inner address to the onward route and traces own outer address in the return route
Options:
`sender` - address of the sender worker
`channel_route` - route from remote receiver to remote channel end
"""
use Ockam.AsymmetricWorker
alias Ockam.Message
alias Ockam.Router
@impl true
def inner_setup(options, state) do
sender = Keyword.fetch!(options, :sender)
channel_route = Keyword.fetch!(options, :channel_route)
{:ok, Map.merge(state, %{sender: sender, channel_route: channel_route})}
end
@impl true
def handle_inner_message(message, state) do
forward_inner(message, state)
{:ok, state}
end
@impl true
def handle_outer_message(message, state) do
forward_outer(message, state)
{:ok, state}
end
@doc false
## Inner message is forwarded with outer address in return route
def forward_inner(message, state) do
[_me | onward_route] = Message.onward_route(message)
return_route = Message.return_route(message)
payload = Message.payload(message)
Router.route(%{
onward_route: onward_route,
return_route: [state.address | return_route],
payload: payload
})
end
@doc false
## Outer message is forwarded through sender
## to other channel endpoints inner address
def forward_outer(message, state) do
channel_route = Map.fetch!(state, :channel_route)
[_me | onward_route] = Message.onward_route(message)
return_route = Message.return_route(message)
payload = Message.payload(message)
sender = Map.fetch!(state, :sender)
Router.route(%{
onward_route: [sender | channel_route ++ onward_route],
return_route: return_route,
payload: payload
})
end
end
|
implementations/elixir/ockam/ockam/lib/ockam/messaging/pipe_channel/simple.ex
| 0.875341
| 0.405566
|
simple.ex
|
starcoder
|
defmodule Grizzly.SwitchBinary do
@moduledoc """
Commands for working with devices that support the Switch Binary command class
"""
alias Grizzly.ZWave
alias Grizzly.ZWave.Command
alias Grizzly.ZWave.Commands.SwitchBinaryReport
@typedoc """
Optional parameters used when setting the switch state
- `:duration` - the duration that the transition from current state to target
state should take (version 2).
"""
@type set_opt() :: {:duration, non_neg_integer()}
@typedoc """
The value the switch's state can be set to
"""
@type set_value() :: :on | :off
@typedoc """
The report received after requesting the state fo the switch using the
`get/1` function.
"""
@type report() :: %{
target_value: SwitchBinaryReport.value(),
current_value: SwitchBinaryReport.value() | nil,
duration: byte() | nil,
version: 1 | 2
}
@doc """
Request the current state of the switch
This command will return a `report()` in response.
"""
@spec get(ZWave.node_id(), [Grizzly.command_opt()]) ::
{:ok, report()}
| {:queued, reference(), non_neg_integer()}
| {:error, :timeout | :including | :updating_firmware | :nack_response | any()}
def get(node_id, command_opts \\ []) do
case Grizzly.send_command_no_warn(node_id, :switch_binary_get, [], command_opts) do
{:ok, %{type: :command} = report} ->
target_value = Command.param!(report.command, :target_value)
duration = Command.param(report.command, :duration)
current_value = Command.param(report.command, :current_value)
version = if duration, do: 2, else: 1
report = %{
current_value: current_value,
duration: duration,
target_value: target_value,
version: version
}
{:ok, report}
{:ok, %{type: :queued_delay} = report} ->
{:queued, report.command_ref, report.queued_delay}
{:ok, %{type: :timeout}} ->
{:error, :timeout}
{:error, _reason} = error ->
error
end
end
@doc """
Set the target value of the binary switch
Devices that support version 2 of the switch binary command class and
optionally be passed a duration that specifies the duration of the
transition from the current value to the target value.
"""
@spec set(ZWave.node_id(), set_value(), [set_opt() | Grizzly.command_opt()]) ::
:ok
| {:queued, reference(), non_neg_integer()}
| {:error, :timeout | :including | :updating_firmware | :nack_response | any()}
def set(node_id, target_value, opts \\ []) do
duration = Keyword.get(opts, :duration)
send_opts = Keyword.drop(opts, [:duration])
case Grizzly.send_command_no_warn(
node_id,
:switch_binary_set,
[target_value: target_value, duration: duration],
send_opts
) do
{:ok, %{type: :ack_response}} ->
:ok
{:ok, %{type: :queued_delay} = report} ->
{:queued, report.command_ref, report.queued_delay}
{:ok, %{type: :timeout}} ->
{:error, :timeout}
{:error, _reason} = error ->
error
end
end
end
|
lib/grizzly/switch_binary.ex
| 0.920576
| 0.425516
|
switch_binary.ex
|
starcoder
|
defmodule Bodyguard.Plug.Authorize do
@behaviour Plug
import Bodyguard.Utilities
@moduledoc """
Perform authorization in a Plug pipeline.
## Options
* `:policy` *required* - the policy (or context) module
* `:action` *required* - the action, or a getter
* `:user` - the user getter
* `:params` - the params, or a getter, to pass to the authorization callbacks
* `:fallback` - a fallback controller or plug to handle authorization
failure. If specified, the plug is called and then the pipeline is
`halt`ed. If not specified, then `Bodyguard.NotAuthorizedError` raises
directly to the router.
### Option Getters
The options `:action`, `:user`, and `:params` can accept getter functions that are either:
* an anonymous 1-arity function that accepts the `conn` and returns a value
* a `{module, function_name}` tuple specifying an existing function with that same signature
### Default Plug Options
You can provide default options for this plug by simply wrapping your own plug around it.
For example, if you're using Phoenix with Pow for authentication, you might want to specify:
defmodule MyAppWeb.Authorize do
def init(opts) do
opts
|> Keyword.put_new(:action, {Phoenix.Controller, :action_name})
|> Keyword.put_new(:user, {Pow.Plug, :current_user})
|> Bodyguard.Plug.Authorize.init()
end
def call(conn, opts) do
Bodyguard.Plug.Authorize.call(conn, opts)
end
end
## Examples
# Raise on failure
plug Bodyguard.Plug.Authorize,
policy: MyApp.Blog,
action: &action_name/1,
user: {MyApp.Authentication, :current_user}
# Fallback on failure
plug Bodyguard.Plug.Authorize,
policy: MyApp.Blog,
action: &action_name/1,
user: {MyApp.Authentication, :current_user},
fallback: MyAppWeb.FallbackController
# Params as a function
plug Bodyguard.Plug.Authorize,
policy: MyApp.Blog,
action: &action_name/1,
user: {MyApp.Authentication, :current_user},
params: &get_params/1
"""
def valid_getter?(fun) when is_function(fun, 1), do: true
def valid_getter?({module, fun}) when is_atom(module) and is_atom(fun), do: true
def valid_getter?(_), do: false
def init(opts \\ []) do
default_opts = Application.get_env(:bodyguard, __MODULE__, [])
opts = Keyword.merge(default_opts, opts)
policy = Keyword.get(opts, :policy)
action = Keyword.get(opts, :action)
user = Keyword.get(opts, :user)
params = Keyword.get(opts, :params, [])
fallback = Keyword.get(opts, :fallback)
# Policy must be defined
if is_nil(policy), do: raise(ArgumentError, "#{inspect(__MODULE__)} :policy option required")
# Action must be defined
if is_nil(action),
do:
raise(
ArgumentError,
"#{inspect(__MODULE__)} :action option is required"
)
# User can be nil or a getter function
unless is_nil(user) || valid_getter?(user),
do:
raise(
ArgumentError,
"#{inspect(__MODULE__)} :user option #{inspect(user)} is invalid"
)
unless is_nil(fallback) or is_atom(fallback),
do: raise(ArgumentError, "#{inspect(__MODULE__)} :fallback option must be a plug module")
# Plug 1.0 through 1.3.2 doesn't support returning maps from init/1
# See https://github.com/schrockwell/bodyguard/issues/52
{fallback,
[
policy: policy,
action: action,
user: user,
params: params
]}
end
def call(conn, {nil, opts}) do
Bodyguard.permit!(
opts[:policy],
resolve_param_or_callback(conn, opts[:action]),
resolve_param_or_callback(conn, opts[:user]),
resolve_param_or_callback(conn, opts[:params])
)
conn
end
def call(conn, {fallback, opts}) do
case Bodyguard.permit(
opts[:policy],
resolve_param_or_callback(conn, opts[:action]),
resolve_param_or_callback(conn, opts[:user]),
resolve_param_or_callback(conn, opts[:params])
) do
:ok ->
conn
error ->
conn
|> fallback.call(error)
|> Plug.Conn.halt()
end
end
end
|
lib/bodyguard/plug/authorize.ex
| 0.828627
| 0.44065
|
authorize.ex
|
starcoder
|
defmodule Mfga do
alias Mfga.{Generator, Genetics}
@chromosome_size Application.get_env(:mfga, :chromosome_length)
@chromosome_values Application.get_env(:mfga, :chromosome_values)
@population_size Application.get_env(:mfga, :population_size)
@mutation_chance Application.get_env(:mfga, :mutation_chance)
def run_simulation do
start_time = System.monotonic_time(:second)
goal = Generator.generate_random_sequence(@chromosome_size, @chromosome_values)
initial_population =
Generator.generate_chromosomes(@population_size, @chromosome_size, @chromosome_values)
next = run_iteration(initial_population, goal)
result = run_all_interations(next, goal)
end_time = System.monotonic_time(:second)
Tuple.append(result, end_time - start_time)
end
# Runs all iterations until maximum fitness
defp run_all_interations(next_pop, goal), do: run_all_interations(0, next_pop, goal)
defp run_all_interations(count, current_pop, goal) do
new_next = run_iteration(current_pop, goal)
max_fitness =
Genetics.add_fitness(new_next, goal)
|> get_max_fitness()
if max_fitness == @chromosome_size do
[fittest | _tail] =
Genetics.add_fitness(current_pop, goal)
|> sort_by_fitness()
print_solutions(count, goal, fittest)
else
run_all_interations(count + 1, new_next, goal)
end
end
defp print_solutions(count, goal, previous_best), do: {count, goal, previous_best}
# Runs a single iteration
defp run_iteration(population, goal) do
result =
Genetics.add_fitness(population, goal)
|> sort_by_fitness()
|> survival_of_fittest()
|> remove_past_fitness()
children =
result
|> perform_crossover()
|> perform_mutations()
result ++
children ++
Generator.generate_chromosomes(
round(@population_size / 4),
@chromosome_size,
@chromosome_values
)
end
def get_max_fitness(population) do
Enum.max(Enum.map(population, fn {_chromosome, fitness} -> fitness end))
end
def remove_past_fitness(surviving_chromosomes) do
Enum.map(surviving_chromosomes, fn {chromosome, _fitness} -> chromosome end)
end
defp survival_of_fittest(population) do
half = round(length(population) / 2)
Enum.take(population, half)
end
def sort_by_fitness(population) do
Enum.sort(population, fn {_chromosome1, fitness1}, {_chromosome2, fitness2} ->
fitness1 >= fitness2
end)
end
defp perform_crossover(population) do
Enum.chunk_every(population, 2)
|> Enum.map(fn [chromosome1, chromosome2] ->
Genetics.crossover(chromosome1, chromosome2)
end)
end
defp perform_mutations(children_chromosomes) do
Enum.map(children_chromosomes, fn child ->
Genetics.mutate(child, @mutation_chance, @chromosome_values)
end)
end
end
|
lib/mfga.ex
| 0.70253
| 0.577614
|
mfga.ex
|
starcoder
|
defmodule GoogleCerts.CertificateDecodeException do
defexception message: "Could not decode certificate"
end
defmodule GoogleCerts.Certificate do
@moduledoc """
Struct to associate a `kid` to a certificate map.
kid is the id and cert can either be a map with a pem or a JWK map
version 1 cert is `%{"pem" => "-----BEGIN CERTIFICATE----- ..."}`
version 3 cert is `%{"kid" => "53c66aab5...". "e" => "AQAB", ...}`
"""
alias GoogleCerts.{Certificate, CertificateDecodeException}
@derive Jason.Encoder
defstruct kid: nil, cert: nil
@type t(kid, cert) :: %Certificate{kid: kid, cert: cert}
@type t :: %Certificate{kid: String.t(), cert: map()}
@spec decode!(map()) :: t | no_return
def decode!(%{"kid" => kid, "cert" => cert}) do
%__MODULE__{kid: kid, cert: cert}
end
def decode!(cert) do
raise CertificateDecodeException,
message: """
Could not decode certificate
Cert must have the following string keys: ["kid", "cert"]
Provided certificate: #{inspect(cert)}
"""
end
end
defmodule GoogleCerts.Certificates do
@moduledoc """
Struct that holds a list of Google.Oauth2.Certificate structs
with their expiration time algorithm and version
"""
alias GoogleCerts.{Certificate, CertificateDecodeException, Certificates}
@derive Jason.Encoder
defstruct certs: [], expire: nil, algorithm: "RS256", version: 1
@type t(certs, expire, algorithm, version) :: %Certificates{
certs: certs,
expire: expire,
algorithm: algorithm,
version: version
}
@type t :: %Certificates{
certs: list(Certificate.t()),
expire: DateTime.t(),
algorithm: String.t(),
version: integer
}
@doc """
Returns true if `expire` is is less than the current UTC time.
"""
@spec expired?(Certificates.t()) :: boolean
def expired?(%__MODULE__{expire: %DateTime{} = expire}) do
DateTime.compare(DateTime.utc_now(), expire) != :lt
end
def expired?(_), do: true
@spec set_expiration(Certificates.t(), DateTime.t()) :: Certificates.t()
def set_expiration(struct = %__MODULE__{}, expiration) do
%__MODULE__{struct | expire: expiration}
end
@spec set_version(Certificates.t(), integer) :: Certificates.t()
def set_version(struct = %__MODULE__{}, version) do
%__MODULE__{struct | version: version}
end
@spec add_cert(Certificates.t(), String.t(), map) :: Certificates.t()
def add_cert(struct = %__MODULE__{certs: certs, version: 1}, kid, cert) do
%__MODULE__{
struct
| certs: [%Certificate{kid: kid, cert: %{"pem" => cert}} | certs]
}
end
def add_cert(struct = %__MODULE__{certs: certs, version: 3}, kid, cert) do
%__MODULE__{
struct
| certs: [%Certificate{kid: kid, cert: cert} | certs],
algorithm: Map.get(cert, "alg")
}
end
@doc """
Returns a `GoogleCerts.Certificate` for a given kid that is in `certs`
"""
@spec find(Certificates.t(), String.t()) :: Certificate.t()
def find(%__MODULE__{certs: certs}, kid) do
Enum.find(certs, fn %Certificate{kid: id} -> id == kid end)
end
@doc """
Returns a `GoogleCerts.Certificates` from the provided json or raw elixir map
"""
@spec decode!(String.t() | map) :: Certificates.t() | no_return
def decode!(json) when is_bitstring(json), do: json |> Jason.decode!() |> decode!()
def decode!(%{
"algorithm" => algorithm,
"certs" => certs,
"expire" => expire,
"version" => version
}) do
{:ok, expire, 0} = DateTime.from_iso8601(expire)
%__MODULE__{
certs: Enum.map(certs, &Certificate.decode!/1),
expire: expire,
algorithm: algorithm,
version: version
}
end
def decode!(arg) do
raise CertificateDecodeException,
message: "The provided arg does not conform to the required structure. arg: #{inspect(arg)}"
end
end
|
lib/google_certs/certificates.ex
| 0.898282
| 0.453443
|
certificates.ex
|
starcoder
|
defmodule ExPlasma.Output.Type.PaymentV1 do
@moduledoc """
Payment V1 Output Type.
"""
@behaviour ExPlasma.Output
alias ExPlasma.Output
@type address() :: <<_::160>>
@type output_guard() :: address()
@type token() :: address()
@type amount() :: non_neg_integer()
@type rlp() :: [output_guard() | [token() | amount()]]
@type validation_responses() :: {:ok, t()}
@type t() :: %{
output_guard: output_guard(),
token: token(),
amount: amount()
}
@zero_address <<0::160>>
@doc """
Encode a map of the output data into an RLP list.
## Example
iex> output = %{output_type: 1, output_data: %{output_guard: <<1::160>>, token: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b>>, amount: 1}}
iex> ExPlasma.Output.Type.PaymentV1.to_rlp(output)
[<<1>>, [<<1::160>>, <<1::160>>, <<1>>]]
"""
@impl Output
@spec to_rlp(Output.t()) :: rlp()
def to_rlp(%{output_type: type, output_data: data}) do
[
<<type>>,
[
data.output_guard,
data.token,
truncate_leading_zero(<<data.amount::integer-size(256)>>)
]
]
end
@doc """
Decode a map of the output data into the Payment V1 format:
## Example
iex> data = [<<1>>, [<<1::160>>, <<1::160>>, <<1>>]]
iex> ExPlasma.Output.Type.PaymentV1.to_map(data)
%{
output_type: 1,
output_data: %{output_guard: <<1::160>>, token: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b>>, amount: 1}
}
"""
@impl Output
@spec to_map([<<_::8>> | [any(), ...], ...]) :: %{
:output_data => %{:amount => non_neg_integer(), :output_guard => any(), :token => any()},
:output_type => byte()
}
def to_map([<<output_type>>, [output_guard, token, amount]]) do
%{
output_type: output_type,
output_data: %{output_guard: output_guard, token: token, amount: :binary.decode_unsigned(amount, :big)}
}
end
@doc """
Validates the output data
## Example
iex> data = %{output_data: %{output_guard: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b>>, token: <<fdf8:f53e:61e4::18>>, amount: 1}}
iex> {:ok, resp} = ExPlasma.Output.Type.PaymentV1.validate(data)
{:ok, %{output_guard: <<1::160>>, token: <<fdf8:f53e:61e4::18>>, amount: 1}}
"""
@impl Output
@spec validate(Output.t()) :: validation_responses()
def validate(%{output_data: data}) do
case do_validate([data.output_guard, data.token, data.amount]) do
{field, value} ->
{:error, {field, value}}
:ok ->
{:ok, data}
end
end
defp do_validate([_output_guard, _token, nil]), do: {:amount, :cannot_be_nil}
defp do_validate([_output_guard, _token, amount]) when amount <= 0,
do: {:amount, :cannot_be_zero}
defp do_validate([_output_guard, nil, _amount]), do: {:token, :cannot_be_nil}
defp do_validate([nil, _token, _amount]), do: {:output_guard, :cannot_be_nil}
defp do_validate([@zero_address, _token, _amount]), do: {:output_guard, :cannot_be_zero}
defp do_validate([_, _, _]), do: :ok
defp truncate_leading_zero(<<0>>), do: <<0>>
defp truncate_leading_zero(<<0>> <> binary), do: truncate_leading_zero(binary)
defp truncate_leading_zero(binary), do: binary
end
|
lib/ex_plasma/output/type/payment_v1.ex
| 0.834811
| 0.452113
|
payment_v1.ex
|
starcoder
|
defmodule Genex.Tools.Selection do
@moduledoc """
Implementation of several popular selection methods.
Selection occurs in two stages in Genex: parent selection and survivor selection. Parent Selection dictates which chromosomes are to be reserved for crossover according to some crossover rate. In this stage, a number of chromosomes are selected and paired off in 2-tuples in the order they are selected. Future versions of Genex will provide more advanced methods of parent selection.
Survivor Selection occurs last in the GA cycle. As of this version of Genex, the survivor rate is always equal to `1 - CR` where CR is the crossover rate. Future versions will support more advanced survivor selection, including the ability to fluctuate the population according to some operators.
"""
@doc """
Natural selection of some number of chromosomes.
This will select the `n` best (fittest) chromosomes.
Returns `Enum.t`.
# Parameters
- `chromosomes`: `Enum` of `Chromosomes`.
- `n`: Number of chromosomes to select.
"""
@spec natural(Enum.t(), integer()) :: Enum.t()
def natural(chromosomes, n) do
chromosomes
|> Enum.take(n)
end
@doc false
def natural, do: &natural(&1, &2)
@doc """
Worst selection of some number of chromosomes.
This will select the `n` worst (least fit) chromosomes.
Returns `Enum.t`.
# Parameters
- `chromosomes`: `Enum` of `Chromosomes`.
- `n`: Number of chromosomes to select.
"""
@spec worst(Enum.t(), integer()) :: Enum.t()
def worst(chromosomes, n) do
chromosomes
|> Enum.reverse()
|> Enum.take(n)
end
@doc false
def worst, do: &worst(&1, &2)
@doc """
Random selection of some number of chromosomes.
This will select `n` random chromosomes.
Returns `Enum.t`.
# Parameters
- `chromosomes`: `Enum` of `Chromosomes`.
- `n`: Number of chromosomes to select.
"""
@spec random(Enum.t(), integer) :: Enum.t()
def random(chromosomes, n) do
chromosomes
|> Enum.take_random(n)
end
@doc false
def random, do: &random(&1, &2)
@doc """
Tournament selection of some number of chromosomes.
This will select `n` chromosomes from tournaments of size `k`. We randomly select `k` chromosomes from the population and choose the max to be in the tournament.
Returns `Enum.t()`.
# Parameters
- `chromosomes`: `Enum` of `Chromosomes`.
- `n`: Number of chromosomes to select.
- `tournsize`: The size of the tournament to run.
"""
@spec tournament(Enum.t(), integer(), integer()) :: Enum.t()
def tournament(chromosomes, n, tournsize) do
0..(n - 1)
|> Enum.map(fn _ ->
chromosomes
|> Enum.take_random(tournsize)
|> Enum.max_by(& &1.fitness)
end)
end
@doc false
def tournament(tournsize: tournsize), do: &tournament(&1, &2, tournsize)
@doc """
Roulette selection of some number of chromosomes.
This will select `n` chromosomes using a "roulette" wheel where the probability of a chromosome being selected is proportional to it's fitness.
Returns `Enum.t()`.
# Parameters
- `chromosomes`: `Enum` of `Chromosomes`.
- `n`: Number of chromosomes to select.
"""
@spec roulette(Enum.t(), integer()) :: Enum.t()
def roulette(chromosomes, n) do
sum_fitness =
chromosomes
|> Enum.reduce(0, fn x, acc -> acc + x.fitness end)
0..(n - 1)
|> Enum.map(fn _ ->
u = :rand.uniform() * sum_fitness
chromosomes
|> Enum.reduce_while(
0,
fn x, sum ->
if x.fitness + sum > u do
{:halt, x}
else
{:cont, x.fitness + sum}
end
end
)
end)
end
@doc false
def roulette, do: &roulette(&1, &2)
@doc """
Stochastic Universal Sampling of chromosomes.
This will sample all of the chromosomes without bias, choosing them at evenly spaced intervals.
Returns `Enum.t()`.
# Parameters
- `chromosomes`: `Enum` of `Chromosomes`.
- `n`: Number of chromomsomes to select.
"""
@spec stochastic_universal_sampling(Enum.t(), integer()) :: Enum.t()
def stochastic_universal_sampling(chromosomes, n) do
sum_fitness =
chromosomes
|> Enum.reduce(0, fn x, acc -> acc + x.fitness end)
p = sum_fitness / n
start = p * :rand.uniform()
pointers = for i <- 0..(n - 1), do: start + i * p
pointers
|> Enum.map(fn x ->
chromosomes
|> Enum.reduce_while(
0,
fn y, sum ->
if y.fitness + sum >= x do
{:halt, y}
else
{:cont, y.fitness + sum}
end
end
)
end)
end
@doc false
def stochastic_universal_sampling, do: &stochastic_universal_sampling(&1, &2)
@doc false
def boltzmann, do: :ok
@doc false
def rank, do: :ok
@doc false
def double_tournament, do: :ok
@doc false
def tournament_dcd, do: :ok
@doc false
def lexicase, do: :ok
@doc false
def epsilon_lexicase, do: :ok
@doc false
def automatic_epsilon_lexicase, do: :ok
end
|
lib/genex/tools/selection.ex
| 0.918604
| 0.872836
|
selection.ex
|
starcoder
|
defmodule Rihanna.Supervisor do
use Supervisor
@moduledoc """
The main supervisor for Rihanna.
Starts the Postgrex process necessary for enqueueing jobs, and also starts a
dispatcher for processing them.
## Setup
Add `Rihanna.Supervisor` to your supervision tree.
By adding it to your supervision tree it will automatically start running jobs
when your app boots.
Rihanna requires a database configuration to be passed in under the `postgrex`
key. This is passed through directly to Postgrex.
If you are already using Ecto you can avoid duplicating your DB config by
pulling this out of your existing Repo using `My.Repo.config()`.
```
# NOTE: In Phoenix you would find this inside `lib/my_app/application.ex`
children = [
{Rihanna.Supervisor, [name: Rihanna.Supervisor, postgrex: My.Repo.config()]}
]
```
"""
def start_link(config, opts \\ []) do
case Keyword.pop_first(config, :postgrex) do
{nil, _} ->
raise """
Could not start Rihanna - database configuration was missing. Did you forget to pass postgres configuration into Rihanna.Supervisor?
For example:
children = [
{Rihanna.Supervisor, [postgrex: %{username: "postgres", password: "<PASSWORD>", database: "rihanna_db", hostname: "localhost", port: 5432}]}
]
"""
{db, config} ->
Supervisor.start_link(__MODULE__, Keyword.merge(config, [db: db]), opts)
end
end
@doc false
def init(config) do
children =
[
producer_postgres_connection(Keyword.get(config, :db)),
{Task.Supervisor, name: Rihanna.TaskSupervisor},
%{
id: Rihanna.JobDispatcher,
start: {Rihanna.JobDispatcher, :start_link, [config, [name: Rihanna.JobDispatcher]]}
}
]
|> Enum.filter(& &1)
Supervisor.init(children, strategy: :one_for_one)
end
defp producer_postgres_connection(db) do
unless(Rihanna.Config.producer_postgres_connection_supplied?()) do
%{
id: Rihanna.Job.Postgrex,
start: {Postgrex, :start_link, [Keyword.put(db, :name, Rihanna.Job.Postgrex)]}
}
end
end
end
|
lib/rihanna/supervisor.ex
| 0.750461
| 0.651161
|
supervisor.ex
|
starcoder
|
defmodule Taxes.Organizer do
@moduledoc """
Module with methods to group taxes by markers
"""
alias Taxes.Types
@doc """
Method to wrap taxes from list into tree structure with `:inclusive` and `:exclusive` keys at top level.
This method modify incoming `payload` structure and add two new keys.
"""
@spec group_taxes(Types.payload()) :: Types.payload()
def group_taxes(%{taxes: taxes} = payload) do
Map.merge(
payload,
taxes
|> build_tree_by_level()
|> build_taxes_tree()
|> group_taxes_by(:is_inclusive)
|> group_taxes_by(:logic)
)
end
def group_taxes(payload), do: payload
@doc """
Method to recursive build tax tree based at level option
"""
def build_tree_by_level(taxes) do
taxes_by_level = Enum.group_by(taxes, &Map.get(&1, :level, 0))
taxes =
taxes
|> Enum.map(&Map.get(&1, :level, 0))
|> Enum.uniq()
|> Enum.sort()
|> Enum.reverse()
|> Enum.reduce(nil, fn level, acc ->
taxes_for_level = acc || Map.get(taxes_by_level, level, [])
higher_level_taxes = Map.get(taxes_by_level, level - 1, [])
case higher_level_taxes do
[] -> taxes_for_level
taxes ->
taxes |> Enum.map(fn tax -> Map.put(tax, :taxes, tax.taxes ++ taxes_for_level) end)
end
end)
taxes || []
end
@doc """
Method to recursive build tax tree
"""
@spec build_taxes_tree([Types.tax()]) :: [Taxes.tax()]
def build_taxes_tree(taxes) do
Enum.map(taxes, &wrap_child_taxes_into_tree(&1))
end
@doc """
Method to wrap into tree nested taxes
"""
@spec wrap_child_taxes_into_tree(Types.tax()) :: Types.tax()
def wrap_child_taxes_into_tree(%{taxes: taxes} = tax) do
Map.put(
tax,
:taxes,
taxes
|> build_taxes_tree()
|> group_taxes_by(:logic)
)
end
def wrap_child_taxes_into_tree(tax), do: tax
@doc """
Method to group taxes by provided marker
"""
@spec group_taxes_by([Types.tax()] | map(), :atom | String.t()) :: map()
def group_taxes_by(taxes, mark) when is_map(taxes) do
Enum.reduce(
taxes,
%{},
fn {group, taxes}, acc -> Map.put(acc, group, group_taxes_by(taxes, mark)) end
)
end
def group_taxes_by(taxes, mark) when is_list(taxes) do
Enum.group_by(taxes, &get_mark_value(&1, mark))
end
@doc """
Method to fetch marker from Tax.
"""
@spec get_mark_value(Types.tax(), :atom | String.t()) :: any()
def get_mark_value(%{is_inclusive: true}, :is_inclusive), do: :inclusive
def get_mark_value(%{is_inclusive: false}, :is_inclusive), do: :exclusive
def get_mark_value(tax, field), do: Map.fetch!(tax, field)
@doc """
Method to convert taxes rate into float
"""
@spec convert_taxes_rate(map) :: map
def convert_taxes_rate(%{taxes: taxes, exponent: exponent} = args) when is_map(args) do
Map.put(
args,
:taxes,
convert_tax_rate(taxes, exponent)
)
end
def convert_tax_rate(taxes, exponent) when is_list(taxes) do
Enum.map(taxes, &convert_tax_rate(&1, exponent))
end
def convert_tax_rate(%{rate: rate, logic: :percent} = tax, exponent) when is_integer(rate) do
tax
|> Map.put(:rate, rate / :math.pow(10, 2))
|> Map.put(:taxes, convert_tax_rate(Map.get(tax, :taxes, []), exponent))
end
def convert_tax_rate(%{rate: rate} = tax, exponent) when is_integer(rate) do
tax
|> Map.put(:rate, rate / :math.pow(10, exponent))
|> Map.put(:taxes, convert_tax_rate(Map.get(tax, :taxes, []), exponent))
end
def convert_tax_rate(%{rate: rate} = tax, exponent) when is_binary(rate) do
tax
|> Map.put(:rate, String.to_float(rate))
|> Map.put(:taxes, convert_tax_rate(Map.get(tax, :taxes, []), exponent))
end
def convert_tax_rate(tax, _) when is_nil(tax), do: nil
def convert_tax_rate(tax, _), do: tax
end
|
lib/taxes/organizer.ex
| 0.8498
| 0.696436
|
organizer.ex
|
starcoder
|
defmodule Monok do
@moduledoc """
#### _Monad on :ok_
Provides the infix pipe operators `~>`, `~>>`, and `<~>` for writing clean pipelines that treat `{:ok, result}`
and `{:error, reason}` tuples like functors, monads or applicatives.
Also provides the functions `fmap`, `bind` and `lift` as which are functionally identical but are less cryptic and
can be used without overriding any inifix operators which could potentially conflict with other libraries.
## Why would you ever do this?
Whilst writing unnecessary macros and overriding infix operators are both generally considered bad practice I
thought I'd try this out given just how freqently `{:ok, result}` and `{:error, reason}` tuples are encountered
in Elixir.
## Functor Pipelines
Allows you to write clean pipelines that transforms values inside of `{:ok, value}` tuples.
```
iex> {:ok, [1, 2, 3]}
...> ~> Enum.sum()
...> ~> div(2)
{:ok, 3}
```
If the input is an `{:error, reason}` tuple it is carried through the pipeline without applying any
transformations.
```
iex> {:error, :reason}
...> ~> Enum.sum()
...> ~> div(2)
{:error, :reason}
```
## Monad Pipelines
Allows you to write clean pipelines that transform values in `{:ok, value}` tuples with functions that also
return `{:ok, value}` tuples.
```
iex> decrement = fn
...> x when x > 0 -> {:ok, x - 1}
...> _ -> {:error, :input_too_small}
...> end
iex> {:ok, 3}
...> ~>> decrement.()
...> ~>> decrement.()
{:ok, 1}
```
If at any point in the pipeline an `{:error, reason}` tuple is returned it is carried through without
any of the transformation functions being applied.
```
iex> decrement = fn
...> x when x > 0 -> {:ok, x - 1}
...> _ -> {:error, :input_too_small}
...> end
iex>
...> {:ok, 3}
...> ~>> (fn _ -> {:error, :contrived_example} end).()
...> ~>> decrement.()
...> ~>> decrement.()
{:error, :contrived_example}
```
## Mixed Pipelines
These pipe operators don't have to be used in seperate pipelines but can be used together or even with the `|>`
standard pipe operator.
```
iex> 7
...> |> (&(if &1 > 5, do: {:ok, &1}, else: {:error, :too_low})).()
...> ~> Integer.to_string()
...> ~>> (&(if &1 |> String.length() > 0, do: {:ok, &1 <> "!"}, else: {:error, :empty_string})).()
{:ok, "7!"}
```
"""
@doc """
Applies a function to a value wrapped in an ok tuple, has no effect if given an error tuple.
## Examples
iex> {:ok, [1, 2, 3]}
...> |> Monok.fmap(&Enum.sum/1)
{:ok, 6}
iex> {:error, :reason}
...> |> Monok.fmap(&Enum.sum/1)
{:error, :reason}
"""
def fmap(value_tuple, function)
def fmap({:ok, value}, function) do
{:ok, function.(value)}
end
def fmap({:error, reason}, _function) do
{:error, reason}
end
@doc """
Applies a function wrapped in an :ok tuple to a value wrapped in an :ok tuple.
Carries through an :error tuple if either the value or function arguments are given as :error tuples instead of :ok tuples.
## Examples
iex> {:ok, [1, 2, 3]}
...> |> Monok.lift({:ok, &Enum.sum/1})
{:ok, 6}
iex> {:ok, 1}
...> |> Monok.lift({:error, :reason})
{:error, :reason}
iex> {:error, :reason}
...> |> Monok.lift({:ok, &Enum.sum/1})
{:error, :reason}
"""
def lift(value_tuple, function_tuple)
def lift({:ok, value}, {:ok, function}) do
{:ok, function.(value)}
end
def lift({:error, reason}, _function_tuple) do
{:error, reason}
end
def lift(_value_tuple, {:error, reason}) do
{:error, reason}
end
@doc """
Applies a function that returns a value wrapped in an :ok tuple to a value wrapped in an :ok tuple.
Carries through an :error tuple if either the value argument is given as an :error tuple or the function returns an
:error tuple when applied to the value.
## Examples
iex> {:ok, [1, 2, 3]}
...> |> Monok.bind(fn x -> {:ok, Enum.sum(x)} end)
{:ok, 6}
iex> {:ok, [1, 2, 3]}
...> |> Monok.bind(fn _ -> {:error, :reason} end)
{:error, :reason}
iex> {:error, :reason}
...> |> Monok.bind(fn x -> {:ok, Enum.sum(x)} end)
{:error, :reason}
"""
def bind(value_tuple, function)
def bind({:ok, value}, function) do
function.(value)
end
def bind({:error, reason}, _function) do
{:error, reason}
end
@doc """
Infix fmap operator.
Treats input as value_tuple a functor.
Applies a function to a value wrapped in an ok tuple, has no effect if given an error tuple.
## Examples
iex> {:ok, [1, 2, 3]}
...> ~> Enum.sum()
{:ok, 6}
iex> {:error, :reason}
...> ~> Enum.sum()
{:error, :reason}
"""
defmacro quote_value_tuple ~> {function, metadata, call_args} do
quote generated: true do
case unquote(quote_value_tuple) do
{:ok, value} -> {:ok, unquote({function, metadata, [quote(do: value) | call_args]})}
other -> other
end
end
end
@doc """
Infix lift operator.
Treats the function_tuple as an applicative.
Applies a function wrapped in an :ok tuple to a value wrapped in an :ok tuple.
Carries through an :error tuple if either the value or function arguments are given as :error tuples instead of :ok tuples.
## Examples
iex> {:ok, [1, 2, 3]}
...> <~> {:ok, &Enum.sum/1}
{:ok, 6}
iex> {:ok, 1}
...> <~> {:error, :reason}
{:error, :reason}
iex> {:error, :reason}
...> <~> {:ok, &Enum.sum/1}
{:error, :reason}
"""
def value_tuple <~> function_tuple do
value_tuple |> lift(function_tuple)
end
@doc """
Infix bind operator.
Treats the value_tuple and tuple_function as monads.
Applies a function that returns a value wrapped in an :ok tuple to a value wrapped in an :ok tuple.
Carries through an :error tuple if either the value argument is given as an :error tuple or the function returns an
:error tuple when applied to the value.
## Examples
iex> {:ok, [1, 2, 3]}
...> ~>> (fn x -> {:ok, Enum.sum(x)} end).()
{:ok, 6}
iex> {:ok, [1, 2, 3]}
...> ~>> (fn _ -> {:error, :reason} end).()
{:error, :reason}
iex> {:error, :reason}
...> ~>> (fn x -> {:ok, Enum.sum(x)} end).()
{:error, :reason}
"""
defmacro quote_value_tuple ~>> {function, metadata, call_args} do
quote generated: true do
case unquote(quote_value_tuple) do
{:ok, value} -> unquote({function, metadata, [quote(do: value) | call_args]})
other -> other
end
end
end
end
|
lib/monok.ex
| 0.911687
| 0.87213
|
monok.ex
|
starcoder
|
defmodule Razorpay.Payment do
import Razorpay
@derive {Poison.Encoder, except: [:entity]}
defstruct [:id, :amount, :currency, :status, :method, :description,
:refund_status, :amount_refunded, :email, :contact, :fee, :service_tax,
:error_code, :error_description, :notes, :order_id, :created_at]
@type t :: %__MODULE__{id: binary, amount: integer, currency: binary,
status: binary, method: binary, description: binary, refund_status: binary,
amount_refunded: integer, email: binary, contact: binary, fee: integer,
service_tax: integer, error_code: binary, error_description: binary,
notes: map, created_at: binary}
@endpoint "/payments"
@spec all(Keyword.t) :: {:ok, Razorpay.Collection.t(t)} | Razorpay.Error.type
def all(opts \\ []) do
request(:get, @endpoint, opts)
|> process_response(%Razorpay.Collection{items: [%__MODULE__{}]})
end
@spec get(binary, Keyword.t) :: {:ok, t} | Razorpay.error
def get(id, opts \\ []) do
request(:get, "#{@endpoint}/#{id}", opts)
|> process_response(%__MODULE__{})
end
def capture(id, amount, opts \\ [])
@spec capture(binary, integer, Keyword.t) :: {:ok, t} | Razorpay.Error.type
def capture(id, amount, opts) when is_binary(id) do
opts = [{:amount, amount} | opts]
request(:post, "#{@endpoint}/#{id}/capture", opts)
|> process_response(%__MODULE__{})
end
@spec capture(t, integer, Keyword.t) :: {:ok, t} | Razorpay.Error.type
def capture(payment, amount, opts) when is_map(payment) do
id = payment.id
opts = [{:amount, amount} | opts]
request(:post, "#{@endpoint}/#{id}/capture", opts)
|> process_response(%__MODULE__{})
end
def refund(id, amount \\ nil, opts \\ [])
@spec refund(binary, integer, Keyword.t) :: {:ok, Razorpay.Refund.t} | Razorpay.Error.type
def refund(id, amount, opts) when is_binary(id) do
opts = if amount, do: [{:amount, amount} | opts], else: opts
request(:post, "#{@endpoint}/#{id}/refund", opts)
|> process_response(%Razorpay.Refund{})
end
@spec refund(t, integer, Keyword.t) :: {:ok, Razorpay.Refund.t} | Razorpay.Error.type
def refund(payment, amount, opts) when is_map(payment) do
id = payment.id
opts = if amount, do: [{:amount, amount} | opts], else: opts
request(:post, "#{@endpoint}/#{id}/refund", opts)
|> process_response(%Razorpay.Refund{})
end
def refunds(id, opts \\ [])
@spec refunds(binary, Keyword.t) :: {:ok, Razorpay.Collection.t(Razorpay.Refund.t)} | Razorpay.Error.type
def refunds(id, opts) when is_binary(id) do
request(:get, "#{@endpoint}/#{id}/refunds", opts)
|> process_response(%Razorpay.Collection{items: [%Razorpay.Refund{}]})
end
@spec refunds(t, Keyword.t) :: {:ok, Razorpay.Collection.t(Razorpay.Refund.t)} | Razorpay.Error.type
def refunds(payment, opts) when is_map(payment) do
id = payment.id
request(:get, "#{@endpoint}/#{id}/refunds", opts)
|> process_response(%Razorpay.Collection{items: [%Razorpay.Refund{}]})
end
def get_refund(payment_id, refund_id, opts \\ [])
@spec get_refund(binary, binary, Keyword.t) :: {:ok, Razorpay.Refund.t} | Razorpay.Error.type
def get_refund(id, refund_id, opts) when is_binary(id) do
request(:get, "#{@endpoint}/#{id}/refunds/#{refund_id}", opts)
|> process_response(%Razorpay.Refund{})
end
@spec get_refund(t, binary, Keyword.t) :: {:ok, Razorpay.Refund.t} | Razorpay.Error.type
def get_refund(payment, refund_id, opts) when is_map(payment) do
id = payment.id
request(:get, "#{@endpoint}/#{id}/refunds/#{refund_id}", opts)
|> process_response(%Razorpay.Refund{})
end
end
|
lib/razorpay/payment.ex
| 0.588771
| 0.430028
|
payment.ex
|
starcoder
|
defmodule Membrane.RTP.InboundPacketTracker do
@moduledoc """
Module responsible for tracking statistics of incoming RTP packets for a single stream.
Tracker is capable of repairing packets' sequence numbers provided that it has information about how many packets has
been previously discarded. To updated number of discarded packets one should send an event `Membrane.RTP.PacketsDiscarded.t/0` that will accumulate
the total number of discarded packets and will subtract that number from the packet's sequence number.
"""
use Membrane.Filter
alias Membrane.{Buffer, RTP, Time}
alias Membrane.RTCP.ReceiverReport
require Bitwise
@max_dropout 3000
@max_unordered 3000
@max_seq_num Bitwise.bsl(1, 16) - 1
@max_s24_val Bitwise.bsl(1, 23) - 1
@min_s24_val -Bitwise.bsl(1, 23)
def_input_pad :input, caps: :any, demand_mode: :auto
def_output_pad :output, caps: :any, demand_mode: :auto
def_options clock_rate: [
type: :integer,
spec: Membrane.RTP.clock_rate_t()
],
repair_sequence_numbers?: [
spec: boolean(),
default: true,
description: "Defines if tracker should try to repair packet's sequence number"
]
defmodule State do
@moduledoc false
@type t :: %__MODULE__{
clock_rate: non_neg_integer(),
repair_sequence_numbers?: boolean(),
jitter: float(),
transit: non_neg_integer() | nil,
received: non_neg_integer(),
discarded: non_neg_integer(),
cycles: non_neg_integer(),
base_seq: non_neg_integer(),
max_seq: non_neg_integer(),
received_prior: non_neg_integer(),
expected_prior: non_neg_integer(),
lost: non_neg_integer(),
fraction_lost: float()
}
@enforce_keys [:clock_rate, :repair_sequence_numbers?]
defstruct @enforce_keys ++
[
jitter: 0.0,
transit: nil,
received: 0,
discarded: 0,
cycles: 0,
base_seq: nil,
max_seq: nil,
received_prior: 0,
expected_prior: 0,
lost: 0,
fraction_lost: 0.0
]
end
@impl true
def handle_init(opts) do
{:ok,
%State{clock_rate: opts.clock_rate, repair_sequence_numbers?: opts.repair_sequence_numbers?}}
end
@impl true
def handle_process(:input, buffer, _ctx, %State{cycles: cycles, max_seq: max_seq} = state) do
seq_num = buffer.metadata.rtp.sequence_number
max_seq = max_seq || seq_num - 1
delta = rem(seq_num - max_seq + @max_seq_num + 1, @max_seq_num + 1)
cond do
# greater sequence number but within dropout to ensure that it is not from previous cycle
delta < @max_dropout ->
state =
state
|> update_sequence_counters(seq_num, max_seq, cycles)
|> update_received()
|> update_jitter(buffer)
{{:ok, buffer: {:output, repair_sequence_number(buffer, state)}}, state}
# the packets is either too old or too new
delta <= @max_seq_num - @max_unordered ->
{:ok, update_received(state)}
# packet is old but within dropout threshold
true ->
state =
state
|> update_received()
|> update_jitter(buffer)
{{:ok, buffer: {:output, repair_sequence_number(buffer, state)}}, update_received(state)}
end
end
@impl true
def handle_event(:input, %ReceiverReport.StatsRequestEvent{}, _ctx, state) do
%State{
received: received,
received_prior: received_prior,
expected_prior: expected_prior,
cycles: cycles,
max_seq: max_seq,
jitter: jitter
} = state
expected = expected_packets(state)
lost = max(expected - received, 0)
expected_interval = expected - expected_prior
received_interval = received - received_prior
lost_interval = expected_interval - received_interval
fraction_lost =
if expected_interval == 0 || lost_interval <= 0 do
0.0
else
lost_interval / expected_interval
end
total_lost =
cond do
lost > @max_s24_val -> @max_s24_val
lost < @min_s24_val -> @min_s24_val
true -> lost
end
state = %State{
state
| expected_prior: expected,
received_prior: received,
lost: total_lost,
fraction_lost: fraction_lost
}
stats = %ReceiverReport.Stats{
fraction_lost: fraction_lost,
total_lost: total_lost,
highest_seq_num: max_seq + cycles,
interarrival_jitter: jitter
}
{{:ok, event: {:input, %ReceiverReport.StatsEvent{stats: stats}}}, state}
end
@impl true
def handle_event(
:input,
%RTP.PacketsDiscardedEvent{discarded: packets_discarded},
_ctx,
%State{discarded: discarded} = state
) do
{:ok, %State{state | discarded: discarded + packets_discarded}}
end
@impl true
def handle_event(direction, event, ctx, state), do: super(direction, event, ctx, state)
defp update_sequence_counters(state, seq_num, max_seq, cycles) do
{max_seq_num, cycles} =
if seq_num < max_seq do
{seq_num, cycles + @max_seq_num}
else
{seq_num, cycles}
end
%State{state | max_seq: max_seq_num, cycles: cycles, base_seq: state.base_seq || seq_num}
end
defp update_received(%State{received: received} = state) do
%State{state | received: received + 1}
end
defp expected_packets(%State{cycles: cycles, max_seq: max_seq, base_seq: base_seq}) do
cycles + max_seq - base_seq + 1
end
defp update_jitter(state, %Buffer{metadata: metadata}) do
%State{clock_rate: clock_rate, jitter: last_jitter, transit: last_transit} = state
# Algorithm from https://tools.ietf.org/html/rfc3550#appendix-A.8
arrival_ts = Map.get(metadata, :arrival_ts, Time.vm_time())
buffer_ts = metadata.rtp.timestamp
arrival = arrival_ts |> Time.as_seconds() |> Ratio.mult(clock_rate) |> Ratio.trunc()
transit = arrival - buffer_ts
{jitter, transit} =
if last_transit == nil do
{last_jitter, transit}
else
d = abs(transit - last_transit)
new_jitter = last_jitter + 1 / 16 * (d - last_jitter)
{new_jitter, transit}
end
%State{state | jitter: jitter, transit: transit}
end
defp repair_sequence_number(%Buffer{} = buffer, %State{
discarded: discarded,
repair_sequence_numbers?: repair?
})
when not repair? or discarded == 0 do
buffer
end
# repairs sequence number if there have been any packets discarded by any of previous elements
defp repair_sequence_number(
%Buffer{metadata: %{rtp: %{sequence_number: seq_num}} = metadata} = buffer,
%State{discarded: discarded}
) do
metadata =
put_in(
metadata,
[:rtp, :sequence_number],
rem(seq_num - discarded + @max_seq_num + 1, @max_seq_num + 1)
)
%Buffer{buffer | metadata: metadata}
end
end
|
lib/membrane/rtp/inbound_packet_tracker.ex
| 0.908747
| 0.513973
|
inbound_packet_tracker.ex
|
starcoder
|
defmodule Algorithms.DataStructures.SinglyLinkedList do
@moduledoc """
Access for SinglyLinkedList is O(n)
Search for SinglyLinkedList is O(n)
Deletion/Insertion at element is O(1)
"""
defmodule Node, do: defstruct([:value, :reference])
defmodule LinkedList, do: defstruct([:nodes])
@doc """
Adding to the head of a SinglyLinkedList is O(1), as you know where the head is
"""
def add_node_head(%LinkedList{nodes: []}, value) do
first_link = form_link(value, nil)
%LinkedList{nodes: [first_link]}
end
def add_node_head(%LinkedList{nodes: [head | _tail] = list}, value) do
new_head = form_link(value, head)
%LinkedList{nodes: [new_head | list]}
end
@doc """
Adding to the tail/middle of a SinglyLinkedList is O(1)
"""
def add_node_tail(%LinkedList{nodes: []} = list, value) do
add_node_head(list, value)
end
def add_node_tail(%LinkedList{nodes: [head | []]}, value) do
old_tail = head
new_tail = form_tail_link(value)
old_tail = %Node{value: old_tail.value, reference: new_tail}
new_nodes = [old_tail] ++ [new_tail]
%LinkedList{nodes: new_nodes}
end
def add_node_tail(%LinkedList{nodes: nodes} = list, value) do
old_tail = tail(list)
new_tail = form_tail_link(value)
old_tail = %Node{value: old_tail.value, reference: new_tail}
new_nodes = nodes |> List.delete_at(-1)
new_nodes = new_nodes ++ [old_tail] ++ [new_tail]
%LinkedList{nodes: new_nodes}
end
def list_length(%LinkedList{nodes: []}), do: 0
def list_length(%LinkedList{nodes: nodes}), do: nodes |> length()
def head(%LinkedList{nodes: nodes}), do: nodes |> List.first()
def tail(%LinkedList{nodes: [current_tail | []]}) do
current_tail
end
def tail(%LinkedList{nodes: []}), do: nil
def tail(%LinkedList{nodes: [_current_head | current_tails]}) do
tail(%LinkedList{nodes: current_tails})
end
defp form_link(value, reference) do
%__MODULE__.Node{value: value, reference: reference}
end
defp form_tail_link(value) do
%__MODULE__.Node{value: value, reference: nil}
end
end
|
lib/data_structures/singly_linked_list.ex
| 0.783077
| 0.41253
|
singly_linked_list.ex
|
starcoder
|
defmodule Posexional.Field do
@moduledoc """
generic utility functions for fields
"""
@doc """
justify a value given alignment, size and filler char, if the given value
is longer it gets trimmed
## Examples
iex> Posexional.Field.positionalize("test",
...> %{alignment: :left, size: 10, filler: ?\\s})
"test "
iex> Posexional.Field.positionalize("test",
...> %{alignment: :right, size: 10, filler: ?\\s})
" test"
iex> Posexional.Field.positionalize("test",
...> %{alignment: :right, size: 5, filler: ?\\s})
" test"
iex> Posexional.Field.positionalize("test",
...> %{alignment: :right, size: 5, filler: ?-})
"-test"
iex> Posexional.Field.positionalize("testtest",
...> %{alignment: :right, size: 5, filler: ?-})
"testt"
"""
@spec positionalize(binary, map) :: binary
def positionalize(value, %{alignment: :left, size: size, filler: filler}) do
value
|> String.pad_trailing(size, filler_to_list_of_string(filler))
|> String.slice(0, size)
end
def positionalize(value, %{alignment: :right, size: size, filler: filler}) do
value
|> String.pad_leading(size, filler_to_list_of_string(filler))
|> String.slice(0, size)
end
@spec depositionalize(binary, map) :: binary
def depositionalize(content, field = %{filler: filler}) do
content
|> remove_filler(field)
|> nil_if_empty(filler)
end
defp remove_filler(content, %{filler: filler, alignment: :right}) do
String.replace_leading(content, filler_to_string(filler), "")
end
defp remove_filler(content, %{filler: filler, alignment: :left}) do
String.replace_trailing(content, filler_to_string(filler), "")
end
defp filler_to_string(filler) do
to_string([filler])
end
defp filler_to_list_of_string(filler) do
[to_string([filler])]
end
@doc """
nil if the value is an empty string, or a string containing only the filler
"""
def nil_if_empty("", _), do: nil
def nil_if_empty(v, filler) do
if contains_only?(v, filler) do
nil
else
v
end
end
@doc """
true if the value passed contains only the filler value
"""
def contains_only?(v, filler) do
v
|> String.to_charlist()
|> Enum.all?(&(&1 == filler))
end
end
|
lib/posexional/field.ex
| 0.812421
| 0.560974
|
field.ex
|
starcoder
|
defmodule Cldr.Number.Backend.Format do
@moduledoc false
def define_number_module(config) do
module = inspect(__MODULE__)
backend = config.backend
config = Macro.escape(config)
number_system = Module.concat(backend, Number.System)
quote location: :keep,
bind_quoted: [
module: module,
backend: backend,
config: config,
number_system: number_system
] do
defmodule Number.Format do
@moduledoc false
if Cldr.Config.include_module_docs?(config.generate_docs) do
@moduledoc """
Functions to manage the collection of number patterns defined in Cldr.
Number patterns affect how numbers are interpreted in a localized context.
Here are some examples, based on the French locale. The "." shows where the
decimal point should go. The "," shows where the thousands separator should
go. A "0" indicates zero-padding: if the number is too short, a zero (in the
locale's numeric set) will go there. A "#" indicates no padding: if the
number is too short, nothing goes there. A "¤" shows where the currency sign
will go. The following illustrates the effects of different patterns for the
French locale, with the number "1234.567". Notice how the pattern characters
',' and '.' are replaced by the characters appropriate for the locale.
## Number Pattern Examples
| Pattern | Currency | Text |
| ------------- | :-------------: | ----------: |
| #,##0.## | n/a | 1 234,57 |
| #,##0.### | n/a | 1 234,567 |
| ###0.##### | n/a | 1234,567 |
| ###0.0000# | n/a | 1234,5670 |
| 00000.0000 | n/a | 01234,5670 |
| #,##0.00 ¤ | EUR | 1 234,57 € |
The number of # placeholder characters before the decimal do not matter,
since no limit is placed on the maximum number of digits. There should,
however, be at least one zero some place in the pattern. In currency formats,
the number of digits after the decimal also do not matter, since the
information in the supplemental data (see Supplemental Currency Data) is used
to override the number of decimal places — and the rounding — according to
the currency that is being formatted. That can be seen in the above chart,
with the difference between Yen and Euro formatting.
Details of the number formats are described in the
[Unicode documentation](http://unicode.org/reports/tr35/tr35-numbers.html#Number_Format_Patterns)
"""
end
alias Cldr.Number.System
@doc """
Returns the list of decimal formats in the configured locales including
the list of locales configured for precompilation in `config.exs`.
This function exists to allow the decimal formatter
to precompile all the known formats at compile time.
## Example
#=> #{inspect(__MODULE__)}.Format.decimal_format_list
["#", "#,##,##0%",
"#,##,##0.###", "#,##,##0.00¤", "#,##,##0.00¤;(#,##,##0.00¤)",
"#,##,##0 %", "#,##0%", "#,##0.###", "#,##0.00 ¤",
"#,##0.00 ¤;(#,##0.00 ¤)", "#,##0.00¤", "#,##0.00¤;(#,##0.00¤)",
"#,##0 %", "#0%", "#0.######", "#0.00 ¤", "#E0", "%#,##0", "% #,##0",
"0", "0.000000E+000", "0000 M ¤", "0000¤", "000G ¤", "000K ¤", "000M ¤",
"000T ¤", "000mM ¤", "000m ¤", "000 Bio'.' ¤", "000 Bln ¤", "000 Bn ¤",
"000 B ¤", "000 E ¤", "000 K ¤", "000 MRD ¤", "000 Md ¤", "000 Mio'.' ¤",
"000 Mio ¤", "000 Mld ¤", "000 Mln ¤", "000 Mn ¤", "000 Mrd'.' ¤",
"000 Mrd ¤", "000 Mr ¤", "000 M ¤", "000 NT ¤", "000 N ¤", "000 Tn ¤",
"000 Tr ¤", ...]
"""
@format_list Cldr.Config.decimal_format_list(config)
@spec decimal_format_list :: list(Cldr.Number.Format.format())
def decimal_format_list do
unquote(Macro.escape(@format_list))
end
@doc """
Returns the list of decimal formats for a configured locale.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by `#{inspect backend}.Locale.new!/1`.
The default is `#{inspect backend}.get_locale/0`
This function exists to allow the decimal formatter to precompile all
the known formats at compile time. Its use is not otherwise recommended.
## Example
iex> #{inspect(__MODULE__)}.decimal_format_list_for("en")
{:ok, ["#,##0%", "#,##0.###", "#E0", "0 billion", "0 million", "0 thousand",
"0 trillion", "00 billion", "00 million", "00 thousand", "00 trillion",
"000 billion", "000 million", "000 thousand", "000 trillion", "000B", "000K",
"000M", "000T", "00B", "00K", "00M", "00T", "0B", "0K", "0M", "0T",
"¤#,##0.00", "¤#,##0.00;(¤#,##0.00)", "¤000B", "¤000K", "¤000M",
"¤000T", "¤00B", "¤00K", "¤00M", "¤00T", "¤0B", "¤0K", "¤0M", "¤0T"]}
"""
@spec decimal_format_list_for(LanguageTag.t() | Cldr.Locale.locale_name()) ::
{:ok, list(String.t())} | {:error, {module(), String.t()}}
def decimal_format_list_for(locale \\ unquote(backend).get_locale())
for locale_name <- Cldr.Locale.Loader.known_locale_names(config) do
decimal_formats = Cldr.Config.decimal_formats_for(locale_name, config)
def decimal_format_list_for(%LanguageTag{cldr_locale_name: unquote(locale_name)}) do
{:ok, unquote(Macro.escape(decimal_formats))}
end
end
def decimal_format_list_for(locale_name) when is_binary(locale_name) do
with {:ok, locale} <- unquote(backend).validate_locale(locale_name) do
decimal_format_list_for(locale)
end
end
def decimal_format_list_for(locale) do
{:error, Cldr.Locale.locale_error(locale)}
end
@doc """
Returns the decimal formats defined for a given locale.
## Arguments
* `locale` is any valid locale name returned by
`#{inspect(backend)}.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by
`#{inspect(backend)}.Locale.new!/1`. The default
is `#{inspect(backend)}.get_locale/0`
## Returns
* `{:ok, map}` where map is a map of decimal formats
keyed by number system or
* `{:error, {exception, message}}`
"""
@spec all_formats_for(LanguageTag.t() | Cldr.Locale.locale_name()) ::
{:ok, map()} | {:error, {module(), String.t()}}
def all_formats_for(locale \\ unquote(backend).get_locale())
@doc """
Returns the minimum grouping digits for a locale.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by
`#{inspect(backend)}.Locale.new!/1`. The default
is `#{inspect(backend)}.get_locale/0`
## Returns
* `{:ok, minumum_digits}` or
* `{:error, {exception, message}}`
## Examples
iex> #{inspect(__MODULE__)}.minimum_grouping_digits_for("en")
{:ok, 1}
"""
@spec minimum_grouping_digits_for(LanguageTag.t() | Cldr.Locale.locale_name()) ::
{:ok, non_neg_integer} | {:error, {module(), String.t()}}
def minimum_grouping_digits_for(locale \\ unquote(backend).get_locale())
@doc """
Returns the default grouping for a locale as a map.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by
`#{inspect(backend)}.Locale.new!/1`. The default
is `#{inspect(backend)}.get_locale/0`
## Returns
* `{:ok, grouping}` or
* `{:error, {exception, message}}`
## Examples
iex> #{inspect(__MODULE__)}.default_grouping_for("en")
{:ok, %{fraction: %{first: 0, rest: 0}, integer: %{first: 3, rest: 3}}}
"""
@spec default_grouping_for(LanguageTag.t() | Cldr.Locale.locale_name()) ::
{:ok, map()} | {:error, {module(), String.t()}}
def default_grouping_for(locale \\ unquote(backend).get_locale())
for locale_name <- Cldr.Locale.Loader.known_locale_names(config) do
locale_data =
locale_name
|> Cldr.Locale.Loader.get_locale(config)
number_formats =
locale_data
|> Map.get(:number_formats)
|> Enum.map(fn {type, format} -> {type, struct(Cldr.Number.Format, format)} end)
|> Enum.into(%{})
minimum_grouping_digits =
locale_data
|> Map.get(:minimum_grouping_digits)
def all_formats_for(%LanguageTag{cldr_locale_name: unquote(locale_name)}) do
{:ok, unquote(Macro.escape(number_formats))}
end
def minimum_grouping_digits_for(%LanguageTag{cldr_locale_name: unquote(locale_name)}) do
{:ok, unquote(minimum_grouping_digits)}
end
default_number_system =
locale_data
|> get_in([:number_systems, :default])
standard_format = number_formats[default_number_system].standard
{:ok, meta} = Cldr.Number.Format.Compiler.format_to_metadata(standard_format)
def default_grouping_for(%LanguageTag{cldr_locale_name: unquote(locale_name)}) do
{:ok, unquote(Macro.escape(meta.grouping))}
end
end
def all_formats_for(locale_name) when is_binary(locale_name) do
with {:ok, locale} <- unquote(backend).validate_locale(locale_name) do
all_formats_for(locale)
end
end
def minimum_grouping_digits_for(locale_name) when is_binary(locale_name) do
with {:ok, locale} <- unquote(backend).validate_locale(locale_name) do
minimum_grouping_digits_for(locale)
end
end
def default_grouping_for(locale_name) when is_binary(locale_name) do
with {:ok, locale} <- unquote(backend).validate_locale(locale_name) do
default_grouping_for(locale)
end
end
@doc """
Returns the minium grouping digits for a locale
or raises on error.
## Arguments
* `locale` is any valid locale name returned by
`#{inspect(backend)}.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by `#{inspect(backend)}.Locale.new!/1`. The default
is `#{inspect(backend)}.get_locale/0`
## Returns
* `minumum_digits` or
* raises an exception
## Examples
iex> #{inspect(__MODULE__)}.minimum_grouping_digits_for!("en")
1
"""
@spec minimum_grouping_digits_for!(LanguageTag.t() | Cldr.Locale.locale_name()) ::
non_neg_integer | no_return()
def minimum_grouping_digits_for!(locale) do
case minimum_grouping_digits_for(locale) do
{:ok, digits} -> digits
{:error, {exception, reason}} -> raise exception, reason
end
end
@doc """
Returns the default grouping for a locale
or raises on error.
## Arguments
* `locale` is any valid locale name returned by
`#{inspect(backend)}.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by `#{inspect(backend)}.Locale.new!/1`. The default
is `#{inspect(backend)}.get_locale/0`
## Returns
* `grouping` as a map or
* raises an exception
## Examples
iex> #{inspect(__MODULE__)}.default_grouping_for!("en")
%{fraction: %{first: 0, rest: 0}, integer: %{first: 3, rest: 3}}
"""
@spec default_grouping_for!(LanguageTag.t() | Cldr.Locale.locale_name()) ::
map() | no_return()
def default_grouping_for!(locale) do
case default_grouping_for(locale) do
{:ok, digits} -> digits
{:error, {exception, reason}} -> raise exception, reason
end
end
@doc """
Returns the currency space for a given locale and
number system.
"""
@spec currency_spacing(
LanguageTag.t() | Cldr.Locale.locale_name(),
System.system_name()
) :: map() | {:error, {module(), String.t()}}
def currency_spacing(locale, number_system) do
with {:ok, formats} <- formats_for(locale, number_system) do
Map.get(formats, :currency_spacing)
end
end
@doc """
Returns the decimal formats defined for a given locale.
## Arguments
* `locale` is any valid locale name returned by
`#{inspect(backend)}.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by
`#{inspect(backend)}.Locale.new!/1`. The default
is `#{inspect(backend)}.get_locale/0`
## Returns
* `{:ok, map}` where map is a map of decimal formats
keyed by number system or
* raises an exception
See `#{inspect(__MODULE__)}.Number.Format.all_formats_for/1` for further information.
"""
@spec all_formats_for!(LanguageTag.t() | Cldr.Locale.locale_name()) ::
map() | no_return()
def all_formats_for!(locale \\ unquote(backend).get_locale()) do
case all_formats_for(locale) do
{:ok, formats} -> formats
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Return the predfined formats for a given `locale` and `number_system`.
## Arguments
* `locale` is any valid locale name returned by
`#{inspect(backend)}.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by `#{inspect backend}.Locale.new!/1`.
The default is `#{inspect(backend)}.get_locale/0`
* `number_system` is any valid number system or number system type returned
by `#{inspect(backend)}.Number.System.number_systems_for/1`
## Example
#{inspect(__MODULE__)}.formats_for "fr", :native
#=> %Cldr.Number.Format{
accounting: "#,##0.00 ¤;(#,##0.00 ¤)",
currency: "#,##0.00 ¤",
percent: "#,##0 %",
scientific: "#E0",
standard: "#,##0.###"
currency_short: [{"1000", [one: "0 k ¤", other: "0 k ¤"]},
{"10000", [one: "00 k ¤", other: "00 k ¤"]},
{"100000", [one: "000 k ¤", other: "000 k ¤"]},
{"1000000", [one: "0 M ¤", other: "0 M ¤"]},
{"10000000", [one: "00 M ¤", other: "00 M ¤"]},
{"100000000", [one: "000 M ¤", other: "000 M ¤"]},
{"1000000000", [one: "0 Md ¤", other: "0 Md ¤"]},
{"10000000000", [one: "00 Md ¤", other: "00 Md ¤"]},
{"100000000000", [one: "000 Md ¤", other: "000 Md ¤"]},
{"1000000000000", [one: "0 Bn ¤", other: "0 Bn ¤"]},
{"10000000000000", [one: "00 Bn ¤", other: "00 Bn ¤"]},
{"100000000000000", [one: "000 Bn ¤", other: "000 Bn ¤"]}],
...
}
"""
@spec formats_for(LanguageTag.t() | binary(), atom | String.t()) ::
{:ok, map()} | {:error, {module(), String.t()}}
def formats_for(
locale \\ unquote(backend).default_locale(),
number_system \\ Cldr.Number.System.default_number_system_type()
)
def formats_for(%LanguageTag{} = locale, number_system) do
with {:ok, locale} <- unquote(backend).validate_locale(locale),
{:ok, system_name} <-
System.system_name_from(number_system, locale, unquote(backend)),
{:ok, formats} <- all_formats_for(locale) do
{:ok, Map.get(formats, system_name)}
end
end
def formats_for(locale_name, number_system) when is_binary(locale_name) do
with {:ok, locale} <- unquote(backend).validate_locale(locale_name) do
formats_for(locale, number_system)
end
end
@spec formats_for!(
LanguageTag.t() | Cldr.Locale.locale_name(),
Cldr.Number.System.system_name()
) ::
map() | no_return()
def formats_for!(
locale \\ unquote(backend).default_locale(),
number_system \\ Cldr.Number.System.default_number_system_type()
)
def formats_for!(locale_name, number_system) do
case formats_for(locale_name, number_system) do
{:ok, formats} -> formats
{:error, {exception, reason}} -> raise exception, reason
end
end
end
end
end
end
|
lib/cldr/number/backend/format.ex
| 0.774924
| 0.621282
|
format.ex
|
starcoder
|
defmodule Mandrake do
@moduledoc """
Mandrake is a functional programming library that bring something else magic in elixir. Facility, prosperity and maintainability are Mandrake characteristics. Mandrake contains mathematical, logical, relational and functional API that helps developers in their daily work.
## What is a Mandrake?
Element: Fire
Planet: Mercury, Uranus and Pluto.
Powers: Protection, Fertility, Money, Love, Health
Mandrake is a long leafed dark green plant with small greenish-yellow or purple bell-shaped flowers. The flowers eventually fruit into small orange-coloured fleshy berries.
Magically speaking, the female mandrake carries forked that look like a pair of human legs, whereas the male has only a single root.
In the old Herbals we find them frequently figured as a male with a long beard, and a female with a very bushy head of hair.
The female form is the most sought after for magic and medicinal use.
A whole Mandrake root placed in the home, will give the house protection, fertility, and prosperity. Also, where there is Mandrake, demons cannot abide. Money placed beside the root is said to multiply.
Source: http://monsters.monstrous.com/mandrake.htm
"""
defmacro __using__(_opts) do
quote do
import Mandrake
end
end
defmodule Math do
@moduledoc """
Mandrake mathematical functions.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.Math
end
end
@doc """
Calculates the sum of a list of numbers.
## Examples
iex> Mandrake.Math.sum([1, 2, 3, 4, 5])
15
"""
def sum([head|tail]) do
head + sum(tail)
end
def sum([]) do
0
end
@doc """
Calculates the mean of a list of numbers.
## Examples
iex> Mandrake.Math.mean([1, 2, 3, 4, 5])
3.0
"""
def mean(list) do
sum(list) / (length list)
end
@doc """
Calculates the product of a list of numbers.
## Examples
iex> Mandrake.Math.product([1, 2, 3, 4, 5])
120
"""
def product([head|tail]) do
head * product(tail)
end
def product([]) do
1
end
@doc """
Calculates the absolute value of each number in a list.
## Examples
iex> Mandrake.Math.abs([-1, 2, -3, -4, -5])
[1, 2, 3, 4, 5]
"""
def abs([head|tail]) do
[ Kernel.abs(head) | __MODULE__.abs(tail) ]
end
def abs([]) do
[]
end
@doc """
Returns max value in a list.
## Examples
iex> Mandrake.Math.max([1, 243, 3, 4, 5])
243
"""
def max([head|tail]) when (length tail) > 1 do
Kernel.max(head, __MODULE__.max(tail))
end
def max([head|tail]) when (length tail) == 1 do
Kernel.max(head, List.first(tail))
end
@doc """
Returns min value in a list.
## Examples
iex> Mandrake.Math.min([1, 243, 3, 4, 5])
1
"""
def min([head|tail]) when (length tail) > 1 do
Kernel.min(head, __MODULE__.min(tail))
end
def min([head|tail]) when (length tail) == 1 do
Kernel.min(head, List.first(tail))
end
@doc """
Adds two numbers.
## Examples
iex> Mandrake.Math.add(5, 2)
7
"""
def add(first_number, second_number) do
first_number + second_number
end
@doc """
Returns a function that adds a number to the first.
## Examples
iex> add2 = Mandrake.Math.add(2)
...> add2.(5)
7
"""
def add(number) do
fn arg -> add(number, arg) end
end
@doc """
Subtracts two numbers.
## Examples
iex> Mandrake.Math.subtract(5, 2)
3
"""
def subtract(first_number, second_number) do
first_number - second_number
end
@doc """
Returns a function that subtracts a number to the first.
## Examples
iex> subtract2 = Mandrake.Math.subtract(2)
...> subtract2.(5)
-3
"""
def subtract(number) do
fn arg -> subtract(number, arg) end
end
@doc """
Decrease a number by 1.
## Examples
iex> Mandrake.Math.dec(7)
6
"""
def dec(number) do
number - 1
end
@doc """
Increase a number by 1.
## Examples
iex> Mandrake.Math.inc(7)
8
"""
def inc(number) do
number + 1
end
@doc """
Divides two numbers.
## Examples
iex> Mandrake.Math.divide(5, 2)
2.5
"""
def divide(first_number, second_number) do
first_number / second_number
end
@doc """
Returns a function that divides a number to the first.
## Examples
iex> divide2 = Mandrake.Math.divide(2)
...> divide2.(5)
0.4
"""
def divide(number) do
fn arg -> divide(number, arg) end
end
@doc """
Multiplies two numbers.
## Examples
iex> Mandrake.Math.multiply(5, 2)
10
"""
def multiply(first_number, second_number) do
first_number * second_number
end
@doc """
Returns a function that multiplies a number to the first.
## Examples
iex> multiply2 = Mandrake.Math.multiply(2)
...> multiply2.(5)
10
"""
def multiply(number) do
fn arg -> multiply(number, arg) end
end
@doc """
Returns modulus of two numbers.
## Examples
iex> Mandrake.Math.modulo(20, 5)
0
"""
def modulo(first_number, second_number) do
Kernel.rem(first_number, second_number)
end
@doc """
Negate a number.
## Examples
iex> Mandrake.Math.negate(7)
-7
"""
def negate(number) do
-number
end
@doc """
Calculates the median of a list of numbers.
## Examples
iex> Mandrake.Math.median([7, 2, 10, 9, 6])
7
iex> Mandrake.Math.median([7, 2, 10, 9, 6, 8])
7.5
"""
def median(list) do
list = Enum.sort(list)
if modulo((length list), 2) == 0 do
(Enum.at(list,((length list) -1) |> div 2) + Enum.at(list,((length list) +1) |> div 2)) / 2
else
Enum.at(list,((length list) -1) |> div 2)
end
end
end
defmodule Function do
@moduledoc """
Mandrake functional functions.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.Function
end
end
def build_args(string, remaining) do
if remaining == 0 do
string
else
build_args(string <> ", arg" <> to_string(remaining), remaining-1)
end
end
@doc """
Apply the second argument to the function.
## Examples
iex> mod = Mandrake.Function.apply(fn arg -> -arg end, -2)
2
"""
def apply(function, value) do
function.(value)
end
@doc """
Returns an anonymous function that wrap the given.
## Examples
iex> mod = Mandrake.Function.lambda("Mandrake.Math.modulo", 2)
...> mod.(20, 5)
0
"""
def lambda(function, arity) do
args = cond do
arity == 0 -> ""
arity == 1 -> "arg"
arity > 1 -> nil
end
if is_nil(args) do args = build_args("arg" <> to_string(arity), arity-1) end
List.first(Tuple.to_list(Code.eval_string("fn " <> args <> " -> " <> function <> "(" <> args <> ") end")))
end
@doc """
Function that return always the given value.
## Examples
iex> printExample = Mandrake.Function.always("Example")
...> printExample.()
"Example"
"""
def always(value) do
fn -> value end
end
@doc """
Function that do nothing
## Examples
iex> Mandrake.Function.noop()
nil
"""
def noop() do
end
@doc """
Returns the composition of two functions.
## Examples
iex> printHello = Mandrake.Function.compose(fn x -> "Hello " <> to_string(x) end, fn y -> y + 1 end)
...> printHello.(6)
"Hello 7"
"""
def compose(first_function, second_function) do
fn arg -> first_function.(second_function.(arg)) end
end
end
defmodule Logic do
@moduledoc """
Mandrake logical functions.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.Logic
end
end
@doc """
Returns a function that return !result of the given function.
## Examples
iex> is_odd = Mandrake.Logic.complement(fn arg -> Kernel.rem(arg, 2) == 0 end)
...> is_odd.(24)
false
"""
def complement(function) do
fn arg -> logic_not(function.(arg)) end
end
@doc """
Returns a function that process onTrue or onFalse depending upon the result of the condition.
## Examples
iex> my_number = 1
...> my_function = Mandrake.Logic.ifElse(my_number <= 1, fn arg -> Mandrake.Math.inc(arg) end, fn arg -> Mandrake.Math.dec(arg) end)
...> my_function.(my_number)
2
"""
def ifElse(condition, onTrue, onFalse) do
fn arg -> if condition do onTrue.(arg) else onFalse.(arg) end end
end
@doc """
Returns a function that process on_true if condition is true.
## Examples
iex> my_number = 1
...> my_function = Mandrake.Logic.logic_if(my_number <= 1, fn arg -> Mandrake.Math.inc(arg) end)
...> my_function.(my_number)
2
"""
def logic_if(condition, on_true) do
fn arg -> if condition do on_true.(arg) end end
end
@doc """
Returns `true` if both functions return true
## Examples
iex> my_function = Mandrake.Logic.both(fn x -> x > 10 end, fn x -> Kernel.rem(x, 2) == 0 end)
...> my_function.(100)
true
iex> my_function.(101)
false
"""
def both(first_func, second_func) do
fn value -> first_func.(value) && second_func.(value) end
end
@doc """
Returns `true` if one function return true
## Examples
iex> my_function = Mandrake.Logic.either(fn x -> x > 10 end, fn x -> Kernel.rem(x, 2) == 0 end)
...> my_function.(100)
true
iex> my_function.(101)
true
"""
def either(first_func, second_func) do
fn value -> first_func.(value) || second_func.(value) end
end
@doc """
Returns `true` if value is "", '', [], {} or %{}.
## Examples
iex> Mandrake.Logic.is_empty([])
true
iex> Mandrake.Logic.is_empty(nil)
false
"""
def is_empty(value) do
if value == [] || value == {} || value == "" || value == '' || value == %{} do
true
else
false
end
end
@doc """
Returns `true` if both conditions are true.
## Examples
iex> Mandrake.Logic.logic_and(1<2, 3>4)
false
"""
def logic_and(first_cond, second_cond) do
first_cond && second_cond
end
@doc """
Returns `true` if one condition is true.
## Examples
iex> Mandrake.Logic.logic_or(1<2, 3>4)
true
"""
def logic_or(first_cond, second_cond) do
first_cond || second_cond
end
@doc """
Returns ! of value
## Examples
iex> Mandrake.Logic.logic_not(false)
true
"""
def logic_not(value) do
!value
end
@doc """
Returns the second argument if it is not nil.
## Examples
iex> default_to_7 = Mandrake.Logic.default_to(7)
...> default_to_7.(nil)
7
iex> default_to_7.(12)
12
"""
def default_to(value) do
fn arg ->
if arg != nil do
arg
else
value
end
end
end
end
defmodule List do
@moduledoc """
Mandrake list manipulation functions.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.List
end
end
@doc """
Returns a new list starting from the given value or nil.
## Examples
iex> Mandrake.List.reverse([4, 7, 43, 6, 3, 7])
[7, 3, 6, 43, 7, 4]
"""
def reverse([head|tail]) do
reverse(tail) ++ [head]
end
def reverse([]) do
[]
end
@doc """
Returns a new list starting from the given value or nil.
## Examples
iex> Mandrake.List.from(43, [4, 7, 43, 6, 3, 7])
[43, 6, 3, 7]
"""
def from(value, [head|tail]) do
if head == value do
[head|tail]
else
from(value, tail)
end
end
def from(value, []) do
end
@doc """
Returns `true` if all elements in a list are equal.
## Examples
iex> Mandrake.List.equals([7, 7, 7])
true
"""
def equals([head|tail]) when (length tail) > 1 do
if head == Elixir.List.first(tail) do
equals(tail)
else
false
end
end
def equals([head|tail]) when (length tail) == 1 do
head == Elixir.List.first(tail)
end
@doc """
Returns a new list after appending the new element.
## Examples
iex> Mandrake.List.append(45, [4, 7, 43, 6, 3, 7])
[4, 7, 43, 6, 3, 7, 45]
"""
def append(element, list) do
list ++ [element]
end
@doc """
Returns a new list after prepending the new element.
## Examples
iex> Mandrake.List.prepend(45, [4, 7, 43, 6, 3, 7])
[45, 4, 7, 43, 6, 3, 7]
"""
def prepend(element, list) do
[element] ++ list
end
@doc """
Returns the best item in a list based on the return value for the iterator function.
## Examples
iex> longest = Mandrake.List.best(fn arg1, arg2 -> if arg1 > arg2 do arg1 else arg2 end end)
...> longest.([4, 7, 43, 6, 3, 7])
43
"""
def best(function) do
fn list -> best(function, list) end
end
@doc """
Returns the best item in a list based on the return value for the iterator function.
## Examples
iex> Mandrake.List.best(fn arg1, arg2 -> if arg1 > arg2 do arg1 else arg2 end end, [4, 7, 43, 6, 3, 7])
43
"""
def best(function, [head|tail]) when (length tail) > 1 do
function.(head, best(function, tail))
end
def best(function, [head|tail]) when (length tail) == 1 do
function.(head, Elixir.List.first(tail))
end
@doc """
Returns `true` if list contains the given value.
## Examples
iex> Mandrake.List.contains(3, [1, 2, 3, 4, 5])
true
"""
def contains(value, [head|tail]) do
value == head || contains(value, tail)
end
def contains(value, []) do
false
end
@doc """
Returns the extension of the first list with the second.
## Examples
iex> Mandrake.List.union([1, 2, 3], [4, 5])
[1, 2, 3, 4, 5]
"""
def union(first_list, second_list) do
first_list ++ second_list
end
@doc """
Returns the difference of the 2 lists.
## Examples
iex> Mandrake.List.difference([1, 2, 3], [2])
[1, 3]
"""
def difference(first_list, second_list) do
first_list -- second_list
end
@doc """
Returns the intersection of the 2 lists.
## Examples
iex> Mandrake.List.intersection([1,2,3,4], [7,6,5,4,3])
[3, 4]
"""
def intersection(first_list, second_list) do
first_list -- (first_list -- second_list)
end
@doc """
Returns the last item of a list that returns `true` for the function.
## Examples
iex> Mandrake.List.last(fn x -> Kernel.rem(x, 2) == 0 end, [2, 4, 5, 7, 8, 9])
8
"""
def last(func, list) do
list = Enum.reverse(list)
first(func, list)
end
@doc """
Returns the first item of a list that returns `true` for the function.
## Examples
iex> printHello = Mandrake.List.first(fn x -> Kernel.rem(x, 2) == 0 end, [2, 4, 5, 7])
2
"""
def first(func, [head|tail]) do
if func.(head) do
head
else
first(func, tail)
end
end
def first(func, []) do
[]
end
@doc """
Returns a list containing the given value.
## Examples
iex> Mandrake.List.of("Example")
["Example"]
"""
def of(value) do
[value]
end
@doc """
Returns a new list without the given item.
## Examples
iex> Mandrake.List.delete([1, 2, 4, 3, 4, 5, 6], 4)
[1, 2, 3, 5, 6]
"""
def delete([head|tail], item) do
if head != item do
[ head ] ++ delete(tail, item)
else
delete(tail, item)
end
end
def delete([], item) do
[]
end
@doc """
Returns a new list without the given item.
## Examples
iex> delete_4 = Mandrake.List.delete(4)
...> delete_4.([1, 2, 4, 3, 4, 5, 6])
[1, 2, 3, 5, 6]
"""
def delete(item) do
fn list -> delete(list, item) end
end
@doc """
Returns a list composed by the value of the properties for the given key from items in the given list.
## Examples
iex> pluck_example = Mandrake.List.pluck(:example)
...> pluck_example.([%{ name: "Doe" }, %{ example: "first", name: "John" }, %{ example: "second" }])
[ "first", "second" ]
"""
def pluck(key) do
fn list -> pluck(list, key) end
end
@doc """
Returns a list composed by the value of the properties for the given key from items in the given list.
## Examples
iex> Mandrake.List.pluck([%{ name: "Doe" }, %{ example: "first", name: "John" }, %{ example: "second" }], :example)
[ "first", "second" ]
"""
def pluck([head|tail], key) do
value = Map.get(head, key)
if value != nil do
[ value ] ++ pluck(tail, key)
else
pluck(tail, key)
end
end
def pluck([], key) do
[]
end
@doc """
Returns a new list after applying the given function to each element of the list.
## Examples
iex> Mandrake.List.for_each(fn x -> x + 1 end, [1, 2, 3])
[2, 3, 4]
"""
def for_each(function, [head|tail]) do
[ function.(head) ] ++ for_each(function, tail)
end
def for_each(function, []) do
[]
end
@doc """
Returns a list of items until the condition is true.
## Examples
iex> while_even = Mandrake.List.while(fn item -> Kernel.rem(item, 2) == 0 end)
...> while_even.([2, 6, 7, 8, 10])
[2, 6]
"""
def while(function) do
fn list -> while(function, list) end
end
@doc """
Returns a list of items until the condition is true.
## Examples
iex> Mandrake.List.while(fn item -> Kernel.rem(item, 2) == 0 end, [2, 6, 7, 8, 10])
[2, 6]
"""
def while(function, [head|tail]) do
if function.(head) do
[ head ] ++ while(function, tail)
else
[]
end
end
def while(function, []) do
[]
end
@doc """
Returns a new list with the only item that satisfy the given function.
## Examples
iex> select_even = Mandrake.List.select(fn item -> Kernel.rem(item, 2) == 0 end)
...> select_even.([2, 6, 7, 8, 10])
[2, 6, 8, 10]
"""
def select(function) do
fn list -> select(function, list) end
end
@doc """
Returns the list of items that satisfy the condition.
## Examples
iex> Mandrake.List.select(fn item -> Kernel.rem(item, 2) == 0 end, [2, 6, 7, 8, 10])
[2, 6, 8, 10]
"""
def select(function, [head|tail]) do
if function.(head) do
[ head ] ++ select(function, tail)
else
select(function, tail)
end
end
def select(function, []) do
[]
end
@doc """
Returns a list of 2 lists. The first contains the elements that satisfy the function, other elements are in the second.
## Examples
iex> even = Mandrake.List.partition(fn item -> Kernel.rem(item, 2) == 0 end)
...> even.([4, 7, 43, 6, 3, 7])
[[4, 6], [7, 43, 3, 7]]
"""
def partition(function) do
fn list -> partition(function, list) end
end
@doc """
Returns a list of 2 lists. The first contains the elements that satisfy the function, other elements are in the second.
## Examples
iex> Mandrake.List.partition(fn item -> Kernel.rem(item, 2) == 0 end, [4, 7, 43, 6, 3, 7])
[[4, 6], [7, 43, 3, 7]]
"""
def partition(function, list) do
[ select(function, list), select(fn item -> !function.(item) end, list) ]
end
end
defmodule Relation do
@moduledoc """
Mandrake relational functions.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.Relation
end
end
@doc """
Returns the larger of two arguments.
## Examples
iex> Mandrake.Relation.max('a', 'z')
'z'
"""
def max(value_1, value_2) do
if value_1 > value_2 do
value_1
else
value_2
end
end
@doc """
Returns the value that produces the larger result when passed to the provided function.
## Examples
iex> Mandrake.Relation.max_by(fn arg -> Mandrake.Math.negate(arg) end, -3, -6)
-6
"""
def max_by(function, value_1, value_2) do
if function.(value_1) > function.(value_2) do
value_1
else
value_2
end
end
@doc """
Returns the smaller of two arguments.
## Examples
iex> Mandrake.Relation.min('a', 'z')
'a'
"""
def min(value_1, value_2) do
if value_1 < value_2 do
value_1
else
value_2
end
end
@doc """
Returns the value that produces the smaller result when passed to the provided function.
## Examples
iex> Mandrake.Relation.min_by(fn arg -> Mandrake.Math.negate(arg) end, -3, -6)
-6
"""
def min_by(function, value_1, value_2) do
if function.(value_1) > function.(value_2) do
value_1
else
value_2
end
end
@doc """
Returns `true` if the values produce the same result when passed to the provided function.
## Examples
iex> Mandrake.Relation.equals_by(fn arg -> Mandrake.Math.abs(arg) end, [-3], [3])
true
"""
def equals_by(function, value_1, value_2) do
function.(value_1) == function.(value_2)
end
@doc """
Returns `true` if the given values are equal.
## Examples
iex> Mandrake.Relation.equals('0', 0)
false
"""
def equals(value_1, value_2) do
value_1 == value_2
end
@doc """
Returns `true` if first value is less than the second.
## Examples
iex> Mandrake.Relation.lt(7, 5)
false
"""
def lt(first_value, second_value) do
first_value < second_value
end
@doc """
Returns `true` if first value is less than or equal to the second.
## Examples
iex> Mandrake.Relation.lte(7, 7)
true
"""
def lte(first_value, second_value) do
first_value <= second_value
end
@doc """
Returns `true` if first value is greater than the second.
## Examples
iex> Mandrake.Relation.gt(7, 5)
true
"""
def gt(first_value, second_value) do
first_value > second_value
end
@doc """
Returns `true` if first value is greater than or equal to the second.
## Examples
iex> Mandrake.Relation.gte(7, 7)
true
"""
def gte(first_value, second_value) do
first_value >= second_value
end
end
defmodule Validation do
@moduledoc """
Mandrake validation functions.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.Validation
end
end
@doc """
Returns a boolean indicating whether there was a match or not.
## Examples
iex> Mandrake.Validation.test(~r/foo/, "foo")
true
"""
def test(regex, string) do
Regex.match?(regex, string)
end
@doc """
Returns a function that test the given regex.
## Examples
iex> testFoo = Mandrake.Validation.test(~r/foo/)
...> testFoo.("bar")
false
"""
def test(regex) do
fn string -> Regex.match?(regex, string) end
end
@doc """
Returns always `true`.
## Examples
iex> Mandrake.Validation.anything("foo")
true
"""
def anything(string) do
Regex.match?(~r/^.*$/, string)
end
@doc """
Returns `true` if the given value is a valid email.
## Examples
iex> Mandrake.Validation.email("<EMAIL>")
true
"""
def email(string) do
Regex.match?(~r/^([a-z0-9_\.-]+)@([\da-z\.-]+)\.([a-z\.]{2,6})$/, string)
end
@doc """
Returns `true` if the given value is a valid amount.
## Examples
iex> Mandrake.Validation.amount("0")
true
iex> Mandrake.Validation.amount("0.00")
true
iex> Mandrake.Validation.amount("0,00")
true
"""
def amount(string) do
Regex.match?(~r/(?:^\d{1,3}(?:\.?\d{3})*(?:,\d{2})?$)|(?:^\d{1,3}(?:,?\d{3})*(?:\.\d{2})?$)/, string)
end
@doc """
Returns `true` if the given value is a valid gender (m/M, f/F).
## Examples
iex> Mandrake.Validation.gender("m")
true
"""
def gender(string) do
Regex.match?(~r/^[mfMF]$/, string)
end
@doc """
Returns `true` if the given value is a valid year.
## Examples
iex> Mandrake.Validation.year("2015")
true
"""
def year(string) do
Regex.match?(~r/^\d{4}$/, string)
end
@doc """
Returns `true` if the given value is a valid number.
## Examples
iex> Mandrake.Validation.number("535")
true
"""
def number(string) do
Regex.match?(~r/^\d+$/, string)
end
@doc """
Returns `true` if the given value is a valid name.
## Examples
iex> Mandrake.Validation.name("<NAME>")
true
iex> Mandrake.Validation.name("<NAME>, Jr.")
true
"""
def name(string) do
Regex.match?(~r/^[a-zA-ZàáâäãåąčćęèéêëėįìíîïłńòóôöõøùúûüųūÿýżźñçčšžÀÁÂÄÃÅĄĆČĖĘÈÉÊËÌÍÎÏĮŁŃÒÓÔÖÕØÙÚÛÜŲŪŸÝŻŹÑßÇŒÆČŠŽ∂ð ,.'-]+$/u, string)
end
@doc """
Returns `true` if the given value is a valid phone number.
## Examples
iex> Mandrake.Validation.phone_number("(123) 456 7899")
true
iex> Mandrake.Validation.phone_number("(123).456.7899")
true
iex> Mandrake.Validation.phone_number("(123)-456-7899")
true
iex> Mandrake.Validation.phone_number("123-456-7899")
true
iex> Mandrake.Validation.phone_number("123 456 7899")
true
iex> Mandrake.Validation.phone_number("1234567899")
true
"""
def phone_number(string) do
Regex.match?(~r/\(?([0-9]{3})\)?([ .-]?)([0-9]{3})\2([0-9]{4})/, string)
end
@doc """
Returns `true` if the given value is a valid username (letters, numbers, underscores, hyphens, 3 < username_length < 18).
## Examples
iex> Mandrake.Validation.username("mbasso")
true
"""
def username(string) do
Regex.match?(~r/^[a-z0-9_-]{3,16}$/, string)
end
@doc """
Returns `true` if the given value is a valid password (letters, numbers, underscores, hyphens, 6 < password_length < 18).
## Examples
iex> Mandrake.Validation.password("<PASSWORD>")
true
"""
def password(string) do
Regex.match?(~r/^[a-zA-Z0-9_-]{6,18}$/, string)
end
@doc """
Returns `true` if the given value is a valid hex.
## Examples
iex> Mandrake.Validation.hex("#C0C0C0")
true
"""
def hex(string) do
Regex.match?(~r/^#?([a-f0-9]{6}|[a-f0-9]{3}|[A-F0-9]{6}|[A-F0-9]{3})$/, string)
end
@doc """
Returns `true` if the given value is a valid slug.
## Examples
iex> Mandrake.Validation.slug("my-example-tytle")
true
"""
def slug(string) do
Regex.match?(~r/^[a-z0-9-]+$/, string)
end
@doc """
Returns `true` if the given value is a valid url.
## Examples
iex> Mandrake.Validation.url("http://example.org/")
true
"""
def url(string) do
Regex.match?(~r/^(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?$/, string)
end
@doc """
Returns `true` if the given value is a valid ip address.
## Examples
iex> Mandrake.Validation.ip_address("127.0.0.1")
true
"""
def ip_address(string) do
Regex.match?(~r/^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/, string)
end
@doc """
Returns `true` if the given value is a valid html tag.
## Examples
iex> Mandrake.Validation.html("<a href='https://example.org'>My example link</a>")
true
"""
def html(string) do
Regex.match?(~r/^<([a-z]+)([^<]+)*(?:>(.*)<\/\1>|\s+\/>)$/, string)
end
end
defmodule Type do
@moduledoc """
Mandrake functions for types.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.Type
end
end
@doc """
Returns the extension of the tuple with the second.
## Examples
iex> Mandrake.Type.assign({ "prop1", "prop2" }, { "prop3" })
{ "prop1", "prop2", "prop3" }
"""
def assign(first_tuple, second_tuple) do
Elixir.List.to_tuple(Tuple.to_list(first_tuple) ++ Tuple.to_list(second_tuple))
end
@doc """
Create a copy of a value.
## Examples
iex> Mandrake.Type.clone([1, 2, 3, 4, 5])
[1, 2, 3, 4, 5]
"""
def clone(object) do
object
end
@doc """
Always return true.
## Examples
iex> Mandrake.Type.bool_true()
true
"""
def bool_true() do
true
end
@doc """
Always return false.
## Examples
iex> Mandrake.Type.bool_false()
false
"""
def bool_false() do
false
end
@doc """
Returns `true` if value is nil.
## Examples
iex> Mandrake.Type.is_nil([])
false
iex> Mandrake.Type.is_nil(nil)
true
"""
def is_nil(value) do
value == nil
end
@doc """
Returns `true` if the type of the value is the given type.
## Examples
iex> Mandrake.Type.is(Integer, 5)
true
iex> Mandrake.Type.is(List, 5)
false
"""
def is(type, value) do
type == type_of(value)
end
@doc """
Returns type of value.
## Examples
iex> Mandrake.Type.type_of(42.0)
Float
"""
def type_of(value) do
cond do
is_atom(value) ->
Atom
is_binary(value) ->
Binary
is_bitstring(value) ->
Bitstring
is_boolean(value) ->
Boolean
is_float(value) ->
Float
is_function(value) ->
Function
is_integer(value) ->
Integer
is_list(value) ->
List
is_map(value) ->
Map
is_number(value) ->
Number
is_pid(value) ->
Pid
is_port(value) ->
Port
is_reference(value) ->
Reference
is_tuple(value) ->
Tuple
Kernel.is_nil(value) ->
Nil
end
end
@doc """
Removes whitespace at the beginning and at the end of the string.
## Examples
iex> Mandrake.Type.trim(" Example ")
"Example"
"""
def trim(string) do
String.strip(string)
end
@doc """
Returns a function that split a string with the given pattern.
## Examples
iex> splitComma = Mandrake.Type.split(",")
...> splitComma.("a,b,c")
["a", "b", "c"]
"""
def split(pattern) do
fn string -> String.split(string, pattern) end
end
@doc """
Returns `true` if value is not nil.
## Examples
iex> Mandrake.Type.exists("Example")
true
iex> Mandrake.Type.exists(nil)
false
"""
def exists(value) do
!(value == nil)
end
@doc """
Returns `true` if value is not nil or false.
## Examples
iex> Mandrake.Type.truthy("Example")
true
iex> Mandrake.Type.truthy(nil)
false
"""
def truthy(value) do
!(value == nil || value == false)
end
@doc """
Returns `true` if value is nil or false.
## Examples
iex> Mandrake.Type.falsy("Example")
false
iex> Mandrake.Type.falsy(nil)
true
"""
def falsy(value) do
value == nil || value == false
end
end
defmodule Annotation do
@moduledoc """
Mandrake annotations.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.Annotation
end
end
@doc """
Raise an exception if time for todo is over.
## Examples
iex> Mandrake.Annotation.todo({{2015, 12, 31}, {0, 0, 0}}, "Must implement 'sum' function")
nil
"""
def todo(date_time, description) do
if Mandrake.DateTime.is_past(Mandrake.DateTime.get_local(), date_time) do
raise Mandrake.TodoError.exception(date_time, description)
end
end
@doc """
Raise an exception if time to remove deprecation is over.
## Examples
iex> Mandrake.Annotation.deprecation({{2015, 12, 31}, {0, 0, 0}}, "Code refactoring needed")
nil
"""
def deprecation(date_time, description) do
if Mandrake.DateTime.is_past(Mandrake.DateTime.get_local(), date_time) do
raise Mandrake.DeprecationError.exception(date_time, description)
end
end
@doc """
Raise an exception if time to write documentation is over.
## Examples
iex> Mandrake.Annotation.documentation({{2015, 12, 31}, {0, 0, 0}}, "Write documentation for Math module")
nil
"""
def documentation(date_time, description) do
if Mandrake.DateTime.is_past(Mandrake.DateTime.get_local(), date_time) do
raise Mandrake.DocumentationNeededError.exception(date_time, description)
end
end
@doc """
Raise an exception if time to fix redundancy is over.
## Examples
iex> Mandrake.Annotation.redundancy({{2015, 12, 31}, {0, 0, 0}}, "Fix redundancy")
nil
"""
def redundancy(date_time, description) do
if Mandrake.DateTime.is_past(Mandrake.DateTime.get_local(), date_time) do
raise Mandrake.RedundancyError.exception(date_time, description)
end
end
@doc """
Raise an exception if time to fix an issue is over.
## Examples
iex> Mandrake.Annotation.fix({{2015, 12, 31}, {0, 0, 0}}, "Fix issue in 'sum' function")
nil
"""
def fix(date_time, description) do
if Mandrake.DateTime.is_past(Mandrake.DateTime.get_local(), date_time) do
raise Mandrake.FixNeededError.exception(date_time, description)
end
end
end
defmodule DateTime do
@moduledoc """
Mandrake functions for date and time.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.DateTime
end
end
@doc """
Format date or date_time with the given divider.
## Examples
iex> Mandrake.DateTime.format({{2015,12,31}, {10,30,15}}, "/")
"2015/12/31 10:30:15"
iex> Mandrake.DateTime.format({2015,12,31}, "/")
"2015/12/31"
"""
def format({{year, month, day}, {hour, minute, second}}, divider) do
format_date_time("~4..0B" <> divider <> "~2..0B" <> divider <> "~2..0B ~2..0B:~2..0B:~2..0B", [year, month, day, hour, minute, second])
end
def format({year, month, day}, divider) do
format_date_time("~4..0B" <> divider <> "~2..0B" <> divider <> "~2..0B", [year, month, day])
end
@doc """
Format time.
## Examples
iex> Mandrake.DateTime.format({10,30,15})
"10:30:15"
"""
def format({hour, minute, second}) do
format_date_time("~2..0B:~2..0B:~2..0B", [hour, minute, second])
end
@doc """
Format date to dd mm yyyy with the given divider.
## Examples
iex> Mandrake.DateTime.dd_mm_yyyy({2015,12,31}, "/")
"31/12/2015"
"""
def dd_mm_yyyy({year, month, day}, divider) do
format_date_time("~2..0B" <> divider <> "~2..0B" <> divider <> "~4..0B", [day, month, year])
end
@doc """
Format date to mm dd yyyy with the given divider.
## Examples
iex> Mandrake.DateTime.mm_dd_yyyy({2015,12,31}, "/")
"12/31/2015"
"""
def mm_dd_yyyy({year, month, day}, divider) do
format_date_time("~2..0B" <> divider <> "~2..0B" <> divider <> "~4..0B", [month, day, year])
end
@doc """
Returns a function for date or time formatting.
## Examples
iex> formatWithSlash = Mandrake.DateTime.format_with_divider("/")
...> formatWithSlash.({{2015,12,31}, {10,30,15}})
"2015/12/31 10:30:15"
"""
def format_with_divider(divider) do
fn date_time -> format(date_time, divider) end
end
@doc """
Format date/time with the given pattern and list using :io_lib.format.
## Examples
iex> Mandrake.DateTime.format_date_time("~2..0B:~2..0B:~2..0B", [10 ,30 ,15 ])
"10:30:15"
"""
def format_date_time(pattern, list) do
:io_lib.format(pattern, list)
|> Elixir.List.flatten
|> to_string
end
@doc """
Get current date and time.
## Examples
iex> Mandrake.DateTime.get()
{{2015, 11, 29}, {15, 34, 18}}
"""
def get() do
:calendar.universal_time()
end
@doc """
Get current local date and time.
## Examples
iex> Mandrake.DateTime.get_local()
{{2015, 11, 29}, {15, 34, 18}}
"""
def get_local() do
:calendar.local_time()
end
@doc """
Get current date.
## Examples
iex> Mandrake.DateTime.get_date()
{2015, 11, 29}
"""
def get_date() do
extract_date(:calendar.universal_time())
end
@doc """
Get current local date.
## Examples
iex> Mandrake.DateTime.get_local_date()
{2015, 11, 29}
"""
def get_local_date() do
extract_date(:calendar.local_time())
end
@doc """
Get current time.
## Examples
iex> Mandrake.DateTime.get_time()
{15, 34, 18}
"""
def get_time() do
extract_time(:calendar.universal_time())
end
@doc """
Get current local time.
## Examples
iex> Mandrake.DateTime.get_local_time()
{15, 34, 18}
"""
def get_local_time() do
extract_time(:calendar.local_time())
end
@doc """
Extract date to date_time.
## Examples
iex> Mandrake.DateTime.extract_date({{2015, 11, 29}, {15, 34, 18}})
{2015, 11, 29}
"""
def extract_date({date, time}) do
date
end
@doc """
Extract time to date_time.
## Examples
iex> Mandrake.DateTime.extract_time({{2015, 11, 29}, {15, 34, 18}})
{15, 34, 18}
"""
def extract_time({date, time}) do
time
end
@doc """
Returns the difference in seconds beetween two date_time elements (date_2 - date_1).
## Examples
iex> Mandrake.DateTime.offset({{2015, 11, 29}, {15, 34, 18}}, {{2015, 11, 30}, {15, 34, 18}})
86400
"""
def offset(date_1, date_2) do
:calendar.datetime_to_gregorian_seconds(date_2) - :calendar.datetime_to_gregorian_seconds(date_1)
end
@doc """
Returns days and time correspondig given seconds.
## Examples
iex> Mandrake.DateTime.seconds_to_date_time(86400)
{1, {0, 0, 0}}
"""
def seconds_to_date_time(seconds) do
:calendar.seconds_to_daystime(Kernel.abs(seconds))
end
@doc """
Returns `true` if first date_time is > than second.
## Examples
iex> Mandrake.DateTime.is_past({{2015, 11, 29}, {15, 34, 18}}, {{2015, 11, 28}, {15, 34, 18}})
true
"""
def is_past(date_1, date_2) do
offset(date_1, date_2) < 0
end
@doc """
Returns `true` if first date_time is < than second.
## Examples
iex> Mandrake.DateTime.is_future({{2015, 11, 29}, {15, 34, 18}}, {{2015, 11, 28}, {15, 34, 18}})
false
"""
def is_future(date_1, date_2) do
offset(date_1, date_2) > 0
end
@doc """
Add date to time.
## Examples
iex> Mandrake.DateTime.add_date({15, 34, 18})
{{2015, 11, 29}, {15, 34, 18}}
"""
def add_date(time) do
{extract_date(get()), time}
end
@doc """
Add time to date.
## Examples
iex> Mandrake.DateTime.add_time({2015, 11, 29})
{{2015, 11, 29}, {0, 0, 0}}
"""
def add_time(date) do
{date, {0, 0, 0}}
end
end
#Exceptions
defmodule TodoError do
@moduledoc """
Error to notify that a TODO must be fixed.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.TodoError
end
end
defexception [:message]
@doc """
Raise TodoError with the given date and message.
## Examples
iex> raise Mandrake.TodoError.exception({{2015, 12, 31}, {0, 0, 0}}, "Description")
"""
def exception(date, message) do
%TodoError{message: "\nTodo, time is over on #{inspect Mandrake.DateTime.format(date, "/")}.\nDescription: #{inspect message}"}
end
end
defmodule DeprecationError do
@moduledoc """
Error to notify that a deprecation must be deleted.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.DeprecationError
end
end
defexception [:message]
@doc """
Raise DeprecationError with the given date and message.
## Examples
iex> raise Mandrake.DeprecationError.exception({{2015, 12, 31}, {0, 0, 0}}, "Description")
"""
def exception(date, message) do
%DeprecationError{message: "\nFix deprecation, time is over on #{inspect Mandrake.DateTime.format(date, "/")}.\nDescription: #{inspect message}"}
end
end
defmodule DocumentationNeededError do
@moduledoc """
Error to notify that documentation must be written.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.DocumentationNeededError
end
end
defexception [:message]
@doc """
Raise DocumentationNeededError with the given date and message.
## Examples
iex> raise Mandrake.DocumentationNeededError.exception({{2015, 12, 31}, {0, 0, 0}}, "Description")
"""
def exception(date, message) do
%DocumentationNeededError{message: "\nWrite documentation, time is over on #{inspect Mandrake.DateTime.format(date, "/")}.\nDescription: #{inspect message}"}
end
end
defmodule RedundancyError do
@moduledoc """
Error to notify that a redundancy must be fixed.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.RedundancyError
end
end
defexception [:message]
@doc """
Raise RedundancyError with the given date and message.
## Examples
iex> raise Mandrake.RedundancyError.exception({{2015, 12, 31}, {0, 0, 0}}, "Description")
"""
def exception(date, message) do
%RedundancyError{message: "\nFix redundancy, time is over on #{inspect Mandrake.DateTime.format(date, "/")}.\nDescription: #{inspect message}"}
end
end
defmodule FixNeededError do
@moduledoc """
Error to notify that a fix is needed.
"""
defmacro __using__(_opts) do
quote do
import Mandrake.FixNeededError
end
end
defexception [:message]
@doc """
Raise FixNeededError with the given date and message.
## Examples
iex> raise Mandrake.FixNeededError.exception({{2015, 12, 31}, {0, 0, 0}}, "Description")
"""
def exception(date, message) do
%FixNeededError{message: "\nFix issue, time is over on #{inspect Mandrake.DateTime.format(date, "/")}.\nDescription: #{inspect message}"}
end
end
end
|
lib/mandrake.ex
| 0.818265
| 0.752945
|
mandrake.ex
|
starcoder
|
defmodule Taex.MovingAverage do
defmodule DoubleEma do
defstruct [:ema, :ema_2, :value]
end
defmodule TripleEma do
defstruct [:ema, :ema_2, :ema_3, :value]
end
defmodule VolumeWeightedMovingAverage do
defstruct [:prices, :volumes, :periods, :value]
def update(%VolumeWeightedMovingAverage{} = vwma, market_price, volume) do
vwma = %{vwma | prices: vwma.prices ++ [market_price], volumes: vwma.volumes ++ [volume]}
vwma = if Enum.count(vwma.prices) > vwma.periods do
[_head | prices] = vwma.prices
[_head | volumes] = vwma.volumes
%{vwma | prices: prices, volumes: volumes}
else
vwma
end
numerator = vwma.prices
|> Enum.with_index
|> Enum.map(fn({price, index})->
price * Enum.at(vwma.volumes, index)
end)
|> Enum.sum
%{vwma | value: numerator/Enum.sum(vwma.volumes)}
end
end
@doc """
Calculates the simple moving average which is just the sum of the items passed in divided by the number of items
"""
@spec simple(integer, [float]) :: float
def simple(items) when is_list(items), do: Enum.sum(items) / Enum.count(items)
def simple(0, _), do: 0
def simple(n, items) when is_list(items) do
sum = items |> Enum.take(n) |> Enum.sum
(sum / n)
end
@spec exponential(integer, [float]) :: float
def exponential(_, []), do: 0
def exponential(n, prices) do
[head | _] = exp_calc(n, prices)
head
end
def exponential(n, price, previous_ema) do
exp_calc(n, [price], [previous_ema]) |> Enum.at(0)
end
@spec double_ema(integer, [float]) :: float
def double_ema(_, []), do: 0
def double_ema(n, prices) do
emas = exp_calc(n, prices)
ema_2 = exp_calc(n, emas |> Enum.reverse) |> Enum.at(0)
ema = emas |> Enum.at(0)
%DoubleEma{ema: ema, ema_2: ema_2, value: 2 * ema - ema_2}
end
def double_ema(n, price, %DoubleEma{ema: previous_ema, ema_2: previous_ema_2}) do
ema = exp_calc(n, [price], [previous_ema]) |> Enum.at(0)
ema_2 = exp_calc(n, [ema], [previous_ema_2]) |> Enum.at(0)
%DoubleEma{ema: ema, ema_2: ema_2, value: 2 * ema - ema_2}
end
@spec triple_ema(integer, [float]) :: float
def triple_ema(_, []), do: 0
def triple_ema(n, prices) do
emas = exp_calc(n, prices)
ema_2s = exp_calc(n, emas |> Enum.reverse)
ema_3 = exp_calc(n, ema_2s |> Enum.reverse) |> Enum.at(0)
ema = emas |> Enum.at(0)
ema_2 = ema_2s |> Enum.at(0)
%TripleEma{ema: ema, ema_2: ema_2, ema_3: ema_3, value: (3 * ema - 3 * ema_2) + ema_3}
end
def triple_ema(n, price, %TripleEma{ema: previous_ema, ema_2: previous_ema_2, ema_3: previous_ema_3}) do
ema = exp_calc(n, [price], [previous_ema]) |> Enum.at(0)
ema_2 = exp_calc(n, [ema], [previous_ema_2]) |> Enum.at(0)
ema_3 = exp_calc(n, [ema_2], [previous_ema_3]) |> Enum.at(0)
%TripleEma{ema: ema, ema_2: ema_2, ema_3: ema_3, value: (3 * ema - 3 * ema_2) + ema_3}
end
@spec weighting_multiplier(integer) :: float
defp weighting_multiplier(n) do
2 / (n + 1)
end
@spec exp_calc(integer, [float], [float]) :: [float]
defp exp_calc(_, [], emas), do: emas
defp exp_calc(n, [p | tl], [ema_head | ema_tail]) do
k = weighting_multiplier(n)
exp_calc(n, tl, [(p * k) + (ema_head * (1 - k))] ++ [ema_head] ++ ema_tail)
end
defp exp_calc(k, [hd | tl]), do: exp_calc(k, tl, [hd])
@spec weighted([{float, float}]) :: float
def weighted(items) do
Enum.reduce(items, 0, fn {a, b} , acc -> acc + (a * b) end)
end
end
|
lib/taex/moving_average.ex
| 0.830044
| 0.696526
|
moving_average.ex
|
starcoder
|
defmodule Calculator do
@moduledoc """
Documentation for Calculator.
"""
@doc """
Hello world.
## Examples
iex> Calculator.hello
:world
"""
def hello do
:world
end
@doc """
Sum the two numbers
### Examples
iex> Calculator.sum(2,4)
6
"""
def sum(x, y), do: x + y
@doc """
Multiplicate the two numbers
### Examples
iex> Calculator.multiplicate(2,4)
8
"""
def multiplicate(x, y), do: x * y
@doc """
Subtract the two numbers
### Examples
iex> Calculator.subtract(2,4)
-2
"""
def subtract(x, y), do: x - y
@doc """
Divide the two numbers
### Examples
iex> Calculator.subtract(2,4)
-2
"""
def divide(x, y), do: x / y
@doc """
Mod divide the two numbers
### Examples
iex> Calculator.mod_divide(4,2)
0
"""
def mod_divide(x, y), do: rem(x, y)
@doc """
Division the two numbers
### Examples
iex> Calculator.division(4,2)
2
"""
def division(x, y), do: div(x, y)
@doc """
Sum multiples values
### Examples
iex> Calculator.sum_values([1,2,3,4,5])
15
"""
def sum_values(array), do: Enum.sum(array)
@doc """
Subtract multiples values
### Examples
iex> Calculator.subtract_values([1,2,3,4,5])
3
"""
def subtract_values(array) do
Enum.reduce(array, fn(x, acc) -> x - acc end)
end
@doc """
Multiplecate multiple values
### Examples
iex> Calculator.multiplecate_values([1,2,3,4,5])
120
"""
def multiplecate_values(array) do
Enum.reduce(array, fn(x, acc) -> x * acc end)
end
@doc """
Divide multiple values
### Examples
iex> Calculator.divide_values([1,2,3,4,5])
1.875
"""
def divide_values(array) do
Enum.reduce(array, fn(x, acc) -> x / acc end)
end
@doc """
Division multiple values
### Examples
iex> Calculator.division_values([1,2,3,4,5])
1
"""
def division_values(array) do
Enum.reduce(array, fn(x, acc) -> div(x,acc) end)
end
@doc """
Mod division multiple values
### Examples
iex> Calculator.mod_values([5,4,3,2,1])
1
"""
def mod_values(array) do
Enum.reduce(array, fn(x, acc) -> rem(x, acc) end)
end
end
|
lib/calculator.ex
| 0.835953
| 0.689495
|
calculator.ex
|
starcoder
|
defmodule Day18 do
def part1(input) do
parse(input)
|> Enum.reduce(fn n, sum ->
reduce([sum, n])
end)
|> magnitude
end
def part2(input) do
numbers = parse(input)
Enum.reduce(numbers, 0, fn n1, highest ->
Enum.reduce(numbers -- [n1], highest, fn n2, highest ->
max(highest, reduce([n1, n2]) |> magnitude)
end)
end)
end
@doc """
## Examples
iex> Day18.reduce([[[[[4,3],4],4],[7,[[8,4],9]]],[1,1]])
[[[[0,7],4],[[7,8],[6,0]]],[8,1]]
"""
def reduce(n) do
case explode(n) do
nil ->
case split(n) do
{true, n} ->
reduce(n)
{false, _} ->
n
end
n ->
reduce(n)
end
end
@doc """
## Examples
iex> Day18.explode([[[[8,1],2],3],4])
nil
iex> Day18.explode([[[[[9,8],1],2],3],4])
[[[[0,9],2],3],4]
iex> Day18.explode([7,[6,[5,[4,[3,2]]]]])
[7,[6,[5,[7,0]]]]
iex> Day18.explode([[6,[5,[4,[3,2]]]],1])
[[6,[5,[7,0]]],3]
iex> Day18.explode([[3,[2,[1,[7,3]]]],[6,[5,[4,[3,2]]]]])
[[3,[2,[8,0]]],[9,[5,[4,[3,2]]]]]
iex> Day18.explode([[3,[2,[8,0]]],[9,[5,[4,[3,2]]]]])
[[3,[2,[8,0]]],[9,[5,[7,0]]]]
"""
def explode(n) do
case do_explode(n, 0) do
{_, nil, nil} ->
nil
{exploded, _, _} ->
exploded
end
end
defp split([a, b]) do
case split(a) do
{false, a} ->
case split(b) do
{false, b} ->
{false, [a, b]}
{true, b} ->
{true, [a, b]}
end
{true, a} ->
{true, [a, b]}
end
end
defp split(n) when is_integer(n) and n > 9, do: {true, split_number(n)}
defp split(other), do: {false, other}
@doc """
## Examples
iex> Day18.split_number(10)
[5,5]
iex> Day18.split_number(11)
[5,6]
iex> Day18.split_number(12)
[6,6]
"""
def split_number(n) do
q = n / 2
[floor(q), ceil(q)]
end
@doc """
## Examples
iex> Day18.magnitude([9,1])
29
iex> Day18.magnitude([1,9])
21
iex> Day18.magnitude([[9,1],[1,9]])
129
iex> Day18.magnitude([[[[8,7],[7,7]],[[8,6],[7,7]]],[[[0,7],[6,6]],[8,7]]])
3488
"""
def magnitude(n) do
case n do
[a, b] ->
magnitude(a) * 3 + 2 * magnitude(b)
_ when is_integer(n) ->
n
end
end
defp do_explode([a,b], level) do
if level === 4 do
{0, [a], [b]}
else
{a, left, right} = do_explode(a, level + 1)
{b, right} = propagate_right(right, b)
case {left, right} do
{nil, nil} ->
{b, left, right} = do_explode(b, level + 1)
{a, left} = propagate_left(left, a)
{[a, b], left, right}
{_, _} ->
{[a, b], left, right}
end
end
end
defp do_explode(n, _) when is_integer(n), do: {n, nil, nil}
defp propagate_left(nil, a), do: {a, nil}
defp propagate_left([], a), do: {a, []}
defp propagate_left([left], a) when is_integer(a) do
{left + a, []}
end
defp propagate_left([left], [a, b]) do
{b, []} = propagate_left([left], b)
{[a, b], []}
end
defp propagate_right(nil, a), do: {a, nil}
defp propagate_right([], a), do: {a, []}
defp propagate_right([right], a) when is_integer(a) do
{right + a, []}
end
defp propagate_right([right], [a, b]) do
{a, []} = propagate_right([right], a)
{[a, b], []}
end
defp parse(input) do
Enum.map(input, fn line ->
{:ok, term} = Code.string_to_quoted(line)
term
end)
end
end
|
day18/lib/day18.ex
| 0.653127
| 0.430866
|
day18.ex
|
starcoder
|
defmodule AutoApi.Property do
@moduledoc """
Data wrapper for state properties.
The struct contains three fields: `data`, `timestamp` and `failure`.
The `data` field can be either a scalar or a map, and when set it contains
the actual value of the state property.
The `timestamp` field indicates when the data was last updated, and it is
in `DateTime` format.
The `failure` is set if there was an error that prevented retrieving the
property data.
The `availability` fields indicates how often the data is updated, and any
limitation on how many times the property can receive updates in a specific
time frame.
"""
require Logger
alias AutoApi.UnitType
defstruct [:data, :timestamp, :failure, :availability]
@prop_id_to_name %{0x01 => :data, 0x02 => :timestamp, 0x03 => :failure, 0x05 => :availability}
@prop_name_to_id %{data: 0x01, timestamp: 0x02, failure: 0x03, availability: 0x05}
@type reason ::
:rate_limit
| :execution_timeout
| :format_error
| :unauthorised
| :unknown
| :pending
| :oem_error
@type failure :: %{reason: reason(), description: String.t()}
@type update_rate ::
:trip_high
| :trip
| :trip_start_end
| :trip_end
| :unknown
| :not_available
| :on_change
@type applies_per :: :app | :vehicle
@type availability :: %{
update_rate: update_rate(),
rate_limit: UnitType.frequency(),
applies_per: applies_per()
}
@type t(data) :: %__MODULE__{
data: data,
timestamp: nil | DateTime.t(),
failure: nil | failure,
availability: nil | availability
}
@type t() :: t(any())
@type spec :: map() | list()
@doc """
Converts Property struct to binary format
"""
@spec to_bin(__MODULE__.t(), spec()) :: binary()
def to_bin(%__MODULE__{} = prop, spec) do
wrap_with_size(prop, :data, &data_to_bin(&1, spec)) <>
wrap_with_size(prop, :timestamp, ×tamp_to_bin/1) <>
wrap_with_size(prop, :failure, &failure_to_bin/1) <>
wrap_with_size(prop, :availability, &availability_to_bin/1)
end
defp wrap_with_size(prop, field, conversion_fun) do
case Map.get(prop, field) do
nil ->
<<>>
value ->
id = @prop_name_to_id[field]
binary_value = conversion_fun.(value)
size = byte_size(binary_value)
<<id, size::integer-16, binary_value::binary>>
end
end
defp data_to_bin(nil, _), do: <<>>
defp data_to_bin(data, %{"type" => "string", "embedded" => true}) do
<<byte_size(data)::integer-16, data::binary>>
end
defp data_to_bin(data, %{"type" => "string"}) do
data
end
defp data_to_bin(data, %{"type" => "bytes", "embedded" => true}) do
<<byte_size(data)::integer-16, data::binary>>
end
defp data_to_bin(data, %{"type" => "bytes"}) do
data
end
defp data_to_bin(data, %{"type" => "enum"} = spec) do
enum_id =
spec["enum_values"]
|> Enum.find(%{}, &(&1["name"] == Atom.to_string(data)))
|> Map.get("id")
unless enum_id, do: Logger.warn("Enum key `#{data}` doesn't exist in #{inspect spec}")
<<enum_id>>
end
defp data_to_bin(data, %{"type" => "float", "size" => size}) do
size_bit = size * 8
<<data::float-size(size_bit)>>
end
defp data_to_bin(data, %{"type" => "double", "size" => size}) do
size_bit = size * 8
<<data::float-size(size_bit)>>
end
defp data_to_bin(data, %{"type" => "integer", "size" => size}) do
size_bit = size * 8
<<data::integer-signed-size(size_bit)>>
end
defp data_to_bin(data, %{"type" => "uinteger", "size" => size}) do
size_bit = size * 8
<<data::integer-unsigned-size(size_bit)>>
end
defp data_to_bin(data, %{"type" => "timestamp"}) do
timestamp_to_bin(data)
end
defp data_to_bin(data, %{"type" => "custom"} = specs) do
bin_data = custom_type_to_bin(data, specs)
if specs["embedded"] && is_nil(specs["size"]) do
# Prepend with size only if embedded with no size, like string and bytes
<<byte_size(bin_data)::integer-16, bin_data::binary>>
else
bin_data
end
end
# Workaround while `capability_state` type is `bytes`
defp data_to_bin(command, %{"type" => "types.capability_state"}) do
AutoApi.Command.to_bin(command)
end
defp data_to_bin(data, %{"type" => "types." <> type} = spec) do
type_spec = type |> AutoApi.CustomType.spec() |> Map.put("embedded", spec["embedded"])
data_to_bin(data, type_spec)
end
defp data_to_bin(%{value: value, unit: unit}, %{"type" => "unit." <> type}) do
type_id = AutoApi.UnitType.id(type)
unit_id = AutoApi.UnitType.unit_id(type, unit)
<<type_id, unit_id, value::float-size(64)>>
end
defp custom_type_to_bin(data, specs) do
specs
|> Map.get("items")
|> Enum.map(&Map.put(&1, "embedded", true))
|> Enum.map(fn %{"name" => name} = spec ->
data
|> Map.get(String.to_atom(name))
|> data_to_bin(spec)
end)
|> :binary.list_to_bin()
end
defp timestamp_to_bin(nil), do: <<>>
defp timestamp_to_bin(timestamp) do
milisec = DateTime.to_unix(timestamp, :millisecond)
<<milisec::integer-64>>
end
defp failure_to_bin(nil), do: <<>>
defp failure_to_bin(%{reason: reason, description: description}) do
reason_bin = AutoApi.CommonData.convert_state_to_bin_failure_reason(reason)
description_size = byte_size(description)
<<reason_bin, description_size::integer-16, description::binary>>
end
defp availability_to_bin(nil), do: <<>>
defp availability_to_bin(availability) do
# Availability type is "types.availability"
data_to_bin(availability, %{"type" => "types.availability"})
end
@doc """
Converts Property binary to struct
"""
@spec to_struct(binary(), spec()) :: __MODULE__.t()
def to_struct(binary, specs) do
prop_in_binary = split_binary_to_parts(binary, %__MODULE__{})
data = to_value(prop_in_binary.data, specs)
common_components_to_struct(prop_in_binary, data)
end
defp common_components_to_struct(prop_in_binary, data) do
timestamp = to_value(prop_in_binary.timestamp, %{"type" => "timestamp"})
failure = failure_to_value(prop_in_binary.failure)
availability = availability_to_value(prop_in_binary.availability)
%__MODULE__{data: data, timestamp: timestamp, failure: failure, availability: availability}
end
defp to_value(nil, _) do
nil
end
defp to_value(binary_data, %{"type" => "string"}) do
binary_data
end
defp to_value(binary_data, %{"type" => "bytes"}) do
binary_data
end
defp to_value(binary_data, %{"type" => "float"}) do
AutoApi.CommonData.convert_bin_to_float(binary_data)
end
defp to_value(binary_data, %{"type" => "double"}) do
AutoApi.CommonData.convert_bin_to_double(binary_data)
end
defp to_value(binary_data, %{"type" => "integer"}) do
AutoApi.CommonData.convert_bin_to_integer(binary_data)
end
defp to_value(binary_data, %{"type" => "uinteger"}) do
AutoApi.CommonData.convert_bin_to_uinteger(binary_data)
end
defp to_value(binary_data, %{"type" => "timestamp"}) do
timestamp_in_milisec = AutoApi.CommonData.convert_bin_to_uinteger(binary_data)
case DateTime.from_unix(timestamp_in_milisec, :millisecond) do
{:ok, datetime} -> datetime
_ -> nil
end
end
defp to_value(binary_data, %{"type" => "enum", "size" => size} = spec) do
size_bit = size * 8
<<enum_id::integer-size(size_bit)>> = binary_data
enum_name =
spec["enum_values"]
|> Enum.find(%{}, &(&1["id"] == enum_id))
|> Map.get("name")
if enum_name do
String.to_atom(enum_name)
else
Logger.warn("enum with value `#{binary_data}` doesn't exist in #{inspect spec}")
raise ArgumentError, message: "Invalid enum ID #{inspect <<enum_id>>}"
end
end
defp to_value(binary_data, %{"type" => "custom"} = specs) do
specs
|> Map.get("items")
|> Enum.reduce({0, []}, fn spec, {counter, acc} ->
item_spec = fetch_item_spec(spec)
size = fetch_item_size(binary_data, counter, item_spec)
counter = update_counter(counter, item_spec)
if size - counter > byte_size(binary_data) do
Logger.warn("not able to parse binary_data for #{inspect(specs)}")
end
data_value =
binary_data
|> :binary.part(counter, size)
|> to_value(item_spec)
{counter + size, [{String.to_atom(spec["name"]), data_value} | acc]}
end)
|> elem(1)
|> Enum.into(%{})
end
# Workaround while `capability_state` type is `bytes`
defp to_value(binary_data, %{"type" => "types.capability_state"}) do
AutoApi.Command.from_bin(binary_data)
end
defp to_value(binary_data, %{"type" => "types." <> type}) do
type_spec = AutoApi.CustomType.spec(type)
to_value(binary_data, type_spec)
end
defp to_value(<<id, unit_id, value::float-64>>, %{"type" => "unit." <> _type}) do
unit = AutoApi.UnitType.unit_name(id, unit_id)
%{value: value, unit: unit}
end
defp failure_to_value(nil), do: nil
defp failure_to_value(failure) do
<<reason, size::integer-16, description::binary-size(size)>> = failure
%{
reason: AutoApi.CommonData.convert_bin_to_state_failure_reason(reason),
description: description
}
end
defp availability_to_value(nil), do: nil
defp availability_to_value(availability_bin) do
# Availability type is "types.availability"
to_value(availability_bin, %{"type" => "types.availability"})
end
defp split_binary_to_parts(
<<prop_comp_id, prop_size::integer-16, prop_data::binary-size(prop_size), rest::binary>>,
acc
) do
acc = Map.put(acc, @prop_id_to_name[prop_comp_id], prop_data)
split_binary_to_parts(rest, acc)
end
defp split_binary_to_parts(<<>>, acc), do: acc
defp fetch_item_spec(%{"type" => "types." <> type}) do
type
|> AutoApi.CustomType.spec()
|> Map.put("embedded", "true")
end
defp fetch_item_spec(spec) do
Map.put(spec, "embedded", "true")
end
@sizeless_types ~w(custom string bytes)
defp fetch_item_size(_binary_data, _counter, %{"size" => size}) do
size
end
defp fetch_item_size(binary_data, counter, %{"type" => type}) when type in @sizeless_types do
binary_data
|> :binary.part(counter, 2)
|> AutoApi.CommonData.convert_bin_to_uinteger()
end
defp fetch_item_size(_, _, spec) do
raise("couldn't find size for #{inspect(spec)}")
end
defp update_counter(counter, specs) do
if specs["type"] in @sizeless_types && is_nil(specs["size"]) do
counter + 2
else
counter
end
end
end
|
lib/auto_api/property.ex
| 0.83128
| 0.648132
|
property.ex
|
starcoder
|
defmodule Day19 do
# looking at input, letters should be: H ... Z ?
# 2D Map with origin at top left, meaning going down increases y
@alphabet MapSet.new(["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"])
@valid MapSet.union(@alphabet, MapSet.new(["-", "|", "+"]))
def solve(filename) do
map = to_map filename
start_x =
map
|> elem(0)
|> get_start_x
xmax = map |> elem(0) |> tuple_size
ymax = map |> tuple_size
{letters, counter} = travel(map, xmax - 1, ymax - 1, {start_x, 0}, :down, [], 0)
{letters |> Enum.join(""), counter}
end
def to_map(filename) do
filename
|> File.stream!([:utf8], :line)
|> Enum.map(fn line ->
line
|> String.trim("\n")
|> String.split("", trim: true)
|> List.to_tuple
end)
|> List.to_tuple
end
def get_start_x(tuple) do
tuple
|> Tuple.to_list
|> Enum.find_index(fn x -> x == "|" end)
end
def travel(map, xmax, ymax, {x, y}, _dir, letters, counter)
when x < 0 or x > xmax or y < 0 or y > ymax # out of the grid
or map |> elem(y) |> elem(x) == " " do # out of path
{letters, counter}
end
def travel(map, xmax, ymax, {x, y}, dir, letters, counter) do
p = map |> elem(y) |> elem(x)
cond do
p == "+" ->
{ndir, nx, ny} = next_dir(map, xmax, ymax, {x, y}, dir)
travel(map, xmax, ymax, {nx, ny}, ndir, letters, counter + 1)
true ->
letters =
if p in @alphabet do
letters ++ [p]
else
letters
end
{nx, ny} = move(dir, x, y)
travel(map, xmax, ymax, {nx, ny}, dir, letters, counter + 1)
end
end
def next_dir(map, xmax, ymax, {x, y}, dir) do
moves =
cond do # possible moves
dir == :up or dir == :down ->
[{:left, x - 1, y}, {:right, x + 1, y}]
dir == :left or dir == :right ->
[{:down, x, y + 1}, {:up, x, y - 1}]
end
moves
|> Enum.find(fn {_d, nx, ny} ->
nx >= 0 and nx <= xmax
and ny >= 0 and ny <= ymax
and (map |> elem(ny) |> elem(nx)) in @valid
end)
end
def move(dir, x, y) do
case dir do
:up ->
{x, y - 1}
:down ->
{x, y + 1}
:left ->
{x - 1, y}
:right ->
{x + 1, y}
end
end
end
|
2017/elixir/day19/lib/day19.ex
| 0.522202
| 0.655594
|
day19.ex
|
starcoder
|
defmodule Plymio.Funcio.Enum.Collate do
@moduledoc ~S"""
Collate Patterns for Enumerables.
These functions collate the elements of an *enum* according to one of the defined *patterns*.
See `Plymio.Funcio` for overview and documentation terms.
"""
use Plymio.Funcio.Attribute
@type error :: Plymio.Funcio.error()
import Plymio.Fontais.Error,
only: [
new_argument_error_result: 1
]
import Plymio.Fontais.Guard,
only: [
is_value_unset_or_nil: 1
]
@doc ~S"""
`collate0_enum/2` takes an *enum* and collates its elements according to *pattern 0*.
If an element is `{:ok, value}`, the `value` is added to the
accumulated list of `values` and `{:ok, values}` is returned.
If any element is `{:error, error}` or `value`, the collation is
halted, returning `{:error, error}`.
## Examples
iex> enum = [{:ok, 1}, {:ok, 2}, {:ok, 3}]
...> enum |> collate0_enum
{:ok, [1, 2, 3]}
iex> enum = [{:ok, 1}, {:error, %ArgumentError{message: "value is 2"}}, {:ok, 3}]
...> {:error, error} = enum |> collate0_enum
...> error |> Exception.message
"value is 2"
iex> {:error, error} = :not_an_enum |> collate0_enum
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for :not_an_enum")
true
"""
@since "0.1.0"
@spec collate0_enum(any) :: {:ok, list} | {:error, error}
def collate0_enum(enum) do
try do
enum
|> Enum.reduce_while(
[],
fn value, values ->
value
|> case do
{:ok, value} -> {:cont, [value | values]}
{:error, %{__struct__: _}} = result -> {:halt, result}
value -> {:halt, new_argument_error_result(m: "pattern0 result invalid", v: value)}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
values -> {:ok, values |> Enum.reverse()}
end
rescue
error ->
{:error, error}
end
end
@doc ~S"""
`collate1_enum/2` takes an *enum* and collates its elements according to *pattern 1*.
If an element is `{:ok, value}` or `value`, the `value` is added to
the accumulated list of `values` and `{:ok, values}` is returned.
If any element is `{:error, error}` the collation is halted, returning the `{:error, error}`.
## Examples
iex> [:a, 2, {:ok, :tre}] |> collate1_enum
{:ok, [:a, 2, :tre]}
iex> enum = [{:ok, 1}, {:error, %ArgumentError{message: "value is 2"}}, {:ok, 3}]
...> {:error, error} = enum |> collate1_enum
...> error |> Exception.message
"value is 2"
iex> {:error, error} = :not_an_enum |> collate1_enum
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for :not_an_enum")
true
"""
@since "0.1.0"
@spec collate1_enum(any) :: {:ok, list} | {:error, error}
def collate1_enum(enum) do
try do
enum
|> Enum.reduce_while(
[],
fn value, values ->
value
|> case do
{:ok, value} -> {:cont, [value | values]}
{:error, %{__struct__: _}} = result -> {:halt, result}
value -> {:cont, [value | values]}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
values -> {:ok, values |> Enum.reverse()}
end
rescue
error ->
{:error, error}
end
end
@doc ~S"""
`collate2_enum/1` takes an *enum* and collates its elements according to *pattern 2*.
If an element is `{:ok, value}` or `value`, the `value` is added to
the accumulated list of `values` and `{:ok, values}` is returned.
If an element is `nil` or *the unset value* (see `Plymio.Fontais`),
the element is dropped and **not** added to the accumulated `values`.
If any element is `{:error, error}` the collation is halted, returning the `{:error, error}`.
## Examples
iex> [{:ok, :a}, nil, {:ok, :tre}] |> collate2_enum
{:ok, [:a, :tre]}
iex> unset_value = Plymio.Fontais.Guard.the_unset_value
...> [unset_value, nil, {:ok, :a}, nil, {:ok, :tre}, unset_value] |> collate2_enum
{:ok, [:a, :tre]}
iex> unset_value = Plymio.Fontais.Guard.the_unset_value
...> [unset_value, nil, {:ok, :a}, nil, :b, {:ok, :c}, unset_value, :d] |> collate2_enum
{:ok, [:a, :b, :c, :d]}
iex> unset_value = Plymio.Fontais.Guard.the_unset_value
...> enum = [unset_value, {:ok, 1}, nil, {:error, %ArgumentError{message: "value is 2"}}, {:ok, 3}]
...> {:error, error} = enum |> collate2_enum
...> error |> Exception.message
"value is 2"
iex> {:error, error} = :not_an_enum |> collate2_enum
...> error |> Exception.message
...> |> String.starts_with?("protocol Enumerable not implemented for :not_an_enum")
true
"""
@since "0.1.0"
@spec collate2_enum(any) :: {:ok, list} | {:error, error}
def collate2_enum(enum) do
try do
enum
|> Enum.reduce_while(
[],
fn value, values ->
value
|> case do
{:ok, value} -> {:cont, [value | values]}
{:error, %{__struct__: _}} = result -> {:halt, result}
value when is_value_unset_or_nil(value) -> {:cont, values}
value -> {:cont, [value | values]}
end
end
)
|> case do
{:error, %{__exception__: true}} = result -> result
values -> {:ok, values |> Enum.reverse()}
end
rescue
error ->
{:error, error}
end
end
end
|
lib/funcio/enum/collate/collate.ex
| 0.821331
| 0.474327
|
collate.ex
|
starcoder
|
defmodule MerkleMap.MerkleTree do
@moduledoc false
@opaque t() :: %__MODULE__{}
@type key :: term()
@type value :: term()
defstruct [:tree]
alias MerkleMap.MerkleTreeImpl
alias MerkleMap.MerkleTree.Diff
@spec new(Enumerable.t()) :: t()
def new(enum) do
Enum.reduce(enum, new(), fn {k, v}, tree ->
put(tree, k, v)
end)
end
@spec new() :: t()
def new() do
%__MODULE__{tree: MerkleTreeImpl.new()}
end
@spec put(t(), key(), value()) :: t()
def put(%__MODULE__{tree: tree}, key, value) do
%__MODULE__{tree: MerkleTreeImpl.put(tree, key, value)}
end
@spec delete(t(), key()) :: t()
def delete(%__MODULE__{tree: tree}, key) do
%__MODULE__{tree: MerkleTreeImpl.delete(tree, key)}
end
@spec diff_keys(t(), t(), depth :: integer()) :: {t(), t(), [key()]}
def diff_keys(%__MODULE__{tree: tree}, %__MODULE__{tree: tree2}, depth \\ 0)
when is_integer(depth) and depth >= 0 do
MerkleTreeImpl.diff_keys(tree, tree2, depth)
end
@spec diff_keys(t(), t()) :: {t(), t(), boolean()}
def equal?(%__MODULE__{tree: tree}, %__MODULE__{tree: tree2}) do
MerkleTreeImpl.equal?(tree, tree2)
end
@spec keys(t()) :: [key()]
def keys(%__MODULE__{tree: tree}) do
MerkleTreeImpl.keys(tree)
end
def subtree(%__MODULE__{tree: tree}, location, depth)
when is_bitstring(location) and is_integer(depth) and depth > 0 do
%__MODULE__{tree: MerkleTreeImpl.subtree(tree, location, depth)}
end
def max_depth(%__MODULE__{tree: tree}) do
MerkleTreeImpl.max_depth(tree)
end
def update_hashes(%__MODULE__{tree: tree}) do
%__MODULE__{tree: MerkleTreeImpl.calculate_hashes(tree)}
end
def prepare_partial_diff(merkle_tree, depth) do
{:continue, %Diff{trees: [{<<>>, subtree(merkle_tree, <<>>, depth)}]}}
end
def continue_partial_diff(merkle_tree, %Diff{} = partial, depth)
when is_integer(depth) and depth > 0 do
{partials, keys} =
partial.trees
|> Enum.flat_map(fn {loc, tree} ->
merkle_tree
|> subtree(loc, depth)
|> diff_keys(tree, bit_size(loc))
end)
|> Enum.split_with(fn
{:partial, _loc} -> true
_ -> false
end)
trees =
Enum.map(partials, fn {:partial, loc} ->
{loc, subtree(merkle_tree, loc, depth)}
end)
case trees do
[] -> {:ok, partial.keys ++ keys}
trees -> {:continue, %Diff{keys: partial.keys ++ keys, trees: trees}}
end
end
def truncate_diff(%Diff{} = diff, amount) do
keys = Enum.take(diff.keys, amount)
trees = Enum.take(diff.trees, amount - length(keys))
%{diff | keys: keys, trees: trees}
end
end
|
astreu/deps/merkle_map/lib/merkle_map/merkle_tree.ex
| 0.843589
| 0.427964
|
merkle_tree.ex
|
starcoder
|
defprotocol Distopy.Source do
@doc """
Returns the list of environment variable names defined in the source.
"""
@spec list_keys(t) :: [binary]
def list_keys(t)
@doc """
Returns wether the given environment variable is defined in the source.
"""
@spec has_key?(t, key :: binary) :: boolean
def has_key?(t, key)
@doc """
Returns wether the source has sub-sources, _i.e._ is a group of sources.
"""
@spec source_group?(t) :: boolean
def source_group?(t)
@doc """
Returns wether the source can be modified by adding or remomving environment
variables.
"""
@spec updatable?(t) :: boolean
def updatable?(t)
@doc """
Returns the name of the source for display purposes.
"""
@spec display_name(t) :: iodata
def display_name(t)
@doc """
Get the value associated to the environment variables identified by `key`.
It should raise if the variable is not defined.
"""
@spec get_value(t, key :: binary) :: binary
def get_value(t, key)
@doc """
Returns the reprensentation of a value for display purposes. Instead of
returning the raw binary value, it is possible to return text like
`"hidden value"`, a parsed reprensentation of a JSON string, _etc_.
"""
@spec display_value(t, key :: binary) :: iodata
def display_value(t, key)
@doc """
Creates a new environment variable in the source. It will only be called if
the source returns `true` from `updatable?/1`.
"""
@spec add_pair(t, key :: binary, value :: binary) :: {:ok, t} | {:error, binary}
def add_pair(t, key, value)
@doc """
Deletes the environment variable identified by `key` in the source.
"""
@spec delete_key(t, key :: binary) :: {:ok, t} | {:error, binary}
def delete_key(t, key)
@doc """
Returns a displayable version of the given list of environment variables keys
and values.
The keys may or may not be defined in the source as the values are passed to
the function.
"""
@spec pairs_to_iodata(t, [{key :: binary, value :: iodata}]) :: iodata
def pairs_to_iodata(t, pairs)
@doc """
Represents a single key/value pair for display purposes.
See `pairs_to_iodata/2`.
"""
@spec pair_to_iodata(t, key :: binary, value :: iodata) :: iodata
def pair_to_iodata(t, key, value)
@doc """
Returns a list of sub-sources identified by an unique "group" key.
"""
@doc group: true
@spec list_sources(t) :: [{group_key :: term, display_name :: iodata}]
def list_sources(t)
@doc """
Sets the current selected source identified by `group_key`. A group of
sources shoud add new environment variables to the currently selected sub.
"""
@doc group: true
@spec select_source(t, group_key :: term) :: t
def select_source(t, source)
@doc """
Returns wether the currently selected sub-source in group is the given
`group_key`. The group key is the one returned from `get_sub_with_key/2` or
`list_source/1`.
"""
@doc group: true
@spec selected?(t, group_key :: term) :: boolean
def selected?(t, source)
@doc """
Returns the sub-source that defines the key `key`. The function must return
a tuple with `group_key` as an unique identifier of the sub-source in the
group, and the sub-source itself.
Do not mistake `key` for `group_key`. The former identifies an environment
variable name while the latter identifies a sub-source in a group.
"""
@doc group: true
@spec get_sub_with_key(t, key :: binary) :: {group_key :: term, sub_source :: term}
def get_sub_with_key(t, key)
@doc """
Replaces the sub-source uniquely identified by `group_key`. The given group
key is the one returned from `get_sub_with_key/2` or `list_source/1`.
"""
@doc group: true
@spec put_sub(t, group_key :: term, sub_source :: term) :: t
def put_sub(t, group_key, sub_source)
end
defmodule Distopy.Source.Helpers do
defmacro invalid_group!(t) do
quote do
{f, a} = __ENV__.function
%s{} = unquote(t)
raise "cannot use %#{inspect(s)}{} as a sources group, attempted to call Distopy.Source.#{Atom.to_string(f)}/#{a}"
end
end
defmacro not_updatable!(t) do
quote do
{f, a} = __ENV__.function
%s{} = unquote(t)
raise "cannot use %#{inspect(s)}{} as an updatable source, attempted to call Distopy.Source.#{Atom.to_string(f)}/#{a}"
end
end
defmacro invalid_key!(t, key) do
quote do
errmsg =
to_string([
Distopy.Source.display_name(unquote(t)),
" does not have key ",
?",
unquote(key),
?"
])
raise ArgumentError, message: errmsg
end
end
end
defmodule Distopy.Source.BaseFile do
@moduledoc """
This modules provides the default implementation of the `Distopy.Source`
protocol of group fonctions for non-group sources, just raising exceptions.
"""
defmacro __using__(_) do
quote do
import Distopy.Source.Helpers
@spec source_group?(term) :: boolean
def source_group?(_), do: false
@spec list_sources(term) :: [{group_key :: term, display_name :: iodata}]
def list_sources(t),
do: invalid_group!(t)
@spec select_source(term, group_key :: term) :: term
def select_source(t, _source),
do: invalid_group!(t)
@spec selected?(term, group_key :: term) :: boolean
def selected?(t, _group_key),
do: invalid_group!(t)
@spec get_sub_with_key(term, key :: binary) :: {group_key :: term, sub_source :: term}
def get_sub_with_key(t, _key), do: invalid_group!(t)
@spec put_sub(term, group_key :: term, sub_source :: term) :: term
def put_sub(t, _group_key, _sub_source), do: invalid_group!(t)
end
end
end
|
lib/distopy/source.ex
| 0.877863
| 0.476336
|
source.ex
|
starcoder
|
defmodule Imglab.Source do
@moduledoc """
Provides a way to define and store information about a imglab source.
"""
@default_host "imglab-cdn.net"
@default_https true
@default_subdomains true
@derive {Inspect, except: [:secure_key, :secure_salt]}
@enforce_keys [:name]
defstruct host: @default_host,
https: @default_https,
name: nil,
port: nil,
secure_key: nil,
secure_salt: nil,
subdomains: @default_subdomains
@type t :: %__MODULE__{
host: binary,
https: boolean,
name: binary,
port: nil | :inet.port_number(),
secure_key: nil | binary,
secure_salt: nil | binary,
subdomains: boolean
}
@doc """
Returns a [Source struct](`t:t/0`) with the specified options for the source.
`name` must be a `string` indicating the name of the source.
## Options
The accepted options are:
* `:host` - a `string` specifying the host where the imglab server is located, only for imglab on-premises (default: `"imglab-cdn.net"`)
* `:https` - a `boolean` value specifying if the source should use https or not (default: `true`)
* `:port` - a `:inet.port_number` specifying a port where the imglab server is located, only for imglab on-premises
* `:secure_key` - a `string` specifying the source secure key
* `:secure_salt` - a `string` specifying the source secure salt
* `:subdomains` - a `boolean` value specifying if the source should be specified using subdomains instead of using the path, only for imglab on-premises (default: `true`)
> Note: `secure_key` and `secure_salt` paramaters are secrets that should not be added to the code. Please use environment vars or other secure method to use them in your project.
## Examples
iex> Imglab.Source.new("assets")
%Imglab.Source{
host: "imglab-cdn.net",
https: true,
name: "assets",
port: nil,
secure_key: nil,
secure_salt: nil,
subdomains: true
}
iex> Imglab.Source.new("assets", subdomains: false)
%Imglab.Source{
host: "imglab-cdn.net",
https: true,
name: "assets",
port: nil,
secure_key: nil,
secure_salt: nil,
subdomains: false
}
iex> Imglab.Source.new("assets", https: false, host: "imglab.net", port: 8080)
%Imglab.Source{
host: "imglab.net",
https: false,
name: "assets",
port: 8080,
secure_key: nil,
secure_salt: nil,
subdomains: true
}
iex> Imglab.Source.new("assets", secure_key: "secure-key", secure_salt: "secure-salt")
%Imglab.Source{
host: "imglab-cdn.net",
https: true,
name: "assets",
port: nil,
secure_key: "secure-key",
secure_salt: "secure-salt",
subdomains: true
}
"""
@spec new(binary, keyword) :: t
def new(name, options \\ []) when is_binary(name) do
host = Keyword.get(options, :host, @default_host)
https = Keyword.get(options, :https, @default_https)
port = Keyword.get(options, :port)
secure_key = Keyword.get(options, :secure_key)
secure_salt = Keyword.get(options, :secure_salt)
subdomains = Keyword.get(options, :subdomains, @default_subdomains)
%__MODULE__{
host: host,
https: https,
name: name,
port: port,
secure_key: secure_key,
secure_salt: secure_salt,
subdomains: subdomains
}
end
@doc false
@spec scheme(t) :: binary
def scheme(%__MODULE__{https: true}), do: "https"
def scheme(%__MODULE__{}), do: "http"
@doc false
@spec host(t) :: binary
def host(%__MODULE__{subdomains: true} = source), do: "#{source.name}.#{source.host}"
def host(%__MODULE__{} = source), do: source.host
@doc false
@spec path(t, binary) :: binary
def path(%__MODULE__{subdomains: true}, path) when is_binary(path), do: path
def path(%__MODULE__{} = source, path) when is_binary(path), do: Path.join(source.name, path)
@doc false
@spec is_secure?(t) :: boolean
def is_secure?(%__MODULE__{} = source) do
!is_nil(source.secure_key) && !is_nil(source.secure_salt)
end
end
|
lib/imglab/source.ex
| 0.928401
| 0.622574
|
source.ex
|
starcoder
|
defmodule GraylogSearch do
@moduledoc """
GraylogSearch constructs search queries in a composable manner.
It more or less follows [Graylog's search query language](http://docs.graylog.org/en/latest/pages/queries.html).
## Basic Usage
All queries start with a base URL to the graylog instance.
```
GraylogSearch.new("https://graylog.example.com")
```
And end with requesting the constructed URL.
```
GraylogSearch.url()
```
In between the start and end, the query can be composed in various ways.
```
GraylogSearch.new("https://graylog.example.com")
|> GraylogSearch.for("message", "ssh login")
|> GraylogSearch.and_for("hostname", "service*.example.com")
|> GraylogSearch.minutes_ago(5)
|> GraylogSearch.url()
```
See the API docs below for other ways to construct a query.
"""
@doc """
Given a base URL to the Graylog instance (i.e. scheme and host name, no path or query string),
returns a URI for GraylogSearch pipelines.
"""
@doc group: :generic
@spec new(String.t()) :: URI.t()
def new(url) when is_binary(url) do
uri = URI.parse(url)
%URI{uri | path: "/search"}
end
@doc "Returns a URL to perform the search"
@doc group: :generic
@spec url(URI.t() | {:error, atom()}) :: String.t() | {:error, atom()}
def url(%URI{} = uri), do: URI.to_string(uri)
def url({:error, _reason} = err), do: err
@doc """
Search for a message by the given query term or phrase.
This function can be chained to combine queries with AND.
By default, all fields are included in the search
when a field to search in is not specified.
"""
@doc group: :operators
@spec for(URI.t(), String.t()) :: URI.t()
def for(%URI{} = uri, query) when is_binary(query) do
sanitised_query = sanitise_input(query)
and_query(uri, sanitised_query)
end
@doc """
Searches for a term or phrase in a specific message field.
Unlike `for/2` which searches all message fields.
"""
@doc group: :operators
@spec for(URI.t(), atom(), String.t()) :: URI.t()
def for(%URI{} = uri, field, query) when is_atom(field) and is_binary(query) do
sanitised_query = sanitise_input(query)
and_field_query(uri, field, sanitised_query)
end
@doc "Aliases `for/2`"
@doc group: :operators
@spec and_for(URI.t(), String.t()) :: URI.t()
def and_for(uri, query), do: __MODULE__.for(uri, query)
@doc "Aliases `for/3`"
@doc group: :operators
@spec and_for(URI.t(), atom(), String.t()) :: URI.t()
def and_for(uri, field, query), do: __MODULE__.for(uri, field, query)
defp and_query(uri, nil), do: uri
defp and_query(%URI{query: nil} = uri, query) do
query_string = URI.encode_query(%{"q" => query})
%URI{uri | query: query_string}
end
defp and_query(%URI{} = uri, query) do
query_string =
uri.query
|> URI.decode_query()
|> Map.get_and_update("q", &add_and_query(&1, query))
|> (fn {_old_query, new_query} -> new_query end).()
|> URI.encode_query()
%URI{uri | query: query_string}
end
defp and_field_query(uri, _field, nil), do: uri
defp and_field_query(uri, field, query) do
field_query = ~s(#{field}:"#{query}")
and_query(uri, field_query)
end
@doc """
Search messages by another term or phrase.
Uses the OR operator to combine queries.
"""
@doc group: :operators
@spec or_for(URI.t(), String.t()) :: URI.t()
def or_for(%URI{} = uri, query) when is_binary(query) do
sanitised_query = sanitise_input(query)
or_query(uri, sanitised_query)
end
@doc """
Searches for another term or phrase in a specific message field.
Unlike `or_for/2` which searches all message fields.
"""
@doc group: :operators
@spec or_for(URI.t(), atom(), String.t()) :: URI.t()
def or_for(%URI{} = uri, field, query) when is_atom(field) and is_binary(query) do
sanitised_query = sanitise_input(query)
or_field_query(uri, field, sanitised_query)
end
defp or_query(uri, nil), do: uri
defp or_query(%URI{query: nil} = uri, _query), do: uri
defp or_query(%URI{} = uri, query) do
query_string =
uri.query
|> URI.decode_query()
|> Map.get_and_update("q", &add_or_query(&1, query))
|> (fn {_old_query, new_query} -> new_query end).()
|> URI.encode_query()
%URI{uri | query: query_string}
end
defp or_field_query(uri, _field, nil), do: uri
defp or_field_query(uri, field, query) do
field_query = ~s(#{field}:"#{query}")
or_query(uri, field_query)
end
@doc """
Search for messages that do not include a term or phrase.
Uses the NOT operator. Can be chained to combine queries with AND NOT.
"""
@doc group: :operators
@spec not_for(URI.t(), String.t()) :: URI.t()
def not_for(%URI{} = uri, query) when is_binary(query) do
sanitised_query = sanitise_input(query)
not_query(uri, sanitised_query)
end
@doc """
Searches for messages that do not include a term or phrase in a specific field.
Unlike `not_for/2` which searches all message fields.
"""
@doc group: :operators
@spec not_for(URI.t(), atom(), String.t()) :: URI.t()
def not_for(%URI{} = uri, field, query) when is_atom(field) and is_binary(query) do
sanitised_query = sanitise_input(query)
not_field_query(uri, field, sanitised_query)
end
defp not_query(uri, nil), do: uri
defp not_query(%URI{query: nil} = uri, query) do
query_string = URI.encode_query(%{"q" => "NOT #{query}"})
%URI{uri | query: query_string}
end
defp not_query(%URI{} = uri, query) do
query_string =
uri.query
|> URI.decode_query()
|> Map.get_and_update("q", &add_not_query(&1, query))
|> (fn {_old_query, new_query} -> new_query end).()
|> URI.encode_query()
%URI{uri | query: query_string}
end
defp not_field_query(uri, _field, nil), do: uri
defp not_field_query(uri, field, query) do
field_query = ~s(#{field}:"#{query}")
not_query(uri, field_query)
end
@doc "Aliases `not_for/2`"
@doc group: :operators
def and_not(uri, query), do: not_for(uri, query)
@doc "Aliases `not_for/3`"
@doc group: :operators
def and_not(uri, field, query), do: not_for(uri, field, query)
defp add_and_query(existing_query, query_addition),
do: add_to_query(existing_query, query_addition, " AND ")
defp add_or_query(existing_query, query_addition),
do: add_to_query(existing_query, query_addition, " OR ")
defp add_not_query(existing_query, query_addition),
do: add_to_query(existing_query, query_addition, " AND NOT ")
defp add_to_query(existing_query, query_addition, delimiter) do
new_query = existing_query <> delimiter <> query_addition
{existing_query, new_query}
end
@doc """
Search messages within an absolute time range.
Datetimes expected to be UTC in ISO 8601 format.
"""
@doc group: :time
@spec between(URI.t(), String.t(), String.t()) :: URI.t() | {:error, atom()}
def between(%URI{} = uri, from, to) when is_binary(from) and is_binary(to) do
with {:ok, from_dt, _utc_offset} <- DateTime.from_iso8601(from),
{:ok, to_dt, _utc_offset} = DateTime.from_iso8601(to) do
between(uri, from_dt, to_dt)
end
end
@spec between(URI.t(), DateTime.t(), DateTime.t()) :: URI.t()
def between(%URI{} = uri, %DateTime{} = from, %DateTime{} = to) do
utc_iso_ms = fn dt -> cast_millisecond(dt) |> DateTime.to_iso8601() end
do_between(uri, utc_iso_ms.(from), utc_iso_ms.(to))
end
@spec between(URI.t(), NaiveDateTime.t(), NaiveDateTime.t()) :: URI.t()
def between(%URI{} = uri, %NaiveDateTime{} = from, %NaiveDateTime{} = to) do
utc_iso_ms = fn dt -> cast_millisecond(dt) |> NaiveDateTime.to_iso8601() |> Kernel.<>("Z") end
do_between(uri, utc_iso_ms.(from), utc_iso_ms.(to))
end
defp do_between(uri, from, to) do
query_string =
(uri.query || "")
|> URI.decode_query()
|> Map.delete("relative")
|> Map.put("rangetype", "absolute")
|> Map.put("from", from)
|> Map.put("to", to)
|> URI.encode_query()
%URI{uri | query: query_string}
end
defp cast_millisecond(%NaiveDateTime{microsecond: {0, n}} = dt) when n < 3 do
%NaiveDateTime{dt | microsecond: {0, 3}}
end
defp cast_millisecond(%DateTime{microsecond: {0, n}} = dt) when n < 3 do
%DateTime{dt | microsecond: {0, 3}}
end
defp cast_millisecond(%NaiveDateTime{} = dt) do
NaiveDateTime.truncate(dt, :millisecond)
end
defp cast_millisecond(%DateTime{} = dt) do
DateTime.truncate(dt, :millisecond)
end
@doc """
Search messages with a relative time range in minutes.
From the given amount of minutes ago to the time the search is performed.
"""
@doc group: :time
@spec minutes_ago(URI.t(), pos_integer) :: URI.t()
def minutes_ago(%URI{} = uri, n) when is_integer(n) and n > 0 do
relative_time_range(uri, 60 * n)
end
@doc """
Search messages with a relative time range in hours.
From the given amount of hours ago to the time the search is performed.
"""
@doc group: :time
@spec hours_ago(URI.t(), pos_integer) :: URI.t()
def hours_ago(%URI{} = uri, n) when is_integer(n) and n > 0 do
relative_time_range(uri, 60 * 60 * n)
end
@doc """
Search messages with a relative time range in days.
From the given amount of days ago to the time the search is performed.
"""
@doc group: :time
@spec days_ago(URI.t(), pos_integer) :: URI.t()
def days_ago(%URI{} = uri, n) when is_integer(n) and n > 0 do
relative_time_range(uri, 60 * 60 * 24 * n)
end
defp relative_time_range(uri, sec) do
query_string =
(uri.query || "")
|> URI.decode_query()
|> Map.drop(["from", "to"])
|> Map.put("rangetype", "relative")
|> Map.put("relative", sec)
|> URI.encode_query()
%URI{uri | query: query_string}
end
@doc """
Search messages within a time range specified by natural language.
Consult the [natty natural language parser](http://natty.joestelmach.com/doc.jsp#syntax_list)
for details on supported date/time formats.
"""
@doc group: :time
@spec within(URI.t(), String.t()) :: URI.t()
def within(%URI{} = uri, date_expression) when is_binary(date_expression) do
sanitised_expression = sanitise_input(date_expression)
do_within(uri, sanitised_expression)
end
defp do_within(uri, nil), do: uri
defp do_within(uri, date_expression) do
query_string =
(uri.query || "")
|> URI.decode_query()
|> Map.drop(["from", "to"])
|> Map.put("rangetype", "keyword")
|> Map.put("keyword", date_expression)
|> URI.encode_query()
%URI{uri | query: query_string}
end
@doc """
Includes the given fields in the message results.
Requires extractors to exist for the fields being specified.
"""
@doc group: :generic
@spec show_fields(URI.t(), [atom()]) :: URI.t()
def show_fields(%URI{} = uri, fields) when is_list(fields) do
query_string =
(uri.query || "")
|> URI.decode_query()
|> Map.put("fields", Enum.join(fields, ","))
|> URI.encode_query()
%URI{uri | query: query_string}
end
defp sanitise_input(query) when is_binary(query) do
case String.trim(query) do
"" -> nil
query -> query
end
end
end
|
lib/graylog_search.ex
| 0.909538
| 0.885235
|
graylog_search.ex
|
starcoder
|
defmodule Blueprint.Application.Module do
@moduledoc """
A struct containing contents from a BEAM module useful for
inspecting.
By default messages will only be found when servers explicitly
match. This however can be extended by providing custom server
matching expressions in the config under the `:servers` key.
## Server Match Expressions
Server match expression should take the form of `{ match, server }`,
where `match` is the expression to be matched with (against the
function calls server arg), and `server` is the resulting target
server.
The `:servers` key should have a string of the the list of server
match expressions.
An example of this could be:
config :blueprint,
servers: ~S([
{ { :tuple, _, [{ :atom, _, s }, { :atom, _, :"foo@127.0.0.1" }] }, s }, \# match against a named node, and return whatever server name is bound to 's'
{ { :call, _, { :atom, _, :get_server }, [] }, Foo }, \# match against a get_server/0 function and return Foo as the server
{ _, Bar } \# match any argument and return Bar as the server
])
"""
defstruct [path: nil, beam: nil, name: nil, messages: [], server: nil]
@type server :: { :named, atom } | nil
@type t :: %Blueprint.Application.Module{ path: String.t, beam: binary, name: atom, messages: [Blueprint.Application.Module.Message.t], server: server }
@server_behaviours [GenServer, GenEvent, GenStage, :gen_event, :gen_fsm, :gen_server, :gen_statem, :gen]
@server_behaviours_sends [:call, :cast]
defp messages(code, messages \\ [])
defp messages({ :call, _, { :remote, _, { :atom, _, module }, { :atom, _, fun } }, args = [{ :atom, _ , server }|_] }, messages) when module in @server_behaviours and fun in @server_behaviours_sends do
[%Blueprint.Application.Module.Message{
target: server,
interface: { module, fun, length(args) },
args: args #TODO: format
}|messages]
end
defp messages({ :call, _, { :remote, _, { :atom, _, module }, { :atom, _, fun } }, args = [server_arg|_] }, messages) when module in @server_behaviours and fun in @server_behaviours_sends do
Application.get_env(:blueprint, :servers, "[]")
|> Code.string_to_quoted!
|> Enum.find_value(messages, fn { match, server } ->
try do
{ server, _ } =
quote do
case var!(arg) do
unquote(match) -> unquote(server)
end
end
|> Code.eval_quoted([arg: server_arg])
[%Blueprint.Application.Module.Message{
target: server,
interface: { module, fun, length(args) },
args: args #TODO: format
}|messages]
rescue
_ -> nil
end
end)
end
defp messages([h|t], messages), do: messages(t, messages(h, messages))
defp messages(code, messages) when is_tuple(code), do: messages(Tuple.to_list(code), messages)
defp messages(_, messages), do: messages
@doc """
Load the contents of a module at the given path.
iex> Blueprint.Application.Module.new(Path.join(Mix.Project.app_path(), "ebin/Elixir.Blueprint.Application.Module.beam")).name
Blueprint.Application.Module
"""
@spec new(String.t) :: t
def new(path) do
{ :ok, beam } = File.read(path)
{ :ok, { mod, [atoms: atoms] } } = :beam_lib.chunks(beam, [:atoms])
if Enum.any?(atoms, fn
{ _, module } when module in @server_behaviours -> true
_ -> false
end) do
{ :ok, { _, chunks } } = :beam_lib.chunks(beam, [:attributes, :abstract_code])
server = if Enum.any?(chunks[:attributes], fn
{ :behaviour, behaviours } -> GenServer in behaviours
_ -> false
end) do
#TODO: Workout whether it is a named server, and what that name is
{ :named, mod }
end
messages = case chunks[:abstract_code] do
{ :raw_abstract_v1, code } -> messages(code)
_ -> []
end
%Blueprint.Application.Module{ path: path, beam: beam, name: mod, server: server, messages: messages }
else
%Blueprint.Application.Module{ path: path, beam: beam, name: mod }
end
end
end
|
lib/blueprint/application/module.ex
| 0.731826
| 0.465995
|
module.ex
|
starcoder
|
defmodule Phoenix.HTML do
@moduledoc """
The default building blocks for working with HTML safely
in Phoenix.
This library provides three main functionalities:
* HTML safety
* Form handling (with CSRF protection)
* A tiny JavaScript library to enhance applications
## HTML safety
One of the main responsibilities of this package is to
provide convenience functions for escaping and marking
HTML code as safe.
By default, data output in templates is not considered
safe:
<%= "<hello>" %>
will be shown as:
<hello>
User data or data coming from the database is almost never
considered safe. However, in some cases, you may want to tag
it as safe and show its "raw" contents:
<%= raw "<hello>" %>
Keep in mind most helpers will automatically escape your data
and return safe content:
<%= content_tag :p, "<hello>" %>
will properly output:
<p><hello></p>
## Form handling
See `Phoenix.HTML.Form`.
## JavaScript library
This project ships with a tiny bit of JavaScript that listens
to all click events to:
* Support `data-confirm="message"` attributes, which shows
a confirmation modal with the given message
* Support `data-method="patch|post|put|delete"` attributes,
which sends the current click as a PATCH/POST/PUT/DELETE
HTTP request. You will need to add `data-to` with the URL
and `data-csrf` with the CSRF token value. See
`link_attributes/2` for a function that wraps it all up
for you
* Dispatch a "phoenix.link.click" event. You can listen to this
event to customize the behaviour above. Returning false from
this event will disable `data-method`. Stopping propagation
will disable `data-confirm`
To use the functionality above, you must load `priv/static/phoenix_html.js`
into your build tool.
### Overriding the default confirm behaviour
You can override the default confirmation behaviour by hooking
into `phoenix.link.click`. Here is an example:
```javascript
// listen on document.body, so it's executed before the default of
// phoenix_html, which is listening on the window object
document.body.addEventListener('phoenix.link.click', function (e) {
// Prevent default implementation
e.stopPropagation();
// Introduce alternative implementation
var message = e.target.getAttribute("data-confirm");
if(!message){ return true; }
vex.dialog.confirm({
message: message,
callback: function (value) {
if (value == false) { e.preventDefault(); }
}
})
}, false);
```
"""
@doc false
defmacro __using__(_) do
quote do
import Phoenix.HTML
import Phoenix.HTML.Form
import Phoenix.HTML.Link
import Phoenix.HTML.Tag, except: [attributes_escape: 1]
import Phoenix.HTML.Format
end
end
@typedoc "Guaranteed to be safe"
@type safe :: {:safe, iodata}
@typedoc "May be safe or unsafe (i.e. it needs to be converted)"
@type unsafe :: Phoenix.HTML.Safe.t()
@doc false
@deprecated "use the ~H sigil instead"
defmacro sigil_e(expr, opts) do
handle_sigil(expr, opts, __CALLER__)
end
@doc false
@deprecated "use the ~H sigil instead"
defmacro sigil_E(expr, opts) do
handle_sigil(expr, opts, __CALLER__)
end
defp handle_sigil({:<<>>, meta, [expr]}, [], caller) do
options = [
engine: Phoenix.HTML.Engine,
file: caller.file,
line: caller.line + 1,
indentation: meta[:indentation] || 0
]
EEx.compile_string(expr, options)
end
defp handle_sigil(_, _, _) do
raise ArgumentError,
"interpolation not allowed in ~e sigil. " <>
"Remove the interpolation, use <%= %> to insert values, " <>
"or use ~E to show the interpolation literally"
end
@doc """
Marks the given content as raw.
This means any HTML code inside the given
string won't be escaped.
iex> raw("<hello>")
{:safe, "<hello>"}
iex> raw({:safe, "<hello>"})
{:safe, "<hello>"}
iex> raw(nil)
{:safe, ""}
"""
@spec raw(iodata | safe | nil) :: safe
def raw({:safe, value}), do: {:safe, value}
def raw(nil), do: {:safe, ""}
def raw(value) when is_binary(value) or is_list(value), do: {:safe, value}
@doc """
Escapes the HTML entities in the given term, returning safe iodata.
iex> html_escape("<hello>")
{:safe, [[[] | "<"], "hello" | ">"]}
iex> html_escape('<hello>')
{:safe, ["<", 104, 101, 108, 108, 111, ">"]}
iex> html_escape(1)
{:safe, "1"}
iex> html_escape({:safe, "<hello>"})
{:safe, "<hello>"}
"""
@spec html_escape(unsafe) :: safe
def html_escape({:safe, _} = safe), do: safe
def html_escape(other), do: {:safe, Phoenix.HTML.Engine.encode_to_iodata!(other)}
@doc """
Converts a safe result into a string.
Fails if the result is not safe. In such cases, you can
invoke `html_escape/1` or `raw/1` accordingly before.
You can combine `html_escape/1` and `safe_to_string/1`
to convert a data structure to a escaped string:
data |> html_escape() |> safe_to_string()
"""
@spec safe_to_string(safe) :: String.t()
def safe_to_string({:safe, iodata}) do
IO.iodata_to_binary(iodata)
end
@doc ~S"""
Escapes an enumerable of attributes, returning iodata.
The attributes are rendered in the given order. Note if
a map is given, the key ordering is not guaranteed.
The keys and values can be of any shape, as long as they
implement the `Phoenix.HTML.Safe` protocol. In addition,
if the key is an atom, it will be "dasherized". In other
words, `:phx_value_id` will be converted to `phx-value-id`.
Furthemore, the following attributes provide behaviour:
* `:aria`, `:data`, and `:phx` - they accept a keyword list as
value. `data: [confirm: "are you sure?"]` is converted to
`data-confirm="are you sure?"`.
* `:class` - it accepts a list of classes as argument. Each
element in the list is separated by space. `nil` and `false`
elements are discarded. `class: ["foo", nil, "bar"]` then
becomes `class="foo bar"`.
* `:id` - it is validated raise if a number is given as ID,
which is not allowed by the HTML spec and leads to unpredictable
behaviour.
## Examples
iex> safe_to_string attributes_escape(title: "the title", id: "the id", selected: true)
" title=\"the title\" id=\"the id\" selected"
iex> safe_to_string attributes_escape(%{data: [confirm: "Are you sure?"], class: "foo"})
" class=\"foo\" data-confirm=\"Are you sure?\""
iex> safe_to_string attributes_escape(%{phx: [value: [foo: "bar"]], class: "foo"})
" class=\"foo\" phx-value-foo=\"bar\""
"""
def attributes_escape(attrs) when is_list(attrs) do
{:safe, build_attrs(attrs)}
end
def attributes_escape(attrs) do
{:safe, attrs |> Enum.to_list() |> build_attrs()}
end
defp build_attrs([{k, true} | t]),
do: [?\s, key_escape(k) | build_attrs(t)]
defp build_attrs([{_, false} | t]),
do: build_attrs(t)
defp build_attrs([{_, nil} | t]),
do: build_attrs(t)
defp build_attrs([{:id, v} | t]),
do: [" id=\"", id_value(v), ?" | build_attrs(t)]
defp build_attrs([{:class, v} | t]),
do: [" class=\"", class_value(v), ?" | build_attrs(t)]
defp build_attrs([{:aria, v} | t]) when is_list(v),
do: nested_attrs(v, " aria", t)
defp build_attrs([{:data, v} | t]) when is_list(v),
do: nested_attrs(v, " data", t)
defp build_attrs([{:phx, v} | t]) when is_list(v),
do: nested_attrs(v, " phx", t)
defp build_attrs([{"id", v} | t]),
do: [" id=\"", id_value(v), ?" | build_attrs(t)]
defp build_attrs([{"class", v} | t]),
do: [" class=\"", class_value(v), ?" | build_attrs(t)]
defp build_attrs([{"aria", v} | t]) when is_list(v),
do: nested_attrs(v, " aria", t)
defp build_attrs([{"data", v} | t]) when is_list(v),
do: nested_attrs(v, " data", t)
defp build_attrs([{"phx", v} | t]) when is_list(v),
do: nested_attrs(v, " phx", t)
defp build_attrs([{k, v} | t]),
do: [?\s, key_escape(k), ?=, ?", attr_escape(v), ?" | build_attrs(t)]
defp build_attrs([]), do: []
defp nested_attrs([{k, v} | kv], attr, t) when is_list(v),
do: [nested_attrs(v, "#{attr}-#{key_escape(k)}", []) | nested_attrs(kv, attr, t)]
defp nested_attrs([{k, v} | kv], attr, t),
do: [attr, ?-, key_escape(k), ?=, ?", attr_escape(v), ?" | nested_attrs(kv, attr, t)]
defp nested_attrs([], _attr, t),
do: build_attrs(t)
defp id_value(value) when is_number(value) do
raise ArgumentError,
"attempting to set id attribute to #{value}, " <>
"but setting the DOM ID to a number can lead to unpredictable behaviour. " <>
"Instead consider prefixing the id with a string, such as \"user-#{value}\" or similar"
end
defp id_value(value) do
attr_escape(value)
end
defp class_value(value) when is_list(value) do
value
|> Enum.filter(& &1)
|> Enum.join(" ")
|> attr_escape()
end
defp class_value(value) do
attr_escape(value)
end
defp key_escape(value) when is_atom(value), do: String.replace(Atom.to_string(value), "_", "-")
defp key_escape(value), do: attr_escape(value)
defp attr_escape({:safe, data}), do: data
defp attr_escape(nil), do: []
defp attr_escape(other) when is_binary(other), do: Phoenix.HTML.Engine.encode_to_iodata!(other)
defp attr_escape(other), do: Phoenix.HTML.Safe.to_iodata(other)
@doc """
Escapes HTML content to be inserted a JavaScript string.
This function is useful in JavaScript responses when there is a need
to escape HTML rendered from other templates, like in the following:
$("#container").append("<%= javascript_escape(render("post.html", post: @post)) %>");
It escapes quotes (double and single), double backslashes and others.
"""
@spec javascript_escape(binary) :: binary
@spec javascript_escape(safe) :: safe
def javascript_escape({:safe, data}),
do: {:safe, data |> IO.iodata_to_binary() |> javascript_escape("")}
def javascript_escape(data) when is_binary(data),
do: javascript_escape(data, "")
defp javascript_escape(<<0x2028::utf8, t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, "\\u2028">>)
defp javascript_escape(<<0x2029::utf8, t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, "\\u2029">>)
defp javascript_escape(<<0::utf8, t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, "\\u0000">>)
defp javascript_escape(<<"</", t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, ?<, ?\\, ?/>>)
defp javascript_escape(<<"\r\n", t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, ?\\, ?n>>)
defp javascript_escape(<<h, t::binary>>, acc) when h in [?", ?', ?\\, ?`],
do: javascript_escape(t, <<acc::binary, ?\\, h>>)
defp javascript_escape(<<h, t::binary>>, acc) when h in [?\r, ?\n],
do: javascript_escape(t, <<acc::binary, ?\\, ?n>>)
defp javascript_escape(<<h, t::binary>>, acc),
do: javascript_escape(t, <<acc::binary, h>>)
defp javascript_escape(<<>>, acc), do: acc
@doc """
Returns a list of attributes that make an element behave like a link.
For example, to make a button work like a link:
<button {link_attributes("/home")}>
Go back to home
</button>
However, this function is more often used to create buttons that
must invoke an action on the server, such as deleting an entity,
using the relevant HTTP protocol:
<button data-confirm="Are you sure?" {link_attributes("/product/1", method: :delete}>
Delete product
</button>
The `to` argument may be a string, a URI, or a tuple `{scheme, value}`.
See the examples below.
Note: using this function requires loading the JavaScript library
at `priv/static/phoenix_html.js`. See the `Phoenix.HTML` module
documentation for more information.
## Options
* `:method` - the HTTP method for the link. Defaults to `:get`.
* `:csrf_token` - a custom token to use when method is not `:get`.
This is used to ensure the request was sent by the user who
rendered the page. By default, CSRF tokens are generated through
`Plug.CSRFProtection`. You can set this option to `false`, to
disable token generation, or set it to your own token.
When the `:method` is set to `:get` and the `:to` URL contains query
parameters the generated form element will strip the parameters in
accordance with the [W3C](https://www.w3.org/TR/html401/interact/forms.html#h-17.13.3.4)
form specification.
## Data attributes
The following data attributes can also be manually set in the element:
* `data-confirm` - shows a confirmation prompt before generating and
submitting the form.
## Examples
iex> link_attributes("/world")
[data: [method: :get, to: "/world"]]
iex> link_attributes(URI.parse("https://elixir-lang.org"))
[data: [method: :get, to: "https://elixir-lang.org"]]
iex> link_attributes("/product/1", method: :delete)
[data: [csrf: Plug.CSRFProtection.get_csrf_token(), method: :delete, to: "/product/1"]]
If the URL is absolute, only certain schemas are allowed to
avoid JavaScript injection. For example, the following will fail:
iex> link_attributes("javascript:alert('hacked!')")
** (ArgumentError) unsupported scheme given as link. In case you want to link to an
unknown or unsafe scheme, such as javascript, use a tuple: {:javascript, rest}
You can however explicitly render those unsafe schemes by using a tuple:
iex> link_attributes({:javascript, "alert('my alert!')"})
[data: [method: :get, to: ["javascript", 58, "alert('my alert!')"]]]
"""
def link_attributes(to, opts \\ []) do
to = valid_destination!(to)
method = Keyword.get(opts, :method, :get)
data = [method: method, to: to]
data =
if method == :get do
data
else
case Keyword.get(opts, :csrf_token, true) do
true -> [csrf: Phoenix.HTML.Tag.csrf_token_value(to)] ++ data
false -> data
csrf when is_binary(csrf) -> [csrf: csrf] ++ data
end
end
[data: data]
end
defp valid_destination!(%URI{} = uri) do
valid_destination!(URI.to_string(uri))
end
defp valid_destination!({:safe, to}) do
{:safe, valid_string_destination!(IO.iodata_to_binary(to))}
end
defp valid_destination!({other, to}) when is_atom(other) do
[Atom.to_string(other), ?:, to]
end
defp valid_destination!(to) do
valid_string_destination!(IO.iodata_to_binary(to))
end
@valid_uri_schemes ~w(http: https: ftp: ftps: mailto: news: irc: gopher:) ++
~w(nntp: feed: telnet: mms: rtsp: svn: tel: fax: xmpp:)
for scheme <- @valid_uri_schemes do
defp valid_string_destination!(unquote(scheme) <> _ = string), do: string
end
defp valid_string_destination!(to) do
if not match?("/" <> _, to) and String.contains?(to, ":") do
raise ArgumentError, """
unsupported scheme given as link. In case you want to link to an
unknown or unsafe scheme, such as javascript, use a tuple: {:javascript, rest}\
"""
else
to
end
end
end
|
lib/phoenix_html.ex
| 0.861115
| 0.726935
|
phoenix_html.ex
|
starcoder
|
defmodule NoWayJose do
@moduledoc """
Provides functions for signing a map of "claims" into a JWT using
a signing key.
"""
require Logger
@typedoc """
A map containing the claims to be encoded. Map keys must be strings.
"""
@type claims :: %{binary() => term()}
@typedoc """
Algorithm used in JWT signing.
"""
@type alg :: :rs512
@typedoc """
The format of the provided key.
"""
@type key_format :: :der | :pem
@typedoc """
Key Identifier – Acts as an alias for the key
"""
@type kid :: nil | binary()
@type signing_option ::
{:alg, alg()}
| {:format, key_format()}
| {:key, key()}
| {:kid, kid()}
@type signing_options :: [signing_option()]
@typedoc """
RSA private key.
The key can be either DER or PEM encoded.
## Generating a key
der = NoWayJose.generate_rsa(4096, :der)
pem = NoWayJose.generate_rsa(4096, :pem)
Optionally, you can extract the DER data from a PEM encoded private key in code
using the following:
{:ok, key} = File.read("private.pem")
[{:RSAPrivateKey, der, _}] = :public_key.pem_decode(key)
"""
@type key :: binary()
@typedoc """
JSON Web Token
"""
@type token :: binary()
@doc """
Generates a signed JWT from the given claims and key.
Returns a JWT on success and raises an error on error.
"""
@spec sign!(claims(), key() | signing_options()) :: token() | no_return()
def sign!(claims, opts) do
case sign(claims, opts) do
{:ok, token} -> token
{:error, error} -> raise error
end
end
@doc """
Generates a signed JWT from the given claims and key.
## Example
# Get the private signing key
{:ok, key} = File.read("private.der")
# Build your claims
claims = %{
"exp" => 1571065163,
"iat" => 1571061563,
"iss" => "example.com",
"jti" => "a3a31258-2450-490b-86ed-2b8e67f91e20",
"nbf" => 1571061563,
"scopes" => [
"posts.r+w",
"comments.r+w"
],
"sub" => "4d3796ca-19e0-40e6-97fe-060c0b7e3ce3"
}
# Sign the claims into a JWT
{:ok, token} = NoWayJose.sign(claims, key)
"""
@spec sign(claims(), signing_options()) :: {:ok, token()} | {:error, term()}
def sign(claims, key) when is_binary(key) do
Logger.warn(
"Passing a binary key to sign/2 is deprecated. Please pass a list of signing options."
)
opts = [alg: :rs512, format: :der, key: key]
NoWayJose.Native.sign(claims, struct(NoWayJose.Signer, opts))
end
@doc """
Generates a signed JWT from the given claims and signing options.
## Example
# Get the private signing key
{:ok, key} = File.read("private.pem")
# Build your claims
claims = %{
"exp" => 1571065163,
"iat" => 1571061563,
"iss" => "example.com",
"jti" => "a3a31258-2450-490b-86ed-2b8e67f91e20",
"nbf" => 1571061563,
"scopes" => [
"posts.r+w",
"comments.r+w"
],
"sub" => "4d3796ca-19e0-40e6-97fe-060c0b7e3ce3"
}
# Sign the claims into a JWT
{:ok, token} = NoWayJose.sign(claims, alg: :rs512, key: key, format: :pem, kid: "1")
"""
@spec sign(claims(), signing_options()) :: {:ok, token()} | {:error, term()}
def sign(claims, opts) when is_list(opts) do
NoWayJose.Native.sign(claims, struct(NoWayJose.Signer, opts))
end
@doc """
Generates an RSA private key based on the given bit size and format.
"""
@spec generate_rsa(integer(), key_format()) :: binary()
def(generate_rsa(bits, format)) do
NoWayJose.Native.generate_rsa(bits, format)
end
end
|
lib/no_way_jose.ex
| 0.841256
| 0.58602
|
no_way_jose.ex
|
starcoder
|
defmodule Zaryn.TransactionChain.Transaction.CrossValidationStamp do
@moduledoc """
Represent a cross validation stamp validated a validation stamp.
"""
defstruct [:node_public_key, :signature, inconsistencies: []]
alias Zaryn.Crypto
alias Zaryn.TransactionChain.Transaction.ValidationStamp
@type inconsistency() ::
:timestamp
| :signature
| :proof_of_work
| :proof_of_integrity
| :proof_of_election
| :transaction_fee
| :transaction_movements
| :unspent_outputs
| :node_movements
| :errors
@typedoc """
A cross validation stamp is composed from:
- Public key: identity of the node signer
- Signature: built from the validation stamp and the inconsistencies found
- Inconsistencies: a list of errors from the validation stamp
"""
@type t :: %__MODULE__{
node_public_key: nil | Crypto.key(),
signature: nil | binary(),
inconsistencies: list(inconsistency())
}
@doc """
Sign the cross validation stamp using the validation stamp and inconsistencies list
"""
@spec sign(t(), ValidationStamp.t()) :: t()
def sign(
cross_stamp = %__MODULE__{inconsistencies: inconsistencies},
validation_stamp = %ValidationStamp{}
) do
signature =
[ValidationStamp.serialize(validation_stamp), marshal_inconsistencies(inconsistencies)]
|> Crypto.sign_with_last_node_key()
%{cross_stamp | node_public_key: Crypto.last_node_public_key(), signature: signature}
end
@doc """
Determines if the cross validation stamp signature valid from a validation stamp
"""
@spec valid_signature?(
t(),
ValidationStamp.t()
) :: boolean()
def valid_signature?(
%__MODULE__{
signature: signature,
inconsistencies: inconsistencies,
node_public_key: node_public_key
},
stamp = %ValidationStamp{}
) do
data = [ValidationStamp.serialize(stamp), marshal_inconsistencies(inconsistencies)]
Crypto.verify?(signature, data, node_public_key)
end
defp marshal_inconsistencies(inconsistencies) do
inconsistencies
|> Enum.map(&serialize_inconsistency/1)
|> :erlang.list_to_binary()
end
@doc """
Serialize a cross validation stamp into binary format
## Examples
iex> %CrossValidationStamp{
...> node_public_key: <<0, 0, 32, 44, 135, 146, 55, 226, 199, 234, 83, 141, 249, 46, 64, 213, 172, 218, 137,
...> 35, 16, 193, 228, 78, 130, 36, 204, 242, 96, 90, 230, 5, 193, 137>>,
...> signature: <<70, 102, 163, 198, 192, 91, 177, 10, 201, 156, 10, 109, 165, 39, 226, 156, 72,
...> 169, 219, 71, 63, 236, 35, 228, 182, 45, 13, 166, 165, 102, 216, 23, 183, 46,
...> 195, 74, 85, 242, 164, 44, 225, 204, 233, 91, 217, 177, 243, 234, 229, 72,
...> 149, 17, 40, 182, 207, 127, 193, 3, 194, 156, 105, 209, 43, 161>>,
...> inconsistencies: [:signature, :proof_of_work, :proof_of_integrity]
...> }
...> |> CrossValidationStamp.serialize()
<<
# Public key
0, 0, 32, 44, 135, 146, 55, 226, 199, 234, 83, 141, 249, 46, 64, 213, 172, 218, 137,
35, 16, 193, 228, 78, 130, 36, 204, 242, 96, 90, 230, 5, 193, 137,
# Signature size
64,
# Signature
70, 102, 163, 198, 192, 91, 177, 10, 201, 156, 10, 109, 165, 39, 226, 156, 72,
169, 219, 71, 63, 236, 35, 228, 182, 45, 13, 166, 165, 102, 216, 23, 183, 46,
195, 74, 85, 242, 164, 44, 225, 204, 233, 91, 217, 177, 243, 234, 229, 72,
149, 17, 40, 182, 207, 127, 193, 3, 194, 156, 105, 209, 43, 161,
# Number of inconsistencies
3,
# Inconsistencies
1, 2, 3
>>
"""
@spec serialize(t()) :: binary()
def serialize(%__MODULE__{
node_public_key: node_public_key,
signature: signature,
inconsistencies: inconsistencies
}) do
inconsistencies_bin =
inconsistencies
|> Enum.map(&serialize_inconsistency(&1))
|> :erlang.list_to_binary()
<<node_public_key::binary, byte_size(signature)::8, signature::binary,
length(inconsistencies)::8, inconsistencies_bin::binary>>
end
defp serialize_inconsistency(:timestamp), do: 0
defp serialize_inconsistency(:signature), do: 1
defp serialize_inconsistency(:proof_of_work), do: 2
defp serialize_inconsistency(:proof_of_integrity), do: 3
defp serialize_inconsistency(:proof_of_election), do: 4
defp serialize_inconsistency(:transaction_fee), do: 5
defp serialize_inconsistency(:transaction_movements), do: 6
defp serialize_inconsistency(:unspent_outputs), do: 7
defp serialize_inconsistency(:node_movements), do: 8
defp serialize_inconsistency(:errors), do: 9
@doc """
Deserialize an encoded cross validation stamp
## Examples
iex> <<0, 0, 32, 44, 135, 146, 55, 226, 199, 234, 83, 141, 249, 46, 64, 213, 172, 218, 137,
...> 35, 16, 193, 228, 78, 130, 36, 204, 242, 96, 90, 230, 5, 193, 137,
...> 64, 70, 102, 163, 198, 192, 91, 177, 10, 201, 156, 10, 109, 165, 39, 226, 156, 72,
...> 169, 219, 71, 63, 236, 35, 228, 182, 45, 13, 166, 165, 102, 216, 23, 183, 46,
...> 195, 74, 85, 242, 164, 44, 225, 204, 233, 91, 217, 177, 243, 234, 229, 72,
...> 149, 17, 40, 182, 207, 127, 193, 3, 194, 156, 105, 209, 43, 161,
...> 3, 1, 2, 3>>
...> |> CrossValidationStamp.deserialize()
{
%CrossValidationStamp{
node_public_key: <<0, 0, 32, 44, 135, 146, 55, 226, 199, 234, 83, 141, 249, 46, 64, 213, 172, 218, 137,
35, 16, 193, 228, 78, 130, 36, 204, 242, 96, 90, 230, 5, 193, 137>>,
signature: <<70, 102, 163, 198, 192, 91, 177, 10, 201, 156, 10, 109, 165, 39, 226, 156, 72,
169, 219, 71, 63, 236, 35, 228, 182, 45, 13, 166, 165, 102, 216, 23, 183, 46,
195, 74, 85, 242, 164, 44, 225, 204, 233, 91, 217, 177, 243, 234, 229, 72,
149, 17, 40, 182, 207, 127, 193, 3, 194, 156, 105, 209, 43, 161>>,
inconsistencies: [:signature, :proof_of_work, :proof_of_integrity]
},
""
}
"""
@spec deserialize(bitstring()) :: {t(), bitstring()}
def deserialize(<<curve_id::8, origin_id::8, rest::bitstring>>) do
key_size = Crypto.key_size(curve_id)
<<key::binary-size(key_size), signature_size::8, signature::binary-size(signature_size),
nb_inconsistencies::8, rest::bitstring>> = rest
{inconsistencies, rest} = reduce_inconsistencies(rest, nb_inconsistencies, [])
{
%__MODULE__{
node_public_key: <<curve_id::8, origin_id::8, key::binary>>,
signature: signature,
inconsistencies: inconsistencies
},
rest
}
end
defp reduce_inconsistencies(rest, nb_inconsistencies, acc)
when nb_inconsistencies == length(acc) do
{Enum.reverse(acc), rest}
end
defp reduce_inconsistencies(rest, nb_inconsistencies, acc) do
{inconsistency, rest} = do_reduce_inconsistencies(rest)
reduce_inconsistencies(rest, nb_inconsistencies, [inconsistency | acc])
end
defp do_reduce_inconsistencies(<<0::8, rest::bitstring>>), do: {:timestamp, rest}
defp do_reduce_inconsistencies(<<1::8, rest::bitstring>>), do: {:signature, rest}
defp do_reduce_inconsistencies(<<2::8, rest::bitstring>>), do: {:proof_of_work, rest}
defp do_reduce_inconsistencies(<<3::8, rest::bitstring>>), do: {:proof_of_integrity, rest}
defp do_reduce_inconsistencies(<<4::8, rest::bitstring>>), do: {:proof_of_election, rest}
defp do_reduce_inconsistencies(<<5::8, rest::bitstring>>), do: {:transaction_fee, rest}
defp do_reduce_inconsistencies(<<6::8, rest::bitstring>>), do: {:transaction_movements, rest}
defp do_reduce_inconsistencies(<<7::8, rest::bitstring>>), do: {:unspent_outputs, rest}
defp do_reduce_inconsistencies(<<8::8, rest::bitstring>>), do: {:node_movements, rest}
defp do_reduce_inconsistencies(<<9::8, rest::bitstring>>), do: {:errors, rest}
@spec from_map(map()) :: t()
def from_map(stamp = %{}) do
%__MODULE__{
node_public_key: Map.get(stamp, :node_public_key),
signature: Map.get(stamp, :signature),
inconsistencies: []
}
end
@spec to_map(t()) :: map()
def to_map(%__MODULE__{signature: signature, node_public_key: public_key}) do
%{
node_public_key: public_key,
signature: signature,
inconsistencies: []
}
end
end
|
lib/zaryn/transaction_chain/transaction/cross_validation_stamp.ex
| 0.908385
| 0.433442
|
cross_validation_stamp.ex
|
starcoder
|
defmodule ExTorch.Utils do
alias ExTorch.Utils.Types, as: Types
defmodule ListWrapper do
@typedoc """
This struct wraps a list of elements or a list with lists of elements into
a representation suitable to be converted into an ExTorch.Tensor
"""
@type t :: %__MODULE__{
list: [number()] | [boolean()],
size: [integer()],
dtype: ExTorch.DType.base_type()
}
@moduledoc """
Struct used to represent a list with elements or lists of elements.
"""
defstruct list: [],
size: [],
dtype: nil
end
@doc """
Given a `ExTorch.Utils.ListWrapper` structure, return a list with elements.
"""
@spec from_list_wrapper(__MODULE__.ListWrapper.t()) :: list()
def from_list_wrapper(%__MODULE__.ListWrapper{
list: list,
size: size
}) do
# dims =
size
|> Tuple.to_list()
|> Enum.reverse()
|> Enum.reduce(list, fn dim, list -> Enum.chunk_every(list, dim) end)
|> Enum.at(0)
end
@doc """
Given a list of elements or a list with lists with elements, this function
returns a ExTorch.Utils.ListWrapper structure.
"""
@spec to_list_wrapper(list() | number()) :: __MODULE__.ListWrapper.t()
def to_list_wrapper([]) do
%__MODULE__.ListWrapper{
list: [],
size: [],
dtype: :float32
}
end
def to_list_wrapper([_ | _] = input) do
all_types = Types.collect_types(input, MapSet.new())
input_size = size(input)
coerce_type =
all_types
|> Enum.to_list()
|> Enum.reduce(&Types.compare_types/2)
new_list = convert_list(input, coerce_type, [])
new_list = flat_list(new_list)
# new_list = Enum.flat_map(new_list, fn x -> x end)
%__MODULE__.ListWrapper{
list: new_list,
size: input_size,
dtype: coerce_type
}
end
def to_list_wrapper(input) when is_tuple(input) do
to_list_wrapper(Tuple.to_list(input))
end
def to_list_wrapper(input) do
to_list_wrapper([input])
end
defp size(x) do
size(x, [0])
end
defp size([_ | _] = x, []) do
size(x, [0])
end
defp size([h | t], [size | rest]) when is_list(h) do
head_size = size(h, [0])
case rest do
[] -> size(t, [size + 1 | head_size])
^head_size -> size(t, [size + 1 | rest])
_ -> {:error, :size_mismatch}
end
end
defp size([h | t], size) when is_tuple(h) do
size([Tuple.to_list(h) | t], size)
end
defp size([_h | t], [size]) do
size(t, [size + 1])
end
defp size([], size) do
size
end
defp size(tup, size) when is_tuple(tup) do
size(Tuple.to_list(tup), size)
end
defp size(_x, _) do
[]
end
defp convert_list([], _type, acc) do
Enum.reverse(acc)
end
defp convert_list([h | t], type, acc) do
head_converted = convert_list(h, type, [])
convert_list(t, type, [head_converted | acc])
end
defp convert_list(tup, type, acc) when is_tuple(tup) do
convert_list(Tuple.to_list(tup), type, acc)
end
defp convert_list(bool, type, _) when is_boolean(bool) and type in [:float32, :float64] do
case bool do
true -> 1.0
false -> 0.0
end
end
defp convert_list(bool, type, _) when is_boolean(bool) and type in [:uint8, :int32, :int64] do
case bool do
true -> 1
false -> 0
end
end
defp convert_list(integer, type, _) when is_integer(integer) and type in [:float32, :float64] do
integer / 1
end
defp convert_list(value, _, _) do
value
end
defp flat_list(list) do
flat = flat_list(list, [])
Enum.reverse(flat)
end
defp flat_list([], acc) do
acc
end
defp flat_list([h | t], acc) when is_list(h) do
flat_list(t, flat_list(h, acc))
end
defp flat_list([h | t], acc) do
flat_list(t, [h | acc])
end
end
|
lib/extorch/utils.ex
| 0.822474
| 0.660908
|
utils.ex
|
starcoder
|
defmodule Animu.Media.Anime do
@moduledoc """
Stores and builds Anime data
"""
use Animu.Ecto.Schema
alias Animu.Ecto.Image
alias __MODULE__
alias Animu.Media
alias Media.Franchise
alias Anime.{Bag, Options}
alias Anime.{Episode, Season, Genre}
schema "anime" do
## Meta Data
field :name, :string
field :titles, {:map, :string}
field :synopsis, :string
field :slug, :string # CI
field :directory, :string
field :cover_image, Image
field :poster_image, Image
field :gallery, {:map, :string}
field :trailers, {:array, :string}
field :tags, {:array, :string}
many_to_many :genres, Genre,
join_through: "anime_genre",
on_replace: :delete,
defaults: []
field :nsfw, :boolean
field :age_rating, :string
field :age_guide, :string
## External Data
field :kitsu_rating, :float
field :kitsu_id, :string
field :mal_id, :string
field :tvdb_id, :string
field :anidb_id, :string
## Franchise Data
belongs_to :franchise, Franchise
field :subtitle, :string
field :subtype, :string # CI
field :number, :integer
## Episode Data
has_many :episodes, Episode,
on_replace: :delete,
defaults: []
field :episode_count, :integer
field :episode_length, :integer
## Augur Data
field :augur, :boolean
field :augured_at, :utc_datetime
field :regex, :string
field :rss_feed, :string
field :subgroup, :string
field :quality, :string
## Time Data
many_to_many :season, Season,
join_through: "anime_season",
defaults: []
field :airing, :boolean
field :airing_at, :map
field :start_date, :date
field :end_date, :date
timestamps()
end
def build(%Anime{} = anime, params, opt \\ []) do
with {:ok, opt} <- Options.parse(opt),
{:ok, bag} <- Bag.new(anime, params, opt),
bag <- Bag.invoke(bag),
{:ok, attrs} <- Bag.compile(bag),
{:ok, ch} <- valid_changeset(bag.anime, attrs)
do
ch =
ch
|> Bag.add_todos(bag)
|> put_assoc(:episodes, attrs.episodes)
IO.inspect ch
{:ok, ch, bag.golems}
else
{:error, msg} -> {:error, msg}
error ->
{:error, "Unexpected Error: #{inspect(error)}"}
end
end
defp valid_changeset(%Anime{} = anime, attrs) do
case changeset(anime, attrs) do
%Changeset{valid?: true} = ch ->
{:ok, ch}
ch ->
errors = Animu.Util.format_errors(ch)
{:error, errors}
end
end
def changeset(%Anime{} = anime, attrs) do
anime
|> cast(attrs, all_fields(Anime))
|> validate_required([:name, :slug, :directory])
|> unique_constraint(:slug)
end
def bake_golems(anime, jobs) do
jobs
|> Enum.dedup
|> Enum.map(fn {module, params} ->
params = params ++ [anime: anime]
Kiln.bake(module, params)
end)
end
end
defimpl Inspect, for: Animu.Media.Anime do
import Inspect.Algebra
@fields [
## Meta
:name,
:titles,
:synopsis,
:slug,
:directory,
:cover_image,
:poster_image,
:gallery,
#:trailers,
:tags,
:genres,
:nsfw,
:age_rating,
:age_guide,
## External Data
:kitsu_rating,
:kitsu_id,
:mal_id,
:tvdb_id,
:anidb_id,
## Franchise Data
:franchise,
:subtitle,
:subtype,
:number,
## Episode Data
:episodes,
:episode_count,
:episode_length,
## Augur Data
:augur,
:augured_at,
:regex,
:rss_feed,
:subgroup,
:quality,
## Time Data
:season,
:airing,
:airing_at,
:start_date,
:end_date,
]
def inspect(changeset, opts) do
list = for attr <- @fields do
{attr, Map.get(changeset, attr)}
end
container_doc("#Anime<", list, ">", opts, fn
{field, value = %Ecto.Association.NotLoaded{}}, opts ->
concat(color("#{field}: ", :atom, opts), to_doc(:not_loaded, opts))
{field, value}, opts -> concat(color("#{field}: ", :atom, opts), to_doc(value, opts))
end)
end
defp to_struct(%{__struct__: struct}, _opts), do: "#" <> Kernel.inspect(struct) <> "<>"
defp to_struct(other, opts), do: to_doc(other, opts)
end
|
lib/animu/media/anime.ex
| 0.608012
| 0.483831
|
anime.ex
|
starcoder
|
defmodule ESpec.DatesTimes.Comparator do
@moduledoc false
alias ESpec.DatesTimes.Delegator
alias ESpec.DatesTimes.Types
@units [
:year,
:month,
:week,
:day,
:hour,
:minute,
:second,
:millisecond,
:microsecond
]
@spec diff(non_neg_integer, non_neg_integer, Types.time_units()) :: integer
def diff(a, a, granularity) when is_integer(a), do: zero(granularity)
@spec diff(non_neg_integer, non_neg_integer, Types.time_units()) :: integer
def diff(a, b, granularity) when is_integer(a) and is_integer(b) and is_atom(granularity) do
do_diff(a, b, granularity)
end
@spec diff(non_neg_integer, non_neg_integer, Types.time_units()) :: integer
def diff(a, b, granularity) do
case {Delegator.to_comparison_units(a), Delegator.to_comparison_units(b)} do
{{:error, _} = err, _} -> err
{_, {:error, _} = err} -> err
{au, bu} when is_integer(au) and is_integer(bu) -> diff(au, bu, granularity)
end
end
defp do_diff(a, a, type), do: zero(type)
defp do_diff(a, b, :microsecond), do: a - b
defp do_diff(a, b, :millisecond), do: div(a - b, 1_000)
defp do_diff(a, b, :second), do: div(a - b, 1_000 * 1_000)
defp do_diff(a, b, :minute), do: div(a - b, 1_000 * 1_000 * 60)
defp do_diff(a, b, :hour), do: div(a - b, 1_000 * 1_000 * 60 * 60)
defp do_diff(a, b, :day), do: div(a - b, 1_000 * 1_000 * 60 * 60 * 24)
defp do_diff(a, b, :week), do: div(a - b, 1_000 * 1_000 * 60 * 60 * 24 * 7)
defp do_diff(a, b, :month), do: diff_months(a, b)
defp do_diff(a, b, :year), do: diff_years(a, b)
defp do_diff(_, _, granularity) when not (granularity in @units),
do: {:error, {:invalid_granularity, granularity}}
defp diff_years(a, b) do
{start_date, _} = :calendar.gregorian_seconds_to_datetime(div(a, 1_000 * 1_000))
{end_date, _} = :calendar.gregorian_seconds_to_datetime(div(b, 1_000 * 1_000))
if a > b do
do_diff_years(end_date, start_date, 0)
else
do_diff_years(start_date, end_date, 0) * -1
end
end
defp do_diff_years({y, _, _}, {y, _, _}, acc) do
acc
end
defp do_diff_years({y1, m, d}, {y2, _, _} = ed, acc) when y1 < y2 do
sd2 = {y1 + 1, m, d}
if :calendar.valid_date(sd2) do
sd2_secs = :calendar.datetime_to_gregorian_seconds({sd2, {0, 0, 0}})
ed_secs = :calendar.datetime_to_gregorian_seconds({ed, {0, 0, 0}})
if sd2_secs <= ed_secs do
do_diff_years(sd2, ed, acc + 1)
else
acc
end
else
# This date is a leap day, so subtract a day and try again
do_diff_years({y1, m, d - 1}, ed, acc)
end
end
defp diff_months(a, b) do
{start_date, _} = :calendar.gregorian_seconds_to_datetime(div(a, 1_000 * 1_000))
{end_date, _} = :calendar.gregorian_seconds_to_datetime(div(b, 1_000 * 1_000))
if a > b do
do_diff_months(end_date, start_date)
else
do_diff_months(start_date, end_date) * -1
end
end
defp do_diff_months({y, m, _}, {y, m, _}), do: 0
defp do_diff_months({y1, m1, d1}, {y2, m2, d2}) when y1 <= y2 and m1 < m2 do
year_diff = y2 - y1
month_diff = if d2 >= d1, do: m2 - m1, else: m2 - 1 - m1
year_diff * 12 + month_diff
end
defp do_diff_months({y1, m1, d1}, {y2, m2, d2}) when y1 < y2 and m1 > m2 do
year_diff = y2 - (y1 + 1)
month_diff =
cond do
d2 == d1 ->
12 - (m1 - m2)
d2 > d1 ->
12 - (m1 - 1 - m2)
d2 < d1 ->
12 - (m1 - m2)
end
year_diff * 12 + month_diff
end
defp do_diff_months({y1, m, d1}, {y2, m, d2}) when y1 < y2 do
year_diff = y2 - (y1 + 1)
month_diff = if d2 > d1, do: 11, else: 12
year_diff * 12 + month_diff
end
defp zero(_type), do: 0
end
|
lib/espec/dates_times/comparator.ex
| 0.787319
| 0.66133
|
comparator.ex
|
starcoder
|
defmodule Cashtrail.Banking.Account do
@moduledoc """
This is an `Ecto.Schema` struct that represents a banking account of the entity.
## Definition
According to [Investopedia](https://www.investopedia.com/terms/a/account.asp),
the term account generally refers to a record-keeping or ledger activity. This
can be any system of money in common use by people.
You should first create an account to track the money or asset. One account could be your
wallet, your saving account, checking account, or your brookerage account for example. Then you
can create transactions to movement the account money or assets.
## Fields
* `:id` - The unique id of the account.
* `:description` - The description of the account.
* `:type` - The type of account. Can be:
* `:cash` - To be used to track phisical money, like wallets or cashier.
* `:checking` - To be used to track savings accounts.
* `:saving` - To be used to track checking accounts.
* `:digital` - To be used to track digital accounts.
* `:credit` - To be used to track loans, financings, or credit cards.
* `:investment` - To be used to track investments, like broker account.
* `:other` - To be used to track other kind of account that was not listed.
* `:initial_balance_amount` - The initial balance of the account.
* `:initial_balance_date` - The date of the initial balance of account. This cannot be changed
after creation.
* `:avatar_url` - One icon or image that represents this account.
* `:restricted_transaction_types` - The transaction types that can be movimented by this account.
If the list is empty this allow all transactions. Cannot be changed after creation.
* `:identifier` - The data that identifies the account. See `Cashtrail.Banking.AccountIdentifier`
to have more information.
* `:currency` - The iso code of the currency used by the account. This cannot be changed after
account creation.
* `:institution` - The institution of the account. See `Cashtrail.Banking.Institution` to have
more inforation.
* `:predicted_account` - If this account is a credit card or a loan, the predicted_account is
where the transaction will be created.
* `:archived_at` - When the account was archived.
* `:inserted_at` - When the account was inserted at the first time.
* `:updated_at` - When the account was updated at the last time.
See `Cashtrail.Banking` to know how to list, get, insert, update, and delete accounts.
"""
use Ecto.Schema
import Ecto.Changeset
alias Cashtrail.Banking
@derive Cashtrail.Statuses.WithStatus
@type account_type :: :cash | :checking | :saving | :digital | :credit | :investment | :other
@type transaction_type :: :income | :expense | :tax | :transfer | :exchange | :refund
@type t :: %Cashtrail.Banking.Account{
id: Ecto.UUID.t() | nil,
description: String.t() | nil,
type: account_type() | nil,
initial_balance_amount: number() | Decimal.t() | nil,
initial_balance_date: Date.t() | nil,
avatar_url: String.t() | nil,
restricted_transaction_types: list() | nil,
identifier: Banking.AccountIdentifier.t() | nil,
currency: String.t() | nil,
institution: Banking.Institution.t() | Ecto.Association.NotLoaded.t() | nil,
institution_id: Ecto.UUID.t() | nil,
predicted_account: Banking.Account.t() | Ecto.Association.NotLoaded.t() | nil,
predicted_account_id: Ecto.UUID.t() | nil,
archived_at: NaiveDateTime.t() | nil,
updated_at: NaiveDateTime.t() | nil,
inserted_at: NaiveDateTime.t() | nil,
__meta__: Ecto.Schema.Metadata.t()
}
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "accounts" do
field :description, :string
field :currency, :string
field :type, Ecto.Enum,
values: [:cash, :checking, :saving, :digital, :credit, :investment, :other],
default: :cash
field :initial_balance_amount, :decimal, default: 0
field :initial_balance_date, :date
field :avatar_url, :string
field :restricted_transaction_types, {:array, Ecto.Enum},
values: [:income, :expense, :tax, :transfer, :exchange, :refund],
default: []
embeds_one :identifier, Banking.AccountIdentifier, on_replace: :update
belongs_to :institution, Banking.Institution
belongs_to :predicted_account, Banking.Account
field :archived_at, :naive_datetime
timestamps()
end
@doc false
@spec changeset(t(), map()) :: Ecto.Changeset.t()
def changeset(account, attrs) do
account
|> cast(attrs, [
:description,
:currency,
:type,
:initial_balance_amount,
:initial_balance_date,
:restricted_transaction_types,
:avatar_url,
:institution_id,
:predicted_account_id
])
|> validate_required([:description])
|> cast_embed(:identifier)
|> foreign_key_constraint(:institution_id)
|> foreign_key_constraint(:predicted_account_id)
end
@doc false
@spec update_changeset(t | Ecto.Changeset.t(), map()) :: Ecto.Changeset.t()
def update_changeset(account, attrs) do
account
|> cast(attrs, [
:description,
:initial_balance_amount,
:avatar_url,
:institution_id,
:predicted_account_id
])
|> validate_required([:description])
|> cast_embed(:identifier)
|> foreign_key_constraint(:institution_id)
|> foreign_key_constraint(:predicted_account_id)
end
@spec archive_changeset(t | Ecto.Changeset.t()) :: Ecto.Changeset.t()
def archive_changeset(account) do
change(account, %{archived_at: NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second)})
end
@spec unarchive_changeset(t | Ecto.Changeset.t()) :: Ecto.Changeset.t()
def unarchive_changeset(account) do
change(account, %{archived_at: nil})
end
end
|
apps/cashtrail/lib/cashtrail/banking/account.ex
| 0.878131
| 0.732664
|
account.ex
|
starcoder
|
defmodule Mix.Tasks.EnableGmailInboxSync do
use Mix.Task
@shortdoc "Script to enable the Gmail inbox sync feature for the given account"
@moduledoc """
Example:
```
$ mix enable_gmail_inbox_sync [ACCOUNT_ID]
$ mix enable_gmail_inbox_sync [ACCOUNT_ID] [HISTORY_ID]
```
On Heroku:
```
$ heroku run "POOL_SIZE=2 mix enable_gmail_inbox_sync"
$ heroku run "POOL_SIZE=2 mix enable_gmail_inbox_sync [ACCOUNT_TOKEN]"
```
"""
require Logger
alias ChatApi.Google
alias ChatApi.Google.{Gmail, GoogleAuthorization}
@spec run([binary()]) :: :ok
def run(args) do
Application.ensure_all_started(:chat_api)
result =
case args do
[account_id] ->
enable_gmail_sync(account_id)
[account_id, history_id] ->
enable_gmail_sync(account_id, history_id)
_ ->
{:error, "An account ID is required as the initial arg"}
end
case result do
{:ok, authorization} ->
Logger.info(
"Successfully updated authorization metadata for account #{
inspect(authorization.account_id)
}: #{inspect(authorization.metadata)}"
)
{:error, reason} ->
Logger.error("Failed to enable account: #{inspect(reason)}")
end
end
@spec enable_gmail_sync(binary(), binary()) ::
{:error, binary() | Ecto.Changeset.t()}
| {:ok, ChatApi.Google.GoogleAuthorization.t()}
def enable_gmail_sync(account_id, history_id) do
case Google.get_authorization_by_account(account_id, %{client: "gmail"}) do
%GoogleAuthorization{} = authorization ->
Google.update_google_authorization(authorization, %{
metadata: %{next_history_id: history_id}
})
_ ->
{:error, "Gmail authorization not found for account"}
end
end
@spec enable_gmail_sync(binary()) ::
{:error, binary() | Ecto.Changeset.t()}
| {:ok, ChatApi.Google.GoogleAuthorization.t()}
def enable_gmail_sync(account_id) do
case Google.get_authorization_by_account(account_id, %{client: "gmail"}) do
%GoogleAuthorization{refresh_token: _, metadata: %{"next_history_id" => next_history_id}}
when is_binary(next_history_id) ->
{:error, "Gmail syncing is already enabled for this account"}
%GoogleAuthorization{refresh_token: token} = authorization ->
history_id =
token
|> Gmail.list_threads()
|> Map.get("threads", [])
|> List.first()
|> Map.get("historyId")
case Gmail.list_history(token, start_history_id: history_id) do
%{"historyId" => next_history_id} ->
Google.update_google_authorization(authorization, %{
metadata: %{next_history_id: next_history_id}
})
_ ->
{:error, "Unable to find valid history ID"}
end
authorization ->
{:error, "Invalid authorization #{inspect(authorization)}"}
end
end
end
|
lib/mix/tasks/enable_gmail_inbox_sync.ex
| 0.727879
| 0.608303
|
enable_gmail_inbox_sync.ex
|
starcoder
|
defmodule Geometry.PointM do
@moduledoc """
A point struct, representing a 2D point with a measurement.
"""
import Geometry.Guards
alias Geometry.{GeoJson, Hex, PointM, WKB, WKT}
defstruct [:coordinate]
@blank " "
@empty %{
{:ndr, :hex} => "000000000000F87F000000000000F87F000000000000F87F",
{:xdr, :hex} => "7FF80000000000007FF80000000000007FF8000000000000",
{:ndr, :binary} => Hex.to_binary("000000000000F87F000000000000F87F000000000000F87F"),
{:xdr, :binary} => Hex.to_binary("7FF80000000000007FF80000000000007FF8000000000000")
}
@type t :: %PointM{coordinate: Geometry.coordinate() | nil}
@doc """
Creates an empty `PointM`.
## Examples
iex> PointM.new()
%PointM{coordinate: nil}
"""
@spec new :: t()
def new, do: %PointM{}
@doc """
Creates a `PointM` from the given `coordinate`.
## Examples
iex> PointM.new([1.5, -2.1, 4])
%PointM{coordinate: [1.5, -2.1, 4]}
"""
@spec new(Geometry.coordinate()) :: t()
def new([x, y, m] = coordinate) when is_coordinate(x, y, m) do
%PointM{coordinate: coordinate}
end
@doc """
Creates a `PointM` from the given `x`, `y`, and `m`.
## Examples
iex> PointM.new(-1.1, 2.2, 4)
%PointM{coordinate: [-1.1, 2.2, 4]}
"""
@spec new(number(), number(), number()) :: t()
def new(x, y, m) when is_coordinate(x, y, m) do
%PointM{coordinate: [x, y, m]}
end
@doc """
Returns `true` if the given `PointM` is empty.
## Examples
iex> PointM.empty?(PointM.new())
true
iex> PointM.empty?(PointM.new(1, 2, 4))
false
"""
@spec empty?(t()) :: boolean
def empty?(%PointM{coordinate: coordinate}), do: is_nil(coordinate)
@doc """
Creates a `PointM` from the given coordinate.
## Examples
iex> PointM.from_coordinates([[-1, 1, 1]])
%PointM{coordinate: [-1, 1, 1]}
"""
@spec from_coordinates(Geometry.coordinate() | [nil, ...]) :: t()
def from_coordinates([[x, y, m] = coordinate]) when is_coordinate(x, y, m) do
%PointM{coordinate: coordinate}
end
def from_coordinates([x, y, m] = coordinate) when is_coordinate(x, y, m) do
%PointM{coordinate: coordinate}
end
def from_coordinates([nil, nil, nil]) do
%PointM{}
end
@doc """
Returns the WKT representation for a `PointM`. With option `:srid` an EWKT
representation with the SRID is returned.
## Examples
iex> PointM.to_wkt(PointM.new())
"Point M EMPTY"
iex> PointM.to_wkt(PointM.new(1.1, 2.2, 4.4))
"Point M (1.1 2.2 4.4)"
iex> PointM.to_wkt(PointM.new(1.1, 2.2, 4.4), srid: 4711)
"SRID=4711;Point M (1.1 2.2 4.4)"
"""
@spec to_wkt(t(), opts) :: Geometry.wkt()
when opts: [srid: Geometry.srid()]
def to_wkt(%PointM{coordinate: coordinate}, opts \\ []) do
WKT.to_ewkt(<<"Point M ", to_wkt_point(coordinate)::binary()>>, opts)
end
@doc """
Returns an `:ok` tuple with the `PointM` from the given WKT string. Otherwise
returns an `:error` tuple.
If the geometry contains an SRID the id is added to the tuple.
## Examples
iex> PointM.from_wkt("Point M (-5.1 7.8 12)")
{:ok, %PointM{coordinate: [-5.1, 7.8, 12]}}
iex> PointM.from_wkt("SRID=7219;Point M (-5.1 7.8 12)")
{:ok, {%PointM{coordinate: [-5.1, 7.8, 12]}, 7219}}
iex> PointM.from_wkt("Point M EMPTY")
{:ok, %PointM{}}
"""
@spec from_wkt(Geometry.wkt()) ::
{:ok, t()} | {t(), Geometry.srid()} | Geometry.wkt_error()
def from_wkt(wkt), do: WKT.to_geometry(wkt, PointM)
@doc """
The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkt!(Geometry.wkt()) :: t() | {t(), Geometry.srid()}
def from_wkt!(wkt) do
case WKT.to_geometry(wkt, PointM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the GeoJSON term of a `PointM`.
## Examples
iex> PointM.to_geo_json(PointM.new(1, 2, 4))
%{"type" => "Point", "coordinates" => [1, 2, 4]}
"""
@spec to_geo_json(t()) :: Geometry.geo_json_term()
def to_geo_json(%PointM{coordinate: coordinate}) when not is_nil(coordinate) do
%{
"type" => "Point",
"coordinates" => coordinate
}
end
@doc """
Returns an `:ok` tuple with the `PointM` from the given GeoJSON term.
Otherwise returns an `:error` tuple.
## Examples
iex> ~s({"type": "Point", "coordinates": [1.1, 2.2, 4.4]})
iex> |> Jason.decode!()
iex> |> PointM.from_geo_json()
{:ok, %PointM{coordinate: [1.1, 2.2, 4.4]}}
"""
@spec from_geo_json(Geometry.geo_json_term()) :: {:ok, t()} | Geometry.geo_json_error()
def from_geo_json(json), do: GeoJson.to_point(json, PointM)
@doc """
The same as `from_geo_json/1`, but raises a `Geometry.Error` exception if it
fails.
"""
@spec from_geo_json!(Geometry.geo_json_term()) :: t()
def from_geo_json!(json) do
case GeoJson.to_point(json, PointM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc """
Returns the WKB representation for a `PointM`.
With option `:srid` an EWKB representation with the SRID is returned.
The option `:endian` indicates whether `:xdr` big endian or `:ndr` little
endian is returned. The default is `:xdr`.
The `:mode` determines whether a hex-string or binary is returned. The default
is `:binary`.
## Examples
iex> PointM.to_wkb(PointM.new(), mode: :hex)
"00400000017FF80000000000007FF80000000000007FF8000000000000"
iex> PointM.to_wkb(PointM.new(), endian: :ndr, mode: :hex)
"0101000040000000000000F87F000000000000F87F000000000000F87F"
iex> PointM.to_wkb(PointM.new(1.1, 2.2, 4.4), endian: :xdr, mode: :hex)
"00400000013FF199999999999A400199999999999A401199999999999A"
iex> PointM.to_wkb(PointM.new(1.1, 2.2, 4.4), endian: :ndr, mode: :hex)
"01010000409A9999999999F13F9A999999999901409A99999999991140"
iex> PointM.to_wkb(PointM.new(1.1, 2.2, 4.4), srid: 4711, endian: :xdr, mode: :hex)
"0060000001000012673FF199999999999A400199999999999A401199999999999A"
"""
@spec to_wkb(t(), opts) :: Geometry.wkb()
when opts: [endian: Geometry.endian(), srid: Geometry.srid(), mode: Geometry.mode()]
def to_wkb(%PointM{coordinate: coordinate}, opts \\ []) do
endian = Keyword.get(opts, :endian, Geometry.default_endian())
srid = Keyword.get(opts, :srid)
mode = Keyword.get(opts, :mode, Geometry.default_mode())
to_wkb(coordinate, srid, endian, mode)
end
@doc """
Returns an `:ok` tuple with the `PointM` from the given WKB string. Otherwise
returns an `:error` tuple.
If the geometry contains a SRID the id is added to the tuple.
The optional second argument determines if a `:hex`-string or a `:binary`
input is expected. The default is `:binary`.
## Examples
iex> PointM.from_wkb(
...> "00400000017FF80000000000007FF80000000000007FF8000000000000",
...> :hex
...> )
{:ok, %PointM{coordinate: nil}}
iex> PointM.from_wkb(
...> "00400000013FF199999999999A400199999999999A401199999999999A",
...> :hex
...> )
{:ok, %PointM{coordinate: [1.1, 2.2, 4.4]}}
iex> PointM.from_wkb(
...> "01010000409A9999999999F13F9A999999999901409A99999999991140",
...> :hex
...> )
{:ok, %PointM{coordinate: [1.1, 2.2, 4.4]}}
iex> PointM.from_wkb(
...> "0060000001000012673FF199999999999A400199999999999A401199999999999A",
...> :hex
...> )
{:ok, {%PointM{coordinate: [1.1, 2.2, 4.4]}, 4711}}
"""
@spec from_wkb(Geometry.wkb(), Geometry.mode()) ::
{:ok, t() | {t(), Geometry.srid()}}
| Geometry.wkb_error()
def from_wkb(wkb, mode \\ :binary), do: WKB.to_geometry(wkb, mode, PointM)
@doc """
The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails.
"""
@spec from_wkb!(Geometry.wkb(), Geometry.mode()) :: t() | {t(), Geometry.srid()}
def from_wkb!(wkb, mode \\ :binary) do
case WKB.to_geometry(wkb, mode, PointM) do
{:ok, geometry} -> geometry
error -> raise Geometry.Error, error
end
end
@doc false
@compile {:inline, to_wkt_coordinate: 1}
@spec to_wkt_coordinate(Geometry.coordinate()) :: String.t()
def to_wkt_coordinate([x, y, m]) do
<<
to_wkt_number(x)::binary(),
@blank,
to_wkt_number(y)::binary(),
@blank,
to_wkt_number(m)::binary()
>>
end
@compile {:inline, to_wkt_point: 1}
defp to_wkt_point(nil), do: "EMPTY"
defp to_wkt_point(coordinate), do: <<"(", to_wkt_coordinate(coordinate)::binary(), ")">>
@compile {:inline, to_wkt_number: 1}
defp to_wkt_number(num) when is_integer(num), do: Integer.to_string(num)
defp to_wkt_number(num) when is_float(num), do: Float.to_string(num)
@doc false
@compile {:inline, to_wkb: 4}
@spec to_wkb(
Geometry.coordinate() | nil,
Geometry.srid() | nil,
Geometry.endian(),
Geometry.mode()
) ::
binary()
def to_wkb(coordinate, srid, endian, mode) do
<<
WKB.byte_order(endian, mode)::binary(),
wkb_code(endian, not is_nil(srid), mode)::binary,
WKB.srid(srid, endian, mode)::binary(),
to_wkb_coordinate(coordinate, endian, mode)::binary
>>
end
@doc false
@compile {:inline, to_wkb_coordinate: 3}
@spec to_wkb_coordinate(coordinate, endian, mode) :: wkb
when coordinate: Geometry.coordinate() | nil,
endian: Geometry.endian(),
mode: Geometry.mode(),
wkb: Geometry.wkb()
def to_wkb_coordinate(nil, endian, mode), do: Map.fetch!(@empty, {endian, mode})
def to_wkb_coordinate([x, y, m], endian, mode) do
<<
to_wkb_number(x, endian, mode)::binary(),
to_wkb_number(y, endian, mode)::binary(),
to_wkb_number(m, endian, mode)::binary()
>>
end
@compile {:inline, to_wkb_number: 3}
defp to_wkb_number(num, endian, :hex), do: Hex.to_float_string(num, endian)
defp to_wkb_number(num, :xdr, :binary), do: <<num::big-float-size(64)>>
defp to_wkb_number(num, :ndr, :binary), do: <<num::little-float-size(64)>>
@compile {:inline, wkb_code: 3}
defp wkb_code(endian, srid?, :hex) do
case {endian, srid?} do
{:xdr, false} -> "40000001"
{:ndr, false} -> "01000040"
{:xdr, true} -> "60000001"
{:ndr, true} -> "01000060"
end
end
defp wkb_code(endian, srid?, :binary) do
case {endian, srid?} do
{:xdr, false} -> <<0x40000001::big-integer-size(32)>>
{:ndr, false} -> <<0x40000001::little-integer-size(32)>>
{:xdr, true} -> <<0x60000001::big-integer-size(32)>>
{:ndr, true} -> <<0x60000001::little-integer-size(32)>>
end
end
end
|
lib/geometry/point_m.ex
| 0.965576
| 0.796649
|
point_m.ex
|
starcoder
|
defmodule AWS.Rekognition do
@moduledoc """
This is the Amazon Rekognition API reference.
"""
@doc """
Compares a face in the *source* input image with each face detected in the
*target* input image.
<note> If the source image contains multiple faces, the service detects the
largest face and uses it to compare with each face detected in the target
image.
</note> In response, the operation returns an array of face matches ordered
by similarity score with the highest similarity scores first. For each face
match, the response provides a bounding box of the face and `confidence`
value (indicating the level of confidence that the bounding box contains a
face). The response also provides a `similarity` score, which indicates how
closely the faces match.
<note> By default, only faces with the similarity score of greater than or
equal to 80% are returned in the response. You can change this value.
</note> In addition to the face matches, the response returns information
about the face in the source image, including the bounding box of the face
and confidence value.
<note> This is a stateless API operation. That is, the operation does not
persist any data.
</note> For an example, see `get-started-exercise-compare-faces`
This operation requires permissions to perform the
`rekognition:CompareFaces` action.
"""
def compare_faces(client, input, options \\ []) do
request(client, "CompareFaces", input, options)
end
@doc """
Creates a collection in an AWS Region. You can add faces to the collection
using the operation.
For example, you might create collections, one for each of your application
users. A user can then index faces using the `IndexFaces` operation and
persist results in a specific collection. Then, a user can search the
collection for faces in the user-specific container.
For an example, see `example1`.
This operation requires permissions to perform the
`rekognition:CreateCollection` action.
"""
def create_collection(client, input, options \\ []) do
request(client, "CreateCollection", input, options)
end
@doc """
Deletes the specified collection. Note that this operation removes all
faces in the collection. For an example, see `example1`.
This operation requires permissions to perform the
`rekognition:DeleteCollection` action.
"""
def delete_collection(client, input, options \\ []) do
request(client, "DeleteCollection", input, options)
end
@doc """
Deletes faces from a collection. You specify a collection ID and an array
of face IDs to remove from the collection.
This operation requires permissions to perform the
`rekognition:DeleteFaces` action.
"""
def delete_faces(client, input, options \\ []) do
request(client, "DeleteFaces", input, options)
end
@doc """
Detects faces within an image (JPEG or PNG) that is provided as input.
For each face detected, the operation returns face details including a
bounding box of the face, a confidence value (that the bounding box
contains a face), and a fixed set of attributes such as facial landmarks
(for example, coordinates of eye and mouth), gender, presence of beard,
sunglasses, etc.
The face-detection algorithm is most effective on frontal faces. For
non-frontal or obscured faces, the algorithm may not detect the faces or
might detect faces with lower confidence.
<note> This is a stateless API operation. That is, the operation does not
persist any data.
</note> For an example, see `get-started-exercise-detect-faces`.
This operation requires permissions to perform the
`rekognition:DetectFaces` action.
"""
def detect_faces(client, input, options \\ []) do
request(client, "DetectFaces", input, options)
end
@doc """
Detects instances of real-world labels within an image (JPEG or PNG)
provided as input. This includes objects like flower, tree, and table;
events like wedding, graduation, and birthday party; and concepts like
landscape, evening, and nature. For an example, see
`get-started-exercise-detect-labels`.
For each object, scene, and concept the API returns one or more labels.
Each label provides the object name, and the level of confidence that the
image contains the object. For example, suppose the input image has a
lighthouse, the sea, and a rock. The response will include all three
labels, one for each object.
`{Name: lighthouse, Confidence: 98.4629}`
`{Name: rock,Confidence: 79.2097}`
` {Name: sea,Confidence: 75.061}`
In the preceding example, the operation returns one label for each of the
three objects. The operation can also return multiple labels for the same
object in the image. For example, if the input image shows a flower (for
example, a tulip), the operation might return the following three labels.
`{Name: flower,Confidence: 99.0562}`
`{Name: plant,Confidence: 99.0562}`
`{Name: tulip,Confidence: 99.0562}`
In this example, the detection algorithm more precisely identifies the
flower as a tulip.
You can provide the input image as an S3 object or as base64-encoded bytes.
In response, the API returns an array of labels. In addition, the response
also includes the orientation correction. Optionally, you can specify
`MinConfidence` to control the confidence threshold for the labels
returned. The default is 50%. You can also add the `MaxLabels` parameter to
limit the number of labels returned.
<note> If the object detected is a person, the operation doesn't provide
the same facial details that the `DetectFaces` operation provides.
</note> This is a stateless API operation. That is, the operation does not
persist any data.
This operation requires permissions to perform the
`rekognition:DetectLabels` action.
"""
def detect_labels(client, input, options \\ []) do
request(client, "DetectLabels", input, options)
end
@doc """
Detects faces in the input image and adds them to the specified collection.
Amazon Rekognition does not save the actual faces detected. Instead, the
underlying detection algorithm first detects the faces in the input image,
and for each face extracts facial features into a feature vector, and
stores it in the back-end database. Amazon Rekognition uses feature vectors
when performing face match and search operations using the and operations.
If you provide the optional `externalImageID` for the input image you
provided, Amazon Rekognition associates this ID with all faces that it
detects. When you call the operation, the response returns the external ID.
You can use this external image ID to create a client-side index to
associate the faces with each image. You can then use the index to find all
faces in an image.
In response, the operation returns an array of metadata for all detected
faces. This includes, the bounding box of the detected face, confidence
value (indicating the bounding box contains a face), a face ID assigned by
the service for each face that is detected and stored, and an image ID
assigned by the service for the input image If you request all facial
attributes (using the `detectionAttributes` parameter, Amazon Rekognition
returns detailed facial attributes such as facial landmarks (for example,
location of eye and mount) and other facial attributes such gender. If you
provide the same image, specify the same collection, and use the same
external ID in the `IndexFaces` operation, Amazon Rekognition doesn't save
duplicate face metadata.
For an example, see `example2`.
This operation requires permissions to perform the `rekognition:IndexFaces`
action.
"""
def index_faces(client, input, options \\ []) do
request(client, "IndexFaces", input, options)
end
@doc """
Returns list of collection IDs in your account. If the result is truncated,
the response also provides a `NextToken` that you can use in the subsequent
request to fetch the next set of collection IDs.
For an example, see `example1`.
This operation requires permissions to perform the
`rekognition:ListCollections` action.
"""
def list_collections(client, input, options \\ []) do
request(client, "ListCollections", input, options)
end
@doc """
Returns metadata for faces in the specified collection. This metadata
includes information such as the bounding box coordinates, the confidence
(that the bounding box contains a face), and face ID. For an example, see
`example3`.
This operation requires permissions to perform the `rekognition:ListFaces`
action.
"""
def list_faces(client, input, options \\ []) do
request(client, "ListFaces", input, options)
end
@doc """
For a given input face ID, searches for matching faces in the collection
the face belongs to. You get a face ID when you add a face to the
collection using the `IndexFaces` operation. The operation compares the
features of the input face with faces in the specified collection.
<note> You can also search faces without indexing faces by using the
`SearchFacesByImage` operation.
</note> The operation response returns an array of faces that match,
ordered by similarity score with the highest similarity first. More
specifically, it is an array of metadata for each face match that is found.
Along with the metadata, the response also includes a `confidence` value
for each face match, indicating the confidence that the specific face
matches the input face.
For an example, see `example3`.
This operation requires permissions to perform the
`rekognition:SearchFaces` action.
"""
def search_faces(client, input, options \\ []) do
request(client, "SearchFaces", input, options)
end
@doc """
For a given input image, first detects the largest face in the image, and
then searches the specified collection for matching faces. The operation
compares the features of the input face with faces in the specified
collection.
<note> To search for all faces in an input image, you might first call the
operation, and then use the face IDs returned in subsequent calls to the
operation.
You can also call the `DetectFaces` operation and use the bounding boxes in
the response to make face crops, which then you can pass in to the
`SearchFacesByImage` operation.
</note> The response returns an array of faces that match, ordered by
similarity score with the highest similarity first. More specifically, it
is an array of metadata for each face match found. Along with the metadata,
the response also includes a `similarity` indicating how similar the face
is to the input face. In the response, the operation also returns the
bounding box (and a confidence level that the bounding box contains a face)
of the face that Amazon Rekognition used for the input image.
For an example, see `example3`.
This operation requires permissions to perform the
`rekognition:SearchFacesByImage` action.
"""
def search_faces_by_image(client, input, options \\ []) do
request(client, "SearchFacesByImage", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "rekognition"}
host = get_host("rekognition", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "RekognitionService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/rekognition.ex
| 0.967101
| 0.986752
|
rekognition.ex
|
starcoder
|
defmodule Vega.Card do
@moduledoc """
This module describes a card struct. A card is a basic data structure, which contains a lot of interesting details
of a card.
"""
use Yildun.Collection
alias Vega.Card
alias Vega.Comment
@collection "cards"
collection "cards" do
attribute :id, String.t(), derived: true ## the ObjectId as string
attribute :created, DateTime.t(), default: &DateTime.utc_now/0 ## creation date
attribute :modified, DateTime.t(), default: &DateTime.utc_now/0 ## last modification date
attribute :title, String.t() ## the title of the card
attribute :description, String.t() ## optional: a description as Markdown
attribute :pos, float() ## current position for ordering
attribute :board, BSON.ObjectId.t() ## the id of the board
attribute :list, BSON.ObjectId.t() ## the id of the list
attribute :archived, DateTime.t() ## date of the archiving
embeds_many :comments, Comment, default: [] ## list of comments
after_load &Card.after_load/1
end
@doc """
Create a new card with a title `title` and position `pos`.
"""
def new(board, list, title, pos) do
%Card{_id: id} = card = new()
%Card{card | id: BSON.ObjectId.encode!(id), title: title, board: board._id, list: list._id, pos: pos}
end
@doc """
Create a new card with a title `title`, position `pos` and `time`. This funcation is used, when a sequence of
cards is created to preserve the order of creating time.
"""
def new(board, list, title, pos, time) do
%Card{_id: id} = card = new()
%Card{card | id: BSON.ObjectId.encode!(id),
title: title,
board: board._id,
list: list._id,
pos: pos,
created: time,
modified: time}
end
@doc """
Deep copy of the card
"""
def clone(board, list, card) do
id = Mongo.object_id()
%Card{card | _id: id,
id: BSON.ObjectId.encode!(id),
board: board.id,
list: list._id}
end
@doc """
Fetch all cards of the list with id `id`.
"""
def fetch_all_in_list(id) do
Mongo.find(:mongo, @collection, %{list: id, archived: %{"$exists": false}}) |> Enum.map(fn card -> load(card) end)
end
@doc """
Post-Processing after loading the struct from the database.
"""
def after_load(%Card{_id: id, comments: comments} = card) when comments == nil do
%Card{card | id: BSON.ObjectId.encode!(id), comments: []}
end
def after_load(%Card{_id: id, comments: comments} = card) do
%Card{card | id: BSON.ObjectId.encode!(id), comments: Enum.reverse(comments)}
end
def is_archived(%Card{archived: date}) do
date != nil
end
#@spec compare(Calendar.date(), Calendar.date()) :: :lt | :eq | :gt
@doc """
Used for sorting: compare the pos value.
"""
def compare(a, b) do
case a.pos - b.pos do
x when x < 0 -> :lt
x when x > 0 -> :gt
_ -> :eq
end
end
end
|
lib/vega/card.ex
| 0.79653
| 0.489931
|
card.ex
|
starcoder
|
defmodule I18nHelpers.Ecto.Translator do
@doc ~S"""
Translates an Ecto struct, a list of Ecto structs or a map containing translations.
Translating an Ecto struct for a given locale consists of the following steps:
1. Get the list of the fields that need to be translated from the Schema.
The Schema must contain a `get_translatable_fields\0` function returning
a list of those fields.
2. Get the text for the given locale and store it into a virtual field.
The Schema must provide, for each translatable field, a corresponding
virtual field in order to store the translation.
3. Get the list of the associations that also need to be translated from
the Schema. The Schema must contain a `get_translatable_assocs\0` function
returning a list of those associations.
4. Repeat step 1. for each associated Ecto struct.
"""
@spec translate(list | struct | map, String.t() | atom, keyword) ::
list | struct | String.t() | nil
def translate(data_structure, locale \\ Gettext.get_locale(), opts \\ [])
def translate([], _locale, _opts), do: []
def translate([head | tail], locale, opts) do
[
translate(head, locale, opts)
| translate(tail, locale, opts)
]
end
def translate(%{__struct__: _struct_name} = entity, locale, opts) do
fields_to_translate = entity.__struct__.get_translatable_fields()
assocs_to_translate = entity.__struct__.get_translatable_assocs()
entity =
Enum.reduce(fields_to_translate, entity, fn field, updated_entity ->
virtual_translated_field = String.to_atom("translated_" <> Atom.to_string(field))
%{^field => translations} = entity
struct(updated_entity, [
{virtual_translated_field, translate(translations, locale, opts)}
])
end)
entity =
Enum.reduce(assocs_to_translate, entity, fn field, updated_entity ->
%{^field => assoc} = entity
case Ecto.assoc_loaded?(assoc) do
true ->
struct(updated_entity, [{field, translate(assoc, locale, opts)}])
_ ->
updated_entity
end
end)
entity
end
def translate(map, _locale, _opts) when map == %{}, do: nil
def translate(%{} = translations_map, locale, opts) do
locale = to_string(locale)
fallback_locale =
Keyword.get(opts, :fallback_locale, Gettext.get_locale())
|> to_string()
handle_missing_translation =
Keyword.get(opts, :handle_missing_translation, fn _, _ -> true end)
cond do
has_translation?(translations_map, locale) ->
translations_map[locale]
has_translation?(translations_map, fallback_locale) ->
translation = translations_map[fallback_locale]
handle_missing_translation.(translations_map, locale)
translation
true ->
handle_missing_translation.(translations_map, locale)
nil
end
end
defp has_translation?(translations_map, locale),
do: Map.has_key?(translations_map, locale) && String.trim(locale) != ""
# @doc ~S"""
# Returns a closure allowing to memorize the given options for `translate\3`.
# """
def set_opts(opts) do
fn data_structure, overriding_opts ->
opts = Keyword.merge(opts, overriding_opts)
locale = Keyword.get(opts, :locale, Gettext.get_locale())
translate(data_structure, locale, opts)
end
end
end
|
lib/ecto/translator.ex
| 0.805096
| 0.510863
|
translator.ex
|
starcoder
|
defmodule DealerReviews.Scraper do
@moduledoc """
Contains functions to request data from the site
and convert the html document to review structs.
"""
@doc """
Takes an integer range of pages then calls
`get_reviews_page` for each page and combines
the outputs.
"""
def get_reviews_pages(pages) do
HTTPoison.start()
pages
|> Enum.map(fn p -> get_reviews_page(p) end)
|> Enum.concat()
end
@doc """
Gets the data from the site then parses the output to
review structs.
"""
def get_reviews_page(page) do
page |> scrape |> parse
end
@doc """
Sends a request to the url and gets the body of the
successful response. Prints errors to console for
troubleshooting.
"""
def scrape(page) do
url =
"https://www.dealerrater.com/dealer/McKaig-Chevrolet-Buick-A-Dealer-For-The-People-dealer-reviews-23685/page#{page}/"
case HTTPoison.get(url) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
body
{:ok, %HTTPoison.Response{status_code: 404}} ->
IO.puts("Not found :(")
{:error, %HTTPoison.Error{reason: reason}} ->
IO.inspect(reason)
end
end
@doc """
Converts the response body to review structs.
"""
def parse(body) do
{:ok, document} = Floki.parse_document(body)
review_dates = get_review_dates(document)
titles = get_titles(document)
bodies = get_bodies(document)
employees = get_employees(document)
ratings = get_ratings(document)
review_dates
|> Enum.zip(titles)
|> Enum.map(fn {r, t} ->
%{date: date, overall: overall, visit_reason: visit_reason} = r
%{customer: customer, title: title} = t
%{
title: title,
customer: customer,
date: date,
overall_rating: overall,
visit_reason: visit_reason
}
end)
|> Enum.zip(bodies)
|> Enum.map(fn {r, b} ->
Map.put(r, :body, b)
end)
|> Enum.zip(employees)
|> Enum.map(fn {r, e} ->
Map.put(r, :employees, e)
end)
|> Enum.zip(ratings)
|> Enum.map(fn {r, rt} ->
Map.put(r, :ratings, rt)
end)
|> Enum.map(fn r -> struct(DealerReviews.Review, r) end)
end
@doc """
Takes a Floki document and parses out review bodies.
"""
def get_bodies(document) do
find_body_sections(document)
|> Enum.map(fn b ->
parse_body_section(b)
end)
end
@doc """
Takes a Floki document and parses out titles.
"""
def get_titles(document) do
find_title_sections(document)
|> Enum.map(fn t ->
parse_title_section(t)
end)
end
@doc """
Takes a Floki document and parses out review date maps.
"""
def get_review_dates(document) do
find_review_date_sections(document)
|> Enum.map(fn r ->
parse_review_date_section(r)
end)
end
@doc """
Takes a Floki document and parses out ratings structs.
"""
def get_ratings(document) do
find_ratings_section(document)
|> Enum.map(fn rating ->
rating_list =
parse_ratings_section(rating)
|> Enum.map(fn r -> parse_rating_section(r) end)
|> Enum.filter(fn r -> r != nil end)
merge_ratings(%{}, rating_list)
end)
end
@doc """
Takes a Floki document and parses out employee struct lists.
"""
def get_employees(document) do
find_employees_sections(document)
|> Enum.map(fn e ->
parse_employees_section(e)
end)
end
# review date sections
defp find_review_date_sections(document) do
document |> Floki.find("#reviews .review-entry .review-date")
end
defp parse_review_date_section(section) do
{"div", _,
[
{"div", _, [date]},
{"div", _,
[
{"div",
[
{"class",
"rating-static visible-xs pad-none margin-none rating-" <>
<<overall::binary-size(2)>> <> " pull-right"}
], _},
_,
{"div", _, [visit_reason]}
]}
]} = section
%{date: date, overall: String.to_integer(overall) / 10, visit_reason: visit_reason}
end
# title sections
defp find_title_sections(document) do
document |> Floki.find("#reviews .review-entry .review-wrapper > div:first-of-type")
end
defp parse_title_section(section) do
{"div", _,
[
{"h3", _, [title]},
{"span", _, ["- " <> customer]}
]} = section
%{title: title |> String.replace("\"", ""), customer: customer}
end
# body sections
defp find_body_sections(document) do
document |> Floki.find("#reviews .review-entry .review-wrapper > div:nth-of-type(2)")
end
defp parse_body_section(section) do
{"div", _,
[
{"div", _,
[
{"p", _, [body]},
_
]}
]} = section
body
end
# employees sections
defp find_employees_sections(document) do
document |> Floki.find("#reviews .review-entry .review-wrapper .employees-wrapper")
end
defp parse_employee_section(section) do
case section do
{"div", _,
[
{"div", [{"class", "table"}],
[
_,
{"div", _,
[
{"a", _, [employee]},
{"div", _,
[
{"div", _,
[
{"div", _,
[
{"span", _, [rating]},
_
]}
]}
]}
]}
]}
]} ->
employee_cleaned = employee |> String.replace("\r\n", "") |> String.trim()
{rating_integer, _} = rating |> Integer.parse()
%DealerReviews.Review.EmployeeReview{name: employee_cleaned, rating: rating_integer}
_ ->
nil
end
end
defp parse_employees_section(section) do
{"div", [{"class", "col-xs-12 lt-grey pad-left-none employees-wrapper"}], [_ | employees]} =
section
employees
|> Enum.map(fn e -> parse_employee_section(e) end)
|> Enum.filter(fn e -> e != nil end)
end
# ratings sections
defp find_ratings_section(document) do
document |> Floki.find("#reviews .review-entry .review-wrapper .review-ratings-all")
end
defp parse_recommend(recommend) do
case recommend do
"Yes" -> true
"No" -> false
_ -> raise "Invalid recommend #{recommend}"
end
end
defp parse_rating_section(section) do
case section do
{"div", _,
[
{"div", _, [label]},
{"div",
[
{"class",
"rating-static-indv rating-" <> <<rating::binary-size(1)>> <> "0 margin-top-none td"}
], []}
]} ->
%{label: label, rating: rating |> String.to_integer()}
{"div", _,
[
{"div", _, [label]},
{"div", [{"class", "td small-text boldest"}], [recommend]}
]} ->
%{
label: label,
recommend: recommend |> String.replace("\r\n", "") |> String.trim() |> parse_recommend
}
_ ->
nil
end
end
defp parse_ratings_section(section) do
{"div", _,
[
_,
{"div", _, ratings}
]} = section
ratings
end
defp merge_ratings(ratings_map, ratings) do
case ratings do
[h | t] ->
case h do
%{label: "Customer Service", rating: r} -> Map.put(ratings_map, :customer_service, r)
%{label: "Quality of Work", rating: r} -> Map.put(ratings_map, :quality, r)
%{label: "Friendliness", rating: r} -> Map.put(ratings_map, :friendliness, r)
%{label: "Pricing", rating: r} -> Map.put(ratings_map, :pricing, r)
%{label: "Overall Experience", rating: r} -> Map.put(ratings_map, :overall, r)
%{label: "Recommend Dealer", recommend: r} -> Map.put(ratings_map, :recommend, r)
end
|> merge_ratings(t)
[] ->
struct(DealerReviews.Review.Ratings, ratings_map)
end
end
end
|
lib/scraper.ex
| 0.669205
| 0.504211
|
scraper.ex
|
starcoder
|
defmodule MrRoboto.Rules do
@moduledoc """
The Rules module defines the rules for a user-agent.
The Rules module defines the MrRoboto.Rules struct which provides a convenient
way to track the directives for a user agent
The Rules module also provides functionality for checking whether a path is
legal for a rule set
"""
@default_delay Application.get_env(:mr_roboto, :default_delay) || 1000
defstruct user_agent: "", allow: [], disallow: [], crawl_delay: @default_delay
@doc """
Sets the *Crawl Delay* for the `user-agent`
Returns an updated `MrRoboto.Rules` struct
## Examples
```
iex> rule = %MrRoboto.Rules{user_agent: "*", allow: ["/"], disallow: [], crawl_delay: 1000}
...> MrRoboto.Rules.set_delay(rule, 2000)
%MrRoboto.Rules{user_agent: "*", allow: ["/"], disallow: [], crawl_delay: 2000}
```
"""
def set_delay(rule, frequency) do
struct(rule, crawl_delay: frequency)
end
@doc """
Returns the default `Crawl-delay`
"""
def default_delay do
@default_delay
end
@doc """
Determines whether the specified `path` is allowed by the given `rule`
Returns `true` or `false`
## Examples
When checking a path and a rule the determination is made based on the directive
with the longest match. For example if `"/foo"` is allowed but `"/foo/bar"` is
disallowed a __path__ value of `"/foo/bar/baz"` would not be permitted.
```
iex> rule = %Rules{user_agent: "*", allow: ["/foo"], disallow: ["/foo/bar"]}
...> Rules.permitted? rule, "/foo/bar/baz"
false
```
Wildcard matches are counted as if they were normal directives. So for example,
`"/foo*bar"` would have an equal weight as `"/foo/bar"`. In this case the
response will be `:ambiguous` and it is up to the caller to decide how to
proceed.
```
iex> rule = %Rules{user_agent: "*", allow: ["/foo*bar"], disallow: ["/foo/bar"]}
...> Rules.permitted? rule, "/foo/bar"
:ambiguous
```
`$` terminated directives are supported as well. When matching against a `$`
terminated directive the dollar sign is ignored. However when considering
match length it is not.
```
iex> rule = %Rules{user_agent: "*", allow: ["/foo"], disallow: ["/*.php$"]}
...> Rules.permitted? rule, "/hello/world.php"
false
```
"""
def permitted?(rule, path) do
allow_check = Task.async(__MODULE__, :matching_allow, [rule, path])
disallow = matching_disallow(rule, path)
allow = Task.await(allow_check)
disallow
|> byte_size
|> case do
0 ->
true
b_size when b_size < byte_size(allow) ->
true
b_size when b_size > byte_size(allow) ->
false
_ ->
:ambiguous
end
end
def matching_allow(rule, path) do
longest_match rule.allow, path, ""
end
def matching_disallow(rule, path) do
longest_match rule.disallow, path, ""
end
@doc """
Determines the direction to walk the path for the given directive
Returns `:forwards` or `:backwards`
## Examples
As per Google's documentation [here](https://developers.google.com/webmasters/control-crawl-index/docs/robots_txt#example-path-matches) a directive ending with `$` endicates that the pattern is matched against the end of the path.
```
iex> MrRoboto.Rules.match_direction "/*.php$"
:backwards
```
```
iex> MrRoboto.Rules.match_direction "/foo/"
:forwards
```
"""
def match_direction(directive) do
case :binary.last(directive) do
?$ ->
:backwards
_ ->
:forwards
end
end
@doc """
Finds the longest directive which matches the given path.
Returns a single directive `binary`.
## Examples
In the case of multiple patterns which match. Especially when those matches belong to both `Allow` and `Disallow` directives. It is necessary to pick a winner. This is done by finding the longest (most specific) directive that matches the path.
```
iex> directives = ["/", "/foo/bar", "/foo"]
...> path = "/foo/bar"
...> MrRoboto.Rules.longest_match directives, path, ""
"/foo/bar"
```
"""
def longest_match(directives, path, longest)
def longest_match([], _path, longest), do: longest
def longest_match([directive | rest], path, longest) do
{norm_dir, norm_path} = normalize(directive, path)
matches = directive_applies? norm_dir, norm_path
if matches && (byte_size(directive) > byte_size(longest)) do
longest_match rest, path, directive
else
longest_match rest, path, longest
end
end
defp normalize(directive, path) do
directive
|> match_direction
|> case do
:forwards ->
{directive, path}
:backwards ->
<<_ :: size(8), rev_dir :: binary>> = String.reverse(directive)
{rev_dir, String.reverse(path)}
end
end
@doc """
Determines whether the given directive applies to the given path.
Returns `true` or `false`
## Examples
The most straightforward case involves vanilla paths. As illustrated below the directive is matched character by character until there is a discrepency or the directive is exhausted. This means that in the case of a directive ending with `$` the directive and path must be reversed before being checked.
```
iex> MrRoboto.Rules.directive_applies? "/foo", "/foo/bar"
true
iex> MrRoboto.Rules.directive_applies? "/foo/bar", "/hello/bar"
false
```
It is also possible to check a directive containing one or more wildcards
```
iex> MrRoboto.Rules.directive_applies? "/foo*bar", "/foo/hello/world/bar"
true
```
```
iex> MrRoboto.Rules.directive_applies? "/f*b*", "/foo/bar.html"
true
```
"""
def directive_applies?(remaining_directive, remaining_path)
def directive_applies?("", _remaining_path), do: true
def directive_applies?(_remaining_directive, ""), do: false
def directive_applies?(<<d :: size(8), d_rest :: binary>>, <<p :: size(8), p_rest :: binary>>) do
case d do
?* ->
case handle_wildcard(d_rest, p_rest) do
{:ok, directive, path} ->
directive_applies? directive, path
{:error, :exhausted} ->
false
end
^p ->
directive_applies? d_rest, p_rest
_ ->
false
end
end
defp handle_wildcard("", _remaining_path), do: {:ok, "", ""}
defp handle_wildcard(<<target :: size(8), remaining_directive :: binary>>, path) do
case consume_until(path, target) do
{:ok, remaining_path} ->
{:ok, remaining_directive, remaining_path}
_ ->
{:error, :exhausted}
end
end
defp consume_until("", _target), do: {:error, :exhausted}
defp consume_until(<<char :: size(8), rest:: binary>>, target) do
case char do
^target ->
{:ok, rest}
_ ->
consume_until rest, target
end
end
end
|
lib/mr_roboto/rules.ex
| 0.906047
| 0.850903
|
rules.ex
|
starcoder
|
defmodule PropCheck.Test.PingPongMaster do
@moduledoc """
This is the ping pong master from Proper's Process Interaction Tutorial,
translated from Erlang to Elixir.
From the tutorial introduction:
In this tutorial, we will use PropEr to test a group of interacting processes.
The system under test consists of one master and multiple slave processes.
The main concept is that the master plays ping-pong (i.e. exchanges ping and
pong messages) with all slave processes, which do not interact with each other.
For the rest of this tutorial, we will refer to the slave processes as
the ping-pong players.
"""
use GenServer
require Logger
# -------------------------------------------------------------------
# Master's API
# -------------------------------------------------------------------
def start_link() do
GenServer.start_link(__MODULE__, [], [name: __MODULE__])
end
def stop() do
ref = Process.monitor(__MODULE__)
try do
GenServer.cast(__MODULE__, :stop)
catch
:error, :badarg -> Logger.error "already_dead_master: #{__MODULE__}"
end
receive do
{:DOWN, ^ref, :process, _object, _reason} -> :ok
end
end
def add_player(name) do
GenServer.call(__MODULE__, {:add_player, name})
end
def remove_player(name) do
GenServer.call(__MODULE__, {:remove_player, name})
end
def ping(from_name) do
#Logger.debug "Ping Pong Game for #{inspect from_name}"
r = GenServer.call(__MODULE__, {:ping, from_name})
#Logger.debug "Ping Pong result: #{inspect r}"
r
end
def get_score(name) do
GenServer.call(__MODULE__, {:get_score, name})
end
# -------------------------------------------------------------------
# Player's internal loop
# -------------------------------------------------------------------
@doc "Process loop for the ping pong player process"
def ping_pong_player(name, counter \\ 1) do
#Logger.debug "Player #{inspect name} is waiting round #{counter}"
receive do
:ping_pong -> # Logger.debug "Player #{inspect name} got a request for a ping-pong game"
ping(name)
{:tennis, from} -> send(from, :maybe_later)
{:football, from} -> send(from, :no_way)
msg -> Logger.error "Player #{inspect name} got invalid message #{inspect msg}".
exit(:kill)
end
# Logger.debug "Player #{inspect name} is recursive"
ping_pong_player(name, counter + 1)
end
# -------------------------------------------------------------------
# Player's API
# -------------------------------------------------------------------
@doc "Start playing ping pong"
@spec play_ping_pong(atom) :: :ok | {:dead_player, atom}
def play_ping_pong(player) do
robust_send(player, :ping_pong)
end
@doc "Start playing football"
def play_football(player) do
case robust_send(player, {:football, self()}) do
:ok ->
receive do
reply -> reply
after 500 -> "Football timeout!"
end
return -> return
end
end
@doc "Start playing football"
def play_football_eager(player) do
send(player, {:football, self()})
receive do
reply -> reply
after 500 -> "Football timeout!"
end
end
@doc "Start playing tennis"
def play_tennis(player) do
case robust_send(player, {:tennis, self()}) do
:ok ->
receive do
reply -> reply
after 500 -> "Tennis timeout!"
end
return -> return
end
end
defp robust_send(name, msg) do
try do
send(name, msg)
:ok
catch
:error, :badarg -> {:dead_player, name}
end
end
# -------------------------------------------------------------------
# Callbacks
# -------------------------------------------------------------------
def init([]) do
{:ok, %{}}
end
def handle_cast(:stop, scores) do
{:stop, :normal, scores}
end
def handle_call({:add_player, name}, _from, scores) do
case Map.fetch(scores, name) do
:error ->
pid = spawn(fn() -> ping_pong_player(name) end)
true = Process.register(pid, name)
{:reply, :ok, scores |> Map.put(name, 0)}
{:ok, _} ->
Logger.debug "add_player: player #{name} already exists!"
{:reply, :ok, scores}
end
end
def handle_call({:remove_player, name}, _from, scores) do
case Process.whereis(name) do
nil -> Logger.debug("Process #{name} is unknown / not running")
pid -> kill_process(pid)
end
# Process.whereis(name) |> Process.exit(:kill)
{:reply, {:removed, name}, scores |> Map.delete(name)}
end
def handle_call({:ping, from_name}, _from, scores) do
# Logger.debug "Master: Ping Pong Game for #{inspect from_name}"
if (scores |> Map.has_key?(from_name)) do
{:reply, :pong, scores |> Map.update!(from_name, &(&1 + 1))}
else
{:reply, {:removed, from_name}, scores}
end
end
def handle_call({:get_score, name}, _from, scores) do
{:reply, scores |> Map.fetch!(name), scores}
end
@doc "Terminates all clients"
def terminate(_reason, scores) do
# Logger.info "Terminate Master with scores #{inspect scores}"
scores
|> Map.keys
|> Enum.each(&kill_process(&1))
end
defp kill_process(pid) when is_pid(pid) do
# monitoring works for already killed processes
ref = Process.monitor(pid)
Process.exit(pid, :kill)
# ... and wait for the DOWN message.
receive do
{:DOWN, ^ref, :process, _object, _reason} -> :ok
end
end
defp kill_process(name) do
kill_process(Process.whereis(name))
end
end
|
test/support/ping_pong_master.ex
| 0.685844
| 0.407805
|
ping_pong_master.ex
|
starcoder
|
defmodule ExPlasma.Output.Position do
@moduledoc """
Generates an Output position if given the:
`blknum` - The block number for this output
`txindex` - The index of the Transaction in the block.
`oindex` - The index of the Output in the Transaction.
"""
@behaviour ExPlasma.Output
alias ExPlasma.Output
@type position() :: pos_integer()
@type t() :: %{
position: position(),
blknum: non_neg_integer(),
txindex: non_neg_integer(),
oindex: non_neg_integer()
}
@type validation_responses() ::
{:ok, t() | Output.t()}
| {:error, {:blknum, :cannot_be_nil}}
| {:error, {:blknum, :exceeds_maximum_value}}
| {:error, {:oindex, :cannot_be_nil}}
| {:error, {:txindex, :cannot_be_nil}}
| {:error, {:txindex, :exceeds_maximum_value}}
# Contract settings
# These are being hard-coded from the same values on the contracts.
# See: https://github.com/omisego/plasma-contracts/blob/master/plasma_framework/contracts/src/utils/PosLib.sol#L16-L23
@block_offset 1_000_000_000
@transaction_offset 10_000
@max_txindex :math.pow(2, 16) - 1
@max_blknum (:math.pow(2, 54) - 1 - @max_txindex) / (@block_offset / @transaction_offset)
@doc """
Encodes the blknum, txindex, and oindex into a single integer.
## Example
iex> pos = %{blknum: 1, txindex: 0, oindex: 0}
iex> ExPlasma.Output.Position.pos(pos)
1_000_000_000
"""
@spec pos(t()) :: number()
def pos(%{blknum: blknum, txindex: txindex, oindex: oindex}) do
blknum * @block_offset + txindex * @transaction_offset + oindex
end
@doc """
Encodes the output position into an RLP encodeable object.
## Example
iex> pos = %{blknum: 1, txindex: 0, oindex: 0}
iex> ExPlasma.Output.Position.to_rlp(pos)
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 154, 202, 0>>
"""
@impl Output
@spec to_rlp(t()) :: binary()
def to_rlp(%{blknum: _, txindex: _, oindex: _} = id) do
id |> pos() |> :binary.encode_unsigned(:big) |> pad_binary()
end
@doc """
Returns a map of the decoded position.
## Example
iex> pos = 1_000_000_000
iex> ExPlasma.Output.Position.to_map(pos)
%{position: 1_000_000_000, blknum: 1, txindex: 0, oindex: 0}
"""
@impl Output
@spec to_map(position()) :: t()
def to_map(pos) do
blknum = div(pos, @block_offset)
txindex = pos |> rem(@block_offset) |> div(@transaction_offset)
oindex = rem(pos, @transaction_offset)
%{position: pos, blknum: blknum, txindex: txindex, oindex: oindex}
end
@doc """
Validates that values can give a valid position.
## Example
iex> pos = %{blknum: 1, txindex: 0, oindex: 0}
iex> {:ok, resp} = ExPlasma.Output.Position.validate(pos)
{:ok, %{blknum: 1, txindex: 0, oindex: 0}}
"""
@impl Output
@spec validate(t()) :: validation_responses()
def validate(%{blknum: blknum, txindex: txindex, oindex: oindex} = pos) do
case do_validate({blknum, txindex, oindex}) do
{field, value} -> {:error, {field, value}}
nil -> {:ok, pos}
end
end
defp do_validate({nil, _, _}), do: {:blknum, :cannot_be_nil}
defp do_validate({_, nil, _}), do: {:txindex, :cannot_be_nil}
defp do_validate({_, _, nil}), do: {:oindex, :cannot_be_nil}
defp do_validate({blknum, _, _}) when is_integer(blknum) and blknum > @max_blknum,
do: {:blknum, :exceeds_maximum_value}
defp do_validate({_, txindex, _}) when is_integer(txindex) and txindex > @max_txindex,
do: {:txindex, :exceeds_maximum_value}
defp do_validate({_, _, _}), do: nil
defp pad_binary(unpadded) do
pad_size = (32 - byte_size(unpadded)) * 8
<<0::size(pad_size)>> <> unpadded
end
end
|
lib/ex_plasma/output/position.ex
| 0.918242
| 0.727951
|
position.ex
|
starcoder
|
defmodule Boids.Motion do
require Logger
alias Boids.Physics.Vector
@max_speed 10
def applyforce(boid, force) do
%Boids.Boid{
position: boid.position,
velocity: boid.velocity,
accleration: Vector.add(boid.accleration, force)
}
end
def move_boid(boid, others) do
sep = separate(boid, others)
aln = align(boid, others)
coh = coh(boid, others)
Logger.debug(
"Sep Align and Cohesion vectors are #{inspect(sep)} #{inspect(aln)} and #{inspect(coh)}"
)
boid
|> applyforce(sep)
|> applyforce(aln)
|> applyforce(coh)
|> move
end
def separate(%Boids.Boid{} = boid, others) do
# Logger.info("Size of boids in separate is #{length(others)}")
min_space = 10
{steer_vec, count} =
others
|> Enum.filter(fn neighbour ->
distance = Vector.distance(boid.position, neighbour.position)
distance > 0 && distance < min_space
end)
|> Enum.map(fn neighbour ->
Vector.diff(boid.position, neighbour.position)
|> Vector.normalize()
|> Vector.vec_div(Vector.distance(boid.position, neighbour.position))
end)
|> Enum.reduce({Vector.new(0, 0), 0}, fn diff, {s, c} -> {Vector.add(s, diff), c + 1} end)
Logger.debug("Steer and count are #{inspect(steer_vec)} and #{inspect(count)}")
steer =
case count do
0 -> steer_vec
_ -> Vector.vec_div(steer_vec, count)
end
case Vector.magnitude(steer) do
0 ->
steer
_ ->
steer
|> Vector.normalize()
|> Vector.mult(@max_speed)
|> Vector.diff(boid.velocity)
end
end
def align(%Boids.Boid{} = boid, others) do
# Logger.info("Size of boids in align is #{length(others)}")
neighbour_dist = 10
{vec, count} =
others
|> Enum.reduce({Vector.new(), 0}, fn neighbour, {s, c} ->
d = Vector.distance(boid.position, neighbour.position)
if d > 0 && d < neighbour_dist do
{Vector.add(s, neighbour.velocity), c + 1}
else
{s, c}
end
end)
Logger.debug("Align prevec and count are #{inspect({vec, count})}")
case count do
0 ->
Vector.new()
_ ->
vec
|> Vector.vec_div(count)
|> Vector.normalize()
|> Vector.mult(@max_speed)
|> Vector.diff(boid.velocity)
end
end
def coh(%Boids.Boid{} = boid, others) do
# Logger.info("Size of boids in coh is #{length(others)}")
neighbour_dist = 50
{vec, count} =
others
|> Enum.reduce({Vector.new(), 0}, fn neighbour, {s, c} ->
d = Vector.distance(boid.position, neighbour.position)
if d > 0 && d < neighbour_dist do
{Vector.add(s, neighbour.velocity), c + 1}
else
{s, c}
end
end)
case count do
0 ->
Vector.new()
_ ->
vec
|> Vector.vec_div(count)
|> seek(boid)
end
end
def seek(target, %Boids.Boid{} = boid) do
target
|> Vector.diff(boid.position)
|> Vector.normalize()
|> Vector.mult(@max_speed)
|> Vector.diff(boid.velocity)
end
defp move(%Boids.Boid{} = boid) do
future_velocity = Vector.add(boid.velocity, boid.accleration)
Logger.debug("Future velocity is #{inspect(future_velocity)}")
%Boids.Boid{
velocity: Vector.add(boid.velocity, boid.accleration),
position: Vector.add(boid.position, boid.velocity),
# reset to 0
accleration: Vector.new()
}
end
end
|
lib/boids/motion.ex
| 0.586286
| 0.706038
|
motion.ex
|
starcoder
|
defmodule Vivid.Bounds do
alias Vivid.{Bounds, Point, Shape, Bounds.Of}
defstruct ~w(min max)a
@moduledoc """
Provides information about the bounds of a box and pixel positions within it.
## Example
iex> use Vivid
...> Box.init(Point.init(5,10),Point.init(15,20))
...> |> Bounds.bounds()
#Vivid.Bounds<[min: #Vivid.Point<{5, 10}>, max: #Vivid.Point<{15, 20}>]>
"""
@opaque t :: %Bounds{min: Point.t(), max: Point.t()}
@doc """
Initialise arbitrary bounds.
* `x0` - The x coordinate of the bottom-left pixel.
* `y0` - The y coordinate of the bottom-left pixel.
* `x1` - The x coordinate of the top-right pixel.
* `y1` - The y coordinate of the top-right pixel.
## Example
iex> Vivid.Bounds.init(0, 0, 5, 5)
#Vivid.Bounds<[min: #Vivid.Point<{0, 0}>, max: #Vivid.Point<{5, 5}>]>
"""
@spec init(number, number, number, number) :: Bounds.t()
def init(x0, y0, x1, y1), do: %Bounds{min: Point.init(x0, y0), max: Point.init(x1, y1)}
@doc """
Return the bounding box required to encapsulate the shape.
* `shape` - A shape whose bounds you want to measure.
## Example
iex> Vivid.Circle.init(Vivid.Point.init(10,10), 10)
...> |> Vivid.Bounds.bounds
#Vivid.Bounds<[min: #Vivid.Point<{0.0, 0.0}>, max: #Vivid.Point<{20.0, 20.0}>]>
"""
@spec bounds(Shape.t()) :: Bounds.t()
def bounds(%Bounds{} = shape), do: shape
def bounds(shape) do
{min, max} = Of.bounds(shape)
%Bounds{min: min, max: max}
end
@doc """
Returns the width of a shape.
* `shape` - The shape whose width you want to measure.
## Example
iex> Vivid.Circle.init(Vivid.Point.init(10,10), 10)
...> |> Vivid.Bounds.width
20.0
"""
@spec width(Shape.t()) :: number
def width(%Bounds{min: %Point{x: x0}, max: %Point{x: x1}} = _shape), do: abs(x1 - x0)
def width(shape), do: shape |> bounds |> width
@doc """
Returns the height of a shape.
* `shape` - The shape whose height you want to measure.
## Example
iex> Vivid.Circle.init(Vivid.Point.init(10,10), 10)
...> |> Vivid.Bounds.height
20.0
"""
@spec height(Shape.t()) :: number
def height(%Bounds{min: %Point{y: y0}, max: %Point{y: y1}}), do: abs(y1 - y0)
def height(shape), do: shape |> bounds |> height
@doc """
Returns the bottom-left point of the bounds.
* `shape` - The shape whose bottom-left pixel you want to find.
## Example
iex> Vivid.Circle.init(Vivid.Point.init(10,10), 10)
...> |> Vivid.Bounds.min
#Vivid.Point<{0.0, 0.0}>
"""
@spec min(Shape.t()) :: Point.t()
def min(%Bounds{min: min} = _shape), do: min
def min(shape), do: shape |> bounds |> min
@doc """
Returns the top-right point of the bounds.
* `shape` - The shape whose top-right pixel you want to find.
## Example
iex> Vivid.Circle.init(Vivid.Point.init(10,10), 10)
...> |> Vivid.Bounds.max
#Vivid.Point<{20.0, 20.0}>
"""
@spec max(Shape.t()) :: Point.t()
def max(%Bounds{max: max}), do: max
def max(shape), do: shape |> bounds |> max
@doc """
Returns the center point of the bounds.
* `shape` - The shape whose center-most pixel you want to find.
## Example
iex> Vivid.Circle.init(Vivid.Point.init(10,10), 10)
...> |> Vivid.Circle.to_polygon
...> |> Vivid.Bounds.center_of
#Vivid.Point<{10.0, 10.0}>
"""
@spec center_of(Shape.t()) :: Point.t()
def center_of(%Bounds{min: %Point{x: x0, y: y0}, max: %Point{x: x1, y: y1}}) do
x = x0 + (x1 - x0) / 2
y = y0 + (y1 - y0) / 2
Point.init(x, y)
end
def center_of(shape), do: shape |> bounds |> center_of
@doc """
Returns true if the point is within the bounds.
* `shape` - A shape you wish to test.
* `point` - The point you wish to test.
## Examples
iex> Vivid.Bounds.init(0, 0, 10, 10)
...> |> Vivid.Bounds.contains?(Vivid.Point.init(0, 0))
true
iex> Vivid.Bounds.init(0, 0, 10, 10)
...> |> Vivid.Bounds.contains?(Vivid.Point.init(5, 5))
true
iex> Vivid.Bounds.init(0, 0, 10, 10)
...> |> Vivid.Bounds.contains?(Vivid.Point.init(-1, -1))
false
iex> Vivid.Bounds.init(0, 0, 10, 10)
...> |> Vivid.Bounds.contains?(Vivid.Point.init(-10, -10))
false
iex> Vivid.Bounds.init(0, 0, 10, 10)
...> |> Vivid.Bounds.contains?(Vivid.Point.init(10, 10))
true
iex> Vivid.Bounds.init(0, 0, 10, 10)
...> |> Vivid.Bounds.contains?(Vivid.Point.init(11, 11))
false
"""
@spec contains?(Shape.t(), Point.t()) :: boolean
def contains?(
%Bounds{min: %Point{x: x0, y: y0}, max: %Point{x: x1, y: y1}} = _shape,
%Point{x: x, y: y} = _point
)
when x0 <= x and x <= x1 and y0 <= y and y <= y1,
do: true
def contains?(_shape, _point), do: false
end
|
lib/vivid/bounds.ex
| 0.95264
| 0.776369
|
bounds.ex
|
starcoder
|
defmodule Lexdee do
@moduledoc """
Documentation for Lexdee.
"""
@doc """
Hello world.
## Examples
iex> Lexdee.hello()
:world
"""
alias Lexdee.Client
defdelegate create_client(base_url, cert \\ nil, key \\ nil),
to: Client,
as: :new
alias Lexdee.Profiles
defdelegate list_profiles(client), to: Profiles, as: :index
defdelegate get_profile(client, id), to: Profiles, as: :show
defdelegate update_profile(client, id, params), to: Profiles, as: :update
defdelegate create_profile(client, params), to: Profiles, as: :create
alias Lexdee.Certificates
defdelegate list_certificates(client), to: Certificates, as: :index
defdelegate get_certificate(client, id), to: Certificates, as: :show
defdelegate create_certificate(client, params), to: Certificates, as: :create
defdelegate update_certificate(client, id, params),
to: Certificates,
as: :update
alias Lexdee.Cluster
defdelegate get_cluster(client), to: Cluster, as: :show
alias Cluster.Members
defdelegate get_cluster_members(client), to: Members, as: :index
defdelegate get_cluster_member(client, id), to: Members, as: :show
alias Lexdee.Instances
defdelegate execute_command(client, id, commands, opts \\ []),
to: Instances.Exec,
as: :perform
defdelegate list_instances(client), to: Instances, as: :index
defdelegate get_instance(client, id), to: Instances, as: :show
defdelegate delete_instance(client, id), to: Instances, as: :remove
defdelegate create_instance(client, params, opts \\ []),
to: Instances,
as: :create
defdelegate show_instance_log(client, instance, file_name, opts \\ []),
to: Instances.Logs,
as: :show
defdelegate get_state(client, id), to: Instances.State, as: :show
defdelegate start_instance(client, id, options \\ []),
to: Instances.State,
as: :start
defdelegate stop_instance(client, id, options \\ []),
to: Instances.State,
as: :stop
defdelegate restart_instance(client, id, options \\ []),
to: Instances.State,
as: :restart
defdelegate create_file(client, id, path, content),
to: Instances.Files,
as: :create
alias Lexdee.Operations
defdelegate get_operation(client, id), to: Operations, as: :show
defdelegate wait_for_operation(client, id, options \\ []),
to: Operations,
as: :wait
end
|
lib/lexdee.ex
| 0.684475
| 0.457379
|
lexdee.ex
|
starcoder
|
defmodule Chameleon.HSV do
@enforce_keys [:h, :s, :v]
defstruct @enforce_keys
@moduledoc """
HSV (hue, saturation, value) represents colors using a cylinder where colors
are sorted by angles and then adjusted via the saturation and value
parameters.
See Chameleon.HSL for a related, but different colorspace.
"""
@type degrees() :: 0..360
@type percent() :: 0..100
@type t() :: %__MODULE__{h: degrees(), s: percent(), v: percent()}
@doc """
Create a new HSV color.
"""
def new(h, s, v), do: %__MODULE__{h: h, s: s, v: v}
end
defmodule Chameleon.HSV.Chameleon.RGB do
defstruct [:from]
alias Chameleon.RGB
@moduledoc false
defimpl Chameleon.Color do
def convert(%{from: hsv}) do
hsv_to_rgb(hsv.h, hsv.s, hsv.v)
end
defp hsv_to_rgb(_h, s, v) when s <= 0 do
a = round(255 * v / 100)
RGB.new(a, a, a)
end
defp hsv_to_rgb(h, s, v) do
h_sixths = h / 60
h_sector = round(:math.floor(h_sixths))
h_offset = h_sixths - h_sector
s = s / 100
v = v / 100
x = round(255 * v * (1 - s))
y = round(255 * v * (1 - s * h_offset))
z = round(255 * v * (1 - s * (1 - h_offset)))
w = round(255 * v)
hsv_sector_to_rgb(h_sector, x, y, z, w)
end
defp hsv_sector_to_rgb(0, x, _y, z, w), do: RGB.new(w, z, x)
defp hsv_sector_to_rgb(1, x, y, _z, w), do: RGB.new(y, w, x)
defp hsv_sector_to_rgb(2, x, _y, z, w), do: RGB.new(x, w, z)
defp hsv_sector_to_rgb(3, x, y, _z, w), do: RGB.new(x, y, w)
defp hsv_sector_to_rgb(4, x, _y, z, w), do: RGB.new(z, x, w)
defp hsv_sector_to_rgb(5, x, y, _z, w), do: RGB.new(w, x, y)
end
end
defmodule Chameleon.RGB.Chameleon.HSV do
defstruct [:from]
alias Chameleon.HSV
@moduledoc false
defimpl Chameleon.Color do
def convert(%{from: rgb}) do
r = rgb.r / 255
g = rgb.g / 255
b = rgb.b / 255
c_max = max(r, max(g, b))
c_min = min(r, min(g, b))
delta = c_max - c_min
h = hue(delta, c_max, r, g, b) |> normalize_degrees() |> round()
s = round(saturation(delta, c_max) * 100)
v = round(c_max * 100)
HSV.new(h, s, v)
end
defp hue(delta, _, _, _, _) when delta <= 0, do: 0
defp hue(delta, r, r, g, b), do: 60 * rem(round((g - b) / delta), 6)
defp hue(delta, g, r, g, b), do: 60 * ((b - r) / delta + 2)
defp hue(delta, b, r, g, b), do: 60 * ((r - g) / delta + 4)
defp saturation(_delta, c_max) when c_max <= 0, do: 0
defp saturation(delta, c_max), do: delta / c_max
defp normalize_degrees(degrees) when degrees < 0, do: degrees + 360
defp normalize_degrees(degrees), do: degrees
end
end
|
lib/chameleon/hsv.ex
| 0.891233
| 0.439988
|
hsv.ex
|
starcoder
|
defmodule Day6 do
def from_file(path) do
File.stream!(path)
|> Enum.map(
&(
String.split(&1, ",")
|> Enum.map(
fn x ->
x
|> String.trim
|> String.to_integer end
)
|> List.to_tuple)
)
end
def largest_area(points) do
areas(points)
|> Enum.map(fn {_, area} -> length(area) end)
|> Enum.max
end
def areas(points) do
bounds = bounds(points)
bounded_coordinates(bounds)
|> Enum.reduce(
%{},
fn coordinate, map ->
map
|> Map.put(
coordinate,
nearest_neighbour(points, coordinate)
)
end
)
|> Enum.filter(fn {_, v} -> v != nil end)
|> Enum.group_by(fn {_, v} -> v end)
|> Enum.map(fn {k, v} -> {k, Enum.map(v, fn {p, _} -> p end)} end)
|> Enum.filter(fn {_, v} -> finite?(v, bounds) end)
end
def finite?(points, bounds) do
!infinite?(points, bounds)
end
def infinite?(points, {min_x, max_x, min_y, max_y}) do
points |> Enum.find(fn {x, y} -> x <= min_x || x >= max_x || y <= min_y || y >= max_y end)
end
def bounded_coordinates({min_x, max_x, min_y, max_y}) do
for y <- min_y..max_y, x <- min_x..max_x, do: {x, y}
end
def nearest_neighbour(points, point) do
[{{x, y}, d1}, {_, d2}] = points
|> Enum.map(fn p -> {p, distance(p, point)} end)
|> Enum.sort(fn {_, d1}, {_, d2} -> d1 <= d2 end)
|> Enum.take(2)
if d1 == d2 do
nil
else
{x, y}
end
end
def distance({x1, y1}, {x2, y2}) do
abs(x1 - x2) + abs(y1 - y2)
end
def bounds(points) do
min_x = points |> Enum.map(&elem(&1, 0)) |> Enum.min
max_x = points |> Enum.map(&elem(&1, 0)) |> Enum.max
min_y = points |> Enum.map(&elem(&1, 1)) |> Enum.min
max_y = points |> Enum.map(&elem(&1, 1)) |> Enum.max
{min_x, max_x, min_y, max_y}
end
def largest_close(points, max_total_distance) do
bounded_coordinates(bounds(points))
|> Enum.reduce(
%{},
fn coordinate, map ->
map
|> Map.put(
coordinate,
total_distance(points, coordinate)
)
end
)
|> Enum.filter(fn {_, v} -> v < max_total_distance end)
|> length
end
def total_distance(points, coordinate) do
points
|> Enum.map(fn point -> distance(point, coordinate) end)
|> Enum.sum
end
def solution do
IO.puts("#{from_file("day6_input.txt") |> largest_area}")
IO.puts("#{from_file("day6_input.txt") |> largest_close(10000)}")
end
end
|
lib/day6.ex
| 0.689933
| 0.560433
|
day6.ex
|
starcoder
|
defmodule Cocktail.Validation.DayOfMonth do
@moduledoc false
import Cocktail.Validation.Shift
import Cocktail.Util, only: [next_gte: 2]
# assumed that DST can not "take" more than 4 hours between any 2 consecutive days
@min_dst_resultant_hours 20
@type t :: %__MODULE__{days: [Cocktail.day_of_month()]}
@enforce_keys [:days]
defstruct days: []
@spec new([Cocktail.day()]) :: t
def new(days), do: %__MODULE__{days: days}
@spec next_time(t, Cocktail.time(), Cocktail.time()) :: Cocktail.Validation.Shift.result()
def next_time(%__MODULE__{days: days}, time, _) do
current_day_of_month = time.day
normalized_days =
days
|> Enum.sort()
|> Enum.map(&normalize_day_of_month(&1, time))
diff =
case next_gte(normalized_days, current_day_of_month) do
# go to next month
nil ->
next_month_time =
time
|> Timex.shift(months: 1)
next_month_normalized_days = Enum.map(days, &normalize_day_of_month(&1, next_month_time))
next_month_earliest_day = Timex.set(next_month_time, day: hd(Enum.sort(next_month_normalized_days)))
dst_accounted_days_diff(next_month_earliest_day, time)
next_earliest_day_of_month ->
next_earliest_day_of_month - current_day_of_month
end
shift_by(diff, :days, time, :beginning_of_day)
end
defp normalize_day_of_month(day_of_month, current_time) do
do_normalize_day_of_month(day_of_month, Timex.days_in_month(current_time))
end
defp do_normalize_day_of_month(day_of_month, days_in_month) when day_of_month > days_in_month do
days_in_month
end
defp do_normalize_day_of_month(day_of_month, _days_in_month) when day_of_month > 0 do
day_of_month
end
defp do_normalize_day_of_month(day_of_month, days_in_month) when -day_of_month > days_in_month do
1
end
defp do_normalize_day_of_month(day_of_month, days_in_month) when day_of_month < 0 do
days_in_month + day_of_month + 1
end
defp dst_accounted_days_diff(next_month_earliest_day, time) do
case Timex.diff(next_month_earliest_day, time, :days) do
0 ->
# get the hours diff to ensure we are not falling short because of DST
if Timex.diff(next_month_earliest_day, time, :hours) > @min_dst_resultant_hours do
1
else
0
end
days_diff ->
days_diff
end
end
end
|
lib/cocktail/validation/day_of_month.ex
| 0.718199
| 0.48932
|
day_of_month.ex
|
starcoder
|
defmodule Coxir.Gateway.Worker do
@moduledoc false
use WebSockex
alias Coxir.Stage.Producer
def start_link(state) do
state = state
|> Map.merge(
%{
beat: nil,
session: nil,
sequence: nil
}
)
WebSockex.start_link(state.gateway, __MODULE__, state)
end
def handle_disconnect(%{reason: _reason}, state) do
:timer.cancel(state.beat)
{:reconnect, %{state | beat: nil}}
end
def handle_info(:heartbeat, state) do
data = state.sequence
|> payload(1)
{:reply, {:text, data}, state}
end
def handle_info({:send, opcode, data}, state) do
data = data
|> payload(opcode)
{:reply, {:text, data}, state}
end
def handle_info(_event, state), do: {:ok, state}
def handle_frame({:text, data}, state) do
data
|> parse
|> dispatch(state)
end
def handle_frame({:binary, data}, state) do
data
|> :zlib.uncompress
|> parse
|> dispatch(state)
end
def handle_frame(_frame, state), do: {:ok, state}
def dispatch(%{op: 10, d: data}, state) do
state = \
case state.beat do
nil ->
beat = :timer.send_interval(
data.heartbeat_interval,
:heartbeat
)
%{state | beat: beat}
_other ->
state
end
data = \
case state.session do
nil ->
{family, _name} = :os.type
%{
token: state.token,
properties: %{
"$os": family,
"$device": "coxir",
"$browser": "coxir"
},
compress: true,
large_threshold: 250,
shard: state.shard
}
|> payload(2)
session ->
%{
token: state.token,
session_id: session,
seq: state.sequence
}
|> payload(6)
end
{:reply, {:text, data}, state}
end
def dispatch(%{op: 7}, state) do
{:close, state}
end
def dispatch(%{op: 9}, state) do
{:close, %{state | session: nil}}
end
def dispatch(%{op: 1}, state) do
handle_info(:heartbeat, state)
end
def dispatch(%{op: 0, t: name, d: data, s: sequence}, state) do
Producer.notify %{
t: String.to_atom(name),
d: data
}
{:ok, %{state | sequence: sequence}}
end
def dispatch(_data, state), do: {:ok, state}
defp parse(term) do
term
|> Jason.decode!(keys: :atoms)
end
defp encode(term) do
term
|> Jason.encode!
end
defp payload(data, op) do
%{op: op, d: data}
|> encode
end
end
|
lib/coxir/gateway/worker.ex
| 0.6705
| 0.483222
|
worker.ex
|
starcoder
|
defmodule LdGraph2.Graph do
@moduledoc """
A directed graph.
"""
defstruct nodes: %{}
### =========================================================================
### Putting new nodes and edges
### =========================================================================
@spec put_node(%LdGraph2.Graph{}, any) :: %LdGraph2.Graph{}
@doc """
Creates a node with the specified key, not connected to any other nodes.
## Examples
```elixir
iex> %LdGraph2.Graph{} |> LdGraph2.Graph.put_node(0) ===
...> %LdGraph2.Graph{nodes: %{0 => []}}
true
```
"""
def put_node(graph = %{nodes: nodes}, at) do
%LdGraph2.Graph{graph | nodes: nodes |> Map.put_new(at, [])}
end
@spec put_edge(%LdGraph2.Graph{}, any, any) :: %LdGraph2.Graph{}
@doc """
Creates an edge pointing from the specified key to another specified key.
Note that each edge points to and from nodes that already exist, and each
edge must be unique.
## Examples
```elixir
iex> %LdGraph2.Graph{}
...> |> LdGraph2.Graph.put_node(0)
...> |> LdGraph2.Graph.put_node(1)
...> |> LdGraph2.Graph.put_edge(0, 1)
%LdGraph2.Graph{nodes: %{0 => [1], 1 => []}}
```
"""
def put_edge(graph = %{nodes: nodes}, from, to) when is_map_key(nodes, from) do
node_at = nodes[from]
%LdGraph2.Graph{
graph
| nodes: %{nodes | from => node_at |> MapSet.new() |> MapSet.put(to) |> MapSet.to_list()}
}
end
### =========================================================================
### Deleting old nodes and edges
### =========================================================================
@spec del_node(%LdGraph2.Graph{}, any) :: %LdGraph2.Graph{}
@doc """
Deletes the specified node if it exists. Otherwise, the graph is left as is.
## Examples
```elixir
iex> %LdGraph2.Graph{}
...> |> LdGraph2.Graph.put_node(0)
...> |> LdGraph2.Graph.del_node(0)
%LdGraph2.Graph{nodes: %{}}
```
"""
def del_node(graph = %{nodes: nodes}, at) do
%LdGraph2.Graph{graph | nodes: nodes |> Map.delete(at)}
end
@spec del_edge(%LdGraph2.Graph{}, any, any) :: %LdGraph2.Graph{}
@doc """
Deletes the specified edge connection, if the connection to the destination
exists. Otherwise, the graph is left as is.
If the node we are connecting from does not exist, an error will be thrown.
## Examples
```elixir
iex> %LdGraph2.Graph{}
...> |> LdGraph2.Graph.put_node(0)
...> |> LdGraph2.Graph.put_node(1)
...> |> LdGraph2.Graph.put_edge(0, 1)
...> |> LdGraph2.Graph.del_edge(0, 1)
%LdGraph2.Graph{nodes: %{0 => [], 1 => []}}
```
"""
def del_edge(graph = %{nodes: nodes}, from, to) when is_map_key(nodes, from) do
node_at = nodes[from]
%LdGraph2.Graph{
graph
| nodes: %{
nodes
| from =>
node_at
|> MapSet.new()
|> MapSet.delete(to)
|> MapSet.to_list()
}
}
end
end
|
apps/ld_graph2/lib/ld_graph2/graph.ex
| 0.889211
| 0.910823
|
graph.ex
|
starcoder
|
defmodule Canvas.Resources.Users do
@moduledoc """
Provides functions to interact with the
[course endpoints](https://canvas.instructure.com/doc/api/users).
"""
alias Canvas.{Client, Listing, Response}
alias Canvas.Resources.User
@doc """
A paginated list of of users associated with this account.
See:
- https://canvas.instructure.com/doc/api/users#method.users.index
## Examples:
client = %Canvas.Client{access_token: "<KEY>", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Users.list_users_in_account(client, account_id = 1)
"""
@spec list_users_in_account(Client.t(), String.t() | integer, Keyword.t()) ::
{:ok | :error, Response.t()}
def list_users_in_account(client, account_id, options \\ []) do
url = Client.versioned("/accounts/#{account_id}/users")
Listing.get(client, url, options)
|> Response.parse([%User{}])
end
@doc """
A paginated list of of users associated with this account.
See:
- https://canvas.instructure.com/doc/api/users#method.users.index
## Examples:
client = %Canvas.Client{access_token: "<PASSWORD>c3d4", base_url: "https://instructure.test"}
{:ok, response} = Canvas.Resources.Users.all_users_in_account(client, account_id = 1)
"""
@spec all_users_in_account(Client.t(), String.t() | integer, Keyword.t()) ::
{:ok, list(%User{})} | {:error, Response.t()}
def all_users_in_account(client, account_id, options \\ []) do
Listing.get_all(__MODULE__, :list_users_in_account, [client, account_id, options])
end
def list_the_activity_stream() do
end
def activity_stream_summary() do
end
def list_the_todo_items() do
end
def list_counts_for_todo_items() do
end
def list_upcoming_assignments_calendar_events() do
end
def list_missing_submissions() do
end
def hide_a_stream_item() do
end
def hide_all_stream_items() do
end
def upload_a_file() do
end
def show_user_details() do
end
def create_a_user() do
end
def self_register_a_user() do
end
def update_user_settings() do
end
def get_custom_colors() do
end
def get_custom_color() do
end
def update_custom_color() do
end
def get_dashboard_positions() do
end
def update_dashboard_positions() do
end
def edit_a_user() do
end
def merge_user_into_another_user() do
end
def split_merged_users_into_separate_users() do
end
def get_a_pandata_events_jwt_token_and_its_expiration_date() do
end
def get_a_users_most_recently_graded_submissions() do
end
def get_user_profilelist_avatar_options() do
end
def list_user_page_views() do
end
end
|
lib/canvas/resources/users.ex
| 0.754553
| 0.414928
|
users.ex
|
starcoder
|
defmodule Calcy.Evaluator do
@moduledoc """
Documentation for the Evaluator.
"""
import IO.ANSI
@doc """
Returns the value of a addition operation between the left and right hand sides
"""
def eval(env, {:plus, right, left}) do
eval(env, right) + eval(env, left)
end
@doc """
Returns the value of a subtraction operation between the left and right hand sides
"""
def eval(env, {:minus, right, left}) do
eval(env, right) - eval(env, left)
end
@doc """
Returns the value of a multiplication operation between the left and right hand sides
"""
def eval(env, {:mult, right, left}) do
eval(env, right) * eval(env, left)
end
@doc """
Returns the value of a division operation between the left and right hand sides
"""
def eval(env, {:divi, right, left}) do
case eval(env, left) do
0 -> "Cannot divide by zero"
val -> eval(env, right) / val
end
end
@doc """
Returns the value of a power operation between the left and right hand sides
"""
def eval(env, {:pow, right, left}) do
:math.pow(eval(env, right), eval(env, left))
end
@doc """
Returns the value of the integer
"""
def eval(_env, {:int, int}) do
int
end
@doc """
Returns the value of the float
"""
def eval(_env, {:float, float}) do
float
end
@doc """
Returns the value of the identifier
"""
def eval(env, {:ident, name}) do
lookup(name, env)
end
@doc """
Assigns the value of expr to name and saves it in the environment
"""
def eval(env, {:assign, name, expr}) do
val = eval(env, expr)
IO.inspect name
if name == :pi, do: raise "Cannot change pi"
if name == :e, do: raise "Cannot change e"
{Map.put(env, name, val), val}
end
@doc """
Prints the environment
"""
def eval(env, {:stack}) do
Enum.each env, fn {k, v} ->
IO.puts blue() <> "#{k}: " <> reset() <> "\t#{v}"
end
nil
end
@doc """
Returns an exit atom if the current node is exit
"""
def eval(_env, {:exit}) do
:exit
end
@doc """
Looks up the variable in the environment
Throws an error if the variable is not defined
"""
def lookup(name, env) do
case Map.get(env, name) do
nil -> raise "#{name} has not been declared"
val -> val
end
end
end
|
lib/calcy/evaluator.ex
| 0.845656
| 0.821796
|
evaluator.ex
|
starcoder
|
defmodule MPEGAudioFrameParser.Frame do
defstruct version_id: nil,
crc_protection: false,
bitrate: nil,
layer: nil,
sample_rate: nil,
padding: 0,
channel_mode: nil,
data: <<>>,
valid: false,
complete: false
alias MPEGAudioFrameParser.Frame
require Logger
@sync_word 0b11111111111
@header_length 32
def from_header(header)
when is_binary(header)
and bit_size(header) == @header_length
do
frame = %Frame{data: header}
|> Map.put(:version_id, parse_version(header))
|> Map.put(:layer, parse_layer(header))
|> Map.put(:crc_protection, parse_crc_protection(header))
|> Map.put(:bitrate, parse_bitrate(header))
|> Map.put(:sample_rate, parse_sample_rate(header))
|> Map.put(:padding, parse_padding(header))
%{frame | valid: header_valid?(frame)}
end
def header_valid?(%Frame{version_id: version_id, layer: layer, bitrate: bitrate, sample_rate: sample_rate})
when version_id != :reserved
and layer != :reserved
and bitrate != :bad
and sample_rate != :bad
do
true
end
def header_valid?(%Frame{}), do: false
def frame_length(%Frame{bitrate: bitrate, sample_rate: sample_rate} = frame)
when is_integer(bitrate)
and is_integer(sample_rate)
do
bits_per_frame = samples_per_frame(frame) / 8
(bits_per_frame * (frame.bitrate * 1000) / frame.sample_rate + frame.padding)
|> trunc
end
def frame_length(%Frame{}), do: 0
def add_bytes(frame, packet) do
limit = bytes_missing(frame)
{:ok, bytes, rest, complete} = split_packet(packet, limit)
{:ok, %{frame | data: frame.data <> bytes, complete: complete}, rest}
end
def bytes_missing(frame) do
(frame_length(frame) - byte_size(frame.data))
|> max(0)
end
# Private Functions
defp split_packet(packet, limit) do
bytes_available = byte_size(packet)
bytes_to_take = min(bytes_available, limit)
bytes_to_leave = bytes_available - bytes_to_take
part1 = :binary.part(packet, {0, bytes_to_take})
part2 = :binary.part(packet, {bytes_available, -bytes_to_leave})
{:ok, part1, part2, bytes_to_take == limit}
end
defp parse_version(<<@sync_word::size(11), bits::size(2), _::bits>>), do: version_atom(bits)
defp version_atom(0b11), do: :version1
defp version_atom(0b10), do: :version2
defp version_atom(0b00), do: :"version2.5"
defp version_atom(0b01), do: :reserved
defp parse_layer(<<@sync_word::size(11), _::size(2), bits::size(2), _::bits>>), do: layer_atom(bits)
defp layer_atom(0b11), do: :layer1
defp layer_atom(0b10), do: :layer2
defp layer_atom(0b01), do: :layer3
defp layer_atom(0b00), do: :reserved
defp parse_crc_protection(<<@sync_word::size(11), _::size(4), 0b0::size(1), _::bits>>), do: false
defp parse_crc_protection(<<@sync_word::size(11), _::size(4), 0b1::size(1), _::bits>>), do: true
defp parse_bitrate(<<@sync_word::size(11), version_bits::size(2), layer_bits::size(2), _::size(1), bitrate_bits::size(4), _::bits>>) do
version_atom = version_atom(version_bits)
layer_atom = layer_atom(layer_bits)
case {version_atom, layer_atom, bitrate_bits} do
# V1, L1
{:version1, :layer1, 0b0001} -> 32
{:version1, :layer1, 0b0010} -> 64
{:version1, :layer1, 0b0011} -> 96
{:version1, :layer1, 0b0100} -> 128
{:version1, :layer1, 0b0101} -> 160
{:version1, :layer1, 0b0110} -> 192
{:version1, :layer1, 0b0111} -> 224
{:version1, :layer1, 0b1000} -> 256
{:version1, :layer1, 0b1001} -> 288
{:version1, :layer1, 0b1010} -> 320
{:version1, :layer1, 0b1011} -> 352
{:version1, :layer1, 0b1100} -> 384
{:version1, :layer1, 0b1101} -> 416
{:version1, :layer1, 0b1110} -> 448
# V1, L2
{:version1, :layer2, 0b0001} -> 32
{:version1, :layer2, 0b0010} -> 48
{:version1, :layer2, 0b0011} -> 56
{:version1, :layer2, 0b0100} -> 64
{:version1, :layer2, 0b0101} -> 80
{:version1, :layer2, 0b0110} -> 96
{:version1, :layer2, 0b0111} -> 112
{:version1, :layer2, 0b1000} -> 128
{:version1, :layer2, 0b1001} -> 160
{:version1, :layer2, 0b1010} -> 192
{:version1, :layer2, 0b1011} -> 224
{:version1, :layer2, 0b1100} -> 256
{:version1, :layer2, 0b1101} -> 320
{:version1, :layer2, 0b1110} -> 384
# V1, L3
{:version1, :layer3, 0b0001} -> 32
{:version1, :layer3, 0b0010} -> 40
{:version1, :layer3, 0b0011} -> 48
{:version1, :layer3, 0b0100} -> 56
{:version1, :layer3, 0b0101} -> 64
{:version1, :layer3, 0b0110} -> 80
{:version1, :layer3, 0b0111} -> 96
{:version1, :layer3, 0b1000} -> 112
{:version1, :layer3, 0b1001} -> 128
{:version1, :layer3, 0b1010} -> 160
{:version1, :layer3, 0b1011} -> 192
{:version1, :layer3, 0b1100} -> 224
{:version1, :layer3, 0b1101} -> 256
{:version1, :layer3, 0b1110} -> 320
# V2, L1
{version, :layer1, 0b0001} when version in [:version2, :"version2.5"] -> 32
{version, :layer1, 0b0010} when version in [:version2, :"version2.5"] -> 48
{version, :layer1, 0b0011} when version in [:version2, :"version2.5"] -> 56
{version, :layer1, 0b0100} when version in [:version2, :"version2.5"] -> 64
{version, :layer1, 0b0101} when version in [:version2, :"version2.5"] -> 80
{version, :layer1, 0b0110} when version in [:version2, :"version2.5"] -> 96
{version, :layer1, 0b0111} when version in [:version2, :"version2.5"] -> 112
{version, :layer1, 0b1000} when version in [:version2, :"version2.5"] -> 128
{version, :layer1, 0b1001} when version in [:version2, :"version2.5"] -> 144
{version, :layer1, 0b1010} when version in [:version2, :"version2.5"] -> 160
{version, :layer1, 0b1011} when version in [:version2, :"version2.5"] -> 176
{version, :layer1, 0b1100} when version in [:version2, :"version2.5"] -> 192
{version, :layer1, 0b1101} when version in [:version2, :"version2.5"] -> 224
{version, :layer1, 0b1110} when version in [:version2, :"version2.5"] -> 256
# V2, L2/L3
{version, _, 0b0001} when version in [:version2, :"version2.5"] -> 8
{version, _, 0b0010} when version in [:version2, :"version2.5"] -> 16
{version, _, 0b0011} when version in [:version2, :"version2.5"] -> 24
{version, _, 0b0100} when version in [:version2, :"version2.5"] -> 32
{version, _, 0b0101} when version in [:version2, :"version2.5"] -> 40
{version, _, 0b0110} when version in [:version2, :"version2.5"] -> 48
{version, _, 0b0111} when version in [:version2, :"version2.5"] -> 56
{version, _, 0b1000} when version in [:version2, :"version2.5"] -> 64
{version, _, 0b1001} when version in [:version2, :"version2.5"] -> 80
{version, _, 0b1010} when version in [:version2, :"version2.5"] -> 96
{version, _, 0b1011} when version in [:version2, :"version2.5"] -> 112
{version, _, 0b1100} when version in [:version2, :"version2.5"] -> 128
{version, _, 0b1101} when version in [:version2, :"version2.5"] -> 144
{version, _, 0b1110} when version in [:version2, :"version2.5"] -> 160
_ -> :bad
end
end
defp parse_sample_rate(<<@sync_word::size(11), version_bits::size(2), _::size(7), sample_rate_bits::size(2), _::bits>>) do
case {version_bits, sample_rate_bits} do
{0b11, 0b00} -> 44100
{0b11, 0b01} -> 48000
{0b11, 0b10} -> 32000
{0b10, 0b00} -> 22050
{0b10, 0b01} -> 24000
{0b10, 0b10} -> 16000
{0b00, 0b00} -> 11025
{0b00, 0b01} -> 12000
{0b00, 0b10} -> 8000
_ -> :bad
end
end
defp parse_padding(<<@sync_word::size(11), _::size(11), 0b0::size(1), _::bits>>), do: 0
defp parse_padding(<<@sync_word::size(11), _::size(11), 0b1::size(1), _::bits>>), do: 1
defp samples_per_frame(%Frame{layer: :layer1}), do: 384
defp samples_per_frame(%Frame{layer: :layer2}), do: 1152
defp samples_per_frame(%Frame{layer: :layer3, version_id: :version1}), do: 1152
defp samples_per_frame(%Frame{layer: :layer3, version_id: _}), do: 576
defp samples_per_frame(%Frame{}), do: 0
end
|
lib/mpeg_audio_frame_parser/frame.ex
| 0.566378
| 0.554048
|
frame.ex
|
starcoder
|
defmodule Swiss.Enum do
@moduledoc """
Helper functions for dealing with Enumerables.
"""
alias Swiss.Enum.KeyValueError
@doc """
Finds the first element in `enumerable` where its `key` equals `value`.
Returns `default` if not found.
### Examples
iex> Swiss.Enum.find_by([%{life: 11}, %{life: 42}], :life, 42)
%{life: 42}
iex> Swiss.Enum.find_by([%{life: 11}, %{life: 42}], :wat, 42)
nil
iex> Swiss.Enum.find_by([%{life: 11}, %{life: 42}], 42, :wat, 42)
42
iex> Swiss.Enum.find_by([%Swiss.TestStruct{life: 42}], :life, 42)
%Swiss.TestStruct{life: 42}
"""
@spec find_by(Enumerable.t(), any(), any(), any()) :: any()
def find_by(enumerable, default \\ nil, key, value) do
Enum.find(enumerable, default, fn
%_{} = el -> Map.get(el, key) == value
el -> el[key] == value
end)
end
@doc """
Finds the first element in `enumerable` where its `key` equals `value`.
Raises if not found.
### Examples
iex> Swiss.Enum.find_by!([%{life: 11}, %{life: 42}], :life, 42)
%{life: 42}
iex> Swiss.Enum.find_by!([%{life: 11}, %{life: 42}], :wat, 42)
** (Swiss.Enum.KeyValueError) key :wat with value 42 not found in: [%{life: 11}, %{life: 42}]
"""
@spec find_by!(Enumerable.t(), any(), any()) :: any()
def find_by!(enumerable, key, value) do
case Swiss.Enum.find_by(enumerable, :not_found, key, value) do
:not_found -> raise %KeyValueError{key: key, value: value, term: enumerable}
el -> el
end
end
@doc """
Finds an element and its index in `enumerable` for which `fun` returns true.
### Examples
iex> Swiss.Enum.find_both([42, 44, 46], fn num -> num == 44 end)
{44, 1}
iex> Swiss.Enum.find_both([42, 44, 46], fn num -> num == 45 end)
{nil, nil}
"""
def find_both(enumerable, fun) do
enumerable
|> Stream.with_index()
|> Enum.reduce_while({nil, nil}, fn {el, idx}, {nil, nil} ->
if fun.(el),
do: {:halt, {el, idx}},
else: {:cont, {nil, nil}}
end)
end
@doc """
Applies `cb` to all elements in `enum`, ignores the return and returns `enum`.
## Examples
iex> Swiss.Enum.thru([1, 2, 3], fn a -> a + 1 end)
[1, 2, 3]
"""
@spec thru(Enumerable.t(), function) :: Enumerable.t()
def thru(enum, cb) do
:ok = Enum.each(enum, cb)
enum
end
@doc """
Same as `Enum.group_by/3` but expects each group to have a single element, and
therefore returns only that element per key, instead of a list.
## Examples
iex> Swiss.Enum.group_by_single(
...> [%{k: "life", v: 42}, %{k: "death", v: 13}, %{k: "ooo", v: 0}],
...> & &1.k,
...> & &1.v
...> )
%{"life" => 42, "death" => 13, "ooo" => 0}
"""
@spec group_by_single(Enumerable.t(), (any() -> any()), (any() -> any())) :: map()
def group_by_single(enum, key_fn, value_fn \\ fn x -> x end) do
enum
|> Enum.group_by(key_fn, value_fn)
|> Enum.reduce(%{}, fn {key, [value]}, acc -> Map.put(acc, key, value) end)
end
@doc """
Calculates the average of values in an enumerable. Currently supports maps and
lists only.
## Examples
iex> Swiss.Enum.avg([1, 2, 3, 4])
2.5
iex> Swiss.Enum.avg([%{key: 1}, %{key: 2}, %{key: 3}, %{key: 4}], & &1.key)
2.5
iex> Swiss.Enum.avg(%{a: 1, b: 2, c: 3, d: 4}, &elem(&1, 1))
2.5
iex> Swiss.Enum.avg(%{})
0
iex> Swiss.Enum.avg([])
0
"""
@spec avg(list() | map(), (any() -> number())) :: number()
def avg(enum, mapper \\ & &1)
def avg([], _),
do: 0
def avg(list, mapper) when is_list(list) do
Enum.reduce(list, 0, &(mapper.(&1) + &2)) / length(list)
end
def avg(map, _) when is_map(map) and map_size(map) == 0,
do: 0
def avg(map, mapper) when is_map(map) do
Enum.reduce(map, 0, &(mapper.(&1) + &2)) / map_size(map)
end
@doc """
Finds the index of a value inside an enumerable.
## Examples
iex> Swiss.Enum.index_of([1, 2, 3, 4], 3)
2
iex> Swiss.Enum.index_of([1, 2, 3, 4], 1)
0
iex> Swiss.Enum.index_of([1, 2, 3, 4], 5)
nil
"""
@spec index_of(Enumerable.t(), any()) :: non_neg_integer() | nil
def index_of(enum, value) do
Enum.find_index(enum, &(&1 == value))
end
end
|
lib/swiss/enum.ex
| 0.875168
| 0.578746
|
enum.ex
|
starcoder
|
defmodule NervesTime.RTC.Abracon do
@moduledoc """
Abracon RTC implementation for NervesTime
To configure NervesTime to use this module, update the `:nerves_time` application
environment like this:
```elixir
config :nerves_time, rtc: NervesTime.RTC.Abracon
```
If not using `"i2c-1"` or the default I2C bus address, specify them like this:
```elixir
config :nerves_time, rtc: {NervesTime.RTC.Abracon, [bus_name: "i2c-2", address: 0x69]}
```
Check the logs for error messages if the RTC doesn't appear to work.
See https://abracon.com/Support/AppsManuals/Precisiontiming/Application%20Manual%20AB-RTCMC-32.768kHz-IBO5-S3.pdf
for implementation details.
"""
@behaviour NervesTime.RealTimeClock
require Logger
alias Circuits.I2C
alias NervesTime.RTC.Abracon.{Date, ID}
@default_bus_name "i2c-1"
@default_address 0x69
@typedoc false
@type state :: %{
i2c: I2C.bus(),
bus_name: String.t(),
address: I2C.address()
}
@impl NervesTime.RealTimeClock
def init(args) do
bus_name = Keyword.get(args, :bus_name, @default_bus_name)
address = Keyword.get(args, :address, @default_address)
with {:ok, i2c} <- I2C.open(bus_name),
:ok <- probe(i2c, address) do
{:ok, %{i2c: i2c, bus_name: bus_name, address: address}}
end
end
@impl NervesTime.RealTimeClock
def terminate(_state), do: :ok
@impl NervesTime.RealTimeClock
def set_time(state, now) do
with {:ok, registers} <- Date.encode(now),
:ok <- I2C.write(state.i2c, state.address, [0, registers]) do
state
else
error ->
_ = Logger.error("Error setting Abracon RTC to #{inspect(now)}: #{inspect(error)}")
state
end
end
@impl NervesTime.RealTimeClock
def get_time(state) do
with {:ok, registers} <- I2C.write_read(state.i2c, state.address, <<0>>, 7),
{:ok, time} <- Date.decode(registers) do
{:ok, time, state}
else
any_error ->
_ = Logger.error("Abracon RTC not set or has an error: #{inspect(any_error)}")
{:unset, state}
end
end
@spec probe(I2C.bus(), I2C.address()) :: :ok | {:error, String.t()}
defp probe(i2c, address) do
case I2C.write_read(i2c, address, <<0x28>>, 7) do
{:ok, id_info} ->
check_id(ID.decode(id_info))
{:error, :i2c_nak} ->
{:error, "RTC not found at #{address}"}
end
end
defp check_id({:ok, %{id: :ab_rtcmc_32768khz_ibo5_s3}}), do: :ok
defp check_id(other), do: {:error, "Unexpected response when probing RTC: #{inspect(other)}"}
end
|
lib/nerves_time/rtc/abracon.ex
| 0.781872
| 0.771456
|
abracon.ex
|
starcoder
|
defmodule Ambi do
@moduledoc """
Ambi keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
import Ecto.Query, warn: false
use Timex
require Logger
alias Ambi.Repo
alias Phoenix.PubSub
def add_reading(attrs \\ %{}) do
%Ambi.Reading{}
|> Ambi.Reading.changeset(attrs)
|> Repo.insert()
# broadcast_change(:added)
# Logger.debug "Added new reading and called broadcast_change()"
Logger.debug("Added new reading to the DB")
end
# This method looks for a single row in the reading_metadata table and
# updates it with the values past in from the sensor client
def set_reading_metadata(attrs \\ %{}) do
Logger.debug("reading_metadata: #{inspect(attrs)}")
id = 1
result =
case Repo.get(Ambi.ReadingMetadata, id) do
# ReadingMetadata instance not found, we build one
nil -> %Ambi.ReadingMetadata{id: id}
# ReadingMetadata instance exists, let's use it
reading_metadata -> reading_metadata
end
|> Ambi.ReadingMetadata.changeset(attrs)
|> Repo.insert_or_update()
Logger.debug("result: #{inspect(result)}")
# case result do
# {:ok, _struct} -> Logger.debug "** Set reading metadata successfully"
# {:error, _changeset} -> Logger.error "** Failed to update the ReadingMetadata table"
# end
end
def get_reading_metadata() do
# Get the first entry, id: 1
Repo.get(Ambi.ReadingMetadata, 1)
end
def get_timestamp_resolution_seconds() do
md = get_reading_metadata()
md.timestamp_resolution_seconds
end
def reset_readings() do
Repo.reset_readings(Ambi.Reading)
end
def get_reading() do
%{
temperature: get_temp(),
humidity: get_humidity(),
pressure: get_pressure(),
dust_concentration: get_dust_concentration(),
air_purity: get_air_purity(),
average_temp: get_average_temp(),
average_temp_24hrs: get_average_temp_24hrs(),
max_temp: get_max_temp(),
min_temp: get_min_temp(),
average_humidity: get_average_humidity(),
average_humidity_24hrs: get_average_humidity_24hrs(),
max_humidity: get_max_humidity(),
min_humidity: get_min_humidity(),
readings_count: get_readings_count(),
last_inserted_at: get_last_inserted_at(),
first_inserted_at: get_first_inserted_at()
}
end
def get_temp() do
get_last_row().temperature
end
def get_humidity() do
query =
from Ambi.Reading,
order_by: [desc: :humidity],
limit: 2
Repo.aggregate(query, :avg, :humidity)
get_last_row().humidity
end
def get_pressure() do
get_last_row().pressure
end
def get_dust_concentration() do
get_last_row().dust_concentration
end
def get_air_purity() do
get_last_row().air_purity
end
# Gets the average temperature from the DB over all sensor readings
def get_average_temp() do
Repo.aggregate(Ambi.Reading, :avg, :temperature)
end
# Gets the average temperature from the DB over the last 24 hour period
def get_average_temp_24hrs() do
one_day_ago = Timex.shift(Timex.now(), hours: -24, minutes: 0)
query =
from r in Ambi.Reading,
order_by: [desc: :inserted_at],
where: r.inserted_at >= ^one_day_ago
Repo.aggregate(query, :avg, :temperature)
end
def get_max_temp() do
Repo.aggregate(Ambi.Reading, :max, :temperature)
end
def get_min_temp() do
Repo.aggregate(Ambi.Reading, :min, :temperature)
end
def get_temperatures_over_120s() do
two_mins_ago = Timex.shift(Timex.now(), hours: 0, minutes: -4)
query =
from r in Ambi.Reading,
order_by: [desc: :inserted_at],
where: r.inserted_at >= ^two_mins_ago,
select: {r.temperature},
# temporary
limit: 25
Repo.all(query)
end
def get_avg_temperature_over_1hr(hour) do
n_hours_ago = Timex.shift(Timex.now(), hours: hour, minutes: 0)
nm_hours_ago = Timex.shift(Timex.now(), hours: hour + 1, minutes: 0)
query =
from r in Ambi.Reading,
order_by: [desc: :inserted_at],
where: r.inserted_at >= ^n_hours_ago and r.inserted_at < ^nm_hours_ago,
select: {r.temperature}
Repo.aggregate(query, :avg, :temperature)
end
def get_avg_temperatures_over_24hrs() do
# Iterate over the last 24 hours and get the average temperature over each
# 1 hour segment
Enum.map_every(-24..-1, 1, fn hour ->
get_avg_temperature_over_1hr(hour)
end)
end
def get_average_humidity() do
Repo.aggregate(Ambi.Reading, :avg, :humidity)
end
# Gets the average humidity from the DB over the last 24 hour period
def get_average_humidity_24hrs() do
one_day_ago = Timex.shift(Timex.now(), hours: -24, minutes: 0)
query =
from r in Ambi.Reading,
order_by: [desc: :inserted_at],
where: r.inserted_at >= ^one_day_ago
Repo.aggregate(query, :avg, :humidity)
end
def get_max_humidity() do
Repo.aggregate(Ambi.Reading, :max, :humidity)
end
def get_min_humidity() do
Repo.aggregate(Ambi.Reading, :min, :humidity)
end
def get_humidities_over_24hrs() do
twenty_four_hours_ago = Timex.shift(Timex.now(), hours: -24, minutes: 0)
query =
from r in Ambi.Reading,
order_by: [desc: :inserted_at],
where: r.inserted_at >= ^twenty_four_hours_ago,
select: {r.humidity}
Repo.all(query)
end
def get_avg_humidity_over_1hr(hour) do
n_hours_ago = Timex.shift(Timex.now(), hours: hour, minutes: 0)
nm_hours_ago = Timex.shift(Timex.now(), hours: hour + 1, minutes: 0)
query =
from r in Ambi.Reading,
order_by: [desc: :inserted_at],
where: r.inserted_at >= ^n_hours_ago and r.inserted_at < ^nm_hours_ago,
select: {r.humidity}
Repo.aggregate(query, :avg, :humidity)
end
def get_avg_humidities_over_24hrs() do
# Iterate over the last 24 hours and get the average humidity over each
# 1 hour segment
Enum.map_every(-24..-1, 1, fn hour ->
get_avg_humidity_over_1hr(hour)
end)
end
def get_last_inserted_at() do
Timezone.convert(get_last_row().inserted_at, "America/Chicago")
end
def get_first_inserted_at() do
Timezone.convert(get_first_row().inserted_at, "America/Chicago")
end
defp get_last_row() do
Repo.one(from s in Ambi.Reading, order_by: [desc: s.id], limit: 1)
end
defp get_first_row() do
Repo.one(from s in Ambi.Reading, order_by: [asc: s.id], limit: 1)
end
defp get_readings_count() do
Repo.aggregate(Ambi.Reading, :count, :id)
end
@topic inspect(__MODULE__)
def subscribe() do
PubSub.subscribe(Ambi.PubSub, @topic)
Logger.debug("""
Subscribe details:
topic: #{inspect(@topic)}
""")
end
def broadcast_change(event) do
PubSub.broadcast(Ambi.PubSub, @topic, event)
Logger.debug("""
Broadcast details:
topic: #{inspect(@topic)}
module: #{inspect(__MODULE__)}
event: #{inspect(event)}
""")
:ok
end
end
|
lib/ambi.ex
| 0.651022
| 0.414069
|
ambi.ex
|
starcoder
|
defmodule Calixir.SampleDatesTableMaker do
@moduledoc """
This module generates Elixir data from the Calixir-4.0 sample data.
"""
@doc """
Transforms the sample files into one set with sample dates.
"""
def sample_dict_from_files(files) do
files
|> check_path
|> datasets_from_files
|> sample_set_from_datasets
|> caldates_from_dataset
end
defp sample_set_from_datasets([head | tail] = _datasets) do
# Combines all sample datasets into a one `sample_set`.
Enum.reduce(tail, head, fn set, acc -> join_lines([], acc, set) end)
end
defp join_lines(acc, [], []), do: acc
defp join_lines(acc, [h1 | t1] = _set1, [[_ | th2] | t2] = _set2) do
join_lines(acc ++ [h1 ++ th2], t1, t2)
end
# === year_dates ===================================================
@doc """
Transforms the year files into one set with year dates.
"""
def year_dict_from_files(files) do
files
|> check_path
|> datasets_from_files
|> year_set_from_datasets
|> caldates_from_dataset
end
defp year_set_from_datasets([head | _] = datasets) do
# Combines all year datasets into a single dataset.
# The dataset has two headers, calendars and fields,
# followed by 365/366 data lines per year.
Enum.reduce(datasets, head, fn [_, _ | data], acc -> acc ++ data end)
end
defp datasets_from_files(files) do
# Creates a list of lists (= a list of datasets).
# Each element list contains the data of one file,
files
|> Enum.map(&(csv_from_file(&1)))
|> Enum.map(&(dataset_from_csv(&1)))
|> Enum.map(&(check_dataset(&1)))
end
defp csv_from_file(path) do
# Reads the csv file and splits it into lines
# and returns a list of csv_strings.
path
|> File.read!
|> String.trim
|> String.split("\n")
end
defp dataset_from_csv([csv_calendars, csv_fields | csv_values] = _csv) do
# Transforms csv table data into an equivalent Elixir data structure.
calendars = calendars_from_csv(csv_calendars)
fields = fields_from_csv(csv_fields)
data = Enum.map(csv_values, &(values_from_csv(&1)))
[calendars, fields] ++ data
end
defp calendars_from_csv(csv_calendars) do
# Transforms the first csv header string into Elixir data.
csv_calendars
|> String.trim_trailing
|> String.downcase
|> String.replace(~r/^rd,/, "fixed,")
|> String.replace(~r/mid day,/, "midday,")
|> String.replace(~r/ day,/, " weekday,")
|> String.split(~r/, */)
|> fill([])
|> Enum.map(&(String.replace(&1, " ", "_")))
end
# fill-in empty placeholders after calendars, i.e.:
# "..., Unix, Gregorian, , , Julian, , , Roman, , , , , Olympiad, ..."
defp fill(["" | []], acc), do: acc ++ [List.last(acc)]
defp fill([head | []], acc), do: acc ++ [head]
defp fill(["" | tail], acc), do: fill(tail, acc ++ [List.last(acc)])
defp fill([head | tail], acc), do: fill(tail, acc ++ [head])
defp fields_from_csv(csv_fields) do
# Transforms the second csv header string into Elixir data.
# In some calendars 'leap' isn't used uniquely, so there is a special fix.
csv_fields
|> String.trim_trailing
|> String.downcase
|> String.replace("month, leap, day, leap", "month, leap_month, day, leap_day")
|> String.split(",")
|> Enum.map(&(String.trim(&1)))
|> Enum.map(&(String.replace(&1, " ", "_")))
end
defp values_from_csv(csv_values) do
# Transforms csv values into Elixir values.
csv_values
|> String.trim_trailing
|> String.trim_trailing(",")
|> String.split(~r/, */)
|> Enum.map(&(value_from_csv(&1)))
end
defp value_from_csv(csv_value) do
# Transforms a single csv value into an Elxir value.
s = String.trim(csv_value)
cond do
String.match?(s, ~r/^[0-9+-]+$/) -> String.to_integer(s)
String.match?(s, ~r/^[0-9.+-]+$/) -> String.to_float(s)
s == "f" -> false
s == "t" -> true
s == "none" -> nil
s == "" -> nil
s == "bogus" -> "bogus"
true -> s |> String.downcase |> String.to_atom
end
end
defp check_dataset([calendars, fields | data] = dataset) do
# Checks dataset for consistent line lengths.
# Aborts on error, else returns the dataset.
len = length(calendars)
b = Enum.reduce(dataset, true, fn l, acc -> acc and length(l) == len end)
if b == false do
IO.inspect(calendars, label: "calendars ")
IO.inspect(length(calendars), label: "length(calendars)")
IO.inspect(fields, label: "fields ")
IO.inspect(length(fields), label: "length(fields) ")
IO.inspect(length(hd(data)), label: "length(data) ")
IO.inspect(dataset)
raise "error: different line lengths in dataset"
end
dataset
end
defp check_path(files) when is_list(files) do
# Checks if all the files in the list exist.
# Aborts on error, else return a list with the full paths.
Enum.map(files, &(check_path(&1)))
end
defp check_path(file) do
# Checks if the file exists.
# Aborts on error, else returns the path.
path = Path.expand("./assets/#{file}")
if not File.exists?(path) do
raise "error: file <#{path}> does not exist"
end
path
end
defp caldates_from_dataset([calendars, fields | data] = _dataset) do
headers = headers_from_calendars_and_fields(calendars, fields)
calendars = Enum.map(calendars, &(String.to_atom(&1)))
caldate_structs = caldate_structs_from_calendars_and_headers(calendars, headers)
data = Enum.map(data, &(Enum.zip(headers, &1)))
dates = Enum.map(data, &(caldates_from_data(&1, caldate_structs)))
%{calendars: calendars, caldate_structs: caldate_structs, dates: dates}
end
# === headers ======================================================
defp headers_from_calendars_and_fields(calendars, fields) do
# Returns a list of headers (= atoms) that combine calendar and fields:
# calendars [..., "unix", "gregorian", "gregorian", "gregorian",...]
# + fields [..., "", "year", "month", "day",...]
# -> headers [..., :unix, :gregorian_year, :gregorian_month, :gregorian_day,...]
calendars
|> Enum.zip(fields)
|> Enum.map(&(header_from_calendar_and_field(&1)))
|> Enum.map(&(String.to_atom(&1)))
end
# Headers of empty fields get the name of the calendar.
defp header_from_calendar_and_field({calendar, ""}), do: calendar
# Combines calendar and field into a single header.
defp header_from_calendar_and_field({calendar, field}), do: "#{calendar}_#{field}"
# === caldate_structs =================================================
defp caldate_structs_from_calendars_and_headers(calendars, headers) do
# Returns a list of caldate_structs. A caldate_struct is a list of the form:
# {:calendar, {:header1, :header2, :header3, ...}}
# Its first element is the calendar, the other elements its fields.
# Two examples for caldate_structs:
# {:fixed, :fixed}
# {:gregorian, {:gregorian_year, :gregorian_month, :gregorian_day}}
calendars
|> Enum.zip(headers)
|> Enum.chunk_by(&(elem(&1, 0)))
|> Enum.map(&(caldate_struct_from_chunk(&1)))
end
defp caldate_struct_from_chunk(chunk) do
# Reduces a chunk (= list of tuples) to a single tuple:
# [
# {:calendar, :header1},
# {:calendar, :header2}, -> {:calendar, {:header1, :header2, :header3, ...}}
# {:calendar, :header3},
# ...
# ]
calendar = chunk |> hd |> elem(0)
fields = Enum.map(chunk, fn {_, header} -> header end) |> List.to_tuple
if tuple_size(fields) == 1 do
{calendar, elem(fields, 0)}
else
{calendar, fields}
end
end
# === dates ========================================================
defp caldates_from_data(data, caldate_structs) do
# Returns a list of caldates of the form
# {:calendar, {field_value1, field_value2, field_value3, ...}}
Enum.map(caldate_structs, fn {calendar, fields} ->
{calendar, date_from_data(fields, data)}
end)
end
defp date_from_data(fields, data) when is_tuple(fields) do
fields
|> Tuple.to_list
|> Enum.map(&(Keyword.get(data, &1)))
|> List.to_tuple
end
defp date_from_data(fields, data) do
Keyword.get(data, fields)
end
end
|
lib/calixir/sample_dates_table_maker.ex
| 0.83901
| 0.640601
|
sample_dates_table_maker.ex
|
starcoder
|
defmodule ExXirr do
@moduledoc """
Library to calculate XIRR and absolute rate of return
through the Newton Raphson method.
"""
@max_error 1.0e-3
@days_in_a_year 365
# Public API
@doc """
Function to calculate the rate of return for a given array of
dates and values.
## Examples
iex> d = [{1985, 1, 1}, {1990, 1, 1}, {1995, 1, 1}]
iex> v = [1000, -600, -200]
iex> ExXirr.xirr(d,v)
{:ok, -0.034592}
"""
@spec xirr([Date.t()], [number]) :: float
def xirr(dates, values) when length(dates) != length(values) do
{:error, "Date and Value collections must have the same size"}
end
def xirr(dates, values) when length(dates) < 10 do
LegacyFinance.xirr(dates, values)
end
def xirr(dates, values) do
dates = Enum.map(dates, &Date.from_erl!(&1))
min_date = dates |> List.first()
{dates, values, dates_values} = compact_flow(Enum.zip(dates, values), min_date)
cond do
!verify_flow(values) ->
{:error, "Values should have at least one positive or negative value."}
length(dates) - length(values) == 0 && verify_flow(values) ->
calculate(:xirr, dates_values, [], guess_rate(dates, values), 0)
true ->
{:error, "Uncaught error"}
end
rescue
_ ->
{:error, 0.0}
end
@doc """
Function to calculate the absolute rate of return for a given array
of dates and values.
## Examples
iex> d = [{1985, 1, 1}, {1990, 1, 1}, {1995, 1, 1}]
iex> v = [1000, -600, -200]
iex> {:ok, rate} = ExXirr.xirr(d,v)
iex> ExXirr.absolute_rate(rate, 50)
{:ok, -0.48}
"""
@spec absolute_rate(float(), integer()) :: {:ok, float()} | {:error, String.t()}
def absolute_rate(0, _), do: {:error, "Rate is 0"}
def absolute_rate(rate, days) do
try do
if days < @days_in_a_year do
{:ok, ((:math.pow(1 + rate, days / @days_in_a_year) - 1) * 100) |> Float.round(2)}
else
{:ok, (rate * 100) |> Float.round(2)}
end
rescue
_ ->
{:error, 0.0}
end
end
# Private API
@spec pmap(list(tuple()), fun()) :: Enum.t()
defp pmap(collection, function) do
me = self()
collection
|> Enum.map(fn element -> spawn_link(fn -> send(me, {self(), function.(element)}) end) end)
|> Enum.map(fn pid ->
receive do
{^pid, result} -> result
end
end)
end
@spec power_of(float(), Fraction.t()) :: float()
defp power_of(rate, fraction) when rate < 0 do
:math.pow(-rate, Fraction.to_float(fraction)) * :math.pow(-1, fraction.num)
end
defp power_of(rate, fraction) do
:math.pow(rate, Fraction.to_float(fraction))
end
@spec xirr_reduction({Fraction.t(), float(), float()}) :: float()
defp xirr_reduction({fraction, value, rate}) do
value / power_of(1.0 + rate, fraction)
end
@spec dxirr_reduction({Fraction.t(), float(), float()}) :: float()
defp dxirr_reduction({fraction, value, rate}) do
-value * Fraction.to_float(fraction) * power_of(1.0 + rate, Fraction.negative(fraction)) *
:math.pow(1.0 + rate, -1)
end
@spec compact_flow(list(), Date.t()) :: tuple()
defp compact_flow(dates_values, min_date) do
flow = Enum.reduce(dates_values, %{}, &organize_value(&1, &2, min_date))
{Map.keys(flow), Map.values(flow), Enum.filter(flow, &(elem(&1, 1) != 0))}
end
@spec organize_value(tuple(), map(), Date.t()) :: map()
defp organize_value(date_value, dict, min_date) do
{date, value} = date_value
fraction = %Fraction{
num: Date.diff(date, min_date),
den: 365.0
}
Map.update(dict, fraction, value, &(value + &1))
end
@spec verify_flow(list(float())) :: boolean()
defp verify_flow(values) do
{min, max} = Enum.min_max(values)
min < 0 && max > 0
end
@spec guess_rate([Date.t()], [number]) :: float
defp guess_rate(dates, values) do
{min_value, max_value} = Enum.min_max(values)
period = 1 / (length(dates) - 1)
multiple = 1 + abs(max_value / min_value)
rate = :math.pow(multiple, period) - 1
Float.round(rate, 6)
end
@spec reduce_date_values(list(), float()) :: tuple()
defp reduce_date_values(dates_values, rate) do
calculated_xirr =
dates_values
|> pmap(fn x ->
{
elem(x, 0),
elem(x, 1),
rate
}
end)
|> pmap(&xirr_reduction/1)
|> Enum.sum()
|> Float.round(6)
calculated_dxirr =
dates_values
|> pmap(fn x ->
{
elem(x, 0),
elem(x, 1),
rate
}
end)
|> pmap(&dxirr_reduction/1)
|> Enum.sum()
|> Float.round(6)
{calculated_xirr, calculated_dxirr}
end
@spec calculate(atom(), list(), float(), float(), integer()) ::
{:ok, float()} | {:error, String.t()}
defp calculate(:xirr, _, 0.0, rate, _), do: {:ok, Float.round(rate, 6)}
defp calculate(:xirr, _, _, -1.0, _), do: {:error, "Could not converge"}
defp calculate(:xirr, _, _, _, 300), do: {:error, "I give up"}
defp calculate(:xirr, dates_values, _, rate, tries) do
{xirr, dxirr} = reduce_date_values(dates_values, rate)
new_rate =
if dxirr < 0.0 do
rate
else
rate - xirr / dxirr
end
diff = Kernel.abs(new_rate - rate)
diff = if diff < @max_error, do: 0.0
tries = tries + 1
calculate(:xirr, dates_values, diff, new_rate, tries)
end
end
|
lib/ex_xirr.ex
| 0.861727
| 0.727661
|
ex_xirr.ex
|
starcoder
|
defmodule Game.Dice.Face do
@moduledoc """
Face of the dice determines the action of the dice,
when not {disabled} do {type} {stance} for {count} times with {amount}
which will intersect with opposing faces for {intersects} times
"""
alias Game.Dice
alias Game.Dice.Face
@type t :: %Face{
count: integer(),
amount: integer(),
intersects: integer(),
disabled: boolean(),
type: :melee | :ranged | :token,
stance: :attack | :block | :steal
}
@derive Jason.Encoder
defstruct count: 1,
amount: 1,
intersects: 0,
disabled: false,
type: :melee,
stance: :block
@spec intersects?(Face.t(), Face.t()) :: boolean()
def intersects?(face, other) do
!face.disabled &&
!other.disabled &&
face.count - face.intersects > 0 &&
other.count - other.intersects > 0 &&
face.type == other.type &&
[:block, :attack] -- [face.stance, other.stance] == []
end
@spec update(Dice.t(), map()) :: Dice.t()
def update(%Dice{face: face} = dice, attrs) do
%{dice | face: update(face, attrs)}
end
@spec update(Face.t(), map()) :: Face.t()
def update(face, attrs), do: Map.merge(face, attrs)
@spec increase(Dice.t(), atom(), integer()) :: Dice.t()
def increase(%Dice{face: face} = dice, key, amount) do
%{dice | face: increase(face, key, amount)}
end
@spec increase(Face.t(), atom(), integer()) :: Face.t()
def increase(face, key, amount) do
Map.put(face, key, Map.get(face, key) + amount)
end
@spec multiply(Dice.t(), atom(), integer()) :: Dice.t()
def multiply(%Dice{face: face} = dice, key, amount) do
%{dice | face: multiply(face, key, amount)}
end
@spec multiply(Face.t(), atom(), integer()) :: Face.t()
def multiply(face, key, amount) do
Map.put(face, key, Map.get(face, key) * amount)
end
@spec hits(Face.t()) :: integer()
def hits(%Dice{face: face}), do: hits(face)
def hits(%{disabled: true}), do: 0
def hits(%{stance: :block} = face), do: face.intersects
def hits(face), do: face.count - face.intersects
@spec hit_amount(Dice.t()) :: integer()
def hit_amount(%Dice{face: face}), do: hit_amount(face)
@spec hit_amount(Face.t()) :: integer()
def hit_amount(face), do: hits(face) * face.amount
@spec resolve(map(), map()) :: map()
def resolve(faces, other) do
Enum.reduce(other, faces, &resolve_face(&2, elem(&1, 1)))
end
@spec resolve_face(map(), Face.t()) :: map()
def resolve_face(faces, %{count: count} = face) when count > 1 do
faces
|> find_intersect(face)
|> resolve_face(%{face | count: face.count - 1})
end
def resolve_face(faces, face), do: find_intersect(faces, face)
defp find_intersect(faces, face) do
{index, face} = Enum.find(faces, {nil, nil}, &intersects?(face, elem(&1, 1)))
if index do
faces
|> Map.put(index, %{face | intersects: face.intersects + 1})
else
faces
end
end
end
|
src/server/lib/game/dice/face.ex
| 0.890245
| 0.637003
|
face.ex
|
starcoder
|
defmodule ExUnitFixtures.AutoImport do
@moduledoc """
A mechanism for automatically importing fixtures into the current test module.
In a relatively large test suite you'll most likely need to declare some
fixtures that are to be shared between all tests in a project or module.
`ExUnitFixtures.AutoImport` provides a method for automatically importing
fixtures into a test module based on the module's path.
When a module uses `ExUnitFixtures.AutoImport` it will automatically lookup
`fixtures.exs` files in the current and parent directories and import the
fixtures they contain into the current test file for use.
Note: By default, any call to `use ExUnitFixtures` or
`use ExUnitFixtures.FixtureModule` will automatically
`use ExUnitFixtures.AutoImport` so this module should not need to be manually
used most of the time. This can be controlled by the `auto_import` setting as
described in `ExUnitFixtures.start/1`
For example, we could use the following directory structure & tests:
tests/
fixtures.exs
defmodule GlobalFixtures do
use ExUnitFixtures.FixtureModule
deffixture db do
create_db_conn()
end
end
model_tests/
fixtures.exs
defmodule ModelFixtures do
use ExUnitFixtures.FixtureModule
deffixture user(db) do
user = %User{name: "Graeme"}
insert(db, user)
user
end
end
user_tests.exs
defmodule UserTests do
use ExUnitFixtures
@fixtures: :user
test "user has name", context do
assert context.user.name == "Graeme"
end
Here we declare a fixtures.exs file at our top-level that contains a database
fixture that any of our tests can access. We then define some fixtures for all
of our model tests, and use the user fixture inside one of those tests.
"""
defmacro __using__(_opts) do
quote do
modules = __DIR__
|> ExUnitFixtures.AutoImport.relevant_fixture_files(~r/^fixtures.exs$/i)
|> Enum.flat_map(&ExUnitFixtures.Imp.ModuleStore.find_file/1)
for module <- modules, module != __MODULE__ do
ExUnitFixtures.AutoImport.require_fixture_module(module)
end
end
end
@doc """
Imports a the fixture module `module` into the calling module.
"""
defmacro require_fixture_module(module) do
ExUnitFixtures.FixtureModule.register_fixtures(module)
end
@doc """
Finds any fixture files in the current directory or parent directories.
Uses fixture_regex to match fixture files. Returns the results in descending
directory hierarchy order, but files of the same level are not in a guaranteed
order.
"""
@spec relevant_fixture_files(String.t, Regex.t) :: [String.t]
def relevant_fixture_files(directory, fixture_regex) do
directory
|> find_mix_root
|> directories_between(directory)
|> Enum.flat_map(&matching_files &1, fixture_regex)
|> Enum.into([])
end
# Finds a parent directory with a mix.exs in it.
@spec find_mix_root(String.t) :: String.t | no_return
defp find_mix_root("/") do
raise "Could not find directory with mix.exs"
end
defp find_mix_root(directory) do
if File.exists?(Path.join(directory, "mix.exs")) do
directory
else
directory |> Path.join("..") |> Path.expand |> find_mix_root
end
end
# Returns a list of directories between parent & child.
@spec directories_between(String.t, String.t) :: [String.t]
defp directories_between(parent, child) do
child
|> Path.relative_to(parent)
|> Path.split
|> Enum.scan(parent, &(Path.join &2, &1))
end
# Returns a list of files in directory that match fixture_regex.
# Returns the full path to the files, but the regex only needs to match the
# file name.
@spec matching_files(String.t, Regex.t) :: [String.t]
defp matching_files(directory, fixture_regex) do
if File.dir?(directory) do
directory
|> File.ls!
|> Enum.filter(&Regex.match? fixture_regex, &1)
|> Enum.map(&Path.join directory, &1)
else
[]
end
end
end
|
lib/ex_unit_fixtures/auto_import.ex
| 0.887787
| 0.70724
|
auto_import.ex
|
starcoder
|
defmodule Ecto.Adapters.SQL do
@moduledoc """
Behaviour and implementation for SQL adapters.
The implementation for SQL adapter provides a
pooled based implementation of SQL and also expose
a query function to developers.
Developers that use `Ecto.Adapters.SQL` should implement
a connection module with specifics on how to connect
to the database and also how to translate the queries
to SQL.
See `Ecto.Adapters.Connection` for connection processes and
`Ecto.Adapters.SQL.Query` for the query semantics.
"""
@doc false
defmacro __using__(adapter) do
quote do
@behaviour Ecto.Adapter
@behaviour Ecto.Adapter.Migration
@behaviour Ecto.Adapter.Transaction
@conn __MODULE__.Connection
@adapter unquote(adapter)
## Worker
@doc false
defmacro __before_compile__(_env) do
:ok
end
@doc false
def start_link(repo, opts) do
{:ok, _} = Application.ensure_all_started(@adapter)
Ecto.Adapters.SQL.start_link(@conn, @adapter, repo, opts)
end
## Types
def embed_id(_), do: Ecto.UUID.generate
def load(type, value), do: Ecto.Adapters.SQL.load(type, value, &load/2)
def dump(type, value), do: Ecto.Adapters.SQL.dump(type, value, &dump/2)
## Query
@doc false
def prepare(:all, query), do: {:cache, @conn.all(query)}
def prepare(:update_all, query), do: {:cache, @conn.update_all(query)}
def prepare(:delete_all, query), do: {:cache, @conn.delete_all(query)}
@doc false
def execute(repo, meta, prepared, params, preprocess, opts) do
Ecto.Adapters.SQL.execute(repo, meta, prepared, params, preprocess, opts)
end
@doc false
# Nil ids are generated in the database.
def insert(repo, model_meta, params, {key, :id, nil}, returning, opts) do
insert(repo, model_meta, params, nil, [key|returning], opts)
end
# Nil binary_ids are generated in the adapter.
def insert(repo, model_meta, params, {key, :binary_id, nil}, returning, opts) do
{req, resp} = Ecto.Adapters.SQL.bingenerate(key)
case insert(repo, model_meta, req ++ params, nil, returning, opts) do
{:ok, values} -> {:ok, resp ++ values}
{:error, _} = err -> err
end
end
def insert(repo, %{source: {prefix, source}}, params, _autogenerate, returning, opts) do
{fields, values} = :lists.unzip(params)
sql = @conn.insert(prefix, source, fields, returning)
Ecto.Adapters.SQL.model(repo, @conn, sql, values, returning, opts)
end
@doc false
def update(repo, %{source: {prefix, source}}, fields, filter, _autogenerate, returning, opts) do
{fields, values1} = :lists.unzip(fields)
{filter, values2} = :lists.unzip(filter)
sql = @conn.update(prefix, source, fields, filter, returning)
Ecto.Adapters.SQL.model(repo, @conn, sql, values1 ++ values2, returning, opts)
end
@doc false
def delete(repo, %{source: {prefix, source}}, filter, _autogenarate, opts) do
{filter, values} = :lists.unzip(filter)
sql = @conn.delete(prefix, source, filter, [])
Ecto.Adapters.SQL.model(repo, @conn, sql, values, [], opts)
end
## Transaction
@doc false
def transaction(repo, opts, fun) do
Ecto.Adapters.SQL.transaction(repo, opts, fun)
end
@doc false
def rollback(repo, value) do
Ecto.Adapters.SQL.rollback(repo, value)
end
## Migration
@doc false
def execute_ddl(repo, definition, opts) do
sqls = @conn.execute_ddl(definition)
for sql <- List.wrap(sqls) do
Ecto.Adapters.SQL.query!(repo, sql, [], opts)
end
:ok
end
defoverridable [prepare: 2, execute: 6,
insert: 6, update: 7, delete: 5,
execute_ddl: 3, embed_id: 1,
load: 2, dump: 2]
end
end
alias Ecto.Pool
alias Ecto.Adapters.SQL.Sandbox
@doc """
Converts the given query to SQL according to its kind and the
adapter in the given repository.
## Examples
The examples below are meant for reference. Each adapter will
return a different result:
Ecto.Adapters.SQL.to_sql(:all, repo, Post)
{"SELECT p.id, p.title, p.inserted_at, p.created_at FROM posts as p", []}
Ecto.Adapters.SQL.to_sql(:update_all, repo,
from(p in Post, update: [set: [title: ^"hello"]]))
{"UPDATE posts AS p SET title = $1", ["hello"]}
"""
@spec to_sql(:all | :update_all | :delete_all, Ecto.Repo.t, Ecto.Queryable.t) ::
{String.t, [term]}
def to_sql(kind, repo, queryable) do
adapter = repo.__adapter__
{_meta, prepared, params} =
Ecto.Queryable.to_query(queryable)
|> Ecto.Query.Planner.query(kind, repo, adapter)
{prepared, params}
end
@doc """
Same as `query/4` but raises on invalid queries.
"""
@spec query!(Ecto.Repo.t, String.t, [term], Keyword.t) ::
%{rows: nil | [tuple], num_rows: non_neg_integer} | no_return
def query!(repo, sql, params, opts \\ []) do
query!(repo, sql, params, nil, opts)
end
defp query!(repo, sql, params, mapper, opts) do
case query(repo, sql, params, mapper, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@doc """
Runs custom SQL query on given repo.
In case of success, it must return an `:ok` tuple containing
a map with at least two keys:
* `:num_rows` - the number of rows affected
* `:rows` - the result set as a list. `nil` may be returned
instead of the list if the command does not yield any row
as result (but still yields the number of affected rows,
like a `delete` command without returning would)
## Options
* `:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000)
* `:log` - When false, does not log the query
## Examples
iex> Ecto.Adapters.SQL.query(MyRepo, "SELECT $1::integer + $2", [40, 2])
{:ok, %{rows: [{42}], num_rows: 1}}
"""
@spec query(Ecto.Repo.t, String.t, [term], Keyword.t) ::
{:ok, %{rows: nil | [tuple], num_rows: non_neg_integer}} | {:error, Exception.t}
def query(repo, sql, params, opts \\ []) do
query(repo, sql, params, nil, opts)
end
defp query(repo, sql, params, mapper, opts) do
case query(repo, sql, params, nil, mapper, opts) do
{result, entry} ->
log(repo, entry)
result
:noconnect ->
# :noconnect can never be the reason a call fails because
# it is converted to {:nodedown, node}. This means the exit
# reason can be easily identified.
exit({:noconnect, {__MODULE__, :query, [repo, sql, params, opts]}})
end
end
defp query(repo, sql, params, outer_queue_time, mapper, opts) do
{pool_mod, pool, timeout} = repo.__pool__
opts = Keyword.put_new(opts, :timeout, timeout)
timeout = Keyword.fetch!(opts, :timeout)
log? = Keyword.get(opts, :log, true)
query_fun = fn({mod, conn}, inner_queue_time) ->
query(mod, conn, inner_queue_time || outer_queue_time, sql, params, log?, opts)
end
case Pool.run(pool_mod, pool, timeout, query_fun) do
{:ok, {result, entry}} ->
decode(result, entry, mapper)
{:error, :noconnect} ->
:noconnect
{:error, :noproc} ->
raise ArgumentError, "repo #{inspect repo} is not started, " <>
"please ensure it is part of your supervision tree"
end
end
defp query(mod, conn, _queue_time, sql, params, false, opts) do
{mod.query(conn, sql, params, opts), nil}
end
defp query(mod, conn, queue_time, sql, params, true, opts) do
{query_time, result} = :timer.tc(mod, :query, [conn, sql, params, opts])
entry = %Ecto.LogEntry{query: sql, params: params, connection_pid: conn,
query_time: query_time, queue_time: queue_time}
{result, entry}
end
defp decode(result, nil, nil) do
{result, nil}
end
defp decode(result, nil, mapper) do
{decode(result, mapper), nil}
end
defp decode(result, entry, nil) do
{result, %{entry | result: result}}
end
defp decode(result, %{query_time: query_time} = entry, mapper) do
{decode_time, decoded} = :timer.tc(fn -> decode(result, mapper) end)
{decoded, %{entry | result: decoded, query_time: query_time + decode_time}}
end
defp decode({:ok, %{rows: rows} = res}, mapper) when is_list(rows) do
{:ok, %{res | rows: Enum.map(rows, mapper)}}
end
defp decode(other, _mapper) do
other
end
defp log(_repo, nil), do: :ok
defp log(repo, entry), do: repo.log(entry)
@doc ~S"""
Starts a transaction for test.
This function work by starting a transaction and storing the connection
back in the pool with an open transaction. On every test, we restart
the test transaction rolling back to the appropriate savepoint.
**IMPORTANT:** Test transactions only work if the connection pool is
`Ecto.Adapters.SQL.Sandbox`
## Example
The first step is to configure your database to use the
`Ecto.Adapters.SQL.Sandbox` pool. You set those options in your
`config/config.exs`:
config :my_app, Repo,
pool: Ecto.Adapters.SQL.Sandbox
Since you don't want those options in your production database, we
typically recommend to create a `config/test.exs` and add the
following to the bottom of your `config/config.exs` file:
import_config "config/#{Mix.env}.exs"
Now with the test database properly configured, you can write
transactional tests:
# At the end of your test_helper.exs
# From now, all tests happen inside a transaction
Ecto.Adapters.SQL.begin_test_transaction(TestRepo)
defmodule PostTest do
# Tests that use the shared repository cannot be async
use ExUnit.Case
setup do
# Go back to a clean slate at the beginning of every test
Ecto.Adapters.SQL.restart_test_transaction(TestRepo)
:ok
end
test "create comment" do
assert %Post{} = TestRepo.insert!(%Post{})
end
end
In some cases, you may want to start the test transaction only
for specific tests and then roll it back. You can do it as:
defmodule PostTest do
# Tests that use the shared repository cannot be async
use ExUnit.Case
setup_all do
# Wrap this case in a transaction
Ecto.Adapters.SQL.begin_test_transaction(TestRepo)
# Roll it back once we are done
on_exit fn ->
Ecto.Adapters.SQL.rollback_test_transaction(TestRepo)
end
:ok
end
setup do
# Go back to a clean slate at the beginning of every test
Ecto.Adapters.SQL.restart_test_transaction(TestRepo)
:ok
end
test "create comment" do
assert %Post{} = TestRepo.insert!(%Post{})
end
end
"""
@spec begin_test_transaction(Ecto.Repo.t, Keyword.t) :: :ok
def begin_test_transaction(repo, opts \\ []) do
test_transaction(:begin, repo, opts)
end
@doc """
Restarts a test transaction, see `begin_test_transaction/2`.
"""
@spec restart_test_transaction(Ecto.Repo.t, Keyword.t) :: :ok
def restart_test_transaction(repo, opts \\ []) do
test_transaction(:restart, repo, opts)
end
@spec rollback_test_transaction(Ecto.Repo.t, Keyword.t) :: :ok
def rollback_test_transaction(repo, opts \\ []) do
test_transaction(:rollback, repo, opts)
end
defp test_transaction(fun, repo, opts) do
case repo.__pool__ do
{Sandbox, pool, timeout} ->
opts = Keyword.put_new(opts, :timeout, timeout)
test_transaction(pool, fun, &repo.log/1, opts)
{pool_mod, _, _} ->
raise """
cannot #{fun} test transaction with pool #{inspect pool_mod}.
In order to use test transactions with Ecto SQL, you need to
configure your repository to use #{inspect Sandbox}:
pool: #{inspect Sandbox}
"""
end
end
defp test_transaction(pool, fun, log, opts) do
timeout = Keyword.fetch!(opts, :timeout)
case apply(Sandbox, fun, [pool, log, opts, timeout]) do
:ok ->
:ok
{:error, :sandbox} when fun == :begin ->
raise "cannot begin test transaction because we are already inside one"
end
end
## Worker
@doc false
def start_link(connection, adapter, _repo, opts) do
unless Code.ensure_loaded?(connection) do
raise """
could not find #{inspect connection}.
Please verify you have added #{inspect adapter} as a dependency:
{#{inspect adapter}, ">= 0.0.0"}
And remember to recompile Ecto afterwards by cleaning the current build:
mix deps.clean ecto
"""
end
{pool, opts} = Keyword.pop(opts, :pool)
pool.start_link(connection, opts)
end
## Types
@doc false
def load({:embed, _} = type, data, loader),
do: Ecto.Type.load(type, data, fn
{:embed, _} = type, value -> loader.(type, value)
type, value -> Ecto.Type.cast(type, value)
end)
def load(:binary_id, data, loader),
do: Ecto.Type.load(Ecto.UUID, data, loader)
def load(type, data, loader),
do: Ecto.Type.load(type, data, loader)
@doc false
def dump({:embed, _} = type, data, dumper),
do: Ecto.Type.dump(type, data, fn
{:embed, _} = type, value -> dumper.(type, value)
_type, value -> {:ok, value}
end)
def dump(:binary_id, data, dumper),
do: Ecto.Type.dump(Ecto.UUID, data, dumper)
def dump(type, data, dumper),
do: Ecto.Type.dump(type, data, dumper)
@doc false
def bingenerate(key) do
{:ok, value} = Ecto.UUID.dump(Ecto.UUID.generate)
{[{key, value}], [{key, unwrap(value)}]}
end
defp unwrap(%Ecto.Query.Tagged{value: value}), do: value
defp unwrap(value), do: value
## Query
@doc false
def execute(repo, _meta, prepared, params, nil, opts) do
%{rows: rows, num_rows: num} = query!(repo, prepared, params, nil, opts)
{num, rows}
end
def execute(repo, meta, prepared, params, preprocess, opts) do
fields = count_fields(meta.select.fields, meta.sources)
mapper = &process_row(&1, preprocess, fields)
%{rows: rows, num_rows: num} = query!(repo, prepared, params, mapper, opts)
{num, rows}
end
@doc false
def model(repo, conn, sql, values, returning, opts) do
case query(repo, sql, values, nil, opts) do
{:ok, %{rows: nil, num_rows: 1}} ->
{:ok, []}
{:ok, %{rows: [values], num_rows: 1}} ->
{:ok, Enum.zip(returning, values)}
{:ok, %{num_rows: 0}} ->
{:error, :stale}
{:error, err} ->
case conn.to_constraints(err) do
[] -> raise err
constraints -> {:invalid, constraints}
end
end
end
defp count_fields(fields, sources) do
Enum.map fields, fn
{:&, _, [idx]} = field ->
{_source, model} = elem(sources, idx)
{field, length(model.__schema__(:fields))}
field ->
{field, 0}
end
end
defp process_row(row, preprocess, fields) do
Enum.map_reduce(fields, row, fn
{field, 0}, [h|t] ->
{preprocess.(field, h, nil), t}
{field, count}, acc ->
case split_and_not_nil(acc, count, true, []) do
{nil, rest} -> {nil, rest}
{val, rest} -> {preprocess.(field, val, nil), rest}
end
end) |> elem(0)
end
defp split_and_not_nil(rest, 0, true, _acc), do: {nil, rest}
defp split_and_not_nil(rest, 0, false, acc), do: {:lists.reverse(acc), rest}
defp split_and_not_nil([nil|t], count, all_nil?, acc) do
split_and_not_nil(t, count - 1, all_nil?, [nil|acc])
end
defp split_and_not_nil([h|t], count, _all_nil?, acc) do
split_and_not_nil(t, count - 1, false, [h|acc])
end
## Transactions
@doc false
def transaction(repo, opts, fun) do
{pool_mod, pool, timeout} = repo.__pool__
opts = Keyword.put_new(opts, :timeout, timeout)
timeout = Keyword.fetch!(opts, :timeout)
transaction = fn
:opened, ref, {mod, _conn}, queue_time ->
mode = transaction_mode(pool_mod, pool, timeout)
transaction(repo, ref, mod, mode, queue_time, timeout, opts, fun)
:already_open, _, _, _ ->
{{:return, {:ok, fun.()}}, nil}
end
case Pool.transaction(pool_mod, pool, timeout, transaction) do
{{:return, result}, entry} ->
log(repo, entry)
result
{{:raise, class, reason, stack}, entry} ->
log(repo, entry)
:erlang.raise(class, reason, stack)
{{:error, err}, entry} ->
log(repo, entry)
raise err
{:error, :noconnect} ->
exit({:noconnect, {__MODULE__, :transaction, [repo, opts, fun]}})
{:error, :noproc} ->
raise ArgumentError, "repo #{inspect repo} is not started, " <>
"please ensure it is part of your supervision tree"
end
end
@doc false
def rollback(repo, value) do
{pool_mod, pool, _timeout} = repo.__pool__
Pool.rollback(pool_mod, pool, value)
end
defp transaction_mode(Sandbox, pool, timeout), do: Sandbox.mode(pool, timeout)
defp transaction_mode(_, _, _), do: :raw
defp transaction(repo, ref, mod, mode, queue_time, timeout, opts, fun) do
case begin(repo, mod, mode, queue_time, opts) do
{{:ok, _}, entry} ->
safe = fn -> log(repo, entry); fun.() end
case Pool.with_rollback(ref, safe) do
{:ok, _} = ok ->
commit(repo, ref, mod, mode, timeout, opts, {:return, ok})
{:error, _} = error ->
rollback(repo, ref, mod, mode, timeout, opts, {:return, error})
{:raise, _kind, _reason, _stack} = raise ->
rollback(repo, ref, mod, mode, timeout, opts, raise)
end
{{:error, _err}, _entry} = error ->
Pool.break(ref, timeout)
error
:noconnect ->
{:error, :noconnect}
end
end
defp begin(repo, mod, mode, queue_time, opts) do
sql = begin_sql(mod, mode)
query(repo, sql, [], queue_time, nil, opts)
end
defp begin_sql(mod, :raw), do: mod.begin_transaction
defp begin_sql(mod, :sandbox), do: mod.savepoint "ecto_trans"
defp commit(repo, ref, mod, :raw, timeout, opts, result) do
case query(repo, mod.commit, [], nil, nil, opts) do
{{:ok, _}, entry} ->
{result, entry}
{{:error, _}, _entry} = error ->
Pool.break(ref, timeout)
error
:noconnect ->
{result, nil}
end
end
defp commit(_repo, _ref, _mod, _mode, _timeout, _opts, result) do
{result, nil}
end
defp rollback(repo, ref, mod, mode, timeout, opts, result) do
sql = rollback_sql(mod, mode)
case query(repo, sql, [], nil, nil, opts) do
{{:ok, _}, entry} ->
{result, entry}
{{:error, _}, _entry} = error ->
Pool.break(ref, timeout)
error
:noconnect ->
{result, nil}
end
end
defp rollback_sql(mod, :raw), do: mod.rollback
defp rollback_sql(mod, :sandbox) do
mod.rollback_to_savepoint "ecto_trans"
end
end
|
deps/ecto/lib/ecto/adapters/sql.ex
| 0.892328
| 0.517022
|
sql.ex
|
starcoder
|
defmodule Mix.Releases.Checks do
@moduledoc """
This module defines a behavior for, and orchestrator of, static analysis checks
to be performed at release-time. These checks are intended to operate on the fully
reified release configuration and metadata, and return warnings, errors, or ok for
the release assembler to react to.
In most cases, warnings will be printed but assembly will continue; errors will be
printed but will terminate assembly, and a successful check will be printed only if
verbose logging is enabled.
"""
alias Mix.Releases.Release
@callback run(Release.t()) :: :ok | {:ok, warning :: String.t()} | {:error, term}
defmacro __using__(_) do
quote do
@behaviour unquote(__MODULE__)
alias unquote(__MODULE__)
end
end
@type warning :: String.t()
# The default set of checks to run when executing validate_configuration/1
@default_checks [
__MODULE__.Erts,
__MODULE__.Cookie,
__MODULE__.MissingApps,
__MODULE__.LoadedOrphanedApps
]
@doc """
Returns a list of all checks available to be applied.
"""
def list() do
extra = Application.get_env(:distillery, :extra_checks, [])
Enum.concat(@default_checks, extra)
end
@doc """
Runs all default and configured checks against the given release.
"""
@spec run(Release.t()) :: :ok | {:ok, warning} | {:error, term}
def run(%Release{} = release) do
Mix.Releases.Shell.debug("Running validation checks..")
run(list(), release)
end
@doc """
Runs all of the given checks, in the given order.
"""
@spec run([module], Release.t()) :: :ok | {:ok, warning} | {:error, term}
def run([], _release),
do: :ok
def run([check | checks], %Release{} = release) do
Mix.Releases.Shell.debugf(" > #{Enum.join(Module.split(check), ".")}")
check.run(release)
else
:ok ->
Mix.Releases.Shell.debugf(" * PASS\n", :green)
run(checks, release)
{:ok, warning} = warn when is_binary(warning) ->
Mix.Releases.Shell.debugf(" * WARN\n\n", :yellow)
warn
{:error, _} = err ->
Mix.Releases.Shell.debugf(" * FAILED\n", :red)
err
other ->
{:error,
"The check #{__MODULE__} returned #{inspect(other)} " <>
"when :ok, {:ok, String.t}, or {:error term} were expected"}
end
end
|
lib/mix/lib/releases/checks.ex
| 0.763484
| 0.435601
|
checks.ex
|
starcoder
|
defmodule Norm.Spec.Selection do
@moduledoc false
# Provides the definition for selections
defstruct subset: nil
alias Norm.Schema
alias Norm.SpecError
def new(schema, path) do
select(schema, path, %{})
end
defp select(_, [], selection), do: %__MODULE__{subset: selection}
defp select(schema, [selector | rest], selection) do
case selector do
{key, inner} ->
case Schema.spec(schema, key) do
nil ->
raise SpecError, {:selection, key, schema}
inner_schema ->
selection = Map.put(selection, key, select(inner_schema, inner, %{}))
select(schema, rest, selection)
end
key ->
case Schema.spec(schema, key) do
nil ->
raise SpecError, {:selection, key, schema}
spec ->
new_selection = Map.put(selection, key, spec)
select(schema, rest, new_selection)
end
end
end
if Code.ensure_loaded?(StreamData) do
defimpl Norm.Generatable do
alias Norm.Generatable
def gen(%{subset: specs}) do
case Enum.reduce(specs, %{}, &to_gen/2) do
{:error, error} ->
{:error, error}
gen ->
{:ok, StreamData.fixed_map(gen)}
end
end
defp to_gen(_, {:error, error}), do: {:error, error}
defp to_gen({key, spec}, generator) do
case Generatable.gen(spec) do
{:ok, g} ->
Map.put(generator, key, g)
{:error, error} ->
{:error, error}
end
end
end
end
defimpl Norm.Conformer.Conformable do
alias Norm.Conformer
alias Norm.Conformer.Conformable
def conform(%{subset: subset}, input, path) do
results =
subset
|> Enum.map(fn {key, spec} ->
val = Map.get(input, key)
if val do
{key, Conformable.conform(spec, val, path ++ [key])}
else
{key, {:error, [Conformer.error(path ++ [key], input, ":required")]}}
end
end)
|> Enum.reduce(%{ok: [], error: []}, fn {key, {result, r}}, acc ->
Map.put(acc, result, [{key, r} | acc[result]])
end)
if Enum.any?(results.error) do
errors =
results.error
|> Enum.flat_map(fn {_, errors} -> errors end)
{:error, errors}
else
{:ok, Enum.into(results.ok, %{})}
end
end
end
end
|
lib/norm/spec/selection.ex
| 0.738575
| 0.437643
|
selection.ex
|
starcoder
|
defmodule Credo.CLI.Output do
@moduledoc """
This module provides helper functions regarding command line output.
"""
@category_tag_map %{"refactor" => "F"}
@priority_values_map [
higher: [color: :red, arrow: "\u2191"],
high: [color: :red, arrow: "\u2197"],
normal: [color: :yellow, arrow: "\u2192"],
low: [color: :blue, arrow: "\u2198"],
ignore: [color: :magenta, arrow: "\u2193"]
]
alias Credo.CLI.Output.UI
alias Credo.Execution
alias Credo.Issue
alias Credo.Priority
def check_tag(category, in_parens \\ true)
def check_tag(category, in_parens) when is_binary(category) do
default_tag =
category
|> String.at(0)
|> String.upcase()
tag = Map.get(@category_tag_map, category, default_tag)
if in_parens do
"[#{tag}]"
else
tag
end
end
def check_tag(category, in_parens) when is_atom(category) do
category
|> to_string
|> check_tag(in_parens)
end
def check_tag(check_mod, in_parens) do
check_mod.category
|> to_string
|> check_tag(in_parens)
end
def check_color(category) when is_binary(category) do
case category do
"consistency" -> :cyan
"readability" -> :blue
"design" -> :olive
"refactor" -> :yellow
"warning" -> :red
_ -> :magenta
end
end
def check_color(category) when is_atom(category) do
category
|> to_string
|> check_color
end
def check_color(check_mod) do
check_mod.category
|> to_string
|> check_color
end
@doc """
Returns a suitable color for a given priority.
iex> Credo.CLI.Output.issue_color(%Credo.Issue{priority: :higher})
:red
iex> Credo.CLI.Output.issue_color(%Credo.Issue{priority: 20})
:red
"""
def issue_color(%Issue{priority: priority} = issue) when is_number(priority) do
%Issue{issue | priority: Priority.to_atom(priority)} |> issue_color()
end
def issue_color(%Issue{priority: priority}) when is_atom(priority) do
get_in(@priority_values_map, [priority, :color]) || "?"
end
def issue_color(_), do: "?"
@doc """
Returns a suitable arrow for a given priority.
iex> Credo.CLI.Output.priority_arrow(:high)
"↗"
iex> Credo.CLI.Output.priority_arrow(10)
"↗"
"""
def priority_arrow(priority) when is_number(priority) do
priority |> Priority.to_atom() |> priority_arrow()
end
def priority_arrow(priority) when is_atom(priority) do
get_in(@priority_values_map, [priority, :arrow]) || "?"
end
def priority_arrow(_), do: "?"
@doc """
Returns a suitable name for a given priority.
iex> Credo.CLI.Output.priority_name(:normal)
"normal"
iex> Credo.CLI.Output.priority_name(1)
"normal"
"""
def priority_name(priority) when is_number(priority) do
priority |> Priority.to_atom() |> priority_name()
end
def priority_name(priority) when is_atom(priority) do
if Keyword.has_key?(@priority_values_map, priority), do: priority |> to_string(), else: "?"
end
def priority_name(_), do: "?"
@doc """
Returns a suitable foreground color for a given `background_color`.
iex> Credo.CLI.Output.foreground_color(:yellow)
:black
iex> Credo.CLI.Output.foreground_color(:blue)
:white
"""
def foreground_color(background_color)
def foreground_color(:cyan), do: :black
def foreground_color(:yellow), do: :black
def foreground_color(_), do: :white
def term_columns(default \\ 80) do
case :io.columns() do
{:ok, columns} ->
columns
_ ->
default
end
end
def complain_about_invalid_source_files([]), do: nil
def complain_about_invalid_source_files(invalid_source_files) do
invalid_source_filenames = Enum.map(invalid_source_files, & &1.filename)
output = [
:reset,
:bright,
:orange,
"info: ",
:red,
"Some source files could not be parsed correctly and are excluded:\n"
]
UI.warn(output)
print_numbered_list(invalid_source_filenames)
end
def complain_about_timed_out_source_files([]), do: nil
def complain_about_timed_out_source_files(large_source_files) do
large_source_filenames = Enum.map(large_source_files, & &1.filename)
output = [
:reset,
:bright,
:orange,
"info: ",
:red,
"Some source files were not parsed in the time allotted:\n"
]
UI.warn(output)
print_numbered_list(large_source_filenames)
end
def print_skipped_checks(%Execution{skipped_checks: []}), do: nil
def print_skipped_checks(%Execution{skipped_checks: skipped_checks}) do
msg = [
:reset,
:bright,
:orange,
"info: ",
:reset,
:faint,
"some checks were skipped because they're not compatible with\n",
:reset,
:faint,
"your version of Elixir (#{System.version()}).\n\n",
"You can deactivate these checks by adding this to the `checks` list in your config:\n"
]
UI.puts("")
UI.puts(msg)
skipped_checks
|> Enum.map(&check_name/1)
|> print_disabled_check_config
end
defp check_name({check, _check_info}), do: check_name({check})
defp check_name({check}) do
check
|> to_string
|> String.replace(~r/^Elixir\./, "")
end
defp print_numbered_list(list) do
list
|> Enum.with_index()
|> Enum.flat_map(fn {string, index} ->
[
:reset,
String.pad_leading("#{index + 1})", 5),
:faint,
" #{string}\n"
]
end)
|> UI.warn()
end
defp print_disabled_check_config(list) do
list
|> Enum.flat_map(fn string ->
[
:reset,
String.pad_leading(" ", 4),
:faint,
"{#{string}, false},\n"
]
end)
|> UI.puts()
end
end
|
lib/credo/cli/output.ex
| 0.756762
| 0.418964
|
output.ex
|
starcoder
|
defmodule ExampleFiles.File do
@moduledoc """
A `GenServer` that provides access to a project file that serves as an example
or a template for a project file.
The state of an `ExampleFiles.File` process is its filesystem path.
"""
use GenServer
@typedoc """
An error encountered while processing an example file.
To transform the second term of the tuple into user-friendly form, pass it to
[`:file.format_error/1`](http://erlang.org/doc/man/file.html#format_error-1).
"""
@type error :: {:error, :eacces | :eperm | :enotdir}
@typedoc "The status of an example file."
@type status :: :missing | :identical | :out_of_date
@basename_as_delimited_left_regex ~r/(\.|_|-)example(\.|_|-|$)/i
@basename_as_delimited_right_regex ~r/(^|\.|_|-)example(\.|_|-)/i
@basename_as_camelcase_start_regex ~r/^[Ee]xample([[:upper:]]|\d|$)/
@basename_as_camelcase_middle_regex ~r/([[:lower:]]|\d)Example([[:upper:]]|\d|$)/
# Client API
# TODO: Update this spec using the new `keyword` type under Elixir v1.3
# @spec start_link(binary | [binary], keyword) :: GenServer.on_start
@spec start_link(binary | [binary], [{atom, any}]) :: GenServer.on_start
@doc """
Starts an `ExampleFiles.File` process, linked to the current process, with the
specified `path`.
The process exits if `path` is not an example file (see `example_file?/1`).
"""
def start_link(arguments, options \\ [])
def start_link(arguments, options) when is_list(arguments) do
__MODULE__ |> GenServer.start_link(arguments, options)
end
def start_link(path, options), do: [path] |> start_link(options)
@spec clean(pid, timeout) :: {:ok, :deleted | :enoent} | error
@doc """
Deletes the file at the example file’s path-when-pulled (see
`path_when_pulled/1`).
## Examples
iex> path = Path.join(System.tmp_dir!, String.slice(to_string(:rand.uniform), 2..-1)) <> ".example"
...> {:ok, file} = ExampleFiles.File.start_link([path])
...> false = file |> ExampleFiles.File.path_when_pulled |> File.exists?
...> {:ok, :enoent} = file |> ExampleFiles.File.clean
...> file |> ExampleFiles.File.path_when_pulled |> File.exists?
false
iex> path = Path.join(System.tmp_dir!, String.slice(to_string(:rand.uniform), 2..-1)) <> ".example"
...> {:ok, file} = ExampleFiles.File.start_link([path])
...> file |> ExampleFiles.File.path_when_pulled |> File.touch!
...> true = file |> ExampleFiles.File.path_when_pulled |> File.exists?
...> {:ok, :deleted} = file |> ExampleFiles.File.clean
...> file |> ExampleFiles.File.path_when_pulled |> File.exists?
false
"""
def clean(file, timeout \\ 5000), do: file |> GenServer.call({:clean}, timeout)
@spec example_file?(binary) :: boolean
@doc """
Returns `true` if the specified `path` qualifies as an example file.
## Examples
iex> "foo" |> ExampleFiles.File.example_file?
false
iex> "example" |> ExampleFiles.File.example_file?
false
iex> "fooexample" |> ExampleFiles.File.example_file?
false
iex> "examplefoo" |> ExampleFiles.File.example_file?
false
iex> "foo.example/bar" |> ExampleFiles.File.example_file?
false
iex> "foo.example" |> ExampleFiles.File.example_file?
true
iex> "foo/bar-example" |> ExampleFiles.File.example_file?
true
iex> "fooExample" |> ExampleFiles.File.example_file?
true
iex> "example_foo" |> ExampleFiles.File.example_file?
true
iex> "exampleFoo" |> ExampleFiles.File.example_file?
true
iex> "foo.Example" |> ExampleFiles.File.example_file?
true
iex> "Example.foo" |> ExampleFiles.File.example_file?
true
iex> "123Example" |> ExampleFiles.File.example_file?
true
iex> "Example123" |> ExampleFiles.File.example_file?
true
iex> "foo.EXAMPLE" |> ExampleFiles.File.example_file?
true
iex> "EXAMPLE.foo" |> ExampleFiles.File.example_file?
true
iex> "foo.example.bar" |> ExampleFiles.File.example_file?
true
iex> "fooExampleBar" |> ExampleFiles.File.example_file?
true
iex> "123Example456" |> ExampleFiles.File.example_file?
true
"""
def example_file?(path) do
basename = path |> Path.basename
!String.match?(basename, ~r/^example$/i) &&
(String.match?(basename, @basename_as_delimited_left_regex) ||
String.match?(basename, @basename_as_delimited_right_regex) ||
String.match?(basename, @basename_as_camelcase_start_regex) ||
String.match?(basename, @basename_as_camelcase_middle_regex))
end
@spec path(pid, timeout) :: binary
@doc """
Returns the example file’s path.
## Examples
iex> {:ok, file} = ExampleFiles.File.start_link("foo.example")
...> file |> ExampleFiles.File.path
"foo.example"
"""
def path(file, timeout \\ 5000), do: file |> GenServer.call({:path}, timeout)
@spec path_when_pulled(pid, timeout) :: binary
@doc """
Computes the path of the example file when it is pulled (see `pull/1`).
## Examples
iex> {:ok, file} = ExampleFiles.File.start_link("foo.example")
...> file |> ExampleFiles.File.path_when_pulled
"foo"
iex> {:ok, file} = ExampleFiles.File.start_link("foo/bar-example")
...> file |> ExampleFiles.File.path_when_pulled
"foo/bar"
iex> {:ok, file} = ExampleFiles.File.start_link("fooExample")
...> file |> ExampleFiles.File.path_when_pulled
"foo"
iex> {:ok, file} = ExampleFiles.File.start_link("example_foo")
...> file |> ExampleFiles.File.path_when_pulled
"foo"
iex> {:ok, file} = ExampleFiles.File.start_link("exampleFoo")
...> file |> ExampleFiles.File.path_when_pulled
"Foo"
iex> {:ok, file} = ExampleFiles.File.start_link("foo.Example")
...> file |> ExampleFiles.File.path_when_pulled
"foo"
iex> {:ok, file} = ExampleFiles.File.start_link("Example.foo")
...> file |> ExampleFiles.File.path_when_pulled
"foo"
iex> {:ok, file} = ExampleFiles.File.start_link("123Example")
...> file |> ExampleFiles.File.path_when_pulled
"123"
iex> {:ok, file} = ExampleFiles.File.start_link("Example123")
...> file |> ExampleFiles.File.path_when_pulled
"123"
iex> {:ok, file} = ExampleFiles.File.start_link("foo.EXAMPLE")
...> file |> ExampleFiles.File.path_when_pulled
"foo"
iex> {:ok, file} = ExampleFiles.File.start_link("EXAMPLE.foo")
...> file |> ExampleFiles.File.path_when_pulled
"foo"
iex> {:ok, file} = ExampleFiles.File.start_link("foo.example.bar")
...> file |> ExampleFiles.File.path_when_pulled
"foo.bar"
iex> {:ok, file} = ExampleFiles.File.start_link("fooExampleBar")
...> file |> ExampleFiles.File.path_when_pulled
"fooBar"
iex> {:ok, file} = ExampleFiles.File.start_link("123Example456")
...> file |> ExampleFiles.File.path_when_pulled
"123456"
"""
def path_when_pulled(file, timeout \\ 5000) do
file |> GenServer.call({:path_when_pulled}, timeout)
end
@spec pull(pid, timeout) :: {:ok, :copied | :identical} | error
@doc """
Copies the example file to its path-when-pulled (see `path_when_pulled/1`).
"""
def pull(file, timeout \\ 5000), do: file |> GenServer.call({:pull}, timeout)
@spec status(pid, timeout) :: status
@doc """
Computes the status of the example file.
## Examples
iex> path = Path.join(System.tmp_dir!, String.slice(to_string(:rand.uniform), 2..-1)) <> ".example"
...> {:ok, file} = ExampleFiles.File.start_link([path])
...> file |> ExampleFiles.File.status
:missing
"""
def status(file, timeout \\ 5000) do
file |> GenServer.call({:status}, timeout)
end
@spec identical?(binary, binary) :: boolean
defp identical?(path1, path2) do
case File.read(path1) do
{:ok, content1} ->
case File.read(path2) do
{:ok, content2} -> content1 == content2
{:error, :enoent} -> false
{:error, reason} -> reason |> :file.format_error |> raise
end
{:error, :enoent} ->
! File.exists?(path2)
{:error, reason} ->
reason |> :file.format_error |> raise
end
end
@spec pulled(binary) :: binary
defp pulled(path) do
basename = Path.basename(path)
pulled_basename = basename |> String.replace(@basename_as_delimited_left_regex, "\\2")
|> String.replace(@basename_as_delimited_right_regex, "\\1")
|> String.replace(@basename_as_camelcase_start_regex, "\\1")
|> String.replace(@basename_as_camelcase_middle_regex, "\\1\\2")
if (dirname = Path.dirname(path)) == "." do
pulled_basename
else
Path.join dirname, pulled_basename
end
end
# Server callbacks
def init([path]) when is_binary(path) do
if path |> example_file? do
{:ok, path}
else
init false
end
end
def init(_), do: {:stop, "Not an example file"}
def handle_call({:clean}, _from, path) do
path_when_pulled = path |> pulled
result = case path_when_pulled |> File.rm do
:ok -> {:ok, :deleted}
{:error, :enoent} -> {:ok, :enoent}
other -> other
end
{:reply, result, path}
end
def handle_call({:path}, _from, path), do: {:reply, path, path}
def handle_call({:path_when_pulled}, _from, path) do
path_when_pulled = path |> pulled
{:reply, path_when_pulled, path}
end
def handle_call({:pull}, _from, path) do
path_when_pulled = path |> pulled
result = if File.exists?(path) && identical?(path, path_when_pulled) do
{:ok, :identical}
else
case path |> File.cp(path_when_pulled) do
:ok -> {:ok, :copied}
other -> other
end
end
{:reply, result, path}
end
def handle_call({:status}, _from, path) do
path_when_pulled = path |> pulled
status = if File.exists?(path) && File.exists?(path_when_pulled) do
if identical?(path, path_when_pulled) do
:identical
else
:out_of_date
end
else
:missing
end
{:reply, status, path}
end
end
|
lib/example_files/file.ex
| 0.691185
| 0.583856
|
file.ex
|
starcoder
|
defmodule AWS.AppMesh do
@moduledoc """
AWS App Mesh is a service mesh based on the Envoy proxy that makes it easy
to monitor and
control microservices. App Mesh standardizes how your microservices
communicate, giving you
end-to-end visibility and helping to ensure high availability for your
applications.
App Mesh gives you consistent visibility and network traffic controls for
every
microservice in an application. You can use App Mesh with AWS Fargate,
Amazon ECS, Amazon EKS,
Kubernetes on AWS, and Amazon EC2.
<note>
App Mesh supports microservice applications that use service discovery
naming for their
components. For more information about service discovery on Amazon ECS, see
[Service
Discovery](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/service-discovery.html)
in the *Amazon Elastic Container Service Developer Guide*. Kubernetes
`kube-dns` and `coredns` are supported. For more information,
see <a
href="https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/">DNS
for Services and Pods` in the Kubernetes documentation.
</note>
"""
@doc """
Creates a gateway route.
A gateway route is attached to a virtual gateway and routes traffic to an
existing
virtual service. If a route matches a request, it can distribute traffic to
a target virtual service.
For more information about gateway routes, see [Gateway
routes](https://docs.aws.amazon.com/app-mesh/latest/userguide/gateway-routes.html).
"""
def create_gateway_route(client, mesh_name, virtual_gateway_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateway/#{URI.encode(virtual_gateway_name)}/gatewayRoutes"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates a service mesh.
A service mesh is a logical boundary for network traffic between services
that are
represented by resources within the mesh. After you create your service
mesh, you can
create virtual services, virtual nodes, virtual routers, and routes to
distribute traffic
between the applications in your mesh.
For more information about service meshes, see [Service
meshes](https://docs.aws.amazon.com/app-mesh/latest/userguide/meshes.html).
"""
def create_mesh(client, input, options \\ []) do
path_ = "/v20190125/meshes"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates a route that is associated with a virtual router.
You can route several different protocols and define a retry policy for a
route.
Traffic can be routed to one or more virtual nodes.
For more information about routes, see
[Routes](https://docs.aws.amazon.com/app-mesh/latest/userguide/routes.html).
"""
def create_route(client, mesh_name, virtual_router_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouter/#{URI.encode(virtual_router_name)}/routes"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates a virtual gateway.
A virtual gateway allows resources outside your mesh to communicate to
resources that
are inside your mesh. The virtual gateway represents an Envoy proxy running
in an Amazon ECS
task, in a Kubernetes service, or on an Amazon EC2 instance. Unlike a
virtual node, which
represents an Envoy running with an application, a virtual gateway
represents Envoy deployed by itself.
For more information about virtual gateways, see [Virtual
gateways](https://docs.aws.amazon.com/app-mesh/latest/userguide/virtual_gateways.html).
"""
def create_virtual_gateway(client, mesh_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateways"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates a virtual node within a service mesh.
A virtual node acts as a logical pointer to a particular task group, such
as an Amazon ECS
service or a Kubernetes deployment. When you create a virtual node, you can
specify the
service discovery information for your task group, and whether the proxy
running in a task
group will communicate with other proxies using Transport Layer Security
(TLS).
You define a `listener` for any inbound traffic that your virtual node
expects. Any virtual service that your virtual node expects to communicate
to is specified
as a `backend`.
The response metadata for your new virtual node contains the `arn` that is
associated with the virtual node. Set this value (either the full ARN or
the truncated
resource name: for example, `mesh/default/virtualNode/simpleapp`) as the
`APPMESH_VIRTUAL_NODE_NAME` environment variable for your task group's
Envoy
proxy container in your task definition or pod spec. This is then mapped to
the
`node.id` and `node.cluster` Envoy parameters.
<note>
If you require your Envoy stats or tracing to use a different name, you can
override
the `node.cluster` value that is set by
`APPMESH_VIRTUAL_NODE_NAME` with the
`APPMESH_VIRTUAL_NODE_CLUSTER` environment variable.
</note>
For more information about virtual nodes, see [Virtual
nodes](https://docs.aws.amazon.com/app-mesh/latest/userguide/virtual_nodes.html).
"""
def create_virtual_node(client, mesh_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualNodes"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates a virtual router within a service mesh.
Specify a `listener` for any inbound traffic that your virtual router
receives. Create a virtual router for each protocol and port that you need
to route.
Virtual routers handle traffic for one or more virtual services within your
mesh. After you
create your virtual router, create and associate routes for your virtual
router that direct
incoming requests to different virtual nodes.
For more information about virtual routers, see [Virtual
routers](https://docs.aws.amazon.com/app-mesh/latest/userguide/virtual_routers.html).
"""
def create_virtual_router(client, mesh_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouters"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Creates a virtual service within a service mesh.
A virtual service is an abstraction of a real service that is provided by a
virtual node
directly or indirectly by means of a virtual router. Dependent services
call your virtual
service by its `virtualServiceName`, and those requests are routed to the
virtual node or virtual router that is specified as the provider for the
virtual
service.
For more information about virtual services, see [Virtual
services](https://docs.aws.amazon.com/app-mesh/latest/userguide/virtual_services.html).
"""
def create_virtual_service(client, mesh_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualServices"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Deletes an existing gateway route.
"""
def delete_gateway_route(client, gateway_route_name, mesh_name, virtual_gateway_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateway/#{URI.encode(virtual_gateway_name)}/gatewayRoutes/#{URI.encode(gateway_route_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Deletes an existing service mesh.
You must delete all resources (virtual services, routes, virtual routers,
and virtual
nodes) in the service mesh before you can delete the mesh itself.
"""
def delete_mesh(client, mesh_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Deletes an existing route.
"""
def delete_route(client, mesh_name, route_name, virtual_router_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouter/#{URI.encode(virtual_router_name)}/routes/#{URI.encode(route_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Deletes an existing virtual gateway. You cannot delete a virtual gateway if
any gateway
routes are associated to it.
"""
def delete_virtual_gateway(client, mesh_name, virtual_gateway_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateways/#{URI.encode(virtual_gateway_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Deletes an existing virtual node.
You must delete any virtual services that list a virtual node as a service
provider
before you can delete the virtual node itself.
"""
def delete_virtual_node(client, mesh_name, virtual_node_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualNodes/#{URI.encode(virtual_node_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Deletes an existing virtual router.
You must delete any routes associated with the virtual router before you
can delete the
router itself.
"""
def delete_virtual_router(client, mesh_name, virtual_router_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouters/#{URI.encode(virtual_router_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Deletes an existing virtual service.
"""
def delete_virtual_service(client, mesh_name, virtual_service_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualServices/#{URI.encode(virtual_service_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 200)
end
@doc """
Describes an existing gateway route.
"""
def describe_gateway_route(client, gateway_route_name, mesh_name, virtual_gateway_name, mesh_owner \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateway/#{URI.encode(virtual_gateway_name)}/gatewayRoutes/#{URI.encode(gateway_route_name)}"
headers = []
query_ = []
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Describes an existing service mesh.
"""
def describe_mesh(client, mesh_name, mesh_owner \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}"
headers = []
query_ = []
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Describes an existing route.
"""
def describe_route(client, mesh_name, route_name, virtual_router_name, mesh_owner \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouter/#{URI.encode(virtual_router_name)}/routes/#{URI.encode(route_name)}"
headers = []
query_ = []
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Describes an existing virtual gateway.
"""
def describe_virtual_gateway(client, mesh_name, virtual_gateway_name, mesh_owner \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateways/#{URI.encode(virtual_gateway_name)}"
headers = []
query_ = []
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Describes an existing virtual node.
"""
def describe_virtual_node(client, mesh_name, virtual_node_name, mesh_owner \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualNodes/#{URI.encode(virtual_node_name)}"
headers = []
query_ = []
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Describes an existing virtual router.
"""
def describe_virtual_router(client, mesh_name, virtual_router_name, mesh_owner \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouters/#{URI.encode(virtual_router_name)}"
headers = []
query_ = []
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Describes an existing virtual service.
"""
def describe_virtual_service(client, mesh_name, virtual_service_name, mesh_owner \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualServices/#{URI.encode(virtual_service_name)}"
headers = []
query_ = []
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of existing gateway routes that are associated to a virtual
gateway.
"""
def list_gateway_routes(client, mesh_name, virtual_gateway_name, limit \\ nil, mesh_owner \\ nil, next_token \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateway/#{URI.encode(virtual_gateway_name)}/gatewayRoutes"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
query_ = if !is_nil(limit) do
[{"limit", limit} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of existing service meshes.
"""
def list_meshes(client, limit \\ nil, next_token \\ nil, options \\ []) do
path_ = "/v20190125/meshes"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(limit) do
[{"limit", limit} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of existing routes in a service mesh.
"""
def list_routes(client, mesh_name, virtual_router_name, limit \\ nil, mesh_owner \\ nil, next_token \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouter/#{URI.encode(virtual_router_name)}/routes"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
query_ = if !is_nil(limit) do
[{"limit", limit} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
List the tags for an App Mesh resource.
"""
def list_tags_for_resource(client, limit \\ nil, next_token \\ nil, resource_arn, options \\ []) do
path_ = "/v20190125/tags"
headers = []
query_ = []
query_ = if !is_nil(resource_arn) do
[{"resourceArn", resource_arn} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(limit) do
[{"limit", limit} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of existing virtual gateways in a service mesh.
"""
def list_virtual_gateways(client, mesh_name, limit \\ nil, mesh_owner \\ nil, next_token \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateways"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
query_ = if !is_nil(limit) do
[{"limit", limit} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of existing virtual nodes.
"""
def list_virtual_nodes(client, mesh_name, limit \\ nil, mesh_owner \\ nil, next_token \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualNodes"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
query_ = if !is_nil(limit) do
[{"limit", limit} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of existing virtual routers in a service mesh.
"""
def list_virtual_routers(client, mesh_name, limit \\ nil, mesh_owner \\ nil, next_token \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouters"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
query_ = if !is_nil(limit) do
[{"limit", limit} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Returns a list of existing virtual services in a service mesh.
"""
def list_virtual_services(client, mesh_name, limit \\ nil, mesh_owner \\ nil, next_token \\ nil, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualServices"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(mesh_owner) do
[{"meshOwner", mesh_owner} | query_]
else
query_
end
query_ = if !is_nil(limit) do
[{"limit", limit} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Associates the specified tags to a resource with the specified
`resourceArn`.
If existing tags on a resource aren't specified in the request parameters,
they aren't
changed. When a resource is deleted, the tags associated with that resource
are also
deleted.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/v20190125/tag"
headers = []
{query_, input} =
[
{"resourceArn", "resourceArn"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Deletes specified tags from a resource.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/v20190125/untag"
headers = []
{query_, input} =
[
{"resourceArn", "resourceArn"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates an existing gateway route that is associated to a specified virtual
gateway in a
service mesh.
"""
def update_gateway_route(client, gateway_route_name, mesh_name, virtual_gateway_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateway/#{URI.encode(virtual_gateway_name)}/gatewayRoutes/#{URI.encode(gateway_route_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates an existing service mesh.
"""
def update_mesh(client, mesh_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates an existing route for a specified service mesh and virtual router.
"""
def update_route(client, mesh_name, route_name, virtual_router_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouter/#{URI.encode(virtual_router_name)}/routes/#{URI.encode(route_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates an existing virtual gateway in a specified service mesh.
"""
def update_virtual_gateway(client, mesh_name, virtual_gateway_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualGateways/#{URI.encode(virtual_gateway_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates an existing virtual node in a specified service mesh.
"""
def update_virtual_node(client, mesh_name, virtual_node_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualNodes/#{URI.encode(virtual_node_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates an existing virtual router in a specified service mesh.
"""
def update_virtual_router(client, mesh_name, virtual_router_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualRouters/#{URI.encode(virtual_router_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates an existing virtual service in a specified service mesh.
"""
def update_virtual_service(client, mesh_name, virtual_service_name, input, options \\ []) do
path_ = "/v20190125/meshes/#{URI.encode(mesh_name)}/virtualServices/#{URI.encode(virtual_service_name)}"
headers = []
{query_, input} =
[
{"meshOwner", "meshOwner"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, 200)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, Poison.Parser.t(), Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "appmesh"}
host = build_host("appmesh", client)
url = host
|> build_url(path, client)
|> add_query(query)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, response}
{:ok, %HTTPoison.Response{status_code: status_code, body: body} = response}
when status_code == 200 or status_code == 202 or status_code == 204 ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} ->
{:ok, %{}, response}
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, []) do
url
end
defp add_query(url, query) do
querystring = AWS.Util.encode_query(query)
"#{url}?#{querystring}"
end
defp encode_payload(input) do
if input != nil, do: Poison.Encoder.encode(input, %{}), else: ""
end
end
|
lib/aws/app_mesh.ex
| 0.875335
| 0.550728
|
app_mesh.ex
|
starcoder
|
defmodule Benchmarks.GoogleMessage3.Message10576 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{}
defstruct []
end
defmodule Benchmarks.GoogleMessage3.Message10154 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field10192: binary,
field10193: integer
}
defstruct [:field10192, :field10193]
field :field10192, 1, optional: true, type: :bytes
field :field10193, 2, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message8944 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9045: String.t(),
field9046: String.t(),
field9047: String.t(),
field9048: String.t(),
field9049: integer,
field9050: integer,
field9051: float | :infinity | :negative_infinity | :nan,
field9052: float | :infinity | :negative_infinity | :nan,
field9053: String.t(),
field9054: integer,
field9055: boolean,
field9056: integer,
field9057: integer,
field9058: integer,
field9059: float | :infinity | :negative_infinity | :nan,
field9060: float | :infinity | :negative_infinity | :nan,
field9061: float | :infinity | :negative_infinity | :nan,
field9062: float | :infinity | :negative_infinity | :nan,
field9063: float | :infinity | :negative_infinity | :nan,
field9064: boolean,
field9065: float | :infinity | :negative_infinity | :nan,
field9066: integer,
field9067: Benchmarks.GoogleMessage3.Enum8945.t(),
field9068: integer,
field9069: integer,
field9070: float | :infinity | :negative_infinity | :nan,
field9071: float | :infinity | :negative_infinity | :nan,
field9072: integer,
field9073: integer,
field9074: float | :infinity | :negative_infinity | :nan,
field9075: float | :infinity | :negative_infinity | :nan,
field9076: integer,
field9077: integer,
field9078: Benchmarks.GoogleMessage3.Enum8951.t(),
field9079: String.t(),
field9080: String.t(),
field9081: String.t(),
field9082: float | :infinity | :negative_infinity | :nan,
field9083: float | :infinity | :negative_infinity | :nan,
field9084: float | :infinity | :negative_infinity | :nan,
field9085: float | :infinity | :negative_infinity | :nan,
field9086: Benchmarks.GoogleMessage3.UnusedEnum.t(),
field9087: float | :infinity | :negative_infinity | :nan,
field9088: float | :infinity | :negative_infinity | :nan,
field9089: float | :infinity | :negative_infinity | :nan,
field9090: float | :infinity | :negative_infinity | :nan,
field9091: float | :infinity | :negative_infinity | :nan,
field9092: float | :infinity | :negative_infinity | :nan,
field9093: Benchmarks.GoogleMessage3.UnusedEnum.t(),
field9094: Benchmarks.GoogleMessage3.UnusedEnum.t(),
field9095: String.t(),
field9096: String.t(),
field9097: String.t(),
field9098: String.t(),
field9099: String.t(),
field9100: String.t(),
field9101: String.t(),
field9102: String.t(),
field9103: String.t(),
field9104: String.t(),
field9105: Benchmarks.GoogleMessage3.Message8939.t() | nil,
field9106: integer
}
defstruct [
:field9045,
:field9046,
:field9047,
:field9048,
:field9049,
:field9050,
:field9051,
:field9052,
:field9053,
:field9054,
:field9055,
:field9056,
:field9057,
:field9058,
:field9059,
:field9060,
:field9061,
:field9062,
:field9063,
:field9064,
:field9065,
:field9066,
:field9067,
:field9068,
:field9069,
:field9070,
:field9071,
:field9072,
:field9073,
:field9074,
:field9075,
:field9076,
:field9077,
:field9078,
:field9079,
:field9080,
:field9081,
:field9082,
:field9083,
:field9084,
:field9085,
:field9086,
:field9087,
:field9088,
:field9089,
:field9090,
:field9091,
:field9092,
:field9093,
:field9094,
:field9095,
:field9096,
:field9097,
:field9098,
:field9099,
:field9100,
:field9101,
:field9102,
:field9103,
:field9104,
:field9105,
:field9106
]
field :field9045, 2, optional: true, type: :string
field :field9046, 3, optional: true, type: :string
field :field9047, 23, optional: true, type: :string
field :field9048, 52, optional: true, type: :string
field :field9049, 53, optional: true, type: :int32
field :field9050, 54, optional: true, type: :int32
field :field9051, 55, optional: true, type: :float
field :field9052, 56, optional: true, type: :float
field :field9053, 57, optional: true, type: :string
field :field9054, 1, optional: true, type: :int64
field :field9055, 4, optional: true, type: :bool
field :field9056, 5, optional: true, type: :int32
field :field9057, 6, optional: true, type: :int32
field :field9058, 7, optional: true, type: :int32
field :field9059, 8, optional: true, type: :float
field :field9060, 11, optional: true, type: :float
field :field9061, 9, optional: true, type: :float
field :field9062, 10, optional: true, type: :float
field :field9063, 13, optional: true, type: :float
field :field9064, 14, optional: true, type: :bool
field :field9065, 70, optional: true, type: :float
field :field9066, 71, optional: true, type: :int32
field :field9067, 15, optional: true, type: Benchmarks.GoogleMessage3.Enum8945, enum: true
field :field9068, 16, optional: true, type: :int32
field :field9069, 17, optional: true, type: :int32
field :field9070, 18, optional: true, type: :float
field :field9071, 19, optional: true, type: :float
field :field9072, 28, optional: true, type: :int32
field :field9073, 29, optional: true, type: :int32
field :field9074, 60, optional: true, type: :float
field :field9075, 61, optional: true, type: :float
field :field9076, 72, optional: true, type: :int32
field :field9077, 73, optional: true, type: :int32
field :field9078, 62, optional: true, type: Benchmarks.GoogleMessage3.Enum8951, enum: true
field :field9079, 20, optional: true, type: :string
field :field9080, 21, optional: true, type: :string
field :field9081, 22, optional: true, type: :string
field :field9082, 31, optional: true, type: :double
field :field9083, 32, optional: true, type: :double
field :field9084, 33, optional: true, type: :double
field :field9085, 36, optional: true, type: :double
field :field9086, 37, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field9087, 38, optional: true, type: :double
field :field9088, 39, optional: true, type: :double
field :field9089, 63, optional: true, type: :double
field :field9090, 64, optional: true, type: :double
field :field9091, 65, optional: true, type: :double
field :field9092, 34, optional: true, type: :double
field :field9093, 35, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field9094, 66, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field9095, 40, optional: true, type: :string
field :field9096, 41, optional: true, type: :string
field :field9097, 42, optional: true, type: :string
field :field9098, 43, optional: true, type: :string
field :field9099, 44, optional: true, type: :string
field :field9100, 45, optional: true, type: :string
field :field9101, 46, optional: true, type: :string
field :field9102, 47, optional: true, type: :string
field :field9103, 48, optional: true, type: :string
field :field9104, 49, optional: true, type: :string
field :field9105, 100, optional: true, type: Benchmarks.GoogleMessage3.Message8939
field :field9106, 101, optional: true, type: :int64
end
defmodule Benchmarks.GoogleMessage3.Message9182 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9205: String.t(),
field9206: String.t(),
field9207: float | :infinity | :negative_infinity | :nan,
field9208: integer,
field9209: integer,
field9210: integer,
field9211: integer,
field9212: float | :infinity | :negative_infinity | :nan,
field9213: float | :infinity | :negative_infinity | :nan,
field9214: boolean,
field9215: [Benchmarks.GoogleMessage3.UnusedEmptyMessage.t()],
field9216: [Benchmarks.GoogleMessage3.UnusedEmptyMessage.t()],
field9217: [Benchmarks.GoogleMessage3.Message9181.t()],
field9218: boolean,
field9219: boolean,
field9220: boolean,
field9221: Benchmarks.GoogleMessage3.Message9164.t() | nil,
field9222: Benchmarks.GoogleMessage3.Message9165.t() | nil,
field9223: Benchmarks.GoogleMessage3.Message9166.t() | nil,
field9224: float | :infinity | :negative_infinity | :nan,
field9225: Benchmarks.GoogleMessage3.Message9151.t() | nil,
field9226: float | :infinity | :negative_infinity | :nan,
field9227: float | :infinity | :negative_infinity | :nan,
field9228: float | :infinity | :negative_infinity | :nan,
field9229: float | :infinity | :negative_infinity | :nan,
field9230: float | :infinity | :negative_infinity | :nan,
__pb_extensions__: map
}
defstruct [
:field9205,
:field9206,
:field9207,
:field9208,
:field9209,
:field9210,
:field9211,
:field9212,
:field9213,
:field9214,
:field9215,
:field9216,
:field9217,
:field9218,
:field9219,
:field9220,
:field9221,
:field9222,
:field9223,
:field9224,
:field9225,
:field9226,
:field9227,
:field9228,
:field9229,
:field9230,
:__pb_extensions__
]
field :field9205, 1, optional: true, type: :string
field :field9206, 2, optional: true, type: :string
field :field9207, 16, optional: true, type: :float
field :field9208, 17, optional: true, type: :int32
field :field9209, 27, optional: true, type: :int32
field :field9210, 7, optional: true, type: :int32
field :field9211, 8, optional: true, type: :int32
field :field9212, 26, optional: true, type: :float
field :field9213, 22, optional: true, type: :float
field :field9214, 28, optional: true, type: :bool
field :field9215, 21, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field9216, 25, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field9217, 29, repeated: true, type: Benchmarks.GoogleMessage3.Message9181
field :field9218, 18, optional: true, type: :bool
field :field9219, 19, optional: true, type: :bool
field :field9220, 20, optional: true, type: :bool
field :field9221, 30, optional: true, type: Benchmarks.GoogleMessage3.Message9164
field :field9222, 31, optional: true, type: Benchmarks.GoogleMessage3.Message9165
field :field9223, 32, optional: true, type: Benchmarks.GoogleMessage3.Message9166
field :field9224, 33, optional: true, type: :float
field :field9225, 34, optional: true, type: Benchmarks.GoogleMessage3.Message9151
field :field9226, 35, optional: true, type: :float
field :field9227, 36, optional: true, type: :float
field :field9228, 37, optional: true, type: :float
field :field9229, 38, optional: true, type: :float
field :field9230, 39, optional: true, type: :float
extensions [{3, 7}, {9, 16}, {23, 24}, {24, 25}, {1000, 536_870_912}]
end
defmodule Benchmarks.GoogleMessage3.Message9160 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9161: integer,
field9162: binary
}
defstruct [:field9161, :field9162]
field :field9161, 1, optional: true, type: :int32
field :field9162, 2, optional: true, type: :bytes
end
defmodule Benchmarks.GoogleMessage3.Message9242 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9327: [[Benchmarks.GoogleMessage3.Enum9243.t()]]
}
defstruct [:field9327]
field :field9327, 1, repeated: true, type: Benchmarks.GoogleMessage3.Enum9243, enum: true
end
defmodule Benchmarks.GoogleMessage3.Message8890 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field8916: [Benchmarks.GoogleMessage3.Message8888.t()]
}
defstruct [:field8916]
field :field8916, 1, repeated: true, type: Benchmarks.GoogleMessage3.Message8888
end
defmodule Benchmarks.GoogleMessage3.Message9123 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9135: float | :infinity | :negative_infinity | :nan
}
defstruct [:field9135]
field :field9135, 1, optional: true, type: :float
end
defmodule Benchmarks.GoogleMessage3.Message9628 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9673: Benchmarks.GoogleMessage3.Message9627.t() | nil,
field9674: String.t(),
field9675: [integer],
field9676: integer
}
defstruct [:field9673, :field9674, :field9675, :field9676]
field :field9673, 1, optional: true, type: Benchmarks.GoogleMessage3.Message9627
field :field9674, 2, optional: true, type: :string
field :field9675, 3, repeated: true, type: :int32
field :field9676, 4, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message11014 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field11780: integer,
field11781: String.t(),
field11782: boolean,
field11783: Benchmarks.GoogleMessage3.Enum11107.t(),
field11784: integer,
field11785: float | :infinity | :negative_infinity | :nan,
field11786: integer,
field11787: integer,
field11788: float | :infinity | :negative_infinity | :nan,
field11789: float | :infinity | :negative_infinity | :nan,
field11790: integer,
field11791: boolean,
field11792: integer,
field11793: boolean,
field11794: Benchmarks.GoogleMessage3.Enum11541.t(),
field11795: float | :infinity | :negative_infinity | :nan,
field11796: float | :infinity | :negative_infinity | :nan,
field11797: integer,
field11798: integer,
field11799: Benchmarks.GoogleMessage3.UnusedEnum.t(),
field11800: Benchmarks.GoogleMessage3.Enum11468.t(),
field11801: integer,
field11802: Benchmarks.GoogleMessage3.UnusedEnum.t(),
field11803: integer,
field11804: integer,
field11805: integer,
field11806: Benchmarks.GoogleMessage3.UnusedEmptyMessage.t() | nil,
field11807: [Benchmarks.GoogleMessage3.Message11018.t()],
field11808: boolean,
field11809: boolean,
field11810: boolean,
field11811: boolean,
field11812: boolean,
field11813: boolean,
field11814: boolean,
field11815: Benchmarks.GoogleMessage3.Enum11107.t(),
field11816: integer,
field11817: float | :infinity | :negative_infinity | :nan,
field11818: integer,
field11819: integer,
field11820: integer,
field11821: integer,
field11822: integer,
field11823: integer,
field11824: integer,
field11825: float | :infinity | :negative_infinity | :nan,
field11826: [Benchmarks.GoogleMessage3.Message11020.t()],
field11827: [Benchmarks.GoogleMessage3.UnusedEmptyMessage.t()],
field11828: float | :infinity | :negative_infinity | :nan,
field11829: String.t(),
field11830: integer,
field11831: integer,
field11832: non_neg_integer,
field11833: boolean,
field11834: boolean,
field11835: String.t(),
field11836: integer,
field11837: integer,
field11838: integer,
field11839: integer,
field11840: Benchmarks.GoogleMessage3.Enum11022.t(),
field11841: Benchmarks.GoogleMessage3.Message11013.t() | nil,
field11842: float | :infinity | :negative_infinity | :nan,
field11843: integer,
field11844: boolean
}
defstruct [
:field11780,
:field11781,
:field11782,
:field11783,
:field11784,
:field11785,
:field11786,
:field11787,
:field11788,
:field11789,
:field11790,
:field11791,
:field11792,
:field11793,
:field11794,
:field11795,
:field11796,
:field11797,
:field11798,
:field11799,
:field11800,
:field11801,
:field11802,
:field11803,
:field11804,
:field11805,
:field11806,
:field11807,
:field11808,
:field11809,
:field11810,
:field11811,
:field11812,
:field11813,
:field11814,
:field11815,
:field11816,
:field11817,
:field11818,
:field11819,
:field11820,
:field11821,
:field11822,
:field11823,
:field11824,
:field11825,
:field11826,
:field11827,
:field11828,
:field11829,
:field11830,
:field11831,
:field11832,
:field11833,
:field11834,
:field11835,
:field11836,
:field11837,
:field11838,
:field11839,
:field11840,
:field11841,
:field11842,
:field11843,
:field11844
]
field :field11780, 40, optional: true, type: :int32
field :field11781, 46, optional: true, type: :string
field :field11782, 47, optional: true, type: :bool
field :field11783, 1, optional: true, type: Benchmarks.GoogleMessage3.Enum11107, enum: true
field :field11784, 2, optional: true, type: :int32
field :field11785, 4, optional: true, type: :double
field :field11786, 5, optional: true, type: :int32
field :field11787, 6, optional: true, type: :int32
field :field11788, 7, optional: true, type: :double
field :field11789, 8, optional: true, type: :double
field :field11790, 9, optional: true, type: :int64
field :field11791, 10, optional: true, type: :bool
field :field11792, 28, optional: true, type: :int64
field :field11793, 37, optional: true, type: :bool
field :field11794, 44, optional: true, type: Benchmarks.GoogleMessage3.Enum11541, enum: true
field :field11795, 49, optional: true, type: :double
field :field11796, 51, optional: true, type: :double
field :field11797, 54, optional: true, type: :int64
field :field11798, 55, optional: true, type: :int64
field :field11799, 57, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field11800, 58, optional: true, type: Benchmarks.GoogleMessage3.Enum11468, enum: true
field :field11801, 59, optional: true, type: :int32
field :field11802, 60, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field11803, 61, optional: true, type: :int32
field :field11804, 62, optional: true, type: :int32
field :field11805, 69, optional: true, type: :int32
field :field11806, 68, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field11807, 71, repeated: true, type: Benchmarks.GoogleMessage3.Message11018
field :field11808, 50, optional: true, type: :bool
field :field11809, 56, optional: true, type: :bool
field :field11810, 66, optional: true, type: :bool
field :field11811, 63, optional: true, type: :bool
field :field11812, 64, optional: true, type: :bool
field :field11813, 65, optional: true, type: :bool
field :field11814, 67, optional: true, type: :bool
field :field11815, 15, optional: true, type: Benchmarks.GoogleMessage3.Enum11107, enum: true
field :field11816, 16, optional: true, type: :int64
field :field11817, 17, optional: true, type: :double
field :field11818, 18, optional: true, type: :int64
field :field11819, 19, optional: true, type: :int32
field :field11820, 20, optional: true, type: :int64
field :field11821, 42, optional: true, type: :int32
field :field11822, 52, optional: true, type: :int64
field :field11823, 53, optional: true, type: :int64
field :field11824, 41, optional: true, type: :int64
field :field11825, 48, optional: true, type: :double
field :field11826, 70, repeated: true, type: Benchmarks.GoogleMessage3.Message11020
field :field11827, 72, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field11828, 25, optional: true, type: :double
field :field11829, 26, optional: true, type: :string
field :field11830, 27, optional: true, type: :int64
field :field11831, 32, optional: true, type: :int64
field :field11832, 33, optional: true, type: :uint64
field :field11833, 29, optional: true, type: :bool
field :field11834, 34, optional: true, type: :bool
field :field11835, 30, optional: true, type: :string
field :field11836, 3, optional: true, type: :int32
field :field11837, 31, optional: true, type: :int32
field :field11838, 73, optional: true, type: :int32
field :field11839, 35, optional: true, type: :int32
field :field11840, 36, optional: true, type: Benchmarks.GoogleMessage3.Enum11022, enum: true
field :field11841, 38, optional: true, type: Benchmarks.GoogleMessage3.Message11013
field :field11842, 39, optional: true, type: :double
field :field11843, 45, optional: true, type: :int32
field :field11844, 74, optional: true, type: :bool
end
defmodule Benchmarks.GoogleMessage3.Message10801 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field10812: Benchmarks.GoogleMessage3.Message10800.t() | nil,
field10813: [Benchmarks.GoogleMessage3.Message10802.t()],
field10814: integer
}
defstruct [:field10812, :field10813, :field10814]
field :field10812, 1, optional: true, type: Benchmarks.GoogleMessage3.Message10800
field :field10813, 2, repeated: true, type: Benchmarks.GoogleMessage3.Message10802
field :field10814, 3, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message10749 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field10754: [Benchmarks.GoogleMessage3.Message10748.t()]
}
defstruct [:field10754]
field :field10754, 1, repeated: true, type: Benchmarks.GoogleMessage3.Message10748
end
defmodule Benchmarks.GoogleMessage3.Message8298 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field8321: Benchmarks.GoogleMessage3.Message7966.t() | nil,
field8322: integer,
field8323: String.t()
}
defstruct [:field8321, :field8322, :field8323]
field :field8321, 1, optional: true, type: Benchmarks.GoogleMessage3.Message7966
field :field8322, 2, optional: true, type: :int64
field :field8323, 3, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8300 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field8326: String.t(),
field8327: Benchmarks.GoogleMessage3.Message7966.t() | nil
}
defstruct [:field8326, :field8327]
field :field8326, 1, optional: true, type: :string
field :field8327, 2, optional: true, type: Benchmarks.GoogleMessage3.Message7966
end
defmodule Benchmarks.GoogleMessage3.Message8291 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field8306: String.t(),
field8307: integer,
field8308: String.t(),
field8309: String.t(),
field8310: Benchmarks.GoogleMessage3.Enum8292.t()
}
defstruct [:field8306, :field8307, :field8308, :field8309, :field8310]
field :field8306, 1, optional: true, type: :string
field :field8307, 2, optional: true, type: :int32
field :field8308, 3, optional: true, type: :string
field :field8309, 4, optional: true, type: :string
field :field8310, 5, optional: true, type: Benchmarks.GoogleMessage3.Enum8292, enum: true
end
defmodule Benchmarks.GoogleMessage3.Message8296 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field8311: Benchmarks.GoogleMessage3.Message7966.t() | nil,
field8312: String.t(),
field8313: Benchmarks.GoogleMessage3.Message7966.t() | nil,
field8314: integer,
field8315: integer,
field8316: String.t()
}
defstruct [:field8311, :field8312, :field8313, :field8314, :field8315, :field8316]
field :field8311, 1, optional: true, type: Benchmarks.GoogleMessage3.Message7966
field :field8312, 2, optional: true, type: :string
field :field8313, 3, optional: true, type: Benchmarks.GoogleMessage3.Message7966
field :field8314, 4, optional: true, type: :int32
field :field8315, 5, optional: true, type: :int32
field :field8316, 6, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message7965 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field7967: integer,
field7968: integer
}
defstruct [:field7967, :field7968]
field :field7967, 1, optional: true, type: :int32
field :field7968, 2, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message8290 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field8304: String.t(),
field8305: String.t()
}
defstruct [:field8304, :field8305]
field :field8304, 1, optional: true, type: :string
field :field8305, 2, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message717 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field876: [String.t()],
field877: float | :infinity | :negative_infinity | :nan
}
defstruct [:field876, :field877]
field :field876, 1, repeated: true, type: :string
field :field877, 2, optional: true, type: :double
end
defmodule Benchmarks.GoogleMessage3.Message713 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field852: Benchmarks.GoogleMessage3.Message708.t() | nil,
field853: [String.t()]
}
defstruct [:field852, :field853]
field :field852, 1, required: true, type: Benchmarks.GoogleMessage3.Message708
field :field853, 2, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message705 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field807: String.t(),
field808: String.t(),
field809: String.t(),
field810: boolean,
field811: String.t(),
field812: String.t(),
field813: [String.t()]
}
defstruct [:field807, :field808, :field809, :field810, :field811, :field812, :field813]
field :field807, 1, required: true, type: :string
field :field808, 2, optional: true, type: :string
field :field809, 3, optional: true, type: :string
field :field810, 4, optional: true, type: :bool
field :field811, 5, optional: true, type: :string
field :field812, 6, optional: true, type: :string
field :field813, 7, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message709 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field829: [String.t()],
field830: [String.t()],
field831: [String.t()],
field832: [String.t()],
field833: [String.t()]
}
defstruct [:field829, :field830, :field831, :field832, :field833]
field :field829, 1, repeated: true, type: :string
field :field830, 2, repeated: true, type: :string
field :field831, 3, repeated: true, type: :string
field :field832, 4, repeated: true, type: :string
field :field833, 5, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message702 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field793: String.t(),
field794: String.t()
}
defstruct [:field793, :field794]
field :field793, 1, optional: true, type: :string
field :field794, 2, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message714 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field854: String.t(),
field855: String.t(),
field856: String.t(),
field857: String.t(),
field858: non_neg_integer
}
defstruct [:field854, :field855, :field856, :field857, :field858]
field :field854, 1, optional: true, type: :string
field :field855, 2, optional: true, type: :string
field :field856, 3, optional: true, type: :string
field :field857, 4, optional: true, type: :string
field :field858, 5, optional: true, type: :uint32
end
defmodule Benchmarks.GoogleMessage3.Message710 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field834: [String.t()],
field835: String.t(),
field836: String.t(),
field837: [String.t()],
field838: [String.t()]
}
defstruct [:field834, :field835, :field836, :field837, :field838]
field :field834, 1, repeated: true, type: :string
field :field835, 2, optional: true, type: :string
field :field836, 3, optional: true, type: :string
field :field837, 4, repeated: true, type: :string
field :field838, 5, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message706 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field814: [String.t()],
field815: String.t(),
field816: [String.t()],
field817: [String.t()]
}
defstruct [:field814, :field815, :field816, :field817]
field :field814, 1, repeated: true, type: :string
field :field815, 2, optional: true, type: :string
field :field816, 3, repeated: true, type: :string
field :field817, 4, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message707 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field818: String.t(),
field819: String.t(),
field820: String.t(),
field821: boolean,
field822: [String.t()]
}
defstruct [:field818, :field819, :field820, :field821, :field822]
field :field818, 1, required: true, type: :string
field :field819, 2, required: true, type: :string
field :field820, 3, required: true, type: :string
field :field821, 4, optional: true, type: :bool
field :field822, 5, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message711 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field839: Benchmarks.GoogleMessage3.UnusedEmptyMessage.t() | nil,
field840: [String.t()],
field841: [String.t()],
field842: [String.t()]
}
defstruct [:field839, :field840, :field841, :field842]
field :field839, 1, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field840, 4, repeated: true, type: :string
field :field841, 2, repeated: true, type: :string
field :field842, 3, repeated: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message712 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field843: [String.t()],
field844: String.t(),
field845: String.t(),
field846: [String.t()],
field847: [String.t()],
field848: String.t(),
field849: [String.t()],
field850: String.t(),
field851: String.t()
}
defstruct [
:field843,
:field844,
:field845,
:field846,
:field847,
:field848,
:field849,
:field850,
:field851
]
field :field843, 1, repeated: true, type: :string
field :field844, 2, required: true, type: :string
field :field845, 3, optional: true, type: :string
field :field846, 4, repeated: true, type: :string
field :field847, 5, repeated: true, type: :string
field :field848, 6, optional: true, type: :string
field :field849, 7, repeated: true, type: :string
field :field850, 8, optional: true, type: :string
field :field851, 9, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8939.Message8940 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{}
defstruct []
end
defmodule Benchmarks.GoogleMessage3.Message8939.Message8941 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9033: String.t(),
field9034: String.t(),
field9035: String.t(),
field9036: String.t(),
field9037: String.t(),
field9038: String.t()
}
defstruct [:field9033, :field9034, :field9035, :field9036, :field9037, :field9038]
field :field9033, 32, optional: true, type: :string
field :field9034, 33, optional: true, type: :string
field :field9035, 34, optional: true, type: :string
field :field9036, 35, optional: true, type: :string
field :field9037, 36, optional: true, type: :string
field :field9038, 37, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8939.Message8943 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9039: String.t(),
field9040: String.t(),
field9041: String.t(),
field9042: String.t(),
field9043: String.t(),
field9044: String.t()
}
defstruct [:field9039, :field9040, :field9041, :field9042, :field9043, :field9044]
field :field9039, 1, optional: true, type: :string
field :field9040, 2, optional: true, type: :string
field :field9041, 3, optional: true, type: :string
field :field9042, 4, optional: true, type: :string
field :field9043, 5, optional: true, type: :string
field :field9044, 6, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message8939 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9010: String.t(),
field9011: String.t(),
field9012: String.t(),
field9013: [String.t()],
field9014: String.t(),
message8940: [any],
field9016: integer,
field9017: integer,
field9018: integer,
message8941: any,
field9020: Benchmarks.GoogleMessage3.Message8942.t() | nil,
field9021: [Benchmarks.GoogleMessage3.UnusedEmptyMessage.t()],
field9022: [String.t()],
field9023: String.t(),
field9024: String.t(),
field9025: String.t(),
field9026: String.t(),
field9027: String.t(),
field9028: String.t(),
field9029: Benchmarks.GoogleMessage3.UnusedEnum.t(),
field9030: Benchmarks.GoogleMessage3.UnusedEnum.t(),
message8943: any
}
defstruct [
:field9010,
:field9011,
:field9012,
:field9013,
:field9014,
:message8940,
:field9016,
:field9017,
:field9018,
:message8941,
:field9020,
:field9021,
:field9022,
:field9023,
:field9024,
:field9025,
:field9026,
:field9027,
:field9028,
:field9029,
:field9030,
:message8943
]
field :field9010, 1, optional: true, type: :string
field :field9011, 2, optional: true, type: :string
field :field9012, 3, optional: true, type: :string
field :field9013, 4, repeated: true, type: :string
field :field9014, 5, optional: true, type: :string
field :message8940, 11, repeated: true, type: :group
field :field9016, 21, optional: true, type: :int64
field :field9017, 22, optional: true, type: :int64
field :field9018, 23, optional: true, type: :int64
field :message8941, 31, optional: true, type: :group
field :field9020, 38, optional: true, type: Benchmarks.GoogleMessage3.Message8942
field :field9021, 39, repeated: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field9022, 41, repeated: true, type: :string
field :field9023, 42, optional: true, type: :string
field :field9024, 43, optional: true, type: :string
field :field9025, 44, optional: true, type: :string
field :field9026, 45, optional: true, type: :string
field :field9027, 46, optional: true, type: :string
field :field9028, 47, optional: true, type: :string
field :field9029, 48, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :field9030, 49, optional: true, type: Benchmarks.GoogleMessage3.UnusedEnum, enum: true
field :message8943, 51, optional: true, type: :group
end
defmodule Benchmarks.GoogleMessage3.Message9181 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9204: String.t()
}
defstruct [:field9204]
field :field9204, 1, optional: true, type: :string
end
defmodule Benchmarks.GoogleMessage3.Message9164 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9168: integer,
field9169: integer,
field9170: integer
}
defstruct [:field9168, :field9169, :field9170]
field :field9168, 1, optional: true, type: :int32
field :field9169, 2, optional: true, type: :int32
field :field9170, 3, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message9165 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9171: float | :infinity | :negative_infinity | :nan,
field9172: float | :infinity | :negative_infinity | :nan
}
defstruct [:field9171, :field9172]
field :field9171, 1, optional: true, type: :float
field :field9172, 2, optional: true, type: :float
end
defmodule Benchmarks.GoogleMessage3.Message9166 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9173: float | :infinity | :negative_infinity | :nan,
field9174: integer
}
defstruct [:field9173, :field9174]
field :field9173, 1, optional: true, type: :float
field :field9174, 2, optional: true, type: :int32
end
defmodule Benchmarks.GoogleMessage3.Message9151 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9152: float | :infinity | :negative_infinity | :nan,
field9153: float | :infinity | :negative_infinity | :nan,
field9154: float | :infinity | :negative_infinity | :nan,
field9155: float | :infinity | :negative_infinity | :nan,
field9156: float | :infinity | :negative_infinity | :nan,
field9157: float | :infinity | :negative_infinity | :nan,
field9158: float | :infinity | :negative_infinity | :nan,
field9159: float | :infinity | :negative_infinity | :nan
}
defstruct [
:field9152,
:field9153,
:field9154,
:field9155,
:field9156,
:field9157,
:field9158,
:field9159
]
field :field9152, 1, optional: true, type: :double
field :field9153, 2, optional: true, type: :double
field :field9154, 3, optional: true, type: :float
field :field9155, 4, optional: true, type: :float
field :field9156, 5, optional: true, type: :float
field :field9157, 6, optional: true, type: :float
field :field9158, 7, optional: true, type: :float
field :field9159, 8, optional: true, type: :float
end
defmodule Benchmarks.GoogleMessage3.Message8888 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field8908: integer,
field8909: Benchmarks.GoogleMessage3.Enum8900.t(),
field8910: [integer],
field8911: binary
}
defstruct [:field8908, :field8909, :field8910, :field8911]
field :field8908, 1, optional: true, type: :int32
field :field8909, 4, optional: true, type: Benchmarks.GoogleMessage3.Enum8900, enum: true
field :field8910, 2, repeated: true, type: :int32, packed: true
field :field8911, 3, optional: true, type: :bytes
end
defmodule Benchmarks.GoogleMessage3.Message9627 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field9668: integer,
field9669: integer,
field9670: integer,
field9671: integer,
field9672: float | :infinity | :negative_infinity | :nan
}
defstruct [:field9668, :field9669, :field9670, :field9671, :field9672]
field :field9668, 1, required: true, type: :int32
field :field9669, 2, required: true, type: :int32
field :field9670, 3, required: true, type: :int32
field :field9671, 4, required: true, type: :int32
field :field9672, 5, optional: true, type: :float
end
defmodule Benchmarks.GoogleMessage3.Message11020 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{}
defstruct []
end
defmodule Benchmarks.GoogleMessage3.Message11013 do
@moduledoc false
use Protobuf, syntax: :proto2
@type t :: %__MODULE__{
field11757: binary,
field11758: binary,
field11759: binary,
field11760: binary,
field11761: binary,
field11762: binary,
field11763: binary,
field11764: binary,
field11765: binary,
field11766: binary,
field11767: binary,
field11768: binary,
field11769: binary,
field11770: binary,
field11771: binary,
field11772: binary,
field11773: binary,
field11774: binary,
field11775: binary,
field11776: binary,
field11777: binary,
field11778: Benchmarks.GoogleMessage3.UnusedEmptyMessage.t() | nil,
field11779: [Benchmarks.GoogleMessage3.Message11011.t()]
}
defstruct [
:field11757,
:field11758,
:field11759,
:field11760,
:field11761,
:field11762,
:field11763,
:field11764,
:field11765,
:field11766,
:field11767,
:field11768,
:field11769,
:field11770,
:field11771,
:field11772,
:field11773,
:field11774,
:field11775,
:field11776,
:field11777,
:field11778,
:field11779
]
field :field11757, 19, optional: true, type: :bytes
field :field11758, 1, optional: true, type: :bytes
field :field11759, 2, optional: true, type: :bytes
field :field11760, 3, optional: true, type: :bytes
field :field11761, 4, optional: true, type: :bytes
field :field11762, 5, optional: true, type: :bytes
field :field11763, 6, optional: true, type: :bytes
field :field11764, 7, optional: true, type: :bytes
field :field11765, 8, optional: true, type: :bytes
field :field11766, 9, optional: true, type: :bytes
field :field11767, 10, optional: true, type: :bytes
field :field11768, 11, optional: true, type: :bytes
field :field11769, 12, optional: true, type: :bytes
field :field11770, 13, optional: true, type: :bytes
field :field11771, 14, optional: true, type: :bytes
field :field11772, 15, optional: true, type: :bytes
field :field11773, 16, optional: true, type: :bytes
field :field11774, 17, optional: true, type: :bytes
field :field11775, 18, optional: true, type: :bytes
field :field11776, 20, optional: true, type: :bytes
field :field11777, 21, optional: true, type: :bytes
field :field11778, 23, optional: true, type: Benchmarks.GoogleMessage3.UnusedEmptyMessage
field :field11779, 22, repeated: true, type: Benchmarks.GoogleMessage3.Message11011
end
|
bench/lib/datasets/google_message3/benchmark_message3_6.pb.ex
| 0.823115
| 0.565899
|
benchmark_message3_6.pb.ex
|
starcoder
|
defmodule VendingMachine.MakeCurrency do
alias VendingMachine.CurrencyNote
alias VendingMachine.CurrencyCoin
def make_currency(currency_specification) do
case currency_specification do
100.00 ->
%CurrencyNote{
background_color: "Teal",
fluorescent_strip_color: "Pink",
obverse: "<NAME>",
reverse: "Independence Hall"
}
50.00 ->
%CurrencyNote{
background_color: "Pink",
fluorescent_strip_color: "Yellow",
obverse: "<NAME>",
reverse: "United States Capitol"
}
20.00 ->
%CurrencyNote{
background_color: "Green",
fluorescent_strip_color: "Green",
obverse: "<NAME>",
reverse: "White House"
}
10.00 ->
%CurrencyNote{
background_color: "Orange",
fluorescent_strip_color: "Orange",
obverse: "<NAME>",
reverse: "Treasury Building"
}
5.00 ->
%CurrencyNote{
background_color: "Purple",
fluorescent_strip_color: "Blue",
obverse: "<NAME>",
reverse: "Lincoln Memorial"
}
2.00 ->
%CurrencyNote{
background_color: "Green",
fluorescent_strip_color: "None",
obverse: "<NAME>",
reverse: "Declaration of Independence by <NAME>"
}
1.00 ->
%CurrencyNote{
background_color: "Green",
fluorescent_strip_color: "None",
obverse: "<NAME>",
reverse: "Great Seal of the United States"
}
0.50 ->
%CurrencyCoin{diameter: 1.205, thickness: 2.15, mass: 11.340}
0.25 ->
%CurrencyCoin{diameter: 0.955, thickness: 1.75, mass: 5.670}
0.10 ->
%CurrencyCoin{diameter: 0.705, thickness: 1.35, mass: 2.268}
0.05 ->
%CurrencyCoin{diameter: 0.835, thickness: 1.95, mass: 5.000}
0.01 ->
%CurrencyCoin{diameter: 0.750, thickness: 1.55, mass: 2.500}
# 1.01 ->
# %CurrencyCoin{diameter: 1.043, thickness: 2.00, mass: 8.100}
_ ->
0.00
end
end
end
|
lib/vending_machine/currency/make_currency.ex
| 0.687105
| 0.402011
|
make_currency.ex
|
starcoder
|
defmodule AWS.SFN do
@moduledoc """
AWS Step Functions
AWS Step Functions is a service that lets you coordinate the components of
distributed applications and microservices using visual workflows.
You can use Step Functions to build applications from individual components,
each of which performs a discrete function, or *task*, allowing you to scale and
change applications quickly. Step Functions provides a console that helps
visualize the components of your application as a series of steps. Step
Functions automatically triggers and tracks each step, and retries steps when
there are errors, so your application executes predictably and in the right
order every time. Step Functions logs the state of each step, so you can quickly
diagnose and debug any issues.
Step Functions manages operations and underlying infrastructure to ensure your
application is available at any scale. You can run tasks on AWS, your own
servers, or any system that has access to AWS. You can access and use Step
Functions using the console, the AWS SDKs, or an HTTP API. For more information
about Step Functions, see the * [AWS Step Functions Developer Guide](https://docs.aws.amazon.com/step-functions/latest/dg/welcome.html) *.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "AWS SFN",
api_version: "2016-11-23",
content_type: "application/x-amz-json-1.0",
credential_scope: nil,
endpoint_prefix: "states",
global?: false,
protocol: "json",
service_id: "SFN",
signature_version: "v4",
signing_name: "states",
target_prefix: "AWSStepFunctions"
}
end
@doc """
Creates an activity.
An activity is a task that you write in any programming language and host on any
machine that has access to AWS Step Functions. Activities must poll Step
Functions using the `GetActivityTask` API action and respond using `SendTask*`
API actions. This function lets Step Functions know the existence of your
activity and returns an identifier for use in a state machine and when polling
from the activity.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
`CreateActivity` is an idempotent API. Subsequent requests won’t create a
duplicate resource if it was already created. `CreateActivity`'s idempotency
check is based on the activity `name`. If a following request has different
`tags` values, Step Functions will ignore these differences and treat it as an
idempotent request of the previous. In this case, `tags` will not be updated,
even if they are different.
"""
def create_activity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateActivity", input, options)
end
@doc """
Creates a state machine.
A state machine consists of a collection of states that can do work (`Task`
states), determine to which states to transition next (`Choice` states), stop an
execution with an error (`Fail` states), and so on. State machines are specified
using a JSON-based, structured language. For more information, see [Amazon States
Language](https://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html)
in the AWS Step Functions User Guide.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
`CreateStateMachine` is an idempotent API. Subsequent requests won’t create a
duplicate resource if it was already created. `CreateStateMachine`'s idempotency
check is based on the state machine `name`, `definition`, `type`,
`LoggingConfiguration` and `TracingConfiguration`. If a following request has a
different `roleArn` or `tags`, Step Functions will ignore these differences and
treat it as an idempotent request of the previous. In this case, `roleArn` and
`tags` will not be updated, even if they are different.
"""
def create_state_machine(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateStateMachine", input, options)
end
@doc """
Deletes an activity.
"""
def delete_activity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteActivity", input, options)
end
@doc """
Deletes a state machine.
This is an asynchronous operation: It sets the state machine's status to
`DELETING` and begins the deletion process.
For `EXPRESS`state machines, the deletion will happen eventually (usually less
than a minute). Running executions may emit logs after `DeleteStateMachine` API
is called.
"""
def delete_state_machine(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteStateMachine", input, options)
end
@doc """
Describes an activity.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def describe_activity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeActivity", input, options)
end
@doc """
Describes an execution.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def describe_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeExecution", input, options)
end
@doc """
Describes a state machine.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def describe_state_machine(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeStateMachine", input, options)
end
@doc """
Describes the state machine associated with a specific execution.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def describe_state_machine_for_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeStateMachineForExecution", input, options)
end
@doc """
Used by workers to retrieve a task (with the specified activity ARN) which has
been scheduled for execution by a running state machine.
This initiates a long poll, where the service holds the HTTP connection open and
responds as soon as a task becomes available (i.e. an execution of a task of
this type is needed.) The maximum time the service holds on to the request
before responding is 60 seconds. If no task is available within 60 seconds, the
poll returns a `taskToken` with a null string.
Workers should set their client side socket timeout to at least 65 seconds (5
seconds higher than the maximum time the service may hold the poll request).
Polling with `GetActivityTask` can cause latency in some implementations. See
[Avoid Latency When Polling for Activity Tasks](https://docs.aws.amazon.com/step-functions/latest/dg/bp-activity-pollers.html)
in the Step Functions Developer Guide.
"""
def get_activity_task(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetActivityTask", input, options)
end
@doc """
Returns the history of the specified execution as a list of events.
By default, the results are returned in ascending order of the `timeStamp` of
the events. Use the `reverseOrder` parameter to get the latest events first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This API action is not supported by `EXPRESS` state machines.
"""
def get_execution_history(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetExecutionHistory", input, options)
end
@doc """
Lists the existing activities.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def list_activities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListActivities", input, options)
end
@doc """
Lists the executions of a state machine that meet the filtering criteria.
Results are sorted by time, with the most recent execution first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def list_executions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListExecutions", input, options)
end
@doc """
Lists the existing state machines.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def list_state_machines(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListStateMachines", input, options)
end
@doc """
List tags for a given resource.
Tags may only contain Unicode letters, digits, white space, or these symbols: `_
. : / = + - @`.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` failed.
"""
def send_task_failure(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendTaskFailure", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token) pattern to report to Step Functions that the task represented by the specified
`taskToken` is still making progress.
This action resets the `Heartbeat` clock. The `Heartbeat` threshold is specified
in the state machine's Amazon States Language definition (`HeartbeatSeconds`).
This action does not in itself create an event in the execution history.
However, if the task times out, the execution history contains an
`ActivityTimedOut` entry for activities, or a `TaskTimedOut` entry for for tasks
using the [job
run](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-sync)
or
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern.
The `Timeout` of a task, defined in the state machine's Amazon States Language
definition, is its maximum allowed duration, regardless of the number of
`SendTaskHeartbeat` requests received. Use `HeartbeatSeconds` to configure the
timeout interval for heartbeats.
"""
def send_task_heartbeat(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendTaskHeartbeat", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` completed
successfully.
"""
def send_task_success(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendTaskSuccess", input, options)
end
@doc """
Starts a state machine execution.
`StartExecution` is idempotent. If `StartExecution` is called with the same name
and input as a running execution, the call will succeed and return the same
response as the original request. If the execution is closed or if the input is
different, it will return a 400 `ExecutionAlreadyExists` error. Names can be
reused after 90 days.
"""
def start_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartExecution", input, options)
end
@doc """
Starts a Synchronous Express state machine execution.
"""
def start_sync_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartSyncExecution", input, options)
end
@doc """
Stops an execution.
This API action is not supported by `EXPRESS` state machines.
"""
def stop_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopExecution", input, options)
end
@doc """
Add a tag to a Step Functions resource.
An array of key-value pairs. For more information, see [Using Cost Allocation Tags](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html)
in the *AWS Billing and Cost Management User Guide*, and [Controlling Access Using IAM
Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_iam-tags.html).
Tags may only contain Unicode letters, digits, white space, or these symbols: `_
. : / = + - @`.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Remove a tag from a Step Functions resource
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates an existing state machine by modifying its `definition`, `roleArn`, or
`loggingConfiguration`.
Running executions will continue to use the previous `definition` and `roleArn`.
You must include at least one of `definition` or `roleArn` or you will receive a
`MissingRequiredParameter` error.
All `StartExecution` calls within a few seconds will use the updated
`definition` and `roleArn`. Executions started immediately after calling
`UpdateStateMachine` may use the previous state machine `definition` and
`roleArn`.
"""
def update_state_machine(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateStateMachine", input, options)
end
end
|
lib/aws/generated/sfn.ex
| 0.909523
| 0.707405
|
sfn.ex
|
starcoder
|
defmodule Sqlite.Ecto2 do
@moduledoc ~S"""
Ecto Adapter module for SQLite.
It uses Sqlitex and Esqlite for accessing the SQLite database.
## Configuration Options
When creating an `Ecto.Repo` that uses a SQLite database, you should configure
it as follows:
```elixir
# In your config/config.exs file
config :my_app, Repo,
adapter: Sqlite.Ecto2,
database: "ecto_simple.sqlite3"
# In your application code
defmodule Repo do
use Ecto.Repo,
otp_app: :my_app,
adapter: Sqlite.Ecto2
end
```
You may use other options as specified in the `Ecto.Repo` documentation.
Note that the `:database` option is passed as the `filename` argument to
[`sqlite3_open_v2`](http://sqlite.org/c3ref/open.html). This implies that you
may use `:memory:` to create a private, temporary in-memory database.
See also [SQLite's interpretation of URI "filenames"](https://sqlite.org/uri.html)
for more options such as shared memory caches.
"""
# Inherit all behaviour from Ecto.Adapters.SQL
use Ecto.Adapters.SQL, :sqlitex
import String, only: [to_integer: 1]
# And provide a custom storage implementation
@behaviour Ecto.Adapter.Storage
## Custom SQLite Types
def loaders(:boolean, type), do: [&bool_decode/1, type]
def loaders(:binary_id, type), do: [Ecto.UUID, type]
def loaders(:utc_datetime, type), do: [&date_decode/1, type]
def loaders(:naive_datetime, type), do: [&date_decode/1, type]
def loaders({:embed, _} = type, _),
do: [&json_decode/1, &Ecto.Adapters.SQL.load_embed(type, &1)]
def loaders(:map, type), do: [&json_decode/1, type]
def loaders({:map, _}, type), do: [&json_decode/1, type]
def loaders(:float, type), do: [&float_decode/1, type]
def loaders(_primitive, type), do: [type]
defp bool_decode(0), do: {:ok, false}
defp bool_decode(1), do: {:ok, true}
defp bool_decode(x), do: {:ok, x}
defp date_decode(<<year :: binary-size(4), "-",
month :: binary-size(2), "-",
day :: binary-size(2)>>)
do
{:ok, {to_integer(year), to_integer(month), to_integer(day)}}
end
defp date_decode(<<year :: binary-size(4), "-",
month :: binary-size(2), "-",
day :: binary-size(2), " ",
hour :: binary-size(2), ":",
minute :: binary-size(2), ":",
second :: binary-size(2), ".",
microsecond :: binary-size(6)>>)
do
{:ok, {{to_integer(year), to_integer(month), to_integer(day)},
{to_integer(hour), to_integer(minute), to_integer(second), to_integer(microsecond)}}}
end
defp date_decode(x), do: {:ok, x}
defp json_decode(x) when is_binary(x),
do: {:ok, Application.get_env(:ecto, :json_library).decode!(x)}
defp json_decode(x),
do: {:ok, x}
defp float_decode(x) when is_integer(x), do: {:ok, x / 1}
defp float_decode(x), do: {:ok, x}
def dumpers(:binary, type), do: [type, &blob_encode/1]
def dumpers(:binary_id, type), do: [type, Ecto.UUID]
def dumpers(:boolean, type), do: [type, &bool_encode/1]
def dumpers({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)]
def dumpers(:time, type), do: [type, &time_encode/1]
def dumpers(_primitive, type), do: [type]
defp blob_encode(value), do: {:ok, {:blob, value}}
defp bool_encode(false), do: {:ok, 0}
defp bool_encode(true), do: {:ok, 1}
defp time_encode(value) do
{:ok, value}
end
## Storage API
@doc false
def storage_up(opts) do
storage_up_with_path(Keyword.get(opts, :database), opts)
end
defp storage_up_with_path(nil, opts) do
raise ArgumentError,
"""
No SQLite database path specified. Please check the configuration for your Repo.
Your config/*.exs file should have something like this in it:
config :my_app, MyApp.Repo,
adapter: Sqlite.Ecto2,
database: "/path/to/sqlite/database"
Options provided were:
#{inspect opts, pretty: true}
"""
end
defp storage_up_with_path(database, _opts) do
if File.exists?(database) do
{:error, :already_up}
else
database |> Path.dirname |> File.mkdir_p!
{:ok, db} = Sqlitex.open(database)
:ok = Sqlitex.exec(db, "PRAGMA journal_mode = WAL")
{:ok, [[journal_mode: "wal"]]} = Sqlitex.query(db, "PRAGMA journal_mode")
Sqlitex.close(db)
:ok
end
end
@doc false
def storage_down(opts) do
database = Keyword.get(opts, :database)
case File.rm(database) do
{:error, :enoent} ->
{:error, :already_down}
result ->
File.rm(database <> "-shm") # ignore results for these files
File.rm(database <> "-wal")
result
end
end
@doc false
def supports_ddl_transaction?, do: true
end
|
lib/sqlite_ecto.ex
| 0.76555
| 0.739986
|
sqlite_ecto.ex
|
starcoder
|
defmodule Mechanize.Query do
@moduledoc """
Provides an easy support for querying elements in many Mechanize modules.
This module is not primarily designed to be used by cliente code, instead you should use
indirectly throught `Mechanize.Page.search/2` and `Mechanize.Page.filter_out/2`. Many other
functions that accept `Mechanize.Query.t()` also uses this module under the hood.
Therefore it's important to understand how this module works to unlock all capabilities in
functions that use queries.
## Examples
For example, `Mechanize.Page.click_link!/2` is one function of Mechanize API which accepts a
query as second parameter. You can click in a link based on it's attributes:
```
alias Mechanize.Page
Page.click_link!(page, href: "/home/about")
```
When you call `Mechanize.Page.click_link!/2`, another call to `Mechanize.Query.elements_with/3` is
made under the hood to fetch all links with given `[href: "/home/about"]` query and then
Mechanize "clicks" on the first link.
You can also query elements by its inner text, which is the visible text in case of text links:
```
Page.click_link!(page, text: "About")
```
Or you can use a shorter approach for doing the same:
```
Page.click_link!(page, "About")
```
Query powered functions also accepts regular expressions:
```
Page.click_link!(page, href: ~r/about/)
```
You can combine different types of queries at once. The following example returns a list of
links by its href, title attributes and inner text.
```
Page.links_with(page, href: "/home/about", title: "About page", text: "About page")
```
Use boolean to query if an element attribute exists. In example below, we fetch all
checked and unchecked checkboxes from a given form:
```
alias Mechanize.Form
Form.checkboxes_with(form, checked: true) # => checkboxes with checked attribute present
Form.checkboxes_with(form, checked: false) # => checkboxes with checked attribute not present
```
In case of elements that have a logical order, which is the case of select element in a html form,
you can query it by its index. Note that this index is a zero-based index. In the example below,
we select the first option from a select list with attribute `name="selectlist1"`:
```
Form.select(form, name: "selectlist1", option: 0)
```
Finally, you can also query elements with different attribute values. In the example below,
Mechanize "clicks" on the first link found with href equals to "/company" or "/about":
```
Page.click_link!(page, href: ["/company", "/about"])
```
## Page fragments
Many queries can work both on `Mechanize.Page` or in page fragments. A page fragment is nothing
but a list of data which its type implements `Mechanize.Page.Elementable` protocol.
For example, the function `Mechanize.Page.search/2`, which is also powered by this module,
returns a page fragment. This mechanism enable client code to chain queries like in the example:
```
page
|> Page.search(".planetmap")
|> Page.click_link!("Sun")
```
When you chain functions like that, `Mechanize.Page.click_link!/2` will only work on page fragment
returned by `Mechanize.Page.search/2` function. That means Mechanize will click on a link with
attribute `alt="Sun"` only if its child of a `.planetmap`, ignoring all others that are
not child.
But there's another use case. You can also click on a link if the link is the page fragment
itself, like in example below:
```
page
|> Page.search(".planetmap a")
|> Page.click_link!("Sun")
```
"""
alias Mechanize.Page.{Element, Elementable}
alias Mechanize.Page
defmodule BadQueryError do
@moduledoc """
Raises when an error occurs when searching an element using a query.
"""
defexception [:message]
end
@type t :: keyword() | integer() | String.t()
@doc """
See `Mechanize.Page.search/2`.
"""
def search(nil, _selector), do: raise(ArgumentError, "page_or_fragment is nil")
def search(_page_or_fragment, nil), do: raise(ArgumentError, "selector is nil")
def search(%Page{} = page, selector), do: page.parser.search(page, selector)
def search(fragment, selector) when is_list(fragment) do
fragment
|> Enum.map(&Elementable.element/1)
|> Enum.flat_map(fn el -> el.parser.search(el, selector) end)
end
def search(fragment, selector) do
search([fragment], selector)
end
@doc """
See `Mechanize.Page.filter_out/2`.
"""
def filter_out(nil, _selector), do: raise(ArgumentError, "page_or_fragment is nil")
def filter_out(_page_or_fragment, nil), do: raise(ArgumentError, "selector is nil")
def filter_out(%Page{} = page, selector), do: page.parser.filter_out(page, selector)
def filter_out(fragments, selector) when is_list(fragments) do
fragments
|> Enum.map(&Elementable.element/1)
|> Enum.flat_map(fn el -> el.parser.filter_out(el, selector) end)
end
def filter_out(fragment, selector), do: filter_out([fragment], selector)
@doc """
See `Mechanize.Page.elements_with/3`.
"""
def elements_with(page_or_fragment, selector, query \\ []) do
page_or_fragment
|> search(selector)
|> Enum.filter(&match_query?(&1, query))
end
@doc false
def match?(nil, _types, _query) do
raise ArgumentError, "element is nil"
end
def match?(_element, nil, _query) do
raise ArgumentError, "types is nil"
end
def match?(_element, _types, nil) do
raise ArgumentError, "query is nil"
end
def match?(element, types, query) do
match_type?(element, types) and match_query?(element, query)
end
@doc false
def match_type?(element, types) when is_list(types) do
element.__struct__ in types
end
def match_type?(element, type) do
match_type?(element, [type])
end
@doc false
def match_query?(nil, _query), do: raise(ArgumentError, "element is nil")
def match_query?(_element, nil), do: raise(ArgumentError, "query is nil")
def match_query?(_element, []), do: true
def match_query?(element, text) when is_binary(text) do
match_query?(element, [{:text, text}])
end
def match_query?(element, index) when is_integer(index) do
case Map.get(element, :index) do
^index ->
true
_ ->
false
end
end
def match_query?(element, [attributes | query]) do
match_attribute?(element, attributes) and match_query?(element, query)
end
defp match_attribute?(_element, {:text, nil}) do
raise ArgumentError, "query :text is nil"
end
defp match_attribute?(element, {:text, value}) when is_list(value) do
Element.text(element) in value
end
defp match_attribute?(element, {:text, value}) when is_binary(value) do
Element.text(element) == value
end
defp match_attribute?(element, {:text, value}) do
Element.text(element) =~ value
end
defp match_attribute?(_element, {attr_name, nil}) do
raise ArgumentError, "query :#{attr_name} is nil"
end
defp match_attribute?(element, {attr_name, value}) when is_list(value) do
Element.attr(element, attr_name) in value
end
defp match_attribute?(element, {attr_name, boolean}) when is_boolean(boolean) do
Element.attr_present?(element, attr_name) == boolean
end
defp match_attribute?(element, {attr_name, value}) when is_binary(value) do
Element.attr(element, attr_name) == value
end
defp match_attribute?(element, {attr_name, value}) do
case Element.attr(element, attr_name) do
nil -> false
attr_value -> attr_value =~ value
end
end
end
|
lib/mechanize/query.ex
| 0.945801
| 0.856872
|
query.ex
|
starcoder
|
defmodule Money do
@moduledoc """
Money implements a set of functions to store, retrieve, convert and perform
arithmetic on a `Money.t` type that is composed of a currency code and
a decimal currency amount.
Money is very opinionated in the interests of serving as a dependable library
that can underpin accounting and financial applications.
This opinion expressed by ensuring that:
1. Money must always have both a amount and a currency code.
2. The currency code must always be valid.
3. Money arithmetic can only be performed when both operands are of the
same currency.
4. Money amounts are represented as a `Decimal`.
5. Money is serialised to the database as a custom Postgres composite type
that includes both the amount and the currency. Therefore for Ecto
serialization Postgres is assumed as the data store. Serialization is
entirely optional and Ecto is not a package dependency.
6. All arithmetic functions work in fixed point decimal. No rounding
occurs automatically (unless expressly called out for a function).
7. Explicit rounding obeys the rounding rules for a given currency. The
rounding rules are defined by the Unicode consortium in its CLDR
repository as implemented by the hex package `ex_cldr`. These rules
define the number of fractional digits for a currency and the rounding
increment where appropriate.
"""
import Kernel, except: [round: 1, abs: 1]
require Cldr.Macros
alias Cldr.Config
@typedoc """
Money is composed of an atom representation of an ISO4217 currency code and
a `Decimal` representation of an amount.
"""
@type t :: %Money{currency: atom(), amount: Decimal.t(), format_options: Keyword.t()}
@type currency_code :: atom() | String.t()
@type amount :: float() | integer() | Decimal.t() | String.t()
@enforce_keys [:currency, :amount]
defstruct currency: nil, amount: nil, format_options: []
@doc false
def cldr_backend_provider(config) do
Money.Backend.define_money_module(config)
end
@json_library Application.get_env(:ex_money, :json_library, Config.json_library())
unless Code.ensure_loaded?(@json_library) do
IO.puts("""
The json_library '#{inspect(@json_library)}' does not appear
to be available. A json library is required
for Money to operate. Is it configured as a
dependency in mix.exs?
In config.exs your expicit or implicit configuration is:
config ex_money,
json_library: #{inspect(@json_library)}
In mix.exs you will need something like:
def deps() do
[
...
{:#{String.downcase(inspect(@json_library))}, version_string}
]
end
""")
raise ArgumentError,
"Json library #{String.downcase(inspect(@json_library))} does " <>
"not appear to be a dependency"
end
# Default mode for rounding is :half_even, also known
# as bankers rounding
@default_rounding_mode :half_even
alias Money.Currency
alias Money.ExchangeRates
defdelegate validate_currency(currency_code), to: Cldr
defdelegate known_currencies, to: Cldr
defdelegate known_current_currencies, to: Money.Currency
defdelegate known_historic_currencies, to: Money.Currency
defdelegate known_tender_currencies, to: Money.Currency
@doc """
Returns a %Money{} struct from a currency code and a currency amount or
an error tuple of the form `{:error, {exception, message}}`.
## Arguments
* `currency_code` is an ISO4217 three-character upcased binary or atom
* `amount` is an integer, string or Decimal
* `options` is a keyword list of options
## Options
* `:locale` is any known locale. The locale is used to normalize any
binary (String) amounts to a form that can be consumed by `Decimal.new/1`.
This consists of removing any localised grouping characters and replacing
the localised decimal separator with a ".".
The default is `Cldr.get_locale/0`.
* `:backend` is any module() that includes `use Cldr` and therefore
is a `Cldr` backend module(). The default is `Money.default_backend/0`.
* Any other options are considered as formatting options to
be applied by default when calling `Money.to_string/2`.
Note that the `currency_code` and `amount` arguments can be supplied in
either order,
## Examples
iex> Money.new(:USD, 100)
#Money<:USD, 100>
iex> Money.new(100, :USD)
#Money<:USD, 100>
iex> Money.new("USD", 100)
#Money<:USD, 100>
iex> Money.new("thb", 500)
#Money<:THB, 500>
iex> Money.new("EUR", Decimal.new(100))
#Money<:EUR, 100>
iex> Money.new(:EUR, "100.30")
#Money<:EUR, 100.30>
iex> Money.new(:EUR, "100.30", fractional_digits: 4)
#Money<:EUR, 100.30>
iex> Money.new(:XYZZ, 100)
{:error, {Money.UnknownCurrencyError, "The currency :XYZZ is invalid"}}
iex> Money.new("1.000,99", :EUR, locale: "de")
#Money<:EUR, 1000.99>
iex> Money.new 123.445, :USD
{:error,
{Money.InvalidAmountError,
"Float amounts are not supported in new/2 due to potenial " <>
"rounding and precision issues. If absolutely required, " <>
"use Money.from_float/2"}}
"""
@spec new(amount | currency_code, amount | currency_code, Keyword.t()) ::
Money.t() | {:error, {module(), String.t()}}
def new(currency_code, amount, options \\ [])
def new(currency_code, amount, options) when is_binary(currency_code) and is_integer(amount) do
case validate_currency(currency_code) do
{:error, {_exception, message}} -> {:error, {Money.UnknownCurrencyError, message}}
{:ok, code} -> new(code, amount, options)
end
end
def new(amount, currency_code, options) when is_binary(currency_code) and is_integer(amount) do
new(currency_code, amount, options)
end
def new(currency_code, amount, options) when is_atom(currency_code) and is_integer(amount) do
with {:ok, code} <- validate_currency(currency_code) do
format_options = extract_format_options(options)
%Money{amount: Decimal.new(amount), currency: code, format_options: format_options}
else
{:error, {Cldr.UnknownCurrencyError, message}} ->
{:error, {Money.UnknownCurrencyError, message}}
end
end
def new(amount, currency_code, options) when is_integer(amount) and is_atom(currency_code) do
new(currency_code, amount, options)
end
def new(currency_code, %Decimal{} = amount, options)
when is_atom(currency_code) or is_binary(currency_code) do
case validate_currency(currency_code) do
{:error, {_exception, message}} ->
{:error, {Money.UnknownCurrencyError, message}}
{:ok, code} ->
format_options = extract_format_options(options)
%Money{amount: amount, currency: code, format_options: format_options}
end
end
def new(%Decimal{} = amount, currency_code, options)
when is_atom(currency_code) or is_binary(currency_code) do
new(currency_code, amount, options)
end
def new(currency_code, amount, options) when is_atom(currency_code) and is_binary(amount) do
with {:ok, decimal} <- parse_decimal(amount, options[:locale], options[:backend]) do
new(currency_code, decimal, options)
end
rescue
Decimal.Error ->
{
:error,
{Money.InvalidAmountError, "Amount cannot be converted to a number: #{inspect(amount)}"}
}
end
def new(amount, currency_code, options) when is_atom(currency_code) and is_binary(amount) do
new(currency_code, amount, options)
end
def new(_currency_code, amount, _options) when is_float(amount) do
{:error,
{Money.InvalidAmountError,
"Float amounts are not supported in new/2 due to potenial rounding " <>
"and precision issues. If absolutely required, use Money.from_float/2"}}
end
def new(amount, _currency_code, _options) when is_float(amount) do
{:error,
{Money.InvalidAmountError,
"Float amounts are not supported in new/2 due to potenial rounding " <>
"and precision issues. If absolutely required, use Money.from_float/2"}}
end
def new(param_a, param_b, options) when is_binary(param_a) and is_binary(param_b) do
with {:ok, currency_code} <- validate_currency(param_a) do
new(currency_code, param_b, options)
else
{:error, _} ->
with {:ok, currency_code} <- validate_currency(param_b) do
new(currency_code, param_a, options)
else
{:error, _} ->
{:error,
{Money.Invalid,
"Unable to create money from #{inspect(param_a)} " <> "and #{inspect(param_b)}"}}
end
end
end
defp extract_format_options(options) do
options
|> Keyword.delete(:locale)
|> Keyword.delete(:backend)
|> Keyword.delete(:default_currency)
end
@doc """
Returns a %Money{} struct from a currency code and a currency amount. Raises an
exception if the current code is invalid.
## Arguments
* `currency_code` is an ISO4217 three-character upcased binary or atom
* `amount` is an integer, float or Decimal
## Examples
Money.new!(:XYZZ, 100)
** (Money.UnknownCurrencyError) Currency :XYZZ is not known
(ex_money) lib/money.ex:177: Money.new!/2
"""
@spec new!(amount | currency_code, amount | currency_code, Keyword.t()) :: Money.t() | no_return()
def new!(currency_code, amount, options \\ [])
def new!(currency_code, amount, options)
when is_binary(currency_code) or is_atom(currency_code) do
case money = new(currency_code, amount, options) do
{:error, {exception, message}} -> raise exception, message
_ -> money
end
end
def new!(amount, currency_code, options)
when (is_binary(currency_code) or is_atom(currency_code)) and is_number(amount) do
new!(currency_code, amount, options)
end
def new!(%Decimal{} = amount, currency_code, options)
when is_binary(currency_code) or is_atom(currency_code) do
new!(currency_code, amount, options)
end
def new!(currency_code, %Decimal{} = amount, options)
when is_binary(currency_code) or is_atom(currency_code) do
new!(currency_code, amount, options)
end
@doc """
Returns a %Money{} struct from a currency code and a float amount, or
an error tuple of the form `{:error, {exception, message}}`.
Floats are fraught with danger in computer arithmetic due to the
unexpected loss of precision during rounding. The IEEE754 standard
indicates that a number with a precision of 16 digits should
round-trip convert without loss of fidelity. This function supports
numbers with a precision up to 15 digits and will error if the
provided amount is outside that range.
**Note** that `Money` cannot detect lack of precision or rounding errors
introduced upstream. This function therefore should be used with
great care and its use should be considered potentially harmful.
## Arguments
* `currency_code` is an ISO4217 three-character upcased binary or atom
* `amount` is a float
* `options` is a keyword list of options passed
to `Money.new/3`. The default is `[]`.
## Examples
iex> Money.from_float 1.23456, :USD
#Money<:USD, 1.23456>
iex> Money.from_float 1.234567890987656, :USD
{:error,
{Money.InvalidAmountError,
"The precision of the float 1.234567890987656 is " <>
"greater than 15 which could lead to unexpected results. " <>
"Reduce the precision or call Money.new/2 with a Decimal or String amount"}}
"""
Cldr.Macros.doc_since("2.0.0")
@max_precision_allowed 15
@spec from_float(float | currency_code, float | currency_code, Keyword.t()) ::
Money.t() | {:error, {module(), String.t()}}
def from_float(currency_code, amount, options \\ [])
def from_float(currency_code, amount, options)
when (is_binary(currency_code) or is_atom(currency_code)) and is_float(amount) do
if Cldr.Number.precision(amount) <= @max_precision_allowed do
new(currency_code, Decimal.from_float(amount), options)
else
{:error,
{Money.InvalidAmountError,
"The precision of the float #{inspect(amount)} " <>
"is greater than #{inspect(@max_precision_allowed)} " <>
"which could lead to unexpected results. Reduce the " <>
"precision or call Money.new/2 with a Decimal or String amount"}}
end
end
def from_float(amount, currency_code, options)
when (is_binary(currency_code) or is_atom(currency_code)) and is_float(amount) do
from_float(currency_code, amount, options)
end
@doc """
Returns a %Money{} struct from a currency code and a float amount, or
raises an exception if the currency code is invalid.
See `Money.from_float/2` for further information.
**Note** that `Money` cannot detect lack of precision or rounding errors
introduced upstream. This function therefore should be used with
great care and its use should be considered potentially harmful.
## Arguments
* `currency_code` is an ISO4217 three-character upcased binary or atom
* `amount` is a float
* `options` is a keyword list of options passed
to `Money.new/3`. The default is `[]`.
## Examples
iex> Money.from_float!(:USD, 1.234)
#Money<:USD, 1.234>
Money.from_float!(:USD, 1.234567890987654)
#=> ** (Money.InvalidAmountError) The precision of the float 1.234567890987654 is greater than 15 which could lead to unexpected results. Reduce the precision or call Money.new/2 with a Decimal or String amount
(ex_money) lib/money.ex:293: Money.from_float!/2
"""
Cldr.Macros.doc_since("2.0.0")
@spec from_float!(currency_code, float, Keyword.t()) :: Money.t() | no_return()
def from_float!(currency_code, amount, options \\ []) do
case from_float(currency_code, amount, options) do
{:error, {exception, reason}} -> raise exception, reason
money -> money
end
end
@doc """
Add format options to a `t:Money`.
## Arguments
* `money` is any valid `t:Money` type returned
by `Money.new/2`
* `options` is a keyword list of options. These
options are used when calling `Money.to_string/2`.
The default is `[]`
"""
Cldr.Macros.doc_since("5.5.0")
@spec put_format_options(Money.t(), Keyword.t()) :: Money.t()
def put_format_options(%Money{} = money, options) when is_list(options) do
%{money | format_options: options}
end
@doc """
Parse a string and return a `Money.t` or an error.
The string to be parsed is required to have a currency
code and an amount. The currency code may be placed
before the amount or after, but not both.
Parsing is strict. Additional text surrounding the
currency code and amount will cause the parse to
fail.
## Arguments
* `string` is a string to be parsed
* `options` is a keyword list of options that is
passed to `Money.new/3` with the exception of
the options listed below
## Options
* `:backend` is any module() that includes `use Cldr` and therefore
is a `Cldr` backend module(). The default is `Money.default_backend()`
* `:locale` is any valid locale returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct returned by `Cldr.Locale.new!/2`
The default is `<backend>.get_locale()`
* `:only` is an `atom` or list of `atoms` representing the
currencies or currency types to be considered for a match.
The equates to a list of acceptable currencies for parsing.
See the notes below for currency types.
* `:except` is an `atom` or list of `atoms` representing the
currencies or currency types to be not considered for a match.
This equates to a list of unacceptable currencies for parsing.
See the notes below for currency types.
* `:fuzzy` is a float greater than `0.0` and less than or
equal to `1.0` which is used as input to the
`String.jaro_distance/2` to determine is the provided
currency string is *close enough* to a known currency
string for it to identify definitively a currency code.
It is recommended to use numbers greater than `0.8` in
order to reduce false positives.
* `:default_currency` is any valid currency code or `false`
that will used if no currency code, symbol or description is
indentified in the parsed string. The default is `nil`
which means that the default currency associated with
the `:locale` option will be used. If `false` then the
currency assocated with the `:locale` option will not be
used and an error will be returned if there is no currency
in the string being parsed.
## Returns
* a `Money.t` if parsing is successful or
* `{:error, {exception, reason}}` if an error is
detected.
## Notes
The `:only` and `:except` options accept a list of
currency codes and/or currency types. The following
types are recognised.
If both `:only` and `:except` are specified,
the `:except` entries take priority - that means
any entries in `:except` are removed from the `:only`
entries.
* `:all`, the default, considers all currencies
* `:current` considers those currencies that have a `:to`
date of nil and which also is a known ISO4217 currency
* `:historic` is the opposite of `:current`
* `:tender` considers currencies that are legal tender
* `:unannotated` considers currencies that don't have
"(some string)" in their names. These are usually
financial instruments.
## Examples
iex> Money.parse("USD 100")
#Money<:USD, 100>
iex> Money.parse "USD 100,00", locale: "de"
#Money<:USD, 100.00>
iex> Money.parse("100 USD")
#Money<:USD, 100>
iex> Money.parse("100 eurosports", fuzzy: 0.8)
#Money<:EUR, 100>
iex> Money.parse("100", default_currency: :EUR)
#Money<:EUR, 100>
iex> Money.parse("100 eurosports", fuzzy: 0.9)
{:error, {Money.UnknownCurrencyError, "The currency \\"eurosports\\" is unknown or not supported"}}
iex> Money.parse("100 afghan afghanis")
#Money<:AFN, 100>
iex> Money.parse("100", default_currency: false)
{:error, {Money.Invalid,
"A currency code, symbol or description must be specified but was not found in \\"100\\""}}
iex> Money.parse("USD 100 with trailing text")
{:error, {Money.ParseError, "Could not parse \\"USD 100 with trailing text\\"."}}
"""
Cldr.Macros.doc_since("3.2.0")
@spec parse(String.t(), Keyword.t()) :: Money.t() | {:error, {module(), String.t()}}
def parse(string, options \\ []) do
with {:ok, result, "", _, _, _} <- Money.Parser.money_parser(String.trim(string)) do
result
|> Enum.map(fn {k, v} -> {k, String.trim_trailing(v)} end)
|> Keyword.put_new(:currency, Keyword.get(options, :default_currency))
|> Map.new()
|> maybe_create_money(string, options)
else
_ ->
{:error, {Money.ParseError, "Could not parse #{inspect(string)}."}}
end
end
# No currency was in the string and options[:default_currency] == false
# meaning don't derive it from the locale
defp maybe_create_money(%{currency: false}, string, _options) do
{:error,
{Money.Invalid,
"A currency code, symbol or description must be specified but was not found in #{
inspect(string)
}"}}
end
# No currency was in the string so we'll derive it from
# the locale
defp maybe_create_money(%{currency: nil} = money_map, string, options) do
backend = Keyword.get_lazy(options, :backend, &Money.default_backend/0)
locale = Keyword.get(options, :locale, backend.get_locale)
with {:ok, backend} <- Cldr.validate_backend(backend),
{:ok, locale} <- Cldr.validate_locale(locale, backend) do
currency = Cldr.Currency.currency_from_locale(locale)
money_map
|> Map.put(:currency, currency)
|> maybe_create_money(string, options)
end
end
defp maybe_create_money(%{currency: currency, amount: amount}, _string, options) do
backend = Keyword.get_lazy(options, :backend, &Money.default_backend/0)
locale = Keyword.get(options, :locale, backend.get_locale)
currency = Kernel.to_string(currency)
{only_filter, options} =
Keyword.pop(options, :only, Keyword.get(options, :currency_filter, [:all]))
{except_filter, options} = Keyword.pop(options, :except, [])
{fuzzy, options} = Keyword.pop(options, :fuzzy, nil)
with {:ok, locale} <- backend.validate_locale(locale),
{:ok, currency_strings} <-
Cldr.Currency.currency_strings(locale, backend, only_filter, except_filter),
{:ok, currency} <-
find_currency(currency_strings, currency, fuzzy) do
Money.new(currency, amount, options)
end
end
defp find_currency(currency_strings, currency, nil) do
canonical_currency =
currency
|> String.downcase()
|> String.trim_trailing(".")
case Map.get(currency_strings, canonical_currency) do
nil ->
{:error, unknown_currency_error(currency)}
currency ->
{:ok, currency}
end
end
defp find_currency(currency_strings, currency, fuzzy)
when is_float(fuzzy) and fuzzy > 0.0 and fuzzy <= 1.0 do
canonical_currency = String.downcase(currency)
{distance, currency_code} =
currency_strings
|> Enum.map(fn {k, v} -> {String.jaro_distance(k, canonical_currency), v} end)
|> Enum.sort(fn {k1, _v1}, {k2, _v2} -> k1 > k2 end)
|> hd
if distance >= fuzzy do
{:ok, currency_code}
else
{:error, unknown_currency_error(currency)}
end
end
defp find_currency(_currency_strings, _currency, fuzzy) do
{:error,
{
ArgumentError,
"option :fuzzy must be a number > 0.0 and <= 1.0. Found #{inspect(fuzzy)}"
}}
end
defp unknown_currency_error(currency) do
{Money.UnknownCurrencyError, "The currency #{inspect(currency)} is unknown or not supported"}
end
@doc """
Returns a formatted string representation of a `Money{}`.
Formatting is performed according to the rules defined by CLDR. See
`Cldr.Number.to_string/2` for formatting options. The default is to format
as a currency which applies the appropriate rounding and fractional digits
for the currency.
## Arguments
* `money` is any valid `Money.t` type returned
by `Money.new/2`
* `options` is a keyword list of options or a `%Cldr.Number.Format.Options{}` struct
## Returns
* `{:ok, string}` or
* `{:error, reason}`
## Options
* `:backend` is any CLDR backend module. The default is
`Money.default_backend()`.
* Any other options are passed to `Cldr.Number.to_string/3`
## Examples
iex> Money.to_string Money.new(:USD, 1234)
{:ok, "$1,234.00"}
iex> Money.to_string Money.new(:JPY, 1234)
{:ok, "¥1,234"}
iex> Money.to_string Money.new(:THB, 1234)
{:ok, "THB 1,234.00"}
iex> Money.to_string Money.new(:THB, 1234, fractional_digits: 4)
{:ok, "THB 1,234.0000"}
iex> Money.to_string Money.new(:USD, 1234), format: :long
{:ok, "1,234 US dollars"}
"""
@spec to_string(Money.t(), Keyword.t() | Cldr.Number.Format.Options.t()) ::
{:ok, String.t()} | {:error, {atom, String.t()}}
def to_string(money, options \\ [])
def to_string(%Money{} = money, options) when is_list(options) do
default_options = [backend: Money.default_backend(), currency: money.currency]
format_options = Map.get(money, :format_options, [])
options =
default_options
|> Keyword.merge(format_options)
|> Keyword.merge(options)
backend = options[:backend]
Cldr.Number.to_string(money.amount, backend, options)
end
def to_string(%Money{} = money, %Cldr.Number.Format.Options{} = options) do
format_options = Map.get(money, :format_options, [])
options =
format_options
|> Map.new()
|> Map.merge(options)
|> Map.put(:currency, money.currency)
backend = Map.get(options, :backend, Money.default_backend())
Cldr.Number.to_string(money.amount, backend, options)
end
@doc """
Returns a formatted string representation of a `Money.t` or raises if
there is an error.
Formatting is performed according to the rules defined by CLDR. See
`Cldr.Number.to_string!/2` for formatting options. The default is to format
as a currency which applies the appropriate rounding and fractional digits
for the currency.
## Arguments
* `money` is any valid `Money.t` type returned
by `Money.new/2`
* `options` is a keyword list of options or a `%Cldr.Number.Format.Options{}` struct
## Options
* `:backend` is any CLDR backend module. The default is
`Money.default_backend()`.
* Any other options are passed to `Cldr.Number.to_string/3`
## Examples
iex> Money.to_string! Money.new(:USD, 1234)
"$1,234.00"
iex> Money.to_string! Money.new(:JPY, 1234)
"¥1,234"
iex> Money.to_string! Money.new(:THB, 1234)
"THB 1,234.00"
iex> Money.to_string! Money.new(:USD, 1234), format: :long
"1,234 US dollars"
"""
@spec to_string!(Money.t(), Keyword.t() | Cldr.Number.Format.Options.t()) ::
String.t() | no_return()
def to_string!(%Money{} = money, options \\ []) do
case to_string(money, options) do
{:ok, string} -> string
{:error, {exception, reason}} -> raise exception, reason
end
end
@doc """
Returns the amount part of a `Money` type as a `Decimal`
## Arguments
* `money` is any valid `Money.t` type returned
by `Money.new/2`
## Returns
* a `Decimal.t`
## Example
iex> m = Money.new("USD", 100)
iex> Money.to_decimal(m)
#Decimal<100>
"""
@spec to_decimal(money :: Money.t()) :: Decimal.t()
def to_decimal(%Money{amount: amount}) do
amount
end
@doc """
Returns the currecny code of a `Money` type
as an `atom`.
## Arguments
* `money` is any valid `Money.t` type returned
by `Money.new/2`
## Returns
* the currency code as an `t:atom`
## Example
iex> m = Money.new("USD", 100)
iex> Money.to_currency_code(m)
:USD
"""
@doc since: "5.6.0"
@spec to_currency_code(money :: Money.t()) :: atom()
def to_currency_code(%Money{currency: currency_code}) do
currency_code
end
@doc """
The absolute value of a `Money` amount.
Returns a `Money` type with a positive sign for the amount.
## Arguments
* `money` is any valid `Money.t` type returned
by `Money.new/2`
## Returns
* a `Money.t`
## Example
iex> m = Money.new("USD", -100)
iex> Money.abs(m)
#Money<:USD, 100>
"""
@spec abs(money :: Money.t()) :: Money.t()
def abs(%Money{amount: amount} = money) do
%{money | amount: Decimal.abs(amount)}
end
@doc """
Add two `Money` values.
## Arguments
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `{:ok, money}` or
* `{:error, reason}`
## Example
iex> Money.add Money.new(:USD, 200), Money.new(:USD, 100)
{:ok, Money.new(:USD, 300)}
iex> Money.add Money.new(:USD, 200), Money.new(:AUD, 100)
{:error, {ArgumentError, "Cannot add monies with different currencies. " <>
"Received :USD and :AUD."}}
"""
@spec add(money_1 :: Money.t(), money_2 :: Money.t()) ::
{:ok, Money.t()} | {:error, {module(), String.t()}}
def add(
%Money{currency: same_currency, amount: amount_a},
%Money{currency: same_currency, amount: amount_b} = money_b
) do
{:ok, %{money_b | amount: Decimal.add(amount_a, amount_b)}}
end
def add(%Money{currency: code_a}, %Money{currency: code_b}) do
{
:error,
{
ArgumentError,
"Cannot add monies with different currencies. " <>
"Received #{inspect(code_a)} and #{inspect(code_b)}."
}
}
end
@doc """
Add two `Money` values and raise on error.
## Arguments
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* a `Money.t` struct or
* raises an exception
## Examples
iex> Money.add! Money.new(:USD, 200), Money.new(:USD, 100)
#Money<:USD, 300>
Money.add! Money.new(:USD, 200), Money.new(:CAD, 500)
** (ArgumentError) Cannot add two %Money{} with different currencies. Received :USD and :CAD.
"""
@spec add!(money_1 :: Money.t(), money_2 :: Money.t()) :: t() | no_return()
def add!(%Money{} = money_1, %Money{} = money_2) do
case add(money_1, money_2) do
{:ok, result} -> result
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Subtract one `Money` value struct from another.
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `{:ok, money}` or
* `{:error, reason}`
## Example
iex> Money.sub Money.new(:USD, 200), Money.new(:USD, 100)
{:ok, Money.new(:USD, 100)}
"""
@spec sub(money_1 :: Money.t(), money_2 :: Money.t()) ::
{:ok, Money.t()} | {:error, {module(), String.t()}}
def sub(%Money{currency: same_currency, amount: amount_a},
%Money{currency: same_currency, amount: amount_b} = money_b
) do
{:ok, %{money_b | amount: Decimal.sub(amount_a, amount_b)}}
end
def sub(%Money{currency: code_a}, %Money{currency: code_b}) do
{:error,
{ArgumentError,
"Cannot subtract two monies with different currencies. " <>
"Received #{inspect(code_a)} and #{inspect(code_b)}."}}
end
@doc """
Subtract one `Money` value struct from another and raise on error.
Returns either `{:ok, money}` or `{:error, reason}`.
## Arguments
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* a `Money.t` struct or
* raises an exception
## Examples
iex> Money.sub! Money.new(:USD, 200), Money.new(:USD, 100)
#Money<:USD, 100>
Money.sub! Money.new(:USD, 200), Money.new(:CAD, 500)
** (ArgumentError) Cannot subtract monies with different currencies. Received :USD and :CAD.
"""
@spec sub!(money_1 :: Money.t(), money_2 :: Money.t()) :: Money.t() | none()
def sub!(%Money{} = a, %Money{} = b) do
case sub(a, b) do
{:ok, result} -> result
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Multiply a `Money` value by a number.
## Arguments
* `money` is any valid `Money.t` type returned
by `Money.new/2`
* `number` is an integer, float or `Decimal.t`
> Note that multipling one %Money{} by another is not supported.
## Returns
* `{:ok, money}` or
* `{:error, reason}`
## Example
iex> Money.mult(Money.new(:USD, 200), 2)
{:ok, Money.new(:USD, 400)}
iex> Money.mult(Money.new(:USD, 200), "xx")
{:error, {ArgumentError, "Cannot multiply money by \\"xx\\""}}
"""
@spec mult(Money.t(), Cldr.Math.number_or_decimal()) ::
{:ok, Money.t()} | {:error, {module(), String.t()}}
def mult(%Money{amount: amount} = money, number) when is_integer(number) do
{:ok, %{money | amount: Decimal.mult(amount, Decimal.new(number))}}
end
def mult(%Money{amount: amount} = money, number) when is_float(number) do
{:ok, %{money | amount: Decimal.mult(amount, Decimal.from_float(number))}}
end
def mult(%Money{amount: amount} = money, %Decimal{} = number) do
{:ok, %{money | amount: Decimal.mult(amount, number)}}
end
def mult(%Money{}, other) do
{:error, {ArgumentError, "Cannot multiply money by #{inspect(other)}"}}
end
@doc """
Multiply a `Money` value by a number and raise on error.
## Arguments
* `money` is any valid `Money.t` types returned
by `Money.new/2`
* `number` is an integer, float or `Decimal.t`
## Returns
* a `Money.t` or
* raises an exception
## Examples
iex> Money.mult!(Money.new(:USD, 200), 2)
#Money<:USD, 400>
Money.mult!(Money.new(:USD, 200), :invalid)
** (ArgumentError) Cannot multiply money by :invalid
"""
@spec mult!(Money.t(), Cldr.Math.number_or_decimal()) :: Money.t() | none()
def mult!(%Money{} = money, number) do
case mult(money, number) do
{:ok, result} -> result
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Divide a `Money` value by a number.
## Arguments
* `money` is any valid `Money.t` types returned
by `Money.new/2`
* `number` is an integer, float or `Decimal.t`
> Note that dividing one %Money{} by another is not supported.
## Returns
* `{:ok, money}` or
* `{:error, reason}`
## Example
iex> Money.div Money.new(:USD, 200), 2
{:ok, Money.new(:USD, 100)}
iex> Money.div(Money.new(:USD, 200), "xx")
{:error, {ArgumentError, "Cannot divide money by \\"xx\\""}}
"""
@spec div(Money.t(), Cldr.Math.number_or_decimal()) ::
{:ok, Money.t()} | {:error, {module(), String.t()}}
def div(%Money{amount: amount} = money, number) when is_integer(number) do
{:ok, %{money | amount: Decimal.div(amount, Decimal.new(number))}}
end
def div(%Money{amount: amount} = money, number) when is_float(number) do
{:ok, %{money | amount: Decimal.div(amount, Decimal.from_float(number))}}
end
def div(%Money{amount: amount} = money, %Decimal{} = number) do
{:ok, %{money | amount: Decimal.div(amount, number)}}
end
def div(%Money{}, other) do
{:error, {ArgumentError, "Cannot divide money by #{inspect(other)}"}}
end
@doc """
Divide a `Money` value by a number and raise on error.
## Arguments
* `money` is any valid `Money.t` types returned
by `Money.new/2`
* `number` is an integer, float or `Decimal.t`
## Returns
* a `Money.t` struct or
* raises an exception
## Examples
iex> Money.div!(Money.new(:USD, 200), 2)
#Money<:USD, 100>
iex> Money.div!(Money.new(:USD, 200), "xx")
** (ArgumentError) Cannot divide money by "xx"
"""
@spec div!(Money.t(), Cldr.Math.number_or_decimal()) :: Money.t() | none()
def div!(%Money{} = money, number) do
case Money.div(money, number) do
{:ok, result} -> result
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Returns a boolean indicating if two `Money` values are equal
## Arguments
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `true` or `false`
## Example
iex> Money.equal?(Money.new(:USD, 200), Money.new(:USD, 200))
true
iex> Money.equal?(Money.new(:USD, 200), Money.new(:USD, 100))
false
"""
@spec equal?(money_1 :: Money.t(), money_2 :: Money.t()) :: boolean
def equal?(%Money{currency: same_currency, amount: amount_a}, %Money{
currency: same_currency,
amount: amount_b
}) do
Decimal.equal?(amount_a, amount_b)
end
def equal?(_, _) do
false
end
@doc """
Sum a list of monies that may be in different
currencies.
## Arguments
* `money_list` is a list of any valid `Money.t` types returned
by `Money.new/2`
* `rates` is a map of exchange rates. The default is `%{}`.
`Money.ExchangeRates.latest_rates/0` can be used to return
the latest known exchange rates which can then applied as
the `rates` parameter.
## Returns
* `{:ok, money}` representing the sum of the maybe
converted money amounts. The currency of the sum is
the currency of the first `Money` in the `money_list`.
* `{:error, {exception, reason}}` describing an error.
## Examples
iex> Money.sum [Money.new(:USD, 100), Money.new(:USD, 200), Money.new(:USD, 50)]
{:ok, Money.new(:USD, 350)}
iex> Money.sum [Money.new(:USD, 100), Money.new(:USD, 200), Money.new(:AUD, 50)]
{:error,
{Money.ExchangeRateError, "No exchange rate is available for currency :AUD"}}
iex> rates = %{AUD: Decimal.new(2), USD: Decimal.new(1)}
iex> Money.sum [Money.new(:USD, 100), Money.new(:USD, 200), Money.new(:AUD, 50)], rates
{:ok, Money.from_float(:USD, 325.0)}
"""
@doc since: "5.3.0"
@spec sum([t(), ...], ExchangeRates.t()) :: {:ok, t} | {:error, {module(), String.t()}}
def sum([%Money{} = first | rest] = money_list, rates \\ %{}) when is_list(money_list) do
%Money{currency: target_currency} = first
Enum.reduce_while(rest, {:ok, first}, fn money, {:ok, acc} ->
case to_currency(money, target_currency, rates) do
{:ok, increment} -> {:cont, Money.add(acc, increment)}
error -> {:halt, error}
end
end)
end
@doc """
Compares two `Money` values numerically. If the first number is greater
than the second :gt is returned, if less than :lt is returned, if both
numbers are equal :eq is returned.
## Arguments
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `:gt` | `:eq` | `:lt` or
* `{:error, {module(), String.t}}`
## Examples
iex> Money.compare Money.new(:USD, 200), Money.new(:USD, 100)
:gt
iex> Money.compare Money.new(:USD, 200), Money.new(:USD, 200)
:eq
iex> Money.compare Money.new(:USD, 200), Money.new(:USD, 500)
:lt
iex> Money.compare Money.new(:USD, 200), Money.new(:CAD, 500)
{:error,
{ArgumentError,
"Cannot compare monies with different currencies. Received :USD and :CAD."}}
"""
@spec compare(money_1 :: Money.t(), money_2 :: Money.t()) ::
:gt | :eq | :lt | {:error, {module(), String.t()}}
def compare(%Money{currency: same_currency, amount: amount_a}, %Money{
currency: same_currency,
amount: amount_b
}) do
Cldr.Decimal.compare(amount_a, amount_b)
end
def compare(%Money{currency: code_a}, %Money{currency: code_b}) do
{:error,
{ArgumentError,
"Cannot compare monies with different currencies. " <>
"Received #{inspect(code_a)} and #{inspect(code_b)}."}}
end
@doc """
Compares two `Money` values numerically and raises on error.
## Arguments
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `:gt` | `:eq` | `:lt` or
* raises an exception
## Examples
Money.compare! Money.new(:USD, 200), Money.new(:CAD, 500)
** (ArgumentError) Cannot compare monies with different currencies. Received :USD and :CAD.
"""
def compare!(%Money{} = money_1, %Money{} = money_2) do
case compare(money_1, money_2) do
{:error, {exception, reason}} -> raise exception, reason
result -> result
end
end
@doc """
Compares two `Money` values numerically. If the first number is greater
than the second #Integer<1> is returned, if less than Integer<-1> is
returned. Otherwise, if both numbers are equal Integer<0> is returned.
## Arguments
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `-1` | `0` | `1` or
* `{:error, {module(), String.t}}`
## Examples
iex> Money.cmp Money.new(:USD, 200), Money.new(:USD, 100)
1
iex> Money.cmp Money.new(:USD, 200), Money.new(:USD, 200)
0
iex> Money.cmp Money.new(:USD, 200), Money.new(:USD, 500)
-1
iex> Money.cmp Money.new(:USD, 200), Money.new(:CAD, 500)
{:error,
{ArgumentError,
"Cannot compare monies with different currencies. Received :USD and :CAD."}}
"""
@spec cmp(money_1 :: Money.t(), money_2 :: Money.t()) ::
-1 | 0 | 1 | {:error, {module(), String.t()}}
def cmp(%Money{currency: same_currency} = money_1, %Money{currency: same_currency} = money_2) do
case compare(money_1, money_2) do
:lt -> -1
:eq -> 0
:gt -> 1
end
end
def cmp(%Money{currency: code_a}, %Money{currency: code_b}) do
{:error,
{ArgumentError,
"Cannot compare monies with different currencies. " <>
"Received #{inspect(code_a)} and #{inspect(code_b)}."}}
end
@doc """
Compares two `Money` values numerically and raises on error.
## Arguments
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `-1` | `0` | `1` or
* raises an exception
## Examples
Money.cmp! Money.new(:USD, 200), Money.new(:CAD, 500)
** (ArgumentError) Cannot compare monies with different currencies. Received :USD and :CAD.
"""
def cmp!(%Money{} = money_1, %Money{} = money_2) do
case cmp(money_1, money_2) do
{:error, {exception, reason}} -> raise exception, reason
result -> result
end
end
@doc """
Split a `Money` value into a number of parts maintaining the currency's
precision and rounding and ensuring that the parts sum to the original
amount.
## Arguments
* `money` is a `%Money{}` struct
* `parts` is an integer number of parts into which the `money` is split
Returns a tuple `{dividend, remainder}` as the function result
derived as follows:
1. Round the money amount to the required currency precision using
`Money.round/1`
2. Divide the result of step 1 by the integer divisor
3. Round the result of the division to the precision of the currency
using `Money.round/1`
4. Return two numbers: the result of the division and any remainder
that could not be applied given the precision of the currency.
## Examples
Money.split Money.new(123.5, :JPY), 3
{¥41, ¥1}
Money.split Money.new(123.4, :JPY), 3
{¥41, ¥0}
Money.split Money.new(123.7, :USD), 9
{$13.74, $0.04}
"""
@spec split(Money.t(), non_neg_integer) :: {Money.t(), Money.t()}
def split(%Money{} = money, parts) when is_integer(parts) do
rounded_money = Money.round(money)
div =
rounded_money
|> Money.div!(parts)
|> round
remainder = sub!(money, mult!(div, parts))
{div, remainder}
end
@doc """
Round a `Money` value into the acceptable range for the requested currency.
## Arguments
* `money` is a `%Money{}` struct
* `opts` is a keyword list of options
## Options
* `:rounding_mode` that defines how the number will be rounded. See
`Decimal.Context`. The default is `:half_even` which is also known
as "banker's rounding"
* `:currency_digits` which determines the rounding increment.
The valid options are `:cash`, `:accounting` and `:iso` or
an integer value representing the rounding factor. The
default is `:iso`.
## Notes
There are two kinds of rounding applied:
1. Round to the appropriate number of fractional digits
3. Apply an appropriate rounding increment. Most currencies
round to the same precision as the number of decimal digits, but some
such as `:CHF` round to a minimum such as `0.05` when its a cash
amount. The rounding increment is applied when the option
`:currency_digits` is set to `:cash`
## Examples
iex> Money.round Money.new("123.73", :CHF), currency_digits: :cash
#Money<:CHF, 123.75>
iex> Money.round Money.new("123.73", :CHF), currency_digits: 0
#Money<:CHF, 124>
iex> Money.round Money.new("123.7456", :CHF)
#Money<:CHF, 123.75>
iex> Money.round Money.new("123.7456", :JPY)
#Money<:JPY, 124>
"""
@spec round(Money.t(), Keyword.t()) :: Money.t()
def round(%Money{} = money, opts \\ []) do
money
|> round_to_decimal_digits(opts)
|> round_to_nearest(opts)
end
defp round_to_decimal_digits(%Money{currency: code, amount: amount}, opts) do
with {:ok, currency} <- Currency.currency_for_code(code) do
rounding_mode = Keyword.get(opts, :rounding_mode, @default_rounding_mode)
rounding = digits_from_opts(currency, opts[:currency_digits])
rounded_amount = Decimal.round(amount, rounding, rounding_mode)
%Money{currency: code, amount: rounded_amount}
end
end
defp digits_from_opts(currency, nil) do
currency.iso_digits
end
defp digits_from_opts(currency, :iso) do
currency.iso_digits
end
defp digits_from_opts(currency, :accounting) do
currency.digits
end
defp digits_from_opts(currency, :cash) do
currency.cash_digits
end
defp digits_from_opts(_currency, digits) when is_integer(digits) do
digits
end
defp round_to_nearest(%Money{currency: code} = money, opts) do
with {:ok, currency} <- Currency.currency_for_code(code) do
digits = digits_from_opts(currency, opts[:currency_digits])
increment = increment_from_opts(currency, opts[:currency_digits])
do_round_to_nearest(money, digits, increment, opts)
end
end
defp round_to_nearest({:error, _} = error, _opts) do
error
end
defp do_round_to_nearest(money, _digits, 0, _opts) do
money
end
defp do_round_to_nearest(money, digits, increment, opts) do
rounding_mode = Keyword.get(opts, :rounding_mode, @default_rounding_mode)
rounding =
-digits
|> Cldr.Math.power_of_10()
|> Kernel.*(increment)
|> Decimal.from_float()
rounded_amount =
money.amount
|> Decimal.div(rounding)
|> Decimal.round(0, rounding_mode)
|> Decimal.mult(rounding)
%Money{currency: money.currency, amount: rounded_amount}
end
defp increment_from_opts(currency, :cash) do
currency.cash_rounding
end
defp increment_from_opts(currency, _) do
currency.rounding
end
@doc """
Set the fractional part of a `Money`.
## Arguments
* `money` is a `%Money{}` struct
* `fraction` is an integer amount that will be set
as the fraction of the `money`
## Notes
The fraction can only be set if it matches the number of
decimal digits for the currency associated with the `money`.
Therefore, for a currency with 2 decimal digits, the
maximum for `fraction` is `99`.
## Examples
iex> Money.put_fraction Money.new(:USD, "2.49"), 99
#Money<:USD, 2.99>
iex> Money.put_fraction Money.new(:USD, "2.49"), 0
#Money<:USD, 2.0>
iex> Money.put_fraction Money.new(:USD, "2.49"), 999
{:error,
{Money.InvalidAmountError, "Rounding up to 999 is invalid for currency :USD"}}
"""
def put_fraction(money, fraction \\ 0)
@one Decimal.new(1)
@zero Decimal.new(0)
def put_fraction(%Money{amount: amount} = money, upto) when is_integer(upto) do
with {:ok, currency} <- Currency.currency_for_code(money.currency) do
digits = currency.digits
diff = Decimal.from_float((100 - upto) * :math.pow(10, -digits))
if Cldr.Decimal.compare(diff, @one) in [:lt, :eq] &&
Cldr.Decimal.compare(@zero, diff) in [:lt, :eq] do
new_amount =
Decimal.round(amount, 0)
|> Decimal.add(@one)
|> Decimal.sub(diff)
%{money | amount: new_amount}
else
{:error,
{Money.InvalidAmountError,
"Rounding up to #{inspect(upto)} is invalid for currency #{inspect(money.currency)}"}}
end
end
end
@doc """
Convert `money` from one currency to another.
## Arguments
* `money` is any `Money.t` struct returned by `Cldr.Currency.new/2`
* `to_currency` is a valid currency code into which the `money` is converted
* `rates` is a `Map` of currency rates where the map key is an upcased
atom or string and the value is a Decimal conversion factor. The default is the
latest available exchange rates returned from `Money.ExchangeRates.latest_rates()`
## Converting to a currency defined in a locale
To convert a `Money` to a currency defined by a locale,
`Cldr.Currency.currency_from_locale/1` can be called with
a `t:Cldr.LanguageTag.t()` parameter. It will return
the currency configured for that locale.
## Examples
iex> Money.to_currency(Money.new(:USD, 100), :AUD,
...> %{USD: Decimal.new(1), AUD: Decimal.from_float(0.7345)})
{:ok, Money.new(:AUD, "73.4500")}
iex> Money.to_currency(Money.new("USD", 100), "AUD",
...> %{"USD" => Decimal.new(1), "AUD" => Decimal.from_float(0.7345)})
{:ok, Money.new(:AUD, "73.4500")}
iex> Money.to_currency(Money.new(:USD, 100), :AUDD,
...> %{USD: Decimal.new(1), AUD: Decimal.from_float(0.7345)})
{:error, {Cldr.UnknownCurrencyError, "The currency :AUDD is invalid"}}
iex> Money.to_currency(Money.new(:USD, 100), :CHF,
...> %{USD: Decimal.new(1), AUD: Decimal.from_float(0.7345)})
{:error, {Money.ExchangeRateError,
"No exchange rate is available for currency :CHF"}}
"""
@spec to_currency(
Money.t(),
currency_code(),
ExchangeRates.t() | {:ok, ExchangeRates.t()} | {:error, {module(), String.t()}}
) :: {:ok, Money.t()} | {:error, {module(), String.t()}}
def to_currency(money, to_currency, rates \\ Money.ExchangeRates.latest_rates())
def to_currency(%Money{} = money, currency, {:ok, %{} = rates}) do
to_currency(money, currency, rates)
end
def to_currency(_money, _to_currency, {:error, reason}) do
{:error, reason}
end
def to_currency(%Money{currency: currency} = money, currency, _rates) do
{:ok, money}
end
def to_currency(%Money{} = money, to_currency, %{} = rates)
when is_binary(to_currency) do
with {:ok, currency_code} <- validate_currency(to_currency) do
to_currency(money, currency_code, rates)
end
end
def to_currency(%Money{currency: from_currency, amount: amount} = money, to_currency, rates)
when is_atom(to_currency) and is_map(rates) do
with {:ok, to_currency_code} <- validate_currency(to_currency),
{:ok, cross_rate} <- cross_rate(from_currency, to_currency_code, rates) do
converted_amount = Decimal.mult(amount, cross_rate)
{:ok, %{money | currency: to_currency, amount: converted_amount}}
end
end
@doc """
Convert `money` from one currency to another and raises on error
## Arguments
* `money` is any `t:Money.t()` struct returned by `Cldr.Currency.new/2`
* `to_currency` is a valid currency code into which the `money` is converted
* `rates` is a `Map` of currency rates where the map key is an upcased
atom or string and the value is a Decimal conversion factor. The default is the
latest available exchange rates returned from `Money.ExchangeRates.latest_rates()`
## Examples
iex> Money.to_currency! Money.new(:USD, 100), :AUD,
...> %{USD: Decimal.new(1), AUD: Decimal.from_float(0.7345)}
#Money<:AUD, 73.4500>
iex> Money.to_currency! Money.new("USD", 100), "AUD",
...> %{"USD" => Decimal.new(1), "AUD" => Decimal.from_float(0.7345)}
#Money<:AUD, 73.4500>
=> Money.to_currency! Money.new(:USD, 100), :ZZZ,
%{USD: Decimal.new(1), AUD: Decimal.from_float(0.7345)}
** (Cldr.UnknownCurrencyError) Currency :ZZZ is not known
"""
@spec to_currency!(
Money.t(),
currency_code(),
ExchangeRates.t() | {:ok, ExchangeRates.t()} | {:error, {module(), String.t()}}
) :: Money.t() | no_return
def to_currency!(money, to_currency, rates \\ Money.ExchangeRates.latest_rates())
def to_currency!(%Money{} = money, currency, rates) do
case to_currency(money, currency, rates) do
{:ok, money} -> money
{:error, {exception, reason}} -> raise exception, reason
end
end
@doc """
Returns the effective cross-rate to convert from one currency
to another.
## Arguments
* `from` is any `Money.t` struct returned by `Cldr.Currency.new/2` or a valid
currency code
* `to_currency` is a valid currency code into which the `money` is converted
* `rates` is a `Map` of currency rates where the map key is an upcased
atom or string and the value is a Decimal conversion factor. The default is the
latest available exchange rates returned from `Money.ExchangeRates.latest_rates()`
## Examples
Money.cross_rate(Money.new(:USD, 100), :AUD, %{USD: Decimal.new(1), AUD: Decimal.new("0.7345")})
{:ok, #Decimal<0.7345>}
Money.cross_rate Money.new(:USD, 100), :ZZZ, %{USD: Decimal.new(1), AUD: Decimal.new(0.7345)}
** (Cldr.UnknownCurrencyError) Currency :ZZZ is not known
"""
@spec cross_rate(
Money.t() | currency_code,
currency_code,
ExchangeRates.t() | {:ok, ExchangeRates.t()}
) :: {:ok, Decimal.t()} | {:error, {module(), String.t()}}
def cross_rate(from, to, rates \\ Money.ExchangeRates.latest_rates())
def cross_rate(from, to, {:ok, rates}) do
cross_rate(from, to, rates)
end
def cross_rate(%Money{currency: from_currency}, to_currency, %{} = rates) do
cross_rate(from_currency, to_currency, rates)
end
def cross_rate(from_currency, to_currency, %{} = rates) do
with {:ok, from_code} <- validate_currency(from_currency),
{:ok, to_code} <- validate_currency(to_currency),
{:ok, from_rate} <- get_rate(from_code, rates),
{:ok, to_rate} <- get_rate(to_code, rates) do
{:ok, Decimal.div(to_rate, from_rate)}
end
end
@doc """
Returns the effective cross-rate to convert from one currency
to another.
## Arguments
* `from` is any `Money.t` struct returned by `Cldr.Currency.new/2` or a valid
currency code
* `to_currency` is a valid currency code into which the `money` is converted
* `rates` is a `Map` of currency rates where the map key is an upcased
atom or string and the value is a Decimal conversion factor. The default is the
latest available exchange rates returned from `Money.ExchangeRates.latest_rates()`
## Examples
iex> Money.cross_rate!(Money.new(:USD, 100), :AUD, %{USD: Decimal.new(1), AUD: Decimal.new("0.7345")})
#Decimal<0.7345>
iex> Money.cross_rate!(:USD, :AUD, %{USD: Decimal.new(1), AUD: Decimal.new("0.7345")})
#Decimal<0.7345>
Money.cross_rate Money.new(:USD, 100), :ZZZ, %{USD: Decimal.new(1), AUD: Decimal.new("0.7345")}
** (Cldr.UnknownCurrencyError) Currency :ZZZ is not known
"""
@spec cross_rate!(
Money.t() | currency_code,
currency_code,
ExchangeRates.t() | {:ok, ExchangeRates.t()}
) :: Decimal.t() | no_return
def cross_rate!(from, to_currency, rates \\ Money.ExchangeRates.latest_rates())
def cross_rate!(from, to_currency, rates) do
cross_rate(from, to_currency, rates)
|> do_cross_rate!
end
defp do_cross_rate!({:ok, rate}) do
rate
end
defp do_cross_rate!({:error, {exception, reason}}) do
raise exception, reason
end
@doc """
Normalizes the underlying decimal amount in a
given `Money.t()`
This will normalize the coefficient and exponent of the
decimal amount in a standard way that may aid in
native comparison of `%Money.t()` items.
## Example
iex> x = %Money{currency: :USD, amount: %Decimal{sign: 1, coef: 42, exp: 0}}
#Money<:USD, 42>
iex> y = %Money{currency: :USD, amount: %Decimal{sign: 1, coef: 4200000000, exp: -8}}
#Money<:USD, 42.00000000>
iex> x == y
false
iex> y = Money.normalize(x)
#Money<:USD, 42>
iex> x == y
true
"""
@spec normalize(Money.t()) :: Money.t()
Cldr.Macros.doc_since("5.0.0")
if Code.ensure_loaded?(Decimal) and function_exported?(Decimal, :normalize, 1) do
def normalize(%Money{amount: amount} = money) do
%{money | amount: Decimal.normalize(amount)}
end
else
def normalize(%Money{amount: amount} = money) do
%{money | amount: Decimal.reduce(amount)}
end
end
@deprecated "Use Money.normalize/1 instead."
def reduce(money) do
normalize(money)
end
@doc """
Returns a tuple comprising the currency code, integer amount,
exponent and remainder
Some services require submission of money items as an integer
with an implied exponent that is appropriate to the currency.
Rather than return only the integer, `Money.to_integer_exp`
returns the currency code, integer, exponent and remainder.
The remainder is included because to return an integer
money with an implied exponent the `Money` has to be rounded
potentially leaving a remainder.
## Options
* `money` is any `Money.t` struct returned by `Cldr.Currency.new/2`
## Notes
* Since the returned integer is expected to have the implied fractional
digits the `Money` needs to be rounded which is what this function does.
## Example
iex> m = Money.new(:USD, "200.012356")
#Money<:USD, 200.012356>
iex> Money.to_integer_exp(m)
{:USD, 20001, -2, Money.new(:USD, "0.002356")}
iex> m = Money.new(:USD, "200.00")
#Money<:USD, 200.00>
iex> Money.to_integer_exp(m)
{:USD, 20000, -2, Money.new(:USD, "0.00")}
"""
def to_integer_exp(%Money{} = money, opts \\ []) do
new_money =
money
|> Money.round(opts)
|> Money.normalize()
{:ok, remainder} = Money.sub(money, new_money)
{:ok, currency} = Currency.currency_for_code(money.currency)
digits = digits_from_opts(currency, opts[:currency_digits])
exponent = -digits
exponent_adjustment = Kernel.abs(exponent - new_money.amount.exp)
integer = Cldr.Math.power_of_10(exponent_adjustment) * new_money.amount.coef * new_money.amount.sign
{money.currency, integer, exponent, remainder}
end
@doc """
Convert an integer representation of money into a `Money` struct.
## Arguments
* `integer` is an integer representation of a money amount including
any decimal digits. ie. `20000` would be interpreted to mean `$200.00`
if the `currency` is `:USD` and no `:fractional_digits` option
was provided.
* `currency` is the currency code for the `integer`. The assumed
decimal precision is derived from the currency code if no `fractional_digits`
option is specified.
* `options` is a keyword list of options.
## Options
* `:fractional_digits` which determines the currency precision implied
by the `integer`. The valid options are `:cash`, `:accounting`,
`:iso` or a non-negative integer. The default is `:iso` which uses the
[ISO 4217](https://en.wikipedia.org/wiki/ISO_4217) definition of
currency digits.
All other options are passed to `Money.new/3`.
## Returns
* A `t:Money` struct or
* `{:error, {exception, message}}`
## Notes
Some currencies, like the [Iraqi Dinar](https://en.wikipedia.org/wiki/Iraqi_dinar)
have a difference in the decimal digits defined by CLDR versus
those defined by [ISO 4217](https://en.wikipedia.org/wiki/ISO_4217). CLDR
defines the decimal digits for `IQD` as `0` whereas ISO 4217 defines
`3` decimal digits.
Since converting an integer to a money amount is very
sensitive to the number of fractional digits specified it is
important to be very clear about the precision of the data used
with this function and care taken in specifying the `:fractional_digits`
parameter.
## Examples
iex> Money.from_integer(20000, :USD)
#Money<:USD, 200.00>
iex> Money.from_integer(200, :JPY)
#Money<:JPY, 200>
iex> Money.from_integer(20012, :USD)
#Money<:USD, 200.12>
iex> Money.from_integer(20012, :USD, fractional_digits: 3)
#Money<:USD, 20.012>
iex> Money.from_integer(20012, :IQD)
#Money<:IQD, 20.012>
"""
@spec from_integer(integer, currency_code, Keyword.t()) ::
Money.t() | {:error, {module(), String.t()}}
def from_integer(amount, currency, options \\ []) when is_integer(amount) and is_list(options) do
with {:ok, currency} <- validate_currency(currency),
{:ok, currency_data} <- Currency.currency_for_code(currency),
{:ok, digits, options} <- digits_from_options(currency_data, options) do
sign = if amount < 0, do: -1, else: 1
sign
|> Decimal.new(Kernel.abs(amount), digits)
|> Money.new(currency, options)
end
end
defp digits_from_options(currency_data, options) when is_list(options) do
{fractional_digits, options} = Keyword.pop(options, :fractional_digits)
with {:ok, digits} <- digits_from_options(currency_data, fractional_digits) do
{:ok, -digits, options}
end
end
defp digits_from_options(currency_data, :iso), do: Map.fetch(currency_data, :iso_digits)
defp digits_from_options(currency_data, nil), do: Map.fetch(currency_data, :iso_digits)
defp digits_from_options(currency_data, :cash), do: Map.fetch(currency_data, :cash_digits)
defp digits_from_options(currency_data, :accounting), do: Map.fetch(currency_data, :digits)
defp digits_from_options(_currency_data, integer) when is_integer(integer) and integer >= 0, do: {:ok, integer}
defp digits_from_options(_currency_data, other),
do: {:error, {Money.InvalidDigitsError, "Unknown or invalid :fractional_digits option found: #{inspect other}"}}
@doc """
Return a zero amount `t:Money` in the given currency.
## Arguments
* `money_or_currency` is either a `t:Money` or
a currency code
* `options` is a keyword list of options passed
to `Money.new/3`. The default is `[]`.
## Example
iex> Money.zero(:USD)
#Money<:USD, 0>
iex> money = Money.new(:USD, 200)
iex> Money.zero(money)
#Money<:USD, 0>
iex> Money.zero :ZZZ
{:error, {Cldr.UnknownCurrencyError, "The currency :ZZZ is invalid"}}
"""
@spec zero(currency_code | Money.t()) :: Money.t()
def zero(money_or_currency, options \\ [])
def zero(%Money{currency: currency}, options) do
zero(currency, options)
end
def zero(currency, options) do
with {:ok, currency} <- validate_currency(currency) do
Money.new(currency, 0, options)
end
end
@doc false
def from_integer({currency, integer, _exponent, _remainder}) do
from_integer(integer, currency)
end
## Helpers
@doc false
def get_env(key, default \\ nil) do
case env = Application.get_env(:ex_money, key, default) do
{:system, env_key} ->
System.get_env(env_key) || default
_ ->
env
end
end
def get_env(key, default, :integer) do
key
|> get_env(default)
|> to_integer
end
def get_env(key, default, :maybe_integer) do
key
|> get_env(default)
|> to_maybe_integer
end
def get_env(key, default, :module) do
key
|> get_env(default)
|> to_module()
end
def get_env(key, default, :boolean) do
case get_env(key, default) do
true ->
true
false ->
false
other ->
raise RuntimeError,
"[ex_money] The configuration key " <>
"#{inspect(key)} must be either true or false. #{inspect(other)} was provided."
end
end
defp to_integer(nil), do: nil
defp to_integer(n) when is_integer(n), do: n
defp to_integer(n) when is_binary(n), do: String.to_integer(n)
defp to_maybe_integer(nil), do: nil
defp to_maybe_integer(n) when is_integer(n), do: n
defp to_maybe_integer(n) when is_atom(n), do: n
defp to_maybe_integer(n) when is_binary(n), do: String.to_integer(n)
defp to_module(nil), do: nil
defp to_module(module_name) when is_atom(module_name), do: module_name
defp to_module(module_name) when is_binary(module_name) do
Module.concat([module_name])
end
defp get_rate(currency, rates) do
rates
|> Map.take([currency, Atom.to_string(currency)])
|> Map.values()
|> case do
[rate] ->
{:ok, rate}
_ ->
{:error,
{Money.ExchangeRateError,
"No exchange rate is available for currency #{inspect(currency)}"}}
end
end
@doc false
def json_library do
@json_library
end
defp parse_decimal(string, nil, nil) do
parse_decimal(string, default_backend().get_locale, default_backend())
end
defp parse_decimal(string, nil, backend) do
parse_decimal(string, backend.get_locale, backend)
end
defp parse_decimal(string, locale, nil) do
parse_decimal(string, locale, default_backend())
end
defp parse_decimal(string, locale, backend) do
with {:ok, locale} <- Cldr.validate_locale(locale, backend),
{:ok, symbols} <- Cldr.Number.Symbol.number_symbols_for(locale, backend) do
decimal =
string
|> String.replace(symbols.latn.group, "")
|> String.replace(symbols.latn.decimal, ".")
|> Decimal.new()
{:ok, decimal}
end
end
@doc false
@app_name Money.Mixfile.project() |> Keyword.get(:app)
def app_name do
@app_name
end
@doc """
Returns the default `ex_cldr` backend configured
for `Money`, if any. If no default backing is
configured, an exception is raised.
"""
def default_backend() do
cldr_default_backend = Application.get_env(Cldr.Config.app_name(), :default_backend)
Application.get_env(@app_name, :default_cldr_backend) || cldr_default_backend ||
raise """
A default backend must be configured in config.exs as either:
config :ex_cldr, default_backend: MyApp.Cldr
or
config :ex_money, default_cldr_backend: MyApp.Cldr
"""
end
end
|
lib/money.ex
| 0.924437
| 0.759426
|
money.ex
|
starcoder
|
defmodule WeePub.Subscriber.Filter do
@moduledoc false
@doc """
Captures a pattern and turns it into a filter function
"""
defmacro filter(pattern) do
quote do
fn (message) ->
case message do
unquote(pattern) -> true
_ -> false
end
end
end
end
end
defmodule WeePub.Subscriber do
@moduledoc """
Creates a `GenServer` that registers subscriptions with `WeePub.Broadcaster`
"""
import __MODULE__.Filter
alias WeePub.Broadcaster
@doc false
defmacro __using__(_options) do
quote do
@module __MODULE__
import unquote(__MODULE__)
Module.register_attribute @module, :subscriptions, accumulate: true
@before_compile unquote(__MODULE__)
use GenServer
def child_spec(options) do
%{
id: @module,
start: {@module, :start, [options]},
type: :worker,
}
end
def start(options \\ []) do
GenServer.start_link(@module, nil, name: @module)
end
def init(state \\ nil) do
register_subscriptions()
{:ok, state}
end
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
def register_subscriptions do
for subscription <- @subscriptions do
case subscription do
[pattern: pattern, where: where] ->
Broadcaster.subscribe filter: filter(pattern when where)
[pattern: pattern] ->
Broadcaster.subscribe filter: filter(pattern)
end
end
end
end
end
@doc """
Creates a `handle_cast` function that will accept messages matching the
pattern and the `where:` clause if present.
**Note:** The GenServer state is implicitly set to the result of the
body of the macro.
```
subscribe %{id, id} = message, where: id = 42 do
... processes the message
end
```
will be transformed to
```
def handle_cast(%{id, id} = message, state) when id = 42 do
state = ... process the message
{:noreply, state}
end
```
The pattern and Module will be registered with `WeePub.Broadcaster`
as part of the generated `init` function.
The `where:` clause is optional but when included needs to obey the
same restrictions as a `when` guard clause.
"""
defmacro subscribe(pattern, [where: where], do: block) do
quote do
@subscriptions [pattern: unquote(Macro.escape(pattern)), where: unquote(Macro.escape(where))]
def handle_cast(unquote(pattern), state) when unquote(where) do
state = (unquote(block))
{:noreply, state}
end
end
end
@doc false
defmacro subscribe(pattern, do: block) do
quote do
@subscriptions [pattern: unquote(Macro.escape(pattern))]
def handle_cast(unquote(pattern), state) do
state = (unquote(block))
{:noreply, state}
end
end
end
end
|
lib/wee_pub/subscriber.ex
| 0.874359
| 0.748007
|
subscriber.ex
|
starcoder
|
defmodule Cog.Pipeline.OutputSink do
alias Experimental.GenStage
alias Cog.Chat.Adapter, as: ChatAdapter
alias Cog.Events.PipelineEvent
alias Cog.Pipeline
alias Cog.Pipeline.{Destination, DataSignal, DoneSignal, Evaluator}
alias Cog.Template
alias Cog.Template.Evaluator
@moduledoc ~s"""
Specialized `GenStage` consumer to handle pipeline output. Accumulates
`Cog.Pipeline.DataSignal`s until it receives a non-error `Cog.Pipeline.DoneSignal`.
Then it generates appropriate output (executes Greenbar templates, etc) and routes it
to the correct destinations.
If a `DoneSignal` is received and no `DataSignal`s have been accumulated then
`OutputSink` will use the early exit template to generate a response.
"""
use GenStage
require Logger
@early_exit_template "early-exit"
@type t :: %__MODULE__{
all_events: [] | [DoneSignal.t],
conn: Carrier.Messaging.Connection.t,
destinations: Cog.Pipeline.Destination.destination_map,
owner: pid,
policy: Cog.Pipeline.output_policy,
request: Cog.Messages.ProviderRequest.t,
pipeline: pid,
started: DateTime.t
}
defstruct [:request, :pipeline, :owner, :policy, :destinations, :all_events, :conn, :started]
@doc ~s"""
Starts a new `OutputSink` process and attaches it to the parent pipeline.
## Options
* `:conn` - Pipeline's shared MQTT connection. Required.
* `:destinations` - Map of output destinations grouped by type. Required.
* `:policy` - Pipeline output policy. Required.
* `:owner` - Pid of the pipeline's owner process. Required.
* `:pipeline` - Pid of the parent pipeline. Required.
* `:started` - Pipeline start timestamp. Required.
* `:upstream` - Pid of the preceding pipeline stage. Required.
"""
@spec start_link(Keyword.t) :: {:ok, pid} | {:error, any}
def start_link(opts) do
GenStage.start_link(__MODULE__, opts)
end
def init(opts) do
try do
pipeline = Keyword.fetch!(opts, :pipeline)
Process.monitor(pipeline)
upstream = Keyword.fetch!(opts, :upstream)
{:consumer, %__MODULE__{pipeline: pipeline,
request: Keyword.fetch!(opts, :request),
owner: Keyword.fetch!(opts, :owner),
destinations: Keyword.get(opts, :destinations, []),
all_events: [],
conn: Keyword.fetch!(opts, :conn),
started: Keyword.fetch!(opts, :started),
policy: Keyword.fetch!(opts, :policy)}, [subscribe_to: [upstream]]}
rescue
e in KeyError ->
{:stop, {:error, Exception.message(e)}}
end
end
def handle_events(events, _from, state) do
errors_present = Enum.any?(events, &(DoneSignal.done?(&1) and DoneSignal.error?(&1)))
filtered_events = Enum.filter(events, &want_signal?/1) |> Enum.reduce([], &combine_events/2)
state = state
|> Map.update(:all_events, filtered_events, &(&1 ++ filtered_events))
|> process_output(errors_present)
{:noreply, [], state}
end
def handle_info({:DOWN, _mref, _, pipeline, _}, %__MODULE__{pipeline: pipeline}=state) do
{:stop, :normal, state}
end
def handle_info({:pipeline_complete, pipeline}, %__MODULE__{pipeline: pipeline}=state) do
{:stop, :normal, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
def terminate(_reason, state) do
Logger.debug("Output sink for pipeline #{state.request.id} shutting down")
end
defp want_signal?(%DataSignal{}), do: true
defp want_signal?(%DoneSignal{}=done) do
DoneSignal.error?(done) == false
end
defp want_signal?(_), do: false
def process_output(%__MODULE__{all_events: []}=state, _) do
state
end
# Early exit
def process_output(%__MODULE__{all_events: [%DoneSignal{}=done], policy: policy}=state, false) do
done = %{done | template: @early_exit_template}
send_to_owner(state)
if policy in [:adapter, :adapter_owner] do
early_exit_response(done, state)
end
success_event(state)
Pipeline.teardown(state.pipeline)
%{state | all_events: []}
end
def process_output(%__MODULE__{all_events: events, policy: policy}=state, _) do
if DoneSignal.done?(List.last(events)) do
send_to_owner(state)
if policy in [:adapter, :adapter_owner] do
Enum.each(events, &send_to_adapter(&1, state))
end
success_event(state)
Pipeline.teardown(state.pipeline)
%{state | all_events: []}
else
state
end
end
defp combine_events(%DoneSignal{}=done, accum) do
accum ++ [done]
end
defp combine_events(%DataSignal{}=data, []), do: [data]
defp combine_events(%DataSignal{}=next_data, [%DataSignal{}=last_data]) do
[%{last_data | data: List.wrap(last_data.data) ++ List.wrap(next_data.data),
bundle_version_id: next_data.bundle_version_id, template: next_data.template,
invocation: next_data.invocation}]
end
defp send_to_owner(%__MODULE__{all_events: events, policy: policy, owner: owner}=state) when policy in [:owner, :adapter_owner] do
Process.send(owner, {:pipeline, state.request.id, {:output, events}}, [])
end
defp send_to_owner(_state), do: :ok
defp send_to_adapter(%DoneSignal{}, state), do: state
defp send_to_adapter(%DataSignal{}=signal, state) do
Enum.each(state.destinations, &(send_to_adapter(&1, signal, state)))
end
defp send_to_adapter({type, targets}, signal, state) do
output = output_for(type, signal, nil)
Enum.each(targets, &ChatAdapter.send(state.conn, &1.provider, &1.room, output, state.request.metadata))
end
defp early_exit_response(%DoneSignal{}=signal, state) do
# Synthesize a DataSignal from a DoneSignal so we can render templates
data_signal = %DataSignal{template: signal.template,
data: [],
bundle_version_id: "common"}
destinations = Destination.here(state.request)
Enum.each(destinations, fn({type, destinations}) ->
output = output_for(type, data_signal, "Terminated early")
Enum.each(destinations, &ChatAdapter.send(&1.provider, &1.room, output, state.request.metadata))
end)
end
defp output_for(:chat, %DataSignal{}=signal, _message) do
output = signal.data
bundle_vsn = signal.bundle_version_id
template_name = signal.template
if bundle_vsn == "common" do
if template_name in ["error", "unregistered-user"] do
# No "envelope" for these templates right now
Evaluator.evaluate(template_name, output)
else
Evaluator.evaluate(template_name, Template.with_envelope(output))
end
else
Evaluator.evaluate(bundle_vsn, template_name, Template.with_envelope(output))
end
end
defp output_for(:trigger, signal, message) do
envelope = %{status: "success",
pipeline_output: List.wrap(signal.data)}
if message do
Map.put(envelope, :message, message)
else
envelope
end
end
defp output_for(:status_only, _signal, _message) do
%{status: "success"}
end
defp success_event(state) do
output = Enum.flat_map(state.all_events, fn(%DoneSignal{}) -> [];
(%DataSignal{data: data}) -> [data] end)
PipelineEvent.succeeded(state.request.id, state.started, output) |> Probe.notify
end
end
|
lib/cog/pipeline/output_sink.ex
| 0.772574
| 0.462048
|
output_sink.ex
|
starcoder
|
defmodule Ecto.Repo do
@moduledoc """
This module is used to define a repository. A repository maps to a data
store, for example an SQL database. A repository must implement `conf/0` and
set an adapter (see `Ecto.Adapter`) to be used for the repository.
When used, the following options are allowed:
* `:adapter` - the adapter to be used for the repository
* `:env` - configures the repository to support environments
## Example
defmodule MyRepo do
use Ecto.Repo, adapter: Ecto.Adapters.Postgres
def conf do
parse_url "ecto://postgres:postgres@localhost/postgres"
end
end
Most of the time, we want the repository to work with different
environments. In such cases, we can pass an `:env` option:
defmodule MyRepo do
use Ecto.Repo, adapter: Ecto.Adapters.Postgres, env: Mix.env
def conf(env), do: parse_url url(env)
defp url(:dev), do: "ecto://postgres:postgres@localhost/postgres_dev"
defp url(:test), do: "ecto://postgres:postgres@localhost/postgres_test?size=1"
defp url(:prod), do: "ecto://postgres:postgres@localhost/postgres_prod"
end
Notice that, when using the environment, developers should implement
`conf/1` which automatically passes the environment instead of `conf/0`.
Note the environment is only used at compilation time. That said, don't
forget to set the `:build_per_environment` option to true in your Mix
project definition.
"""
use Behaviour
@type t :: module
@doc false
defmacro __using__(opts) do
adapter = Keyword.fetch!(opts, :adapter)
env = Keyword.get(opts, :env)
quote do
use unquote(adapter)
@behaviour Ecto.Repo
@env unquote(env)
import Ecto.Utils, only: [app_dir: 2]
if @env do
def conf do
conf(@env)
end
defoverridable conf: 0
end
def start_link do
Ecto.Repo.Backend.start_link(__MODULE__, unquote(adapter))
end
def stop do
Ecto.Repo.Backend.stop(__MODULE__, unquote(adapter))
end
def storage_up do
Ecto.Repo.Backend.storage_up(__MODULE__, unquote(adapter))
end
def storage_down do
Ecto.Repo.Backend.storage_down(__MODULE__, unquote(adapter))
end
def get(queryable, id, opts \\ []) do
Ecto.Repo.Backend.get(__MODULE__, unquote(adapter), queryable, id, opts)
end
def all(queryable, opts \\ []) do
Ecto.Repo.Backend.all(__MODULE__, unquote(adapter), queryable, opts)
end
def insert(model, opts \\ []) do
Ecto.Repo.Backend.insert(__MODULE__, unquote(adapter), model, opts)
end
def update(model, opts \\ []) do
Ecto.Repo.Backend.update(__MODULE__, unquote(adapter), model, opts)
end
defmacro update_all(queryable, values, opts \\ []) do
Ecto.Repo.Backend.update_all(__MODULE__, unquote(adapter), queryable,
values, opts)
end
def delete(model, opts \\ []) do
Ecto.Repo.Backend.delete(__MODULE__, unquote(adapter), model, opts)
end
def delete_all(queryable, opts \\ []) do
Ecto.Repo.Backend.delete_all(__MODULE__, unquote(adapter), queryable, opts)
end
def transaction(opts \\ [], fun) do
Ecto.Repo.Backend.transaction(__MODULE__, unquote(adapter), opts, fun)
end
def rollback(value \\ nil) do
Ecto.Repo.Backend.rollback(__MODULE__, unquote(adapter), value)
end
def parse_url(url) do
Ecto.Repo.Backend.parse_url(url)
end
def adapter do
unquote(adapter)
end
def __repo__ do
true
end
def log(arg, fun) do
fun.()
end
def query_apis do
[Ecto.Query.API]
end
defoverridable [log: 2, query_apis: 0]
end
end
@doc """
Should return the database options that will be given to the adapter. Often
used in conjunction with `parse_url/1`. This function must be implemented by
the user.
"""
defcallback conf() :: Keyword.t
@doc """
Parses an Ecto URL of the following format:
`ecto://username:password@hostname:port/database?opts=123` where the
`password`, `port` and `options` are optional.
"""
defcallback parse_url(String.t) :: Keyword.t
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the repo already
started or `{:error, term}` in case anything else goes wrong.
"""
defcallback start_link() :: {:ok, pid} | :ok |
{:error, {:already_started, pid}} |
{:error, term}
@doc """
Stops any connection pooling or supervision started with `start_link/1`.
"""
defcallback stop() :: :ok
@doc """
Create the storage in the data store and return `:ok` if it was created
successfully.
Returns `{:error, :already_up}` if the storage has already been created or
`{:error, term}` in case anything else goes wrong.
"""
defcallback storage_up() :: :ok | {:error, :already_up} | {:error, term}
@doc """
Drop the storage in the data store and return `:ok` if it was dropped
successfully.
Returns `{:error, :already_down}` if the storage has already been dropped or
`{:error, term}` in case anything else goes wrong.
"""
defcallback storage_down() :: :ok | {:error, :already_down} | {:error, term}
@doc """
Fetches a single model from the data store where the primary key matches the
given id. Returns `nil` if no result was found. If the model in the queryable
has no primary key `Ecto.NoPrimaryKey` will be raised. `Ecto.AdapterError`
will be raised if there is an adapter error.
## Options
`:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
"""
defcallback get(Ecto.Queryable.t, term, Keyword.t) :: Ecto.Model.t | nil | no_return
@doc """
Fetches all results from the data store based on the given query. May raise
`Ecto.QueryError` if query validation fails. `Ecto.AdapterError` will be
raised if there is an adapter error.
## Options
`:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
MyRepo.all(query)
"""
defcallback all(Ecto.Query.t, Keyword.t) :: [Ecto.Model.t] | no_return
@doc """
Stores a single new model in the data store and returns its stored
representation. May raise `Ecto.AdapterError` if there is an adapter error.
## Options
`:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
## Example
post = %Post{title: "Ecto is great", text: "really, it is"}
|> MyRepo.insert
"""
defcallback insert(Ecto.Model.t, Keyword.t) :: Ecto.Model.t | no_return
@doc """
Updates an model using the primary key as key. If the model has no primary
key `Ecto.NoPrimaryKey` will be raised. `Ecto.AdapterError` will be raised if
there is an adapter error.
## Options
`:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
## Example
[post] = from p in Post, where: p.id == 42
post = post.title("New title")
MyRepo.update(post)
"""
defcallback update(Ecto.Model.t, Keyword.t) :: :ok | no_return
@doc """
Updates all entities matching the given query with the given values.
`Ecto.AdapterError` will be raised if there is an adapter error.
## Options
`:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
## Examples
MyRepo.update_all(Post, title: "New title")
MyRepo.update_all(p in Post, visits: p.visits + 1)
from(p in Post, where: p.id < 10)
|> MyRepo.update_all(title: "New title")
"""
defmacrocallback update_all(Macro.t, Keyword.t, Keyword.t) :: integer | no_return
@doc """
Deletes an model using the primary key as key. If the model has no primary
key `Ecto.NoPrimaryKey` will be raised. `Ecto.AdapterError` will be raised if
there is an adapter error.
## Options
`:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
## Example
[post] = MyRepo.all(from(p in Post, where: p.id == 42))
MyRepo.delete(post)
"""
defcallback delete(Ecto.Model.t, Keyword.t) :: :ok | no_return
@doc """
Deletes all entities matching the given query with the given values.
`Ecto.AdapterError` will be raised if there is an adapter error.
## Options
`:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
## Examples
MyRepo.delete_all(Post)
from(p in Post, where: p.id < 10) |> MyRepo.delete_all
"""
defcallback delete_all(Ecto.Queryable.t, Keyword.t) :: integer | no_return
@doc """
Runs the given function inside a transaction. If an unhandled error occurs the
transaction will be rolled back. If no error occurred the transaction will be
commited when the function returns. A transaction can be explicitly rolled
back by calling `rollback!`, this will immediately leave the function and
return the value given to `rollback!` as `{:error, value}`. A successful
transaction returns the value returned by the function wrapped in a tuple as
`{:ok, value}`. Transactions can be nested.
## Options
`:timeout` - The time in milliseconds to wait for the call to finish,
`:infinity` will wait indefinitely (default: 5000);
## Examples
MyRepo.transaction(fn ->
MyRepo.update(alice.update_balance(&(&1 - 10))
MyRepo.update(bob.update_balance(&(&1 + 10))
end)
# In the following example only the comment will be rolled back
MyRepo.transaction(fn ->
MyRepo.insert(%Post{})
MyRepo.transaction(fn ->
MyRepo.insert(%Comment{})
raise "error"
end)
end)
# Roll back a transaction explicitly
MyRepo.transaction(fn ->
p = MyRepo.insert(%Post{})
if not Editor.post_allowed?(p) do
MyRepo.rollback!
end
end)
"""
defcallback transaction(Keyword.t, fun) :: {:ok, any} | {:error, any}
@doc """
Rolls back the current transaction. See `rollback/1`.
"""
defcallback rollback() :: no_return
@doc """
Rolls back the current transaction. The transaction will return the value
given as `{:error, value}`.
"""
defcallback rollback(any) :: no_return
@doc """
Returns the adapter tied to the repository.
"""
defcallback adapter() :: Ecto.Adapter.t
@doc """
Enables logging and debugging of adapter actions such as sending queries to
the database. Should be overridden to customize behaviour.
## Examples
def log({:query, sql}, fun) do
{time, result} = :timer.tc(fun)
Logger.log({sql, time})
result
end
def log(_arg, fun), do: fun.()
"""
defcallback log(any, (() -> any)) :: any
@doc """
Returns the supported query APIs. Should be overridden to customize.
"""
defcallback query_apis() :: [module]
end
|
lib/ecto/repo.ex
| 0.869063
| 0.601945
|
repo.ex
|
starcoder
|
defmodule ExDiceRoller.Filters do
@moduledoc """
Filters are used to filter the final value of an evaluated dice roll using
either a provided comparator and comparison number, such as `>=: 3`, or
dropping highest or lowest value, such as `drop_highest: true`. Possible
comparators include:
* numerical: `>=`, `<=`, `=`, `!=`, `<`, and `>` in the format `<comparator>:
<number>`.
* boolean: `drop_highest`, `drop_lowest`, `drop_highest_lowest` in the format
`<comparator>: true | false`.
Note that boolean filters require a list of values, such as adding the
separator (`,`) comparator or using the `:keep` option.
Examples:
iex> ExDiceRoller.roll("1d4", >=: 5)
[]
iex> ExDiceRoller.roll("6d6", <=: 4, opts: :keep)
[3, 2, 4, 2]
iex> ExDiceRoller.roll("xd6", x: [1, 2, 3, 2], >=: 4, opts: :keep)
[6, 4, 5, 4, 4]
iex> ExDiceRoller.roll("4d10", drop_highest: true, opts: :keep)
[9, 6, 4]
iex> ExDiceRoller.roll("4d10", drop_highest_lowest: true, opts: :keep)
[6, 9]
"""
alias ExDiceRoller.Compiler
@doc """
Filter the calculated value using the list of provided filters.
iex> ExDiceRoller.Filters.filter([1, 2, 3, 4, 5, 6], [>=: 3])
[3, 4, 5, 6]
iex> ExDiceRoller.Filters.filter([1, 2, 3, 4, 5, 6], [drop_lowest: true])
[2, 3, 4, 5, 6]
"""
@spec filter(Compiler.calculated_val(), list(tuple)) :: Compiler.calculated_val()
def filter(val, []), do: val
def filter(val, filters) when is_number(val), do: filter([val], filters)
def filter(val, filters) when length(filters) > 0 do
Enum.reduce(filters, val, &do_filter(&2, &1))
end
@doc """
Extract all filters from an argument list and return them as well as the
updated argument list.
"""
@spec get_filters(Keyword.t()) :: {list(any), Keyword.t()}
def get_filters(args) do
filters = do_get_filter(args, [])
{filters,
Enum.filter(args, fn {k, _} ->
k not in [:>=, :!=, :<=, :=, :>, :<, :drop_lowest, :drop_highest, :drop_highest_lowest]
end)}
end
@spec do_filter(list(Compiler.calculated_val()), tuple) :: list(Compiler.calculated_val())
defp do_filter(val, {:>=, num}), do: Enum.filter(val, &(&1 >= num))
defp do_filter(val, {:<=, num}), do: Enum.filter(val, &(&1 <= num))
defp do_filter(val, {:=, num}), do: Enum.filter(val, &(&1 == num))
defp do_filter(val, {:!=, num}), do: Enum.filter(val, &(&1 != num))
defp do_filter(val, {:>, num}), do: Enum.filter(val, &(&1 > num))
defp do_filter(val, {:<, num}), do: Enum.filter(val, &(&1 < num))
defp do_filter(val, {:drop_lowest, true}), do: val |> Enum.sort() |> Enum.drop(1)
defp do_filter(val, {:drop_highest, true}),
do: val |> Enum.sort() |> Enum.reverse() |> Enum.drop(1)
defp do_filter(val, {:drop_highest_lowest, true}) do
val |> Enum.sort() |> Enum.drop(1) |> Enum.drop(-1)
end
@spec do_get_filter(Keyword.t(), list(any)) :: list(any)
defp do_get_filter([], acc), do: acc
defp do_get_filter([{:>=, _} = f | rest], acc), do: do_get_filter(rest, [f] ++ acc)
defp do_get_filter([{:<=, _} = f | rest], acc), do: do_get_filter(rest, [f] ++ acc)
defp do_get_filter([{:=, _} = f | rest], acc), do: do_get_filter(rest, [f] ++ acc)
defp do_get_filter([{:!=, _} = f | rest], acc), do: do_get_filter(rest, [f] ++ acc)
defp do_get_filter([{:>, _} = f | rest], acc), do: do_get_filter(rest, [f] ++ acc)
defp do_get_filter([{:<, _} = f | rest], acc), do: do_get_filter(rest, [f] ++ acc)
defp do_get_filter([{:drop_lowest, true} = f | rest], acc), do: do_get_filter(rest, [f] ++ acc)
defp do_get_filter([{:drop_highest, true} = f | rest], acc), do: do_get_filter(rest, [f] ++ acc)
defp do_get_filter([{:drop_highest_lowest, true} = f | rest], acc),
do: do_get_filter(rest, [f] ++ acc)
defp do_get_filter([_ | rest], acc), do: do_get_filter(rest, acc)
end
|
lib/filters.ex
| 0.787441
| 0.787237
|
filters.ex
|
starcoder
|
defmodule Snitch.Data.Model.LineItem do
@moduledoc """
LineItem API and utilities.
"""
use Snitch.Data.Model
import Ecto.Changeset, only: [change: 1]
alias Snitch.Data.Model.{Variant, Product}
alias Snitch.Data.Schema.LineItem
alias Snitch.Domain.Order
alias Snitch.Tools.Money, as: MoneyTools
alias Snitch.Domain.Stock.Quantifier
@doc """
Creates a new `line_item` for an existing order referenced by `params.order_id`.
This may also update some associated entities like, `Order`, `Package`,
etc. in the same DB transaction.
Returns the newly inserted `line_item` with the order, and all line items preloaded.
Other updated associations may or may not be preloaded.
"""
@spec create(map) :: LineItem.t()
def create(params) do
%LineItem{}
|> LineItem.create_changeset(params)
|> Order.validate_change()
|> Quantifier.validate_in_stock()
|> Repo.insert()
end
@doc """
Updates `line_item`, and possibly other associations in the same DB transaction.
Returns the newly inserted `line_item` with the order and all line items preloaded.
Other updated associations may or may not be preloaded.
"""
@spec update(LineItem.t(), map) :: LineItem.t()
def update(%LineItem{} = line_item, params) do
line_item
|> LineItem.update_changeset(params)
|> Order.validate_change()
|> Quantifier.validate_in_stock()
|> Repo.update()
end
@doc """
Deletes `line_item`, and possibly updates other associations in the same DB transaction.
Returns the deleted `line_item` with the order and all line items preloaded.
> The deleted line item will not be in the assoc list.
Other updated associations may or may not be preloaded.
"""
@spec delete(LineItem.t()) :: LineItem.t()
def delete(%LineItem{} = line_item) do
line_item
|> change()
|> Order.validate_change()
|> Repo.delete()
end
@spec get(map) :: {:ok, LineItem.t()} | {:error, atom}
def get(query_fields) do
QH.get(LineItem, query_fields, Repo)
end
@spec get_all() :: [LineItem.t()]
def get_all, do: Repo.all(LineItem)
@doc """
Set `:unit_price` for many `LineItem` `params`.
`params` from external sources might not include `unit_price`, this function
_can_ compute it and return updated `params`.
Since it accepts any list of maps, and not validated changesets we might not
be able to compute said fields. Such items are returned as is in the list.
## Note
Selling prices of all `LineItem`s are fetched from the DB in a single query.
## Example
When `variant_id` is `nil` or does not exist, no update is made.
```
iex> Model.LineItem.update_unit_price([%{product_id: -1, quantity: 2}])
[%{product_id: -1, quantity: 2}]
```
```
iex> product = Snitch.Core.Tools.MultiTenancy.Repo.one(Snitch.Data.Schema.Product)
iex> product.selling_price
#Money<:USD, 12.99000000>
iex> [priced_item] = Model.LineItem.update_unit_price(
...> [%{product_id: product.id, quantity: 2}]
...> )
iex> priced_item.unit_price
#Money<:USD, 12.99000000>
```
"""
@spec update_unit_price([map]) :: [map]
def update_unit_price([]), do: []
def update_unit_price(line_items) do
unit_selling_prices =
line_items
|> Stream.map(&Map.get(&1, :product_id))
|> Enum.reject(fn x -> is_nil(x) end)
|> Product.get_selling_prices()
Enum.map(line_items, &set_price_and_total(&1, unit_selling_prices))
end
@doc """
Returns the item total for given `line_items`.
If the list is empty, the call is delegated to `MoneyTools.zero!/1`.
"""
@spec compute_total([LineItem.t()]) :: Money.t()
def compute_total([]), do: MoneyTools.zero!()
def compute_total(line_items) when is_list(line_items) do
line_items
|> Stream.map(fn %{quantity: q, unit_price: price} ->
Money.mult!(price, q)
end)
|> Enum.reduce(&Money.add!/2)
|> Money.reduce()
end
@spec set_price_and_total(map, %{non_neg_integer: Money.t()}) :: map
defp set_price_and_total(line_item, unit_selling_prices) do
with {:ok, product_id} <- Map.fetch(line_item, :product_id),
{:ok, unit_price} <- Map.fetch(unit_selling_prices, product_id) do
Map.put(line_item, :unit_price, unit_price)
else
_ -> line_item
end
end
end
|
apps/snitch_core/lib/core/data/model/line_item.ex
| 0.862714
| 0.68591
|
line_item.ex
|
starcoder
|
defmodule Infer.Archive do
@moduledoc """
Archive type matchers based on the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming))
"""
defdelegate epub?(binary), to: Infer.Book
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a zip archive.
See: https://en.wikipedia.org/wiki/List_of_file_signatures
## Examples
iex> binary = File.read!("test/archives/sample.zip")
iex> Infer.Archive.zip?(binary)
true
"""
@spec zip?(binary()) :: boolean()
def zip?(<<0x50, 0x4B, 0x3, 0x4, _rest::binary>>), do: true
def zip?(<<0x50, 0x4B, 0x5, 0x6, _rest::binary>>), do: true
def zip?(<<0x50, 0x4B, 0x7, 0x8, _rest::binary>>), do: true
def zip?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a tar archive.
"""
@spec tar?(binary()) :: boolean()
def tar?(<<_data::binary-size(257), 0x75, 0x73, 0x74, 0x61, 0x72, _rest::binary>>), do: true
def tar?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a rar archive.
"""
@spec rar?(binary()) :: boolean()
def rar?(<<0x52, 0x61, 0x72, 0x21, 0x1A, 0x7, 0x0, _rest::binary>>), do: true
def rar?(<<0x52, 0x61, 0x72, 0x21, 0x1A, 0x7, 0x1, _rest::binary>>), do: true
def rar?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a gzip archive.
"""
@spec gz?(binary()) :: boolean()
def gz?(<<0x1F, 0x8B, 0x8, _rest::binary>>), do: true
def gz?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a bzip archive.
"""
@spec bz2?(binary()) :: boolean()
def bz2?(<<0x42, 0x5A, 0x68, _rest::binary>>), do: true
def bz2?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a 7z archive.
"""
@spec sevenz?(binary()) :: boolean()
def sevenz?(<<0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C, _rest::binary>>), do: true
def sevenz?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a pdf.
## Examples
iex> binary = File.read!("test/archives/sample.pdf")
iex> Infer.Archive.pdf?(binary)
true
"""
@spec pdf?(binary()) :: boolean()
def pdf?(<<0x25, 0x50, 0x44, 0x46, _rest::binary>>), do: true
def pdf?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a swf.
"""
@spec swf?(binary()) :: boolean()
def swf?(<<0x43, 0x57, 0x53, _rest::binary>>), do: true
def swf?(<<0x46, 0x57, 0x53, _rest::binary>>), do: true
def swf?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a rtf.
"""
@spec rtf?(binary()) :: boolean()
def rtf?(<<0x7B, 0x5C, 0x72, 0x74, 0x66, _rest::binary>>), do: true
def rtf?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a Nintendo NES ROM.
"""
@spec nes?(binary()) :: boolean()
def nes?(<<0x4E, 0x45, 0x53, 0x1A, _rest::binary>>), do: true
def nes?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a Google Chrome Extension.
"""
@spec crx?(binary()) :: boolean()
def crx?(<<0x43, 0x72, 0x32, 0x34, _rest::binary>>), do: true
def crx?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a CAB.
"""
@spec cab?(binary()) :: boolean()
def cab?(<<0x4D, 0x53, 0x43, 0x46, _rest::binary>>), do: true
def cab?(<<0x49, 0x53, 0x63, 0x28, _rest::binary>>), do: true
def cab?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a eot octet stream.
"""
@spec eot?(binary()) :: boolean()
def eot?(<<_header::binary-size(8), 0x01, 0x00, 0x00, _data::binary-size(24), 0x4C, 0x50, _rest::binary>>), do: true
def eot?(<<_header::binary-size(8), 0x02, 0x00, 0x02, _data::binary-size(24), 0x4C, 0x50, _rest::binary>>), do: true
def eot?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a postscript.
"""
@spec ps?(binary()) :: boolean()
def ps?(<<0x25, 0x21, _rest::binary>>), do: true
def ps?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a xz archive.
"""
@spec xz?(binary()) :: boolean()
def xz?(<<0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00, _rest::binary>>), do: true
def xz?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a sqlite3 database.
## Examples
iex> binary = File.read!("test/archives/sample.db")
iex> Infer.Archive.sqlite?(binary)
true
"""
@spec sqlite?(binary()) :: boolean()
def sqlite?(<<0x53, 0x51, 0x4C, 0x69, _rest::binary>>), do: true
def sqlite?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a deb archive.
"""
@spec deb?(binary()) :: boolean()
def deb?(
<<0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A, 0x64, 0x65, 0x62, 0x69, 0x61, 0x6E, 0x2D, 0x62, 0x69, 0x6E, 0x61, 0x72, 0x79,
_rest::binary>>
),
do: true
def deb?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a ar archive.
"""
@spec ar?(binary()) :: boolean()
def ar?(<<0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, _rest::binary>>), do: true
def ar?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a z archive.
"""
@spec z?(binary()) :: boolean()
def z?(<<0x1F, 0xA0, _rest::binary>>), do: true
def z?(<<0x1F, 0x9D, _rest::binary>>), do: true
def z?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a lzip archive.
"""
@spec lz?(binary()) :: boolean()
def lz?(<<0x4C, 0x5A, 0x49, 0x50, _rest::binary>>), do: true
def lz?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a RPM.
"""
@spec rpm?(binary()) :: boolean()
def rpm?(<<0xED, 0xAB, 0xEE, 0xDB, _rest::binary>> = binary) when byte_size(binary) < 96, do: true
def rpm?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a dcm archive.
"""
@spec dcm?(binary()) :: boolean()
def dcm?(<<_data::binary-size(128), 0x44, 0x49, 0x43, 0x4D, _rest::binary>>), do: true
def dcm?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a Zstd archive.
## Examples
iex> binary = File.read!("test/archives/sample.tar.zst")
iex> Infer.Archive.zst?(binary)
true
"""
@spec zst?(binary()) :: boolean()
def zst?(<<0x28, 0xB5, 0x2F, 0xFD, _rest::binary>>), do: true
def zst?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a MSI windows installer archive.
"""
@spec msi?(binary()) :: boolean()
def msi?(<<0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1, _rest::binary>>), do: true
def msi?(_binary), do: false
end
|
lib/matchers/archive.ex
| 0.858881
| 0.518607
|
archive.ex
|
starcoder
|
defmodule Timex.Helpers do
@moduledoc false
use Timex.Constants
import Timex.Macros
alias Timex.Types
@doc """
Given a {year, day} tuple where the day is the iso day of that year, returns
the date tuple of format {year, month, day}.
## Examples
iex> Timex.Helpers.iso_day_to_date_tuple(1988, 240)
{1988, 8, 27}
If the given day or year are invalid a tuple of the format {:error, :reason}
is returned. For example:
iex> Timex.Helpers.iso_day_to_date_tuple(-50, 20)
{:error, :invalid_year}
iex> Timex.Helpers.iso_day_to_date_tuple(50, 400)
{:error, :invalid_day}
iex> Timex.Helpers.iso_day_to_date_tuple(-50, 400)
{:error, :invalid_year_and_day}
Days which are valid on leap years but not on non-leap years are invalid on
non-leap years. For example:
iex> Timex.Helpers.iso_day_to_date_tuple(2028, 366)
{2028, 12, 31}
iex> Timex.Helpers.iso_day_to_date_tuple(2027, 366)
{:error, :invalid_day}
"""
@spec iso_day_to_date_tuple(Types.year, Types.day) :: Types.valid_datetime | {:error, term}
def iso_day_to_date_tuple(year, day) when is_year(year) and is_iso_day_of_year(year, day) do
{month, first_of_month} = cond do
:calendar.is_leap_year(year) ->
List.last(Enum.take_while(@ordinals_leap, fn {_m, odom} -> odom <= day end))
:else ->
List.last(Enum.take_while(@ordinals, fn {_m, odom} -> odom <= day end))
end
{year, month, day - (first_of_month-1)}
end
def iso_day_to_date_tuple(year, _) when is_year(year), do: {:error, :invalid_day}
def iso_day_to_date_tuple(year, day) when is_iso_day_of_year(year, day) do
{:error, :invalid_year}
end
def iso_day_to_date_tuple(_, _), do: {:error, :invalid_year_and_day}
def days_in_month(year, month) when is_year(year) and is_month(month) do
:calendar.last_day_of_the_month(year, month)
end
def days_in_month(year, month) do
valid_year? = year > 0
valid_month? = month in @valid_months
cond do
!valid_year? && valid_month? ->
{:error, :invalid_year}
valid_year? && !valid_month? ->
{:error, :invalid_month}
true ->
{:error, :invalid_year_and_month}
end
end
@doc """
Given a {year, month, day} tuple, normalizes it so
that the day does not exceed the maximum valid days in that month
"""
def normalize_date_tuple({year, month, day}) do
# Check if we got past the last day of the month
max_day = days_in_month(year, month)
{year, month, min(day, max_day)}
end
def round_month(m) do
case mod(m, 12) do
0 -> 12
other -> other
end
end
defp mod(a, b), do: rem(rem(a, b) + b, b)
end
|
lib/timex/helpers.ex
| 0.858911
| 0.5901
|
helpers.ex
|
starcoder
|
defmodule AWS.OpsWorks do
@moduledoc """
AWS OpsWorks
Welcome to the *AWS OpsWorks Stacks API Reference*.
This guide provides descriptions, syntax, and usage examples for AWS OpsWorks
Stacks actions and data types, including common parameters and error codes.
AWS OpsWorks Stacks is an application management service that provides an
integrated experience for overseeing the complete application lifecycle. For
information about this product, go to the [AWS OpsWorks](http://aws.amazon.com/opsworks/) details page.
## SDKs and CLI
The most common way to use the AWS OpsWorks Stacks API is by using the AWS
Command Line Interface (CLI) or by using one of the AWS SDKs to implement
applications in your preferred language. For more information, see:
* [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-welcome.html)
* [AWS SDK for Java](https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/opsworks/AWSOpsWorksClient.html)
* [AWS SDK for .NET](https://docs.aws.amazon.com/sdkfornet/latest/apidocs/html/N_Amazon_OpsWorks.htm)
* [AWS SDK for PHP 2](https://docs.aws.amazon.com/aws-sdk-php-2/latest/class-Aws.OpsWorks.OpsWorksClient.html)
* [AWS SDK for Ruby](http://docs.aws.amazon.com/sdkforruby/api/) * [AWS SDK for
Node.js](http://aws.amazon.com/documentation/sdkforjavascript/)
* [AWS SDK for Python(Boto)](http://docs.pythonboto.org/en/latest/ref/opsworks.html)
## Endpoints
AWS OpsWorks Stacks supports the following endpoints, all HTTPS. You must
connect to one of the following endpoints. Stacks can only be accessed or
managed within the endpoint in which they are created.
* opsworks.us-east-1.amazonaws.com
* opsworks.us-east-2.amazonaws.com
* opsworks.us-west-1.amazonaws.com
* opsworks.us-west-2.amazonaws.com
* opsworks.ca-central-1.amazonaws.com (API only; not available in
the AWS console)
* opsworks.eu-west-1.amazonaws.com
* opsworks.eu-west-2.amazonaws.com
* opsworks.eu-west-3.amazonaws.com
* opsworks.eu-central-1.amazonaws.com
* opsworks.ap-northeast-1.amazonaws.com
* opsworks.ap-northeast-2.amazonaws.com
* opsworks.ap-south-1.amazonaws.com
* opsworks.ap-southeast-1.amazonaws.com
* opsworks.ap-southeast-2.amazonaws.com
* opsworks.sa-east-1.amazonaws.com
## Chef Versions
When you call `CreateStack`, `CloneStack`, or `UpdateStack` we recommend you use
the `ConfigurationManager` parameter to specify the Chef version. The
recommended and default value for Linux stacks is currently 12. Windows stacks
use Chef 12.2. For more information, see [Chef Versions](https://docs.aws.amazon.com/opsworks/latest/userguide/workingcookbook-chef11.html).
You can specify Chef 12, 11.10, or 11.4 for your Linux stack. We recommend
migrating your existing Linux stacks to Chef 12 as soon as possible.
"""
@doc """
Assign a registered instance to a layer.
* You can assign registered on-premises instances to any layer type.
* You can assign registered Amazon EC2 instances only to custom
layers.
* You cannot use this action with instances that were created with
AWS OpsWorks Stacks.
**Required Permissions**: To use this action, an AWS Identity and Access
Management (IAM) user must have a Manage permissions level for the stack or an
attached policy that explicitly grants permissions. For more information on user
permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def assign_instance(client, input, options \\ []) do
request(client, "AssignInstance", input, options)
end
@doc """
Assigns one of the stack's registered Amazon EBS volumes to a specified
instance.
The volume must first be registered with the stack by calling `RegisterVolume`.
After you register the volume, you must call `UpdateVolume` to specify a mount
point before calling `AssignVolume`. For more information, see [Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def assign_volume(client, input, options \\ []) do
request(client, "AssignVolume", input, options)
end
@doc """
Associates one of the stack's registered Elastic IP addresses with a specified
instance.
The address must first be registered with the stack by calling
`RegisterElasticIp`. For more information, see [Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def associate_elastic_ip(client, input, options \\ []) do
request(client, "AssociateElasticIp", input, options)
end
@doc """
Attaches an Elastic Load Balancing load balancer to a specified layer.
AWS OpsWorks Stacks does not support Application Load Balancer. You can only use
Classic Load Balancer with AWS OpsWorks Stacks. For more information, see
[Elastic Load Balancing](https://docs.aws.amazon.com/opsworks/latest/userguide/layers-elb.html).
You must create the Elastic Load Balancing instance separately, by using the
Elastic Load Balancing console, API, or CLI. For more information, see [ Elastic Load Balancing Developer
Guide](https://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/Welcome.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def attach_elastic_load_balancer(client, input, options \\ []) do
request(client, "AttachElasticLoadBalancer", input, options)
end
@doc """
Creates a clone of a specified stack.
For more information, see [Clone a Stack](https://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-cloning.html).
By default, all parameters are set to the values used by the parent stack.
**Required Permissions**: To use this action, an IAM user must have an attached
policy that explicitly grants permissions. For more information about user
permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def clone_stack(client, input, options \\ []) do
request(client, "CloneStack", input, options)
end
@doc """
Creates an app for a specified stack.
For more information, see [Creating Apps](https://docs.aws.amazon.com/opsworks/latest/userguide/workingapps-creating.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def create_app(client, input, options \\ []) do
request(client, "CreateApp", input, options)
end
@doc """
Runs deployment or stack commands.
For more information, see [Deploying Apps](https://docs.aws.amazon.com/opsworks/latest/userguide/workingapps-deploying.html)
and [Run Stack Commands](https://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-commands.html).
**Required Permissions**: To use this action, an IAM user must have a Deploy or
Manage permissions level for the stack, or an attached policy that explicitly
grants permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def create_deployment(client, input, options \\ []) do
request(client, "CreateDeployment", input, options)
end
@doc """
Creates an instance in a specified stack.
For more information, see [Adding an Instance to a Layer](https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-add.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def create_instance(client, input, options \\ []) do
request(client, "CreateInstance", input, options)
end
@doc """
Creates a layer.
For more information, see [How to Create a Layer](https://docs.aws.amazon.com/opsworks/latest/userguide/workinglayers-basics-create.html).
You should use **CreateLayer** for noncustom layer types such as PHP App Server
only if the stack does not have an existing layer of that type. A stack can have
at most one instance of each noncustom layer; if you attempt to create a second
instance, **CreateLayer** fails. A stack can have an arbitrary number of custom
layers, so you can call **CreateLayer** as many times as you like for that layer
type.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def create_layer(client, input, options \\ []) do
request(client, "CreateLayer", input, options)
end
@doc """
Creates a new stack.
For more information, see [Create a New Stack](https://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-edit.html).
**Required Permissions**: To use this action, an IAM user must have an attached
policy that explicitly grants permissions. For more information about user
permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def create_stack(client, input, options \\ []) do
request(client, "CreateStack", input, options)
end
@doc """
Creates a new user profile.
**Required Permissions**: To use this action, an IAM user must have an attached
policy that explicitly grants permissions. For more information about user
permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def create_user_profile(client, input, options \\ []) do
request(client, "CreateUserProfile", input, options)
end
@doc """
Deletes a specified app.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def delete_app(client, input, options \\ []) do
request(client, "DeleteApp", input, options)
end
@doc """
Deletes a specified instance, which terminates the associated Amazon EC2
instance.
You must stop an instance before you can delete it.
For more information, see [Deleting Instances](https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-delete.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def delete_instance(client, input, options \\ []) do
request(client, "DeleteInstance", input, options)
end
@doc """
Deletes a specified layer.
You must first stop and then delete all associated instances or unassign
registered instances. For more information, see [How to Delete a Layer](https://docs.aws.amazon.com/opsworks/latest/userguide/workinglayers-basics-delete.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def delete_layer(client, input, options \\ []) do
request(client, "DeleteLayer", input, options)
end
@doc """
Deletes a specified stack.
You must first delete all instances, layers, and apps or deregister registered
instances. For more information, see [Shut Down a Stack](https://docs.aws.amazon.com/opsworks/latest/userguide/workingstacks-shutting.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def delete_stack(client, input, options \\ []) do
request(client, "DeleteStack", input, options)
end
@doc """
Deletes a user profile.
**Required Permissions**: To use this action, an IAM user must have an attached
policy that explicitly grants permissions. For more information about user
permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def delete_user_profile(client, input, options \\ []) do
request(client, "DeleteUserProfile", input, options)
end
@doc """
Deregisters a specified Amazon ECS cluster from a stack.
For more information, see [ Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/workinglayers-ecscluster.html#workinglayers-ecscluster-delete).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack or an attached policy that explicitly grants
permissions. For more information on user permissions, see
[https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def deregister_ecs_cluster(client, input, options \\ []) do
request(client, "DeregisterEcsCluster", input, options)
end
@doc """
Deregisters a specified Elastic IP address.
The address can then be registered by another stack. For more information, see
[Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def deregister_elastic_ip(client, input, options \\ []) do
request(client, "DeregisterElasticIp", input, options)
end
@doc """
Deregister a registered Amazon EC2 or on-premises instance.
This action removes the instance from the stack and returns it to your control.
This action cannot be used with instances that were created with AWS OpsWorks
Stacks.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def deregister_instance(client, input, options \\ []) do
request(client, "DeregisterInstance", input, options)
end
@doc """
Deregisters an Amazon RDS instance.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def deregister_rds_db_instance(client, input, options \\ []) do
request(client, "DeregisterRdsDbInstance", input, options)
end
@doc """
Deregisters an Amazon EBS volume.
The volume can then be registered by another stack. For more information, see
[Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def deregister_volume(client, input, options \\ []) do
request(client, "DeregisterVolume", input, options)
end
@doc """
Describes the available AWS OpsWorks Stacks agent versions.
You must specify a stack ID or a configuration manager. `DescribeAgentVersions`
returns a list of available agent versions for the specified stack or
configuration manager.
"""
def describe_agent_versions(client, input, options \\ []) do
request(client, "DescribeAgentVersions", input, options)
end
@doc """
Requests a description of a specified set of apps.
This call accepts only one resource-identifying parameter.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_apps(client, input, options \\ []) do
request(client, "DescribeApps", input, options)
end
@doc """
Describes the results of specified commands.
This call accepts only one resource-identifying parameter.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_commands(client, input, options \\ []) do
request(client, "DescribeCommands", input, options)
end
@doc """
Requests a description of a specified set of deployments.
This call accepts only one resource-identifying parameter.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_deployments(client, input, options \\ []) do
request(client, "DescribeDeployments", input, options)
end
@doc """
Describes Amazon ECS clusters that are registered with a stack.
If you specify only a stack ID, you can use the `MaxResults` and `NextToken`
parameters to paginate the response. However, AWS OpsWorks Stacks currently
supports only one cluster per layer, so the result set has a maximum of one
element.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack or an attached policy that
explicitly grants permission. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
This call accepts only one resource-identifying parameter.
"""
def describe_ecs_clusters(client, input, options \\ []) do
request(client, "DescribeEcsClusters", input, options)
end
@doc """
Describes [Elastic IP addresses](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/elastic-ip-addresses-eip.html).
This call accepts only one resource-identifying parameter.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_elastic_ips(client, input, options \\ []) do
request(client, "DescribeElasticIps", input, options)
end
@doc """
Describes a stack's Elastic Load Balancing instances.
This call accepts only one resource-identifying parameter.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_elastic_load_balancers(client, input, options \\ []) do
request(client, "DescribeElasticLoadBalancers", input, options)
end
@doc """
Requests a description of a set of instances.
This call accepts only one resource-identifying parameter.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_instances(client, input, options \\ []) do
request(client, "DescribeInstances", input, options)
end
@doc """
Requests a description of one or more layers in a specified stack.
This call accepts only one resource-identifying parameter.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_layers(client, input, options \\ []) do
request(client, "DescribeLayers", input, options)
end
@doc """
Describes load-based auto scaling configurations for specified layers.
You must specify at least one of the parameters.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_load_based_auto_scaling(client, input, options \\ []) do
request(client, "DescribeLoadBasedAutoScaling", input, options)
end
@doc """
Describes a user's SSH information.
**Required Permissions**: To use this action, an IAM user must have
self-management enabled or an attached policy that explicitly grants
permissions. For more information about user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_my_user_profile(client, input, options \\ []) do
request(client, "DescribeMyUserProfile", input, options)
end
@doc """
Describes the operating systems that are supported by AWS OpsWorks Stacks.
"""
def describe_operating_systems(client, input, options \\ []) do
request(client, "DescribeOperatingSystems", input, options)
end
@doc """
Describes the permissions for a specified stack.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_permissions(client, input, options \\ []) do
request(client, "DescribePermissions", input, options)
end
@doc """
Describe an instance's RAID arrays.
This call accepts only one resource-identifying parameter.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_raid_arrays(client, input, options \\ []) do
request(client, "DescribeRaidArrays", input, options)
end
@doc """
Describes Amazon RDS instances.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
This call accepts only one resource-identifying parameter.
"""
def describe_rds_db_instances(client, input, options \\ []) do
request(client, "DescribeRdsDbInstances", input, options)
end
@doc """
Describes AWS OpsWorks Stacks service errors.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
This call accepts only one resource-identifying parameter.
"""
def describe_service_errors(client, input, options \\ []) do
request(client, "DescribeServiceErrors", input, options)
end
@doc """
Requests a description of a stack's provisioning parameters.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_stack_provisioning_parameters(client, input, options \\ []) do
request(client, "DescribeStackProvisioningParameters", input, options)
end
@doc """
Describes the number of layers and apps in a specified stack, and the number of
instances in each state, such as `running_setup` or `online`.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_stack_summary(client, input, options \\ []) do
request(client, "DescribeStackSummary", input, options)
end
@doc """
Requests a description of one or more stacks.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_stacks(client, input, options \\ []) do
request(client, "DescribeStacks", input, options)
end
@doc """
Describes time-based auto scaling configurations for specified instances.
You must specify at least one of the parameters.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_time_based_auto_scaling(client, input, options \\ []) do
request(client, "DescribeTimeBasedAutoScaling", input, options)
end
@doc """
Describe specified users.
**Required Permissions**: To use this action, an IAM user must have an attached
policy that explicitly grants permissions. For more information about user
permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_user_profiles(client, input, options \\ []) do
request(client, "DescribeUserProfiles", input, options)
end
@doc """
Describes an instance's Amazon EBS volumes.
This call accepts only one resource-identifying parameter.
**Required Permissions**: To use this action, an IAM user must have a Show,
Deploy, or Manage permissions level for the stack, or an attached policy that
explicitly grants permissions. For more information about user permissions, see
[Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def describe_volumes(client, input, options \\ []) do
request(client, "DescribeVolumes", input, options)
end
@doc """
Detaches a specified Elastic Load Balancing instance from its layer.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def detach_elastic_load_balancer(client, input, options \\ []) do
request(client, "DetachElasticLoadBalancer", input, options)
end
@doc """
Disassociates an Elastic IP address from its instance.
The address remains registered with the stack. For more information, see
[Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def disassociate_elastic_ip(client, input, options \\ []) do
request(client, "DisassociateElasticIp", input, options)
end
@doc """
Gets a generated host name for the specified layer, based on the current host
name theme.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def get_hostname_suggestion(client, input, options \\ []) do
request(client, "GetHostnameSuggestion", input, options)
end
@doc """
This action can be used only with Windows stacks.
Grants RDP access to a Windows instance for a specified time period.
"""
def grant_access(client, input, options \\ []) do
request(client, "GrantAccess", input, options)
end
@doc """
Returns a list of tags that are applied to the specified stack or layer.
"""
def list_tags(client, input, options \\ []) do
request(client, "ListTags", input, options)
end
@doc """
Reboots a specified instance.
For more information, see [Starting, Stopping, and Rebooting Instances](https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-starting.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def reboot_instance(client, input, options \\ []) do
request(client, "RebootInstance", input, options)
end
@doc """
Registers a specified Amazon ECS cluster with a stack.
You can register only one cluster with a stack. A cluster can be registered with
only one stack. For more information, see [ Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/workinglayers-ecscluster.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack or an attached policy that explicitly grants
permissions. For more information on user permissions, see [ Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def register_ecs_cluster(client, input, options \\ []) do
request(client, "RegisterEcsCluster", input, options)
end
@doc """
Registers an Elastic IP address with a specified stack.
An address can be registered with only one stack at a time. If the address is
already registered, you must first deregister it by calling
`DeregisterElasticIp`. For more information, see [Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def register_elastic_ip(client, input, options \\ []) do
request(client, "RegisterElasticIp", input, options)
end
@doc """
Registers instances that were created outside of AWS OpsWorks Stacks with a
specified stack.
We do not recommend using this action to register instances. The complete
registration operation includes two tasks: installing the AWS OpsWorks Stacks
agent on the instance, and registering the instance with the stack.
`RegisterInstance` handles only the second step. You should instead use the AWS
CLI `register` command, which performs the entire registration operation. For
more information, see [ Registering an Instance with an AWS OpsWorks Stacks Stack](https://docs.aws.amazon.com/opsworks/latest/userguide/registered-instances-register.html).
Registered instances have the same requirements as instances that are created by
using the `CreateInstance` API. For example, registered instances must be
running a supported Linux-based operating system, and they must have a supported
instance type. For more information about requirements for instances that you
want to register, see [ Preparing the Instance](https://docs.aws.amazon.com/opsworks/latest/userguide/registered-instances-register-registering-preparer.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def register_instance(client, input, options \\ []) do
request(client, "RegisterInstance", input, options)
end
@doc """
Registers an Amazon RDS instance with a stack.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def register_rds_db_instance(client, input, options \\ []) do
request(client, "RegisterRdsDbInstance", input, options)
end
@doc """
Registers an Amazon EBS volume with a specified stack.
A volume can be registered with only one stack at a time. If the volume is
already registered, you must first deregister it by calling `DeregisterVolume`.
For more information, see [Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def register_volume(client, input, options \\ []) do
request(client, "RegisterVolume", input, options)
end
@doc """
Specify the load-based auto scaling configuration for a specified layer.
For more information, see [Managing Load with Time-based and Load-based Instances](https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-autoscaling.html).
To use load-based auto scaling, you must create a set of load-based auto scaling
instances. Load-based auto scaling operates only on the instances from that set,
so you must ensure that you have created enough instances to handle the maximum
anticipated load.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def set_load_based_auto_scaling(client, input, options \\ []) do
request(client, "SetLoadBasedAutoScaling", input, options)
end
@doc """
Specifies a user's permissions.
For more information, see [Security and Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/workingsecurity.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def set_permission(client, input, options \\ []) do
request(client, "SetPermission", input, options)
end
@doc """
Specify the time-based auto scaling configuration for a specified instance.
For more information, see [Managing Load with Time-based and Load-based Instances](https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-autoscaling.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def set_time_based_auto_scaling(client, input, options \\ []) do
request(client, "SetTimeBasedAutoScaling", input, options)
end
@doc """
Starts a specified instance.
For more information, see [Starting, Stopping, and Rebooting Instances](https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-starting.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def start_instance(client, input, options \\ []) do
request(client, "StartInstance", input, options)
end
@doc """
Starts a stack's instances.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def start_stack(client, input, options \\ []) do
request(client, "StartStack", input, options)
end
@doc """
Stops a specified instance.
When you stop a standard instance, the data disappears and must be reinstalled
when you restart the instance. You can stop an Amazon EBS-backed instance
without losing data. For more information, see [Starting, Stopping, and Rebooting
Instances](https://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-starting.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def stop_instance(client, input, options \\ []) do
request(client, "StopInstance", input, options)
end
@doc """
Stops a specified stack.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def stop_stack(client, input, options \\ []) do
request(client, "StopStack", input, options)
end
@doc """
Apply cost-allocation tags to a specified stack or layer in AWS OpsWorks Stacks.
For more information about how tagging works, see
[Tags](https://docs.aws.amazon.com/opsworks/latest/userguide/tagging.html) in
the AWS OpsWorks User Guide.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Unassigns a registered instance from all layers that are using the instance.
The instance remains in the stack as an unassigned instance, and can be assigned
to another layer as needed. You cannot use this action with instances that were
created with AWS OpsWorks Stacks.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack or an attached policy that explicitly grants
permissions. For more information about user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def unassign_instance(client, input, options \\ []) do
request(client, "UnassignInstance", input, options)
end
@doc """
Unassigns an assigned Amazon EBS volume.
The volume remains registered with the stack. For more information, see
[Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def unassign_volume(client, input, options \\ []) do
request(client, "UnassignVolume", input, options)
end
@doc """
Removes tags from a specified stack or layer.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates a specified app.
**Required Permissions**: To use this action, an IAM user must have a Deploy or
Manage permissions level for the stack, or an attached policy that explicitly
grants permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def update_app(client, input, options \\ []) do
request(client, "UpdateApp", input, options)
end
@doc """
Updates a registered Elastic IP address's name.
For more information, see [Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def update_elastic_ip(client, input, options \\ []) do
request(client, "UpdateElasticIp", input, options)
end
@doc """
Updates a specified instance.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def update_instance(client, input, options \\ []) do
request(client, "UpdateInstance", input, options)
end
@doc """
Updates a specified layer.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def update_layer(client, input, options \\ []) do
request(client, "UpdateLayer", input, options)
end
@doc """
Updates a user's SSH public key.
**Required Permissions**: To use this action, an IAM user must have
self-management enabled or an attached policy that explicitly grants
permissions. For more information about user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def update_my_user_profile(client, input, options \\ []) do
request(client, "UpdateMyUserProfile", input, options)
end
@doc """
Updates an Amazon RDS instance.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def update_rds_db_instance(client, input, options \\ []) do
request(client, "UpdateRdsDbInstance", input, options)
end
@doc """
Updates a specified stack.
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def update_stack(client, input, options \\ []) do
request(client, "UpdateStack", input, options)
end
@doc """
Updates a specified user profile.
**Required Permissions**: To use this action, an IAM user must have an attached
policy that explicitly grants permissions. For more information about user
permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def update_user_profile(client, input, options \\ []) do
request(client, "UpdateUserProfile", input, options)
end
@doc """
Updates an Amazon EBS volume's name or mount point.
For more information, see [Resource Management](https://docs.aws.amazon.com/opsworks/latest/userguide/resources.html).
**Required Permissions**: To use this action, an IAM user must have a Manage
permissions level for the stack, or an attached policy that explicitly grants
permissions. For more information on user permissions, see [Managing User Permissions](https://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html).
"""
def update_volume(client, input, options \\ []) do
request(client, "UpdateVolume", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "opsworks"}
host = build_host("opsworks", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "OpsWorks_20130218.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/ops_works.ex
| 0.887869
| 0.558688
|
ops_works.ex
|
starcoder
|
defmodule DBConnection.Sojourn.Broker do
@moduledoc """
A `:sbroker` callback module using a timeout strategy for the client queue and
a CoDel strategy for the connection queue.
### Queue options
* `:queue_timeout` - The time to wait for control of the connection's
state (default: `5_000`)
* `:queue_out` - Either `:out` for a FIFO queue or `:out_r` for a
LIFO queue (default: `:out`)
* `:queue_drop` - Either `:drop` for head drop on max size or
`:drop_r` for tail drop (default: `:drop`)
* `:queue_size` - The maximum size of the queue (default: `1024`)
* `:queue_min` - The minimum number of requests in the queue before the
queue will timeout requests (default: `0`)
### Queue protector options
* `:protector` - Either `true` to enable the queue protector or `false` not
to (default: `false`)
* `:protector_interval` - The time interval in milliseconds before the
protector will start dropping requests, ideally the 95-99 percentile
response time for a failure response to an end user, e.g. 95 percent
response time for a HTTP 500 response to a mobile user (default: `5_000`)
* `:protector_target` - The target queue time in milliseconds, ideally
5%-10% of the `:protector_interval` for the queue to feel responsive,
(default: `div(protector_interval, 10)`)
* `:protector_size` - The maximum number of requests the protector will
try to allow in the message queue (default: `64`)
* `:protector_min` - The minimum number of requests in the message queue
before the protector will drop requests (default: `0`)
### Queue overload alarm options
* `:overload_alarm` - Either `true` to set an alarm on overload or `false`
not to - a protector should prevent overload so defaults to `false` when
using a protector, otherwise `true` (default: `not protector`)
* `:overload_target` - The target time in milliseconds for messages in the
message queue (default: `500`)
* `:overload_interval` - The interval in milliseconds for an alarm to be set
when messages are spending longer than the target time in the message queue
(default: `5_000`)
### Idle connection options
* `:idle_out` - Either `:out` for a FIFO queue or `:out_r` for a LIFO queue
(default: if `pool_overflow == 0` `:out`, otherwise `:out_r`)
* `:idle_interval` - The time interval in milliseconds before the pool will
start pinging or dropping connections, ideally the 95-99 percentile the
connection and handshake time for a database connection (default: `100`)
* `:idle_min` - The minimum number of idle connections before the pool
will ping or drop idle connections (default: `div(pool_size, 4)`)
### Regulator options
* `:regulator_update` - The average interval in milliseconds to update the
regulator (default: `100`)
"""
if Code.ensure_loaded?(:sbroker) do
@behaviour :sbroker
end
@pool {__MODULE__, :pool_sup}
@regulator {__MODULE__, :regulator_pid}
@doc false
def init({broker, mod, opts}) do
opts = Keyword.put(opts, :broker_pid, self())
pool = ensure_pool(mod, opts)
reg = lookup_regulator(pool)
opts = [pool_pid: pool, regulator_pid: reg] ++ opts
case broker do
__MODULE__ ->
do_init(opts)
mod ->
apply(mod, :init, opts)
end
end
## Helpers
defp ensure_pool(mod, opts) do
Process.get(@pool) || start_pool(mod, opts)
end
defp start_pool(mod, opts) do
{:ok, pid} = DBConnection.Sojourn.Supervisor.start_pool(mod, opts)
_ = Process.put(@pool, pid)
pid
end
defp lookup_regulator(pool) do
Process.get(@regulator) || do_lookup_regulator(pool)
end
defp do_lookup_regulator(pool) do
pid = DBConnection.Sojourn.Pool.lookup_regulator(pool)
_ = Process.put(@regulator, pid)
pid
end
defp do_init(opts) do
{:ok, {client_queue(opts), conn_queue(opts), meters(opts)}}
end
defp client_queue(opts) do
out = Keyword.get(opts, :queue_out, :out)
timeout = Keyword.get(opts, :queue_timeout, 5_000)
drop = Keyword.get(opts, :queue_drop, :drop)
min = Keyword.get(opts, :queue_min, 0)
size = Keyword.get(opts, :queue_size, 1024)
{:sbroker_timeout_queue, {out, timeout, drop, min, size}}
end
defp conn_queue(opts) do
pool_overflow = Keyword.get(opts, :pool_overflow, 0)
out_default = if pool_overflow === 0, do: :out, else: :out_r
out = Keyword.get(opts, :idle_out, out_default)
timeout = Keyword.get(opts, :idle_timeout, 1_000)
interval = Keyword.get(opts, :idle_interval, 100)
min_default = div(Keyword.get(opts, :pool_size, 10), 4)
min = Keyword.get(opts, :idle_size, min_default)
{:sbroker_codel_queue, {out, timeout, interval, :drop, min, :infinity}}
end
defp meters(opts) do
update_meters(opts) ++ protector_meters(opts) ++ overload_meters(opts)
end
defp update_meters(opts) do
update = Keyword.get(opts, :regulator_update, 50)
pid = Keyword.fetch!(opts, :regulator_pid)
[{:sregulator_update_meter, [{pid, :ask_r, update}]}]
end
defp protector_meters(opts) do
case Keyword.get(opts, :protector, true) do
true ->
interval = Keyword.get(opts, :protector_interval, 5_000)
target = Keyword.get(opts, :protector_target, div(interval, 10))
update = Keyword.get(opts, :protector_update, 100)
min = Keyword.get(opts, :protector_min, 0)
max = Keyword.get(opts, :protector_size, 128)
idle_interval = Keyword.get(opts, :idle_interval, 100)
idle_target = Keyword.get(opts, :idle_target, div(idle_interval, 20))
[{:sprotector_pie_meter,
{target, interval, idle_interval, idle_target, update, min, max}}]
false ->
[]
end
end
defp overload_meters(opts) do
protector? = Keyword.get(opts, :protector, true)
case Keyword.get(opts, :overload_alarm, not protector?) do
true ->
target = Keyword.get(opts, :overload_target, 500)
interval = Keyword.get(opts, :overload_interval, 5_000)
[{:sbroker_overload_meter, {target, interval, alarm_id(opts)}}]
false ->
[]
end
end
defp alarm_id(opts) do
case Keyword.get(opts, :name) do
nil -> {:overload, self()}
name -> {:overload, name}
end
end
end
|
deps/db_connection/lib/db_connection/sojourn/broker.ex
| 0.841109
| 0.641478
|
broker.ex
|
starcoder
|
defmodule SurveyTool.RatingQuestion do
@moduledoc """
Module representing a rating question type question in a survey.
"""
alias __MODULE__, as: RatingQuestion
defstruct scores: [], text: nil, theme: nil
@typedoc "An optional list of integer scores"
@type scores() :: [integer] | []
@typedoc "Optional string"
@type optional_string() :: String.t() | nil
@typedoc "Rating question struct type."
@type t() :: %RatingQuestion{
scores: scores(),
text: optional_string(),
theme: optional_string()
}
@max_score 5
@min_score 1
@doc """
Adds an answer to a given `question`.
## Parameters
- `question`: The `RatingQuestion` to add the score to.
- `score`: The score value to add.
## Validations
An answer will only be added to the question under the
following conditions:
- the answer is not `nil`
- the answer is an integer
- the answer falls with the accepted numerical range
"""
@spec add_answer(RatingQuestion.t(), String.t()) :: RatingQuestion.t()
def add_answer(question = %RatingQuestion{}, ""), do: question
def add_answer(question = %RatingQuestion{scores: scores}, score) do
with score <- String.to_integer(score),
true <- score in @min_score..@max_score do
%RatingQuestion{question | scores: [score | scores]}
else
_invalid_score ->
question
end
end
@doc """
Calculates the average score for a given question.
No score is calculated for questions that have no scores.
## Parameters
- `question`: The question from which to get the scores to calculate
the average score.
"""
@spec average_score(RatingQuestion.t()) :: Decimal.t()
def average_score(_question = %RatingQuestion{scores: []}), do: nil
def average_score(_question = %RatingQuestion{scores: scores}) do
size =
scores
|> length()
|> Decimal.new()
scores
|> Enum.sum()
|> Decimal.new()
|> Decimal.div(size)
end
end
|
lib/survey_tool/rating_question.ex
| 0.904677
| 0.555676
|
rating_question.ex
|
starcoder
|
defmodule Day6 do
@moduledoc """
A debugger program here is having an issue: it is trying to repair a memory reallocation routine, but it keeps getting
stuck in an infinite loop.
In this area, there are sixteen memory banks; each memory bank can hold any number of blocks. The goal of the
reallocation routine is to balance the blocks between the memory banks.
The reallocation routine operates in cycles. In each cycle, it finds the memory bank with the most blocks (ties won by
the lowest-numbered memory bank) and redistributes those blocks among the banks. To do this, it removes all of the
blocks from the selected bank, then moves to the next (by index) memory bank and inserts one of the blocks. It
continues doing this until it runs out of blocks; if it reaches the last memory bank, it wraps around to the first
one.
The debugger would like to know how many redistributions can be done before a blocks-in-banks configuration is
produced that has been seen before.
For example, imagine a scenario with only four memory banks:
The banks start with 0, 2, 7, and 0 blocks. The third bank has the most blocks, so it is chosen for redistribution.
Starting with the next bank (the fourth bank) and then continuing to the first bank, the second bank, and so on, the
7 blocks are spread out over the memory banks. The fourth, first, and second banks get two blocks each, and the third
bank gets one back. The final result looks like this: 2 4 1 2.
Next, the second bank is chosen because it contains the most blocks (four). Because there are four memory banks, each
gets one block. The result is: 3 1 2 3.
Now, there is a tie between the first and fourth memory banks, both of which have three blocks. The first bank wins
the tie, and its three blocks are distributed evenly over the other three banks, leaving it with none: 0 2 3 4.
The fourth bank is chosen, and its four blocks are distributed such that each of the four banks receives one: 1 3 4 1.
The third bank is chosen, and the same thing happens: 2 4 1 2.
At this point, we've reached a state we've seen before: 2 4 1 2 was already seen. The infinite loop is detected after
the fifth block redistribution cycle, and so the answer in this example is 5.
Given the initial block counts in your puzzle input, how many redistribution cycles must be completed before a
configuration is produced that has been seen before?
"""
@doc """
Part A for Day 4
"""
def part_a do
common_part("res/day6.input") |>
process_memory(MapSet.new(), :a)
end
@doc """
Part B for Day 4
"""
def part_b do
common_part("res/day6.input") |>
process_memory(MapSet.new(), :b)
end
def common_part(file) do
instruction_list = File.read!(file) |>
String.split("\t") |>
Enum.map(&String.to_integer/1)
instruction_length = length(instruction_list)
0..(instruction_length-1) |>
Enum.zip(instruction_list) |>
Map.new()
end
def test_a do
common_part("res/day6_test.input") |>
process_memory(MapSet.new(), :a)
end
def test_b do
common_part("res/day6_test.input") |>
process_memory(MapSet.new(), :b)
end
def process_memory(mem_map, mem_history_set, part) do
case {MapSet.member?(mem_history_set, mem_map), part} do
{true,:a} ->
MapSet.size(mem_history_set)
{true, :b} ->
process_memory(mem_map, MapSet.new(), :a)
{false, part} ->
mem_list = Map.to_list(mem_map)
{max_pos,max_val} = mem_list |> Enum.max_by(fn({_pos, val}) -> val end)
mem_list |>
List.keyreplace(max_pos, 0, {max_pos,0}) |>
distribute(max_val, max_pos+1) |>
Map.new() |>
process_memory(MapSet.put(mem_history_set, mem_map), part)
end
end
def distribute(list, 0, _) do
list
end
def distribute([{k,v}|t], max_val, 0) do
distribute(t++[{k,v+1}], max_val-1, 0)
end
def distribute([{k,v}|t], max_val, pos) do
distribute(t++[{k,v}], max_val, pos-1)
end
end
|
lib/day6.ex
| 0.661923
| 0.707582
|
day6.ex
|
starcoder
|
defmodule Day16 do
@moduledoc """
Documentation for Day16.
"""
def part1 do
[rules, _ticket, others] =
read_data("input.txt")
|> Enum.to_list()
rules = parse_rules(rules)
extract_invalid_values(others |> Enum.slice(1..-1), rules) |> Enum.sum() |> IO.puts()
end
def part2 do
[rules, ticket, others] =
read_data("input.txt")
|> Enum.to_list()
rules = parse_rules(rules)
others =
others
|> parse_tickets()
|> filter_invalid_tickets(rules)
ticket = ticket |> parse_tickets() |> List.first()
0..(length(others |> List.first()) - 1)
|> Enum.map(fn column ->
get_column(others, column) |> find_matching_rule(rules)
end)
|> build_column_map()
|> Enum.filter(fn {key, _v} -> String.starts_with?(key, "departure") end)
|> Enum.map(fn {field, column} -> {field, Enum.at(ticket, column)} end)
|> Map.new()
|> Map.values()
|> Enum.reduce(fn x, acc -> x * acc end)
|> IO.inspect()
end
def read_data(filename) do
File.stream!(filename)
|> Stream.map(&String.trim/1)
|> Stream.chunk_by(fn line -> line == "" end)
|> Stream.reject(fn chunk -> chunk == [""] end)
end
def parse_rules(rules) do
rules
|> Enum.map(fn rule ->
[name, range_data] = String.split(rule, ~r/:\s*/)
ranges =
range_data
|> String.split(~r/\s*or\s*/i)
|> Enum.map(fn range_string ->
String.split(range_string, "-") |> Enum.map(&String.to_integer/1) |> List.to_tuple()
end)
{name, ranges}
end)
|> Map.new()
end
def is_valid(num, rule) do
rule |> Enum.map(fn {lb, ub} -> Enum.member?(lb..ub, num) end) |> Enum.any?()
end
def matches_rules(num, rules) do
rules |> Map.values() |> Enum.map(fn rule -> is_valid(num, rule) end) |> Enum.any?()
end
def extract_invalid_values(tickets, rules) do
tickets
|> Enum.map(fn values -> Enum.reject(values, fn value -> matches_rules(value, rules) end) end)
|> List.flatten()
end
def filter_invalid_tickets(tickets, rules) do
tickets
|> Enum.filter(fn values -> Enum.all?(values, fn x -> matches_rules(x, rules) end) end)
end
def parse_tickets(ticket_lines) do
ticket_lines
|> Enum.slice(1..-1)
|> Enum.map(fn ticket_line ->
String.split(ticket_line, ",") |> Enum.map(&String.to_integer/1)
end)
end
def get_column(tickets, column) do
tickets
|> Enum.map(fn values -> Enum.at(values, column) end)
end
def find_matching_rule(col_vals, rules) do
rules
|> Enum.filter(fn {_key, rule} ->
col_vals |> Enum.map(fn num -> is_valid(num, rule) end) |> Enum.all?()
end)
|> Enum.map(fn match -> elem(match, 0) end)
end
def reduce_col_counts({1, col_map}, result_map) do
col_map
|> Enum.reduce(result_map, fn {rule_list, col}, map ->
Map.put(map, rule_list |> List.first(), col)
end)
end
def reduce_col_counts({_count, col_map}, result_map) do
col_map
|> Enum.map(fn {rule_list, column} ->
{rule_list |> Enum.reject(fn rule -> Map.has_key?(result_map, rule) end), column}
end)
|> Enum.group_by(fn {matches, _column} -> Enum.count(matches) end)
|> Enum.reduce(result_map, &reduce_col_counts/2)
end
def build_column_map(matches) do
matches
|> Enum.with_index()
|> Enum.group_by(fn {matches, _column} -> Enum.count(matches) end)
|> Enum.reduce(%{}, &reduce_col_counts/2)
end
end
|
day16/lib/day16.ex
| 0.569254
| 0.435721
|
day16.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.