code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Quantity.Math do
@moduledoc """
Functions for doing math with Quantities
"""
import Kernel, except: [div: 2]
@doc """
Add two Quantities, keeping the unit
iex> add(~Q[1.34 MWh], ~Q[3.49 MWh])
{:ok, ~Q[4.83 MWh]}
iex> add(~Q[1.234567 days], ~Q[3.5 days])
{:ok, ~Q[4.734567 days]}
iex> add(~Q[10 goats], ~Q[40 sheep])
:error
"""
@spec add(Quantity.t(), Quantity.t()) :: {:ok, Quantity.t()} | :error
def add(%{unit: unit} = a, %{unit: unit} = b) do
{:ok, Quantity.new(Decimal.add(a.value, b.value), unit)}
end
def add(_, _) do
:error
end
@doc """
Add two Quantities, but raise an ArgumentError on error
iex> add!(~Q[50.94 kWh], ~Q[49.40 kWh])
~Q[100.34 kWh]
"""
@spec add!(Quantity.t(), Quantity.t()) :: Quantity.t()
def add!(a, b) do
case add(a, b) do
{:ok, result} -> result
:error -> raise(ArgumentError)
end
end
@doc """
Subtract two Quantities, keeping the unit
iex> sub(~Q[99 bottles of beer], ~Q[2 bottles of beer])
{:ok, ~Q[97 bottles of beer]}
iex> sub(~Q[2 bananas], ~Q[1 apple])
:error
"""
@spec sub(Quantity.t(), Quantity.t()) :: {:ok, Quantity.t()} | :error
def sub(%{unit: unit} = a, %{unit: unit} = b) do
{:ok, Quantity.new(Decimal.sub(a.value, b.value), unit)}
end
def sub(_, _) do
:error
end
@doc """
Subtract two Quantities, but raise ArgumentError on error
iex> sub!(~Q[99 problems], ~Q[2 problems])
~Q[97 problems]
"""
@spec sub!(Quantity.t(), Quantity.t()) :: Quantity.t()
def sub!(a, b) do
case sub(a, b) do
{:ok, result} -> result
:error -> raise(ArgumentError)
end
end
@doc """
Sum a list of Quantities with identical units. Errors when addition fails or when the list is empty.
iex> sum([~Q[11.11 DKK], ~Q[22.22 DKK], ~Q[33.33 DKK]])
{:ok, ~Q[66.66 DKK]}
iex> sum([~Q[1 EUR], ~Q[2 DKK]])
:error
iex> sum([])
:error
"""
@spec sum([Quantity.t()]) :: {:ok, Quantity.t()} | :error
def sum([]), do: :error
def sum(quantities) do
{first, remaining} = quantities |> List.pop_at(0)
remaining
|> Enum.reduce_while(first, fn quantity, acc ->
case add(quantity, acc) do
{:ok, result} -> {:cont, result}
:error -> {:halt, :error}
end
end)
|> case do
:error -> :error
result -> {:ok, result}
end
end
@doc """
Sum a list of Quantities with identical units. Includes a fallback value.
The exp and unit will be used to create a Quantity with value 0 if the list is empty.
iex> sum([~Q[0.11 DKK], ~Q[0.22 DKK], ~Q[0.33 DKK]], -2, "DKK")
{:ok, ~Q[0.66 DKK]}
iex> sum([], 0, "DKK")
{:ok, ~Q[0 DKK]}
iex> sum([], -2, "DKK")
{:ok, ~Q[0.00 DKK]}
iex> sum([~Q[1 EUR], ~Q[2 EUR]], -1, "DKK")
{:ok, ~Q[3 EUR]}
iex> sum([~Q[1 EUR], ~Q[2 DKK]], -2, "EUR")
:error
"""
@spec sum([Quantity.t()], integer, String.t()) :: {:ok, Quantity.t()} | :error
def sum([], exp, unit), do: {:ok, Quantity.new(0, exp, unit)}
def sum(quantities, _exp, _unit), do: quantities |> sum()
@doc """
Sum a list of Quantities with identical units, raises ArgumentError on error
iex> sum!([~Q[123 DKK], ~Q[10 DKK], ~Q[39 DKK]])
~Q[172 DKK]
"""
@spec sum!([Quantity.t()]) :: Quantity.t()
def sum!(quantities) do
case sum(quantities) do
{:ok, result} -> result
:error -> raise(ArgumentError)
end
end
@doc """
Sum a list of Quantities with identical units. Includes a fallback value.
The exp and unit will be used to create a Quantity with value 0 if the list is empty.
Raises ArgumentError on error.
iex> sum!([~Q[123 apples], ~Q[10 apples]], 0, "apples")
~Q[133 apples]
iex> sum!([], -2, "DKK")
~Q[0.00 DKK]
iex> sum!([~Q[1 apples], ~Q[2 apples]], -2, "pears")
~Q[3 apples]
"""
@spec sum!([Quantity.t()], integer, String.t()) :: Quantity.t()
def sum!(quantities, exp, unit) do
case sum(quantities, exp, unit) do
{:ok, result} -> result
:error -> raise(ArgumentError)
end
end
@doc """
Divide a Quantity by a scalar or another Quantity
iex> Quantity.div(~Q[15 $], ~Q[10 banana])
~Q[1.5 $/banana]
iex> Quantity.div(~Q[15 $], ~d[7.5])
~Q[2 $]
iex> Quantity.div(~Q[15 $], 10)
~Q[1.5 $]
iex> Quantity.div(~Q[15 $], ~Q[10 $])
~Q[1.5]
"""
@spec div(Quantity.t(), Quantity.t() | Decimal.t() | integer) :: Quantity.t()
def div(%Quantity{} = quantity, scalar) when is_integer(scalar) do
div(quantity, Quantity.new(scalar, 0, 1))
end
def div(%Quantity{} = quantity, %Decimal{} = scalar) do
div(quantity, Quantity.new(scalar, 1))
end
def div(%Quantity{} = q1, %Quantity{} = q2) do
Quantity.new(Decimal.div(q1.value, q2.value), {:div, q1.unit, q2.unit})
end
@doc """
Inverse a Quantity, similar to 1/quantity
iex> Quantity.inverse(~Q[10 DKK/m³])
~Q[0.1 m³/DKK]
"""
@spec inverse(Quantity.t()) :: Quantity.t()
def inverse(%Quantity{} = quantity) do
div(Quantity.new(Decimal.new(1), 1), quantity)
end
@doc """
Multiply a quantity by a scalar or another quantity
iex> Quantity.mult(~Q[15 $], ~d[4.5])
~Q[67.5 $]
iex> Quantity.mult(~Q[15 $], 4)
~Q[60 $]
iex> Quantity.mult(~Q[15 $], ~Q[4 banana])
~Q[60 $*banana]
iex> Quantity.mult(~Q[15 $/banana], ~Q[4 banana])
~Q[60 $]
"""
@spec mult(Quantity.t(), Quantity.t() | Decimal.t() | integer) :: Quantity.t()
def mult(%Quantity{} = quantity, scalar) when is_integer(scalar) do
mult(quantity, Quantity.new(scalar, 0, 1))
end
def mult(%Quantity{} = quantity, %Decimal{} = scalar) do
mult(quantity, Quantity.new(scalar, 1))
end
def mult(%Quantity{} = q1, %Quantity{} = q2) do
Quantity.new(Decimal.mult(q1.value, q2.value), {:mult, q1.unit, q2.unit})
end
@doc """
Round a Quantity to match a precision using the :half_up strategy
iex> Quantity.round(~Q[1.49 DKK], 1)
~Q[1.5 DKK]
iex> Quantity.round(~Q[0.5 DKK], 2)
~Q[0.50 DKK]
"""
def round(quantity, decimal_count) do
Quantity.new(Decimal.round(quantity.value, decimal_count, :half_up), quantity.unit)
end
end
|
lib/quantity/math.ex
| 0.877831
| 0.585812
|
math.ex
|
starcoder
|
defmodule Mix.Tasks.ExampleFiles do
@dialyzer :no_undefined_callbacks
use Mix.Task
@moduledoc """
Lists example files in your project and shows the status of each.
This task traverses the current working directory, looking for files that are
intended to serve as illustrative samples of files provided by a project
contributor or user.
```console
$ mix example_files
Using fileglob **/*{example,Example,EXAMPLE}*
Missing: spec/fixtures/collisions/file2
Missing: spec/fixtures/no_collisions/file
2 example files
Collision detected! spec/fixtures/collisions/file1
• spec/fixtures/collisions/EXAMPLE-file1
• spec/fixtures/collisions/file1.example
```
## Individual file status
This task displays the current status of each of the example files it finds.
The status of a copy is one of three values:
* Missing — not present
* Identical — present and identical in content to the example
* Out-of-date — present, but currently different in content from the example
## Fileglobs
The pattern `**/*{example,Example,EXAMPLE}*` is used by default to search for
example files. You can further restrict the search in `mix example_files` and
its subtasks by specifying one or paths or patterns that will be combined with
the example-files pattern:
```console
$ mix example_files doc log
Using fileglob {doc,log}/**/*{example,Example,EXAMPLE}*
```
## Ignored paths
The following paths are ignored by default in searching for example files:
* _.git/_
* _\_build/_
* _deps/_
* _node_modules/_
* _tmp/_
You can override this default in `mix example_files` and its subtasks by
specifying one or more `--ignore` or `-i` options:
```console
$ mix example_files --ignore spec --ignore log
Using fileglob **/*{example,Example,EXAMPLE}*
```
## Collisions
An example file may be “pulled,” which means that it is copied into the same
directory, using a file name that lacks the “example” nomenclature. A project
may contain two or more example files that, if they were both pulled, would use
the same resulting file name. This constitutes a “collision,” which is always
prevented; colliding example files are never operated on, but are displayed on
_stderr_.
## Verbose output
You can get more information about what `mix example_files` and its subtasks
are doing by specifying the `--verbose` or `-v` option.
""" |> String.replace(~r/\s+$/, "")
# TODO: Use String.trim_trailing/1 when targeting Elixir >= v1.3
@shortdoc "Lists example files in your project"
# TODO: Use `alias ExampleFiles.{English,Options,UI}` when targeting Elixir >= v1.2
alias ExampleFiles.English
alias ExampleFiles.Options
alias ExampleFiles.UI
alias IO.ANSI
@spec run([binary]) :: [pid]
@doc false
def run(arguments) do
:example_files |> Application.ensure_all_started
{:ok, options_pid} = Options.start_link(arguments)
options_pid |> Options.fileglobs |> display_fileglobs
{:ok, example_files_pid} = ExampleFiles.start_link(options: options_pid)
noncollisions = example_files_pid |> display_noncollisions
example_files_pid |> display_collisions
noncollisions
end
@spec display_collisions(pid) :: [pid]
@doc false
def display_collisions(example_files_pid) do
for example_files <- example_files_pid |> ExampleFiles.collisions do
UI |> UI.error
path_when_pulled = example_files |> List.first
|> ExampleFiles.File.path_when_pulled
|> UI.underline
UI |> UI.error([UI.red("Collision detected!"), " ", path_when_pulled])
for example_file <- example_files do
path = example_file |> ExampleFiles.File.path
UI |> UI.error(["• ", UI.underline(path)])
end
example_files
end
end
@spec display_fileglobs([binary]) :: ANSI.ansidata
@doc false
def display_fileglobs([fileglob]) do
UI |> UI.info(["Using fileglob ", UI.underline(fileglob)])
end
@doc false
def display_fileglobs(fileglobs) do
list = fileglobs |> Enum.map(&(&1 |> UI.underline
|> ANSI.format_fragment
|> IO.chardata_to_string))
|> English.list
UI |> UI.info(["Using fileglobs ", list])
end
@spec display_noncollisions(pid) :: [pid]
defp display_noncollisions(example_files_pid) do
noncollisions = example_files_pid |> ExampleFiles.noncollisions
if 0 < length(noncollisions), do: UI |> UI.info
for file <- noncollisions do
message = case file |> ExampleFiles.File.status do
:identical -> "Identical: " |> UI.green
:out_of_date -> "Out of date:" |> UI.yellow
:missing -> "Missing: " |> UI.yellow
end
UI |> UI.info([message,
" ",
UI.underline(ExampleFiles.File.path_when_pulled(file))])
end
UI |> UI.info
example_file_or_files = noncollisions |> length
|> English.pluralize("example file")
|> String.capitalize
UI |> UI.info(example_file_or_files)
noncollisions
end
end
|
lib/mix/tasks/example_files.ex
| 0.694717
| 0.869382
|
example_files.ex
|
starcoder
|
defmodule PacketAnalyzer do
@moduledoc """
PacketAnalyzer keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
@doc """
入力されたデータを返します.
## パラメータ
- input_data: 入力文字列です.
## try-rescue
input_dataに16進数以外の文字列が与えられた場合にエラーが出るようにしています.
ただし, 十分な長さがない16進数の文字列でも通ってしまう.
"""
def analyze(input_data) do
try do
input_data
|> String.to_integer(16)
|> Integer.to_string(16)
rescue
e -> "Error"
end
end
@doc """
バージョンを返します.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.firstで入力文字列の先頭を取得.
その後, case文を用いてバージョンの振り分け.
"""
def version(analyze) do
version = analyze
|> String.first
case version do
"4" -> version<>" -> IP"
"5" -> version<>" -> ST"
"6" -> version<>" -> IPv6"
"7" -> version<>" -> TP/IX"
"8" -> version<>" -> PIP"
"9" -> version<>" -> TUBA"
_ -> "Not Match version"
end
end
@doc """
ヘッダ長を返します.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.atを用いて引数の文字列のindexを指定してその場所の文字列を取得.
"""
def header(analyze) do
header = analyze
|> String.at(1)
end
@doc """
サービスタイプを返します.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて, 引数のn番目のindexから範囲を指定して文字列をスライスする.
例えば入力文字列が"ABCDEFG"の場合, data |> String.slice(2,2)を実行すると,
"CD"が取得される.
"""
def service(analyze) do
analyze
|> String.slice(2,2)
end
@doc """
パケット長を返します.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて文字列をスライスしている.
"""
def packet_length(analyze) do
analyze
|> String.slice(4,4)
end
@doc """
識別子を返します.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて文字列をスライスしている.
"""
def identifier(analyze) do
analyze
|> String.slice(8,4)
end
@doc """
フラグを返します.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて文字列をスライスしている.
"""
def flag(analyze) do
analyze
|> String.slice(12,4)
end
@doc """
TTLを返します.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて文字列をスライスしている.
"""
def ttl(analyze) do
analyze
|> String.slice(16,2)
end
@doc """
プロトコル番号を返します.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて文字列をスライスしている.
"""
def procotol(analyze) do
analyze
|> String.slice(18,2)
end
@doc """
ヘッダチェックサムを返す.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて文字列をスライスしている.
"""
def header_check(analyze) do
analyze
|> String.slice(20,4)
end
@doc """
送信元IPアドレスを返す.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて文字列をスライスした後, スライスされた文字列をさらにスライスし,
各々を16進数から10進数に変換し文字列として返還している.
"""
def source_ip_addr(analyze) do
ip = analyze
|> String.slice(24,8)
ip_1 = String.slice(ip,0,2) |> String.to_integer(16) |> Integer.to_string
ip_2 = String.slice(ip,2,2) |> String.to_integer(16) |> Integer.to_string
ip_3 = String.slice(ip,4,2) |> String.to_integer(16) |> Integer.to_string
ip_4 = String.slice(ip,6,2) |> String.to_integer(16) |> Integer.to_string
ip<>" -> "<>ip_1<>"."<>ip_2<>"."<>ip_3<>"."<>ip_4
end
@doc """
宛先IPアドレスを返す.
## パラメータ
- analyze: analyzeで返ってきた文字列.
送信元アドレスと同様.
"""
def destination_ip_addr(analyze) do
ip = analyze
|> String.slice(32,8)
ip_1 = String.slice(ip,0,2) |> String.to_integer(16) |> Integer.to_string
ip_2 = String.slice(ip,2,2) |> String.to_integer(16) |> Integer.to_string
ip_3 = String.slice(ip,4,2) |> String.to_integer(16) |> Integer.to_string
ip_4 = String.slice(ip,6,2) |> String.to_integer(16) |> Integer.to_string
ip<>" -> "<>ip_1<>"."<>ip_2<>"."<>ip_3<>"."<>ip_4
end
@doc """
オプションを返す.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて文字列をスライスしている.
"""
def option(analyze, n) do
analyze
|> String.slice(40,8*n)
end
@doc """
データを返す.
## パラメータ
- analyze: analyzeで返ってきた文字列.
String.sliceを用いて文字列をスライスしている.
"""
def data(analyze, n) do
analyze
|> String.slice(40+8*n..-1)
end
end
|
packet_analyzer/lib/packet_analyzer.ex
| 0.589362
| 0.53443
|
packet_analyzer.ex
|
starcoder
|
defmodule Ririsu.Interpreter do
@moduledoc """
The evaluator for the Ririsu language.
"""
@doc """
Runs a Ririsu source code.
The runner takes a character list, and an initial state, and it
returns a new state. The state is a Tuple in the form:
{ Mode :: Number, Environment :: Dict, Stack :: List }
`run/1` will call `run/2` with an empty initial state.
"""
def run(source) do
run(source, { 0, :dict.new, [] })
end
def run(source, initial) do
List.foldl(source, initial, &op/2)
end
# -- Some private helpers --------------------------------------------
defp mod(a, b) do
rem(rem(a, b + b), b)
end
defp unfold(a, f) do
unfold([], a, f)
end
defp unfold(xs, a, f) do
case f.(a) do
:stop -> xs
{:ok, x} -> [x | xs] ++ unfold(xs, x, f)
end
end
# -- Evaluates the Ririsu primitive operations -----------------------
# Newlines are ignored
def op(?\n, {mode, env, stack}) do
{mode, env, stack}
end
# Enters quoting mode
def op(?[, {mode, env, stack}) do
if mode == 0 do
{1, env, [ [] | stack ]}
else
[head | tail] = stack
{ mode + 1, env, [ [?[ | head] | tail]}
end
end
# Exits quoting mode
def op(?], {mode, env, stack}) do
cond do
mode == 0 -> :erlang.error(:unquote_outside_data_mode)
mode == 1 ->
[head | tail] = stack
{mode - 1, env, [:lists.reverse(head) | tail]}
true ->
[head | tail] = stack
{mode - 1, env, [ [?] | head] | tail ]}
end
end
# -- Basic combinators ---------------------------------------------
@doc "duplicate (↠) [A _] → [A A _]"
def op(?↠, {0, env, [a | stack]}) do
{0, env, [a, a | stack]}
end
@doc "swap (⇄) [A B _] → [B A _]"
def op(?⇄, {0, env, [a, b | stack]}) do
{0, env, [b, a | stack]}
end
@doc "drop (↓) [A _] → [_]"
def op(?↓, {0, env, [_ | stack]}) do
{0, env, stack}
end
@doc "concatenate (⊕) [[A] [B] _] → [[A B] _]"
def op(?⊕, {0, env, [a, b | stack]}) do
{0, env, [Enum.concat(a, b) | stack]}
end
@doc "cons (×) [A [B] _] → [[A B] _]"
def op(?×, {0, env, [a, b | stack]}) do
{0, env, [[a | b] | stack]}
end
@doc "unit (∘) [A _] → [[A] _]"
def op(?∘, {0, env, [a | stack]}) do
{0, env, [[a] | stack]}
end
@doc "i (▶) [[A] _] → [A _]"
def op(?▶, {0, env, [a | stack]}) do
run(a, {0, env, stack})
end
@doc "dip (↝) [[A] B _] → [B A _]"
def op(?↝, {0, env, [a, b | stack]}) do
{_, _, [c | _]} = run(a, {0, env, stack})
{0, env, [b, c | stack]}
end
# -- Bindings and dynamic evaluation ---------------------------------
@doc "define (@) [A B _] → [_]"
def op(?@, {0, env, [name, code | stack]}) do
{0, :dict.store(name, code, env), stack}
end
# -- Arithmetic ------------------------------------------------------
@doc "addition (+) [A B _] → [C _]"
def op(?+, {0, env, [a, b | stack]}) do
{0, env, [a + b | stack]}
end
@doc "subtraction (-) [A B _] → [C _]"
def op(?-, {0, env, [a, b | stack]}) do
{0, env, [a - b | stack]}
end
@doc "division (/) [A B _] → [C _]"
def op(?/, {0, env, [a, b | stack]}) do
{0, env, [a / b | stack]}
end
@doc "multiplication (*) [A B _] → [C _]"
def op(?*, {0, env, [a, b | stack]}) do
{0, env, [a * b | stack]}
end
@doc "modulo (%) [A B _] → [C _]"
def op(?%, {0, env, [a, b | stack]}) do
{0, env, [mod(a, b) | stack]}
end
@doc "square-root (√) [A _] → [B _]"
def op(?√, {0, env, [a | stack]}) do
{0, env, [:math.sqrt(a) | stack]}
end
@doc "round (⎨) [A _] → [B _]"
def op(?⎨, {0, env, [a | stack]}) do
{0, env, [:erlang.round(a) | stack]}
end
# -- Conversions -----------------------------------------------------
@doc "list→integer (i) [[A] _] → [B _]"
def op(?i, {0, env, [a | stack]}) do
{0, env, [list_to_integer(a) | stack]}
end
@doc "integer→list (a) [A _] → [[B] _]"
def op(?a, {0, env, [a | stack]}) do
{0, env, [integer_to_list(a) | stack]}
end
# -- Logic -----------------------------------------------------------
@doc "equals (=) [A B _] → [bool _]"
def op(?=, {0, env, [a, b | stack]}) do
{0, env, [a == b | stack]}
end
@doc "greater-than (>) [A B _] → [bool _]"
def op(?>, {0, env, [a, b | stack]}) do
{0, env, [a > b | stack]}
end
@doc "negate (¬) [A _] → [bool _]"
def op(?¬, {0, env, [a | stack]}) do
{0, env, [!a | stack]}
end
@doc "bottom (⊥) [_] → [bool _]"
def op(?⊥, {0, env, stack}) do
{0, env, [false | stack]}
end
@doc "branch (⌥) [[A] [B=>C] [D=>E] _] → [(C | E) _]"
def op(?⌥, {0, env, [test, consequent, alternate | stack]}) do
if test, do: run(consequent, {0, env, stack}),
else: run(alternate, {0, env, stack})
end
# -- Lists -----------------------------------------------------------
@doc "flatten (⇲) [[[A]] _] → [[A] _]"
def op(?⇲, {0, env, [xs | stack]}) do
{0, env, [List.flatten(xs) | stack]}
end
@doc "head (⊤) [[A B] _] → [A B _]"
def op(?⊤, {0, env, [[head | tail] | stack]}) do
{0, env, [head, tail | stack]}
end
# -- Folds -----------------------------------------------------------
@doc "map (→) [[A=>B] [A] _] → [[B] _]"
def op(?→, {0, env, [f, xs | stack]}) do
r = Enum.map(xs,
fn (x) -> {_,_,[h | _]} = run(f, {0, env, [x | stack]})
h
end)
{0, env, [r | stack]}
end
@doc "filter (⋈) [[A=>bool] [A] _] → [[A] _]"
def op(?⋈, {0, env, [f, xs | stack]}) do
r = Enum.filter(xs,
fn (x) -> {_,_,[h | _]} = run(f, {0, env, [x | stack]})
h
end)
{0, env, [r | stack]}
end
@doc "fold (⚺) [[A B=>C] B [A] _] → [[C] _]"
def op(?⚺, {0, env, [f, initial, xs | stack]}) do
r = List.foldr(xs, initial,
fn (a, b) -> {_,_,[h | _]} = run(f, {0, env, [a, b | stack]})
h
end)
{0, env, [r | stack]}
end
@doc "unfold (⚻) [[A=>maybe B] A _] → [[B] _]"
def op(?⚻, {0, env, [f, initial | stack]}) do
r = unfold(initial,
fn (x) -> {_,_,[h | _]} = run(f, {0, env, [x | stack]})
if h == false, do: :stop,
else: {:ok, h}
end)
{0, env, [r | stack]}
end
# -- Base cases ------------------------------------------------------
@doc "maybe-run (_) [_] → [A _]"
def op(x, {0, env, stack}) do
case :dict.find(x, env) do
{:ok, source} -> run(source, {0, env, stack})
:error -> {0, env, [x | stack]}
end
end
@doc "id (_) [_] → [A _]"
def op(x, {n, env, [as | stack]}) when n > 0 do
{n, env, [[x | as] | stack]}
end
end
|
lib/interpreter.ex
| 0.586641
| 0.654853
|
interpreter.ex
|
starcoder
|
defmodule Stripe.Cards do
@moduledoc """
Functions for working with cards at Stripe. Through this API you can:
* create a card,
* update a card,
* get a card,
* delete a card,
* delete all cards,
* list cards,
* list all cards,
* count cards.
All requests require `owner_type` and `owner_id` parameters to be specified.
`owner_type` must be one of the following:
* `customer`,
* `account`,
* `recipient`.
`owner_id` must be the ID of the owning object.
Stripe API reference: https://stripe.com/docs/api/curl#card_object
"""
def endpoint_for_entity(entity_type, entity_id) do
case entity_type do
:customer -> "customers/#{entity_id}/sources"
:account -> "accounts/#{entity_id}/external_accounts"
:recipient -> "recipients/#{entity_id}/cards"
end
end
@doc """
Create a card.
Creates a card for given owner type, owner ID using params.
`params` must contain a "source" object. Inside the "source" object, the following parameters are required:
* object,
* number,
* cvs,
* exp_month,
* exp_year.
Returns a `{:ok, card}` tuple.
## Examples
params = [
source: [
object: "card",
number: "4111111111111111",
cvc: 123,
exp_month: 12,
exp_year: 2020,
metadata: [
test_field: "test val"
]
]
]
{:ok, card} = Stripe.Cards.create(:customer, customer_id, params)
"""
def create(owner_type, owner_id, params) do
create owner_type, owner_id, params, Stripe.config_or_env_key
end
@doc """
Create a card. Accepts Stripe API key.
Creates a card for given owner using params.
`params` must contain a "source" object. Inside the "source" object, the following parameters are required:
* object,
* number,
* cvs,
* exp_month,
* exp_year.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Stripe.Cards.create(:customer, customer_id, params, key)
"""
def create(owner_type, owner_id, params, key) do
Stripe.make_request_with_key(:post, endpoint_for_entity(owner_type, owner_id), key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Update a card.
Updates a card for given owner using card ID and params.
* `owner_type` must be one of the following:
* `customer`,
* `account`,
* `recipient`.
* `owner_id` must be the ID of the owning object.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Stripe.Cards.update(:customer, customer_id, card_id, params)
"""
def update(owner_type, owner_id, id, params) do
update(owner_type, owner_id, id, params, Stripe.config_or_env_key)
end
@doc """
Update a card. Accepts Stripe API key.
Updates a card for given owner using card ID and params.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Stripe.Cards.update(:customer, customer_id, card_id, params, key)
"""
def update(owner_type, owner_id, id, params, key) do
Stripe.make_request_with_key(:post, "#{endpoint_for_entity(owner_type, owner_id)}/#{id}", key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Get a card.
Gets a card for given owner using card ID.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Stripe.Cards.get(:customer, customer_id, card_id)
"""
def get(owner_type, owner_id, id) do
get owner_type, owner_id, id, Stripe.config_or_env_key
end
@doc """
Get a card. Accepts Stripe API key.
Gets a card for given owner using card ID.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, card} = Stripe.Cards.get(:customer, customer_id, card_id, key)
"""
def get(owner_type, owner_id, id, key) do
Stripe.make_request_with_key(:get, "#{endpoint_for_entity(owner_type, owner_id)}/#{id}", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Get a list of cards.
Gets a list of cards for given owner.
Accepts the following parameters:
* `starting_after` - an offset (optional),
* `limit` - a limit of items to be returned (optional; defaults to 10).
Returns a `{:ok, cards}` tuple, where `cards` is a list of cards.
## Examples
{:ok, cards} = Stripe.Cards.list(:customer, customer_id, 5) # Get a list of up to 10 cards, skipping first 5 cards
{:ok, cards} = Stripe.Cards.list(:customer, customer_id, 5, 20) # Get a list of up to 20 cards, skipping first 5 cards
"""
def list(owner_type, owner_id, starting_after, limit \\ 10) do
list owner_type, owner_id, Stripe.config_or_env_key, "", limit
end
@doc """
Get a list of cards. Accepts Stripe API key.
Gets a list of cards for a given owner.
Accepts the following parameters:
* `starting_after` - an offset (optional),
* `limit` - a limit of items to be returned (optional; defaults to 10).
Returns a `{:ok, cards}` tuple, where `cards` is a list of cards.
## Examples
{:ok, cards} = Stripe.Cards.list(:customer, customer_id, key, 5) # Get a list of up to 10 cards, skipping first 5 cards
{:ok, cards} = Stripe.Cards.list(:customer, customer_id, key, 5, 20) # Get a list of up to 20 cards, skipping first 5 cards
"""
def list(owner_type, owner_id, key, starting_after, limit) do
Stripe.Util.list endpoint_for_entity(owner_type, owner_id), key, starting_after, limit
end
@doc """
Delete a card.
Deletes a card for given owner using card ID.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, deleted_card} = Stripe.Cards.delete("card_id")
"""
def delete(owner_type, owner_id, id) do
delete owner_type, owner_id, id, Stripe.config_or_env_key
end
@doc """
Delete a card. Accepts Stripe API key.
Deletes a card for given owner using card ID.
Returns a `{:ok, card}` tuple.
## Examples
{:ok, deleted_card} = Stripe.Cards.delete("card_id", key)
"""
def delete(owner_type, owner_id, id,key) do
Stripe.make_request_with_key(:delete, "#{endpoint_for_entity(owner_type, owner_id)}/#{id}", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Delete all cards.
Deletes all cards from given owner.
Returns `:ok` atom.
## Examples
:ok = Stripe.Cards.delete_all(:customer, customer_id)
"""
def delete_all(owner_type, owner_id) do
case all(owner_type, owner_id) do
{:ok, cards} ->
Enum.each cards, fn c -> delete(owner_type, owner_id, c["id"]) end
{:error, err} -> raise err
end
end
@doc """
Delete all cards. Accepts Stripe API key.
Deletes all cards from given owner.
Returns `:ok` atom.
## Examples
:ok = Stripe.Cards.delete_all(:customer, customer_id, key)
"""
def delete_all(owner_type, owner_id, key) do
case all(owner_type, owner_id) do
{:ok, customers} ->
Enum.each customers, fn c -> delete(owner_type, owner_id, c["id"], key) end
{:error, err} -> raise err
end
end
@max_fetch_size 100
@doc """
List all cards.
Lists all cards for a given owner.
Accepts the following parameters:
* `accum` - a list to start accumulating cards to (optional; defaults to `[]`).,
* `starting_after` - an offset (optional; defaults to `""`).
Returns `{:ok, cards}` tuple.
## Examples
{:ok, cards} = Stripe.Cards.all(:customer, customer_id, accum, starting_after)
"""
def all(owner_type, owner_id, accum \\ [], starting_after \\ "") do
all owner_type, owner_id, Stripe.config_or_env_key, accum, starting_after
end
@doc """
List all cards. Accepts Stripe API key.
Lists all cards for a given owner.
Accepts the following parameters:
* `accum` - a list to start accumulating cards to (optional; defaults to `[]`).,
* `starting_after` - an offset (optional; defaults to `""`).
Returns `{:ok, cards}` tuple.
## Examples
{:ok, cards} = Stripe.Cards.all(:customer, customer_id, accum, starting_after, key)
"""
def all(owner_type, owner_id, key, accum, starting_after) do
case Stripe.Util.list_raw("#{endpoint_for_entity(owner_type, owner_id)}",key, @max_fetch_size, starting_after) do
{:ok, resp} ->
case resp[:has_more] do
true ->
last_sub = List.last( resp[:data] )
all(owner_type, owner_id, key, resp[:data] ++ accum, last_sub["id"] )
false ->
result = resp[:data] ++ accum
{:ok, result}
end
{:error, err} -> raise err
end
end
@doc """
Get total number of cards.
Gets total number of cards for a given owner.
Returns `{:ok, count}` tuple.
## Examples
{:ok, count} = Stripe.Cards.count(:customer, customer_id)
"""
def count(owner_type, owner_id) do
count owner_type, owner_id, Stripe.config_or_env_key
end
@doc """
Get total number of cards. Accepts Stripe API key.
Gets total number of cards for a given owner.
Returns `{:ok, count}` tuple.
## Examples
{:ok, count} = Stripe.Cards.count(:customer, customer_id, key)
"""
def count(owner_type, owner_id, key)do
Stripe.Util.count "#{endpoint_for_entity(owner_type, owner_id)}", key
end
end
|
lib/stripe/cards.ex
| 0.917001
| 0.493836
|
cards.ex
|
starcoder
|
defmodule TypeCheck.TypeError.Formatter do
@moduledoc """
Behaviour to format your own type errors
"""
@typedoc """
A `problem tuple` contains four fields:
1. the module of the type for which a check did not pass
2. an atom describing the exact error;
for many types there are multiple checks
3. a map with fields containing extra information about the error.
in the cases of a compound type, this often contains information
about the deeper problem that happened as well.
4. the datastructure that did not pass the check
See the module documentation of all `TypeCheck.Builtin.*` modules
for more information about the checks that they perform and the problem tuples they might return.
"""
# @type problem_tuple :: {module(), atom(), map(), any()}
# use TypeCheck
# type problem_tuple :: {module, atom, map, any}
use TypeCheck
@type! problem_tuple ::
TypeCheck.Builtin.Any.problem_tuple()
| TypeCheck.Builtin.Atom.problem_tuple()
| TypeCheck.Builtin.Binary.problem_tuple()
| TypeCheck.Builtin.Bitstring.problem_tuple()
| TypeCheck.Builtin.Boolean.problem_tuple()
| TypeCheck.Builtin.FixedList.problem_tuple()
| TypeCheck.Builtin.FixedMap.problem_tuple()
| TypeCheck.Builtin.FixedTuple.problem_tuple()
| TypeCheck.Builtin.Float.problem_tuple()
| TypeCheck.Builtin.Integer.problem_tuple()
# | TypeCheck.Builtin.Lazy.problem_tuple
| TypeCheck.Builtin.List.problem_tuple()
| TypeCheck.Builtin.Literal.problem_tuple()
| TypeCheck.Builtin.Map.problem_tuple()
| TypeCheck.Builtin.NamedType.problem_tuple()
| TypeCheck.Builtin.NegInteger.problem_tuple()
| TypeCheck.Builtin.NonNegInteger.problem_tuple()
| TypeCheck.Builtin.None.problem_tuple()
| TypeCheck.Builtin.Number.problem_tuple()
# | TypeCheck.Builtin.OneOf.problem_tuple
| TypeCheck.Builtin.PosInteger.problem_tuple()
| TypeCheck.Builtin.Range.problem_tuple()
| TypeCheck.Builtin.Tuple.problem_tuple()
@doc """
A formatter is expected to turn a `problem_tuple` into a string
that can be used as `:message` of the TypeCheck.TypeError exception.
"""
@callback format(problem_tuple) :: String.t()
end
|
lib/type_check/type_error/formatter.ex
| 0.817356
| 0.552691
|
formatter.ex
|
starcoder
|
defmodule Mnemonex.Config do
alias TheFuzz.Phonetic.MetaphoneAlgorithm, as: MPA
@moduledoc """
word list configuration state
"""
defstruct words: {},
word_indices: %{},
words_version: nil,
metaphone_map: %{},
as_list: false,
total_words: nil,
base_words: nil,
rem_words: nil,
words_per_group: 0,
groups_per_line: 0,
word_sep: "",
group_sep: "",
line_prefix: "",
line_suffix: ""
@type t :: %__MODULE__{}
# Which word list is included
@words_version "0.7"
# For encoding 24-bit remainders
@short_words {"ego", "fax", "jet", "job", "rio", "ski", "yes"}
# Standard word list
@base_words {
"academy",
"acrobat",
"active",
"actor",
"adam",
"admiral",
"adrian",
"africa",
"agenda",
"agent",
"airline",
"airport",
"aladdin",
"alarm",
"alaska",
"albert",
"albino",
"album",
"alcohol",
"alex",
"algebra",
"alibi",
"alice",
"alien",
"alpha",
"alpine",
"amadeus",
"amanda",
"amazon",
"amber",
"america",
"amigo",
"analog",
"anatomy",
"angel",
"animal",
"antenna",
"antonio",
"apollo",
"april",
"archive",
"arctic",
"arizona",
"arnold",
"aroma",
"arthur",
"artist",
"asia",
"aspect",
"aspirin",
"athena",
"athlete",
"atlas",
"audio",
"august",
"austria",
"axiom",
"aztec",
"balance",
"ballad",
"banana",
"bandit",
"banjo",
"barcode",
"baron",
"basic",
"battery",
"belgium",
"berlin",
"bermuda",
"bernard",
"bikini",
"binary",
"bingo",
"biology",
"block",
"blonde",
"bonus",
"boris",
"boston",
"boxer",
"brandy",
"bravo",
"brazil",
"bronze",
"brown",
"bruce",
"bruno",
"burger",
"burma",
"cabinet",
"cactus",
"cafe",
"cairo",
"cake",
"calypso",
"camel",
"camera",
"campus",
"canada",
"canal",
"cannon",
"canoe",
"cantina",
"canvas",
"canyon",
"capital",
"caramel",
"caravan",
"carbon",
"cargo",
"carlo",
"carol",
"carpet",
"cartel",
"casino",
"castle",
"castro",
"catalog",
"caviar",
"cecilia",
"cement",
"center",
"century",
"ceramic",
"chamber",
"chance",
"change",
"chaos",
"charlie",
"charm",
"charter",
"chef",
"chemist",
"cherry",
"chess",
"chicago",
"chicken",
"chief",
"china",
"cigar",
"cinema",
"circus",
"citizen",
"city",
"clara",
"classic",
"claudia",
"clean",
"client",
"climax",
"clinic",
"clock",
"club",
"cobra",
"coconut",
"cola",
"collect",
"colombo",
"colony",
"color",
"combat",
"comedy",
"comet",
"command",
"compact",
"company",
"complex",
"concept",
"concert",
"connect",
"consul",
"contact",
"context",
"contour",
"control",
"convert",
"copy",
"corner",
"corona",
"correct",
"cosmos",
"couple",
"courage",
"cowboy",
"craft",
"crash",
"credit",
"cricket",
"critic",
"crown",
"crystal",
"cuba",
"culture",
"dallas",
"dance",
"daniel",
"david",
"decade",
"decimal",
"deliver",
"delta",
"deluxe",
"demand",
"demo",
"denmark",
"derby",
"design",
"detect",
"develop",
"diagram",
"dialog",
"diamond",
"diana",
"diego",
"diesel",
"diet",
"digital",
"dilemma",
"diploma",
"direct",
"disco",
"disney",
"distant",
"doctor",
"dollar",
"dominic",
"domino",
"donald",
"dragon",
"drama",
"dublin",
"duet",
"dynamic",
"east",
"ecology",
"economy",
"edgar",
"egypt",
"elastic",
"elegant",
"element",
"elite",
"elvis",
"email",
"energy",
"engine",
"english",
"episode",
"equator",
"escort",
"ethnic",
"europe",
"everest",
"evident",
"exact",
"example",
"exit",
"exotic",
"export",
"express",
"extra",
"fabric",
"factor",
"falcon",
"family",
"fantasy",
"fashion",
"fiber",
"fiction",
"fidel",
"fiesta",
"figure",
"film",
"filter",
"final",
"finance",
"finish",
"finland",
"flash",
"florida",
"flower",
"fluid",
"flute",
"focus",
"ford",
"forest",
"formal",
"format",
"formula",
"fortune",
"forum",
"fragile",
"france",
"frank",
"friend",
"frozen",
"future",
"gabriel",
"galaxy",
"gallery",
"gamma",
"garage",
"garden",
"garlic",
"gemini",
"general",
"genetic",
"genius",
"germany",
"global",
"gloria",
"golf",
"gondola",
"gong",
"good",
"gordon",
"gorilla",
"grand",
"granite",
"graph",
"green",
"group",
"guide",
"guitar",
"guru",
"hand",
"happy",
"harbor",
"harmony",
"harvard",
"havana",
"hawaii",
"helena",
"hello",
"henry",
"hilton",
"history",
"horizon",
"hotel",
"human",
"humor",
"icon",
"idea",
"igloo",
"igor",
"image",
"impact",
"import",
"index",
"india",
"indigo",
"input",
"insect",
"instant",
"iris",
"italian",
"jacket",
"jacob",
"jaguar",
"janet",
"japan",
"jargon",
"jazz",
"jeep",
"john",
"joker",
"jordan",
"jumbo",
"june",
"jungle",
"junior",
"jupiter",
"karate",
"karma",
"kayak",
"kermit",
"kilo",
"king",
"koala",
"korea",
"labor",
"lady",
"lagoon",
"laptop",
"laser",
"latin",
"lava",
"lecture",
"left",
"legal",
"lemon",
"level",
"lexicon",
"liberal",
"libra",
"limbo",
"limit",
"linda",
"linear",
"lion",
"liquid",
"liter",
"little",
"llama",
"lobby",
"lobster",
"local",
"logic",
"logo",
"lola",
"london",
"lotus",
"lucas",
"lunar",
"machine",
"macro",
"madam",
"madonna",
"madrid",
"maestro",
"magic",
"magnet",
"magnum",
"major",
"mama",
"mambo",
"manager",
"mango",
"manila",
"marco",
"marina",
"market",
"mars",
"martin",
"marvin",
"master",
"matrix",
"maximum",
"media",
"medical",
"mega",
"melody",
"melon",
"memo",
"mental",
"mentor",
"menu",
"mercury",
"message",
"metal",
"meteor",
"meter",
"method",
"metro",
"mexico",
"miami",
"micro",
"million",
"mineral",
"minimum",
"minus",
"minute",
"miracle",
"mirage",
"miranda",
"mister",
"mixer",
"mobile",
"model",
"modem",
"modern",
"modular",
"moment",
"monaco",
"monica",
"monitor",
"mono",
"monster",
"montana",
"morgan",
"motel",
"motif",
"motor",
"mozart",
"multi",
"museum",
"music",
"mustang",
"natural",
"neon",
"nepal",
"neptune",
"nerve",
"neutral",
"nevada",
"news",
"ninja",
"nirvana",
"normal",
"nova",
"novel",
"nuclear",
"numeric",
"nylon",
"oasis",
"object",
"observe",
"ocean",
"octopus",
"olivia",
"olympic",
"omega",
"opera",
"optic",
"optimal",
"orange",
"orbit",
"organic",
"orient",
"origin",
"orlando",
"oscar",
"oxford",
"oxygen",
"ozone",
"pablo",
"pacific",
"pagoda",
"palace",
"pamela",
"panama",
"panda",
"panel",
"panic",
"paradox",
"pardon",
"paris",
"parker",
"parking",
"parody",
"partner",
"passage",
"passive",
"pasta",
"pastel",
"patent",
"patriot",
"patrol",
"patron",
"pegasus",
"pelican",
"penguin",
"pepper",
"percent",
"perfect",
"perfume",
"period",
"permit",
"person",
"peru",
"phone",
"photo",
"piano",
"picasso",
"picnic",
"picture",
"pigment",
"pilgrim",
"pilot",
"pirate",
"pixel",
"pizza",
"planet",
"plasma",
"plaster",
"plastic",
"plaza",
"pocket",
"poem",
"poetic",
"poker",
"polaris",
"police",
"politic",
"polo",
"polygon",
"pony",
"popcorn",
"popular",
"postage",
"postal",
"precise",
"prefix",
"premium",
"present",
"price",
"prince",
"printer",
"prism",
"private",
"product",
"profile",
"program",
"project",
"protect",
"proton",
"public",
"pulse",
"puma",
"pyramid",
"queen",
"radar",
"radio",
"random",
"rapid",
"rebel",
"record",
"recycle",
"reflex",
"reform",
"regard",
"regular",
"relax",
"report",
"reptile",
"reverse",
"ricardo",
"ringo",
"ritual",
"robert",
"robot",
"rocket",
"rodeo",
"romeo",
"royal",
"russian",
"safari",
"salad",
"salami",
"salmon",
"salon",
"salute",
"samba",
"sandra",
"santana",
"sardine",
"school",
"screen",
"script",
"second",
"secret",
"section",
"segment",
"select",
"seminar",
"senator",
"senior",
"sensor",
"serial",
"service",
"sheriff",
"shock",
"sierra",
"signal",
"silicon",
"silver",
"similar",
"simon",
"single",
"siren",
"slogan",
"social",
"soda",
"solar",
"solid",
"solo",
"sonic",
"soviet",
"special",
"speed",
"spiral",
"spirit",
"sport",
"static",
"station",
"status",
"stereo",
"stone",
"stop",
"street",
"strong",
"student",
"studio",
"style",
"subject",
"sultan",
"super",
"susan",
"sushi",
"suzuki",
"switch",
"symbol",
"system",
"tactic",
"tahiti",
"talent",
"tango",
"tarzan",
"taxi",
"telex",
"tempo",
"tennis",
"texas",
"textile",
"theory",
"thermos",
"tiger",
"titanic",
"tokyo",
"tomato",
"topic",
"tornado",
"toronto",
"torpedo",
"total",
"totem",
"tourist",
"tractor",
"traffic",
"transit",
"trapeze",
"travel",
"tribal",
"trick",
"trident",
"trilogy",
"tripod",
"tropic",
"trumpet",
"tulip",
"tuna",
"turbo",
"twist",
"ultra",
"uniform",
"union",
"uranium",
"vacuum",
"valid",
"vampire",
"vanilla",
"vatican",
"velvet",
"ventura",
"venus",
"vertigo",
"veteran",
"victor",
"video",
"vienna",
"viking",
"village",
"vincent",
"violet",
"violin",
"virtual",
"virus",
"visa",
"vision",
"visitor",
"visual",
"vitamin",
"viva",
"vocal",
"vodka",
"volcano",
"voltage",
"volume",
"voyage",
"water",
"weekend",
"welcome",
"western",
"window",
"winter",
"wizard",
"wolf",
"world",
"xray",
"yankee",
"yoga",
"yogurt",
"yoyo",
"zebra",
"zero",
"zigzag",
"zipper",
"zodiac",
"zoom",
"abraham",
"action",
"address",
"alabama",
"alfred",
"almond",
"ammonia",
"analyze",
"annual",
"answer",
"apple",
"arena",
"armada",
"arsenal",
"atlanta",
"atomic",
"avenue",
"average",
"bagel",
"baker",
"ballet",
"bambino",
"bamboo",
"barbara",
"basket",
"bazaar",
"benefit",
"bicycle",
"bishop",
"blitz",
"bonjour",
"bottle",
"bridge",
"british",
"brother",
"brush",
"budget",
"cabaret",
"cadet",
"candle",
"capitan",
"capsule",
"career",
"cartoon",
"channel",
"chapter",
"cheese",
"circle",
"cobalt",
"cockpit",
"college",
"compass",
"comrade",
"condor",
"crimson",
"cyclone",
"darwin",
"declare",
"degree",
"delete",
"delphi",
"denver",
"desert",
"divide",
"dolby",
"domain",
"domingo",
"double",
"drink",
"driver",
"eagle",
"earth",
"echo",
"eclipse",
"editor",
"educate",
"edward",
"effect",
"electra",
"emerald",
"emotion",
"empire",
"empty",
"escape",
"eternal",
"evening",
"exhibit",
"expand",
"explore",
"extreme",
"ferrari",
"first",
"flag",
"folio",
"forget",
"forward",
"freedom",
"fresh",
"friday",
"fuji",
"galileo",
"garcia",
"genesis",
"gold",
"gravity",
"habitat",
"hamlet",
"harlem",
"helium",
"holiday",
"house",
"hunter",
"ibiza",
"iceberg",
"imagine",
"infant",
"isotope",
"jackson",
"jamaica",
"jasmine",
"java",
"jessica",
"judo",
"kitchen",
"lazarus",
"letter",
"license",
"lithium",
"loyal",
"lucky",
"magenta",
"mailbox",
"manual",
"marble",
"mary",
"maxwell",
"mayor",
"milk",
"monarch",
"monday",
"money",
"morning",
"mother",
"mystery",
"native",
"nectar",
"nelson",
"network",
"next",
"nikita",
"nobel",
"nobody",
"nominal",
"norway",
"nothing",
"number",
"october",
"office",
"oliver",
"opinion",
"option",
"order",
"outside",
"package",
"pancake",
"pandora",
"panther",
"papa",
"patient",
"pattern",
"pedro",
"pencil",
"people",
"phantom",
"philips",
"pioneer",
"pluto",
"podium",
"portal",
"potato",
"prize",
"process",
"protein",
"proxy",
"pump",
"pupil",
"python",
"quality",
"quarter",
"quiet",
"rabbit",
"radical",
"radius",
"rainbow",
"ralph",
"ramirez",
"ravioli",
"raymond",
"respect",
"respond",
"result",
"resume",
"retro",
"richard",
"right",
"risk",
"river",
"roger",
"roman",
"rondo",
"sabrina",
"salary",
"salsa",
"sample",
"samuel",
"saturn",
"savage",
"scarlet",
"scoop",
"scorpio",
"scratch",
"scroll",
"sector",
"serpent",
"shadow",
"shampoo",
"sharon",
"sharp",
"short",
"shrink",
"silence",
"silk",
"simple",
"slang",
"smart",
"smoke",
"snake",
"society",
"sonar",
"sonata",
"soprano",
"source",
"sparta",
"sphere",
"spider",
"sponsor",
"spring",
"acid",
"adios",
"agatha",
"alamo",
"alert",
"almanac",
"aloha",
"andrea",
"anita",
"arcade",
"aurora",
"avalon",
"baby",
"baggage",
"balloon",
"bank",
"basil",
"begin",
"biscuit",
"blue",
"bombay",
"brain",
"brenda",
"brigade",
"cable",
"carmen",
"cello",
"celtic",
"chariot",
"chrome",
"citrus",
"civil",
"cloud",
"common",
"compare",
"cool",
"copper",
"coral",
"crater",
"cubic",
"cupid",
"cycle",
"depend",
"door",
"dream",
"dynasty",
"edison",
"edition",
"enigma",
"equal",
"eric",
"event",
"evita",
"exodus",
"extend",
"famous",
"farmer",
"food",
"fossil",
"frog",
"fruit",
"geneva",
"gentle",
"george",
"giant",
"gilbert",
"gossip",
"gram",
"greek",
"grille",
"hammer",
"harvest",
"hazard",
"heaven",
"herbert",
"heroic",
"hexagon",
"husband",
"immune",
"inca",
"inch",
"initial",
"isabel",
"ivory",
"jason",
"jerome",
"joel",
"joshua",
"journal",
"judge",
"juliet",
"jump",
"justice",
"kimono",
"kinetic",
"leonid",
"lima",
"maze",
"medusa",
"member",
"memphis",
"michael",
"miguel",
"milan",
"mile",
"miller",
"mimic",
"mimosa",
"mission",
"monkey",
"moral",
"moses",
"mouse",
"nancy",
"natasha",
"nebula",
"nickel",
"nina",
"noise",
"orchid",
"oregano",
"origami",
"orinoco",
"orion",
"othello",
"paper",
"paprika",
"prelude",
"prepare",
"pretend",
"profit",
"promise",
"provide",
"puzzle",
"remote",
"repair",
"reply",
"rival",
"riviera",
"robin",
"rose",
"rover",
"rudolf",
"saga",
"sahara",
"scholar",
"shelter",
"ship",
"shoe",
"sigma",
"sister",
"sleep",
"smile",
"spain",
"spark",
"split",
"spray",
"square",
"stadium",
"star",
"storm",
"story",
"strange",
"stretch",
"stuart",
"subway",
"sugar",
"sulfur",
"summer",
"survive",
"sweet",
"swim",
"table",
"taboo",
"target",
"teacher",
"telecom",
"temple",
"tibet",
"ticket",
"tina",
"today",
"toga",
"tommy",
"tower",
"trivial",
"tunnel",
"turtle",
"twin",
"uncle",
"unicorn",
"unique",
"update",
"valery",
"vega",
"version",
"voodoo",
"warning",
"william",
"wonder",
"year",
"yellow",
"young",
"absent",
"absorb",
"accent",
"alfonso",
"alias",
"ambient",
"andy",
"anvil",
"appear",
"apropos",
"archer",
"ariel",
"armor",
"arrow",
"austin",
"avatar",
"axis",
"baboon",
"bahama",
"bali",
"balsa",
"bazooka",
"beach",
"beast",
"beatles",
"beauty",
"before",
"benny",
"betty",
"between",
"beyond",
"billy",
"bison",
"blast",
"bless",
"bogart",
"bonanza",
"book",
"border",
"brave",
"bread",
"break",
"broken",
"bucket",
"buenos",
"buffalo",
"bundle",
"button",
"buzzer",
"byte",
"caesar",
"camilla",
"canary",
"candid",
"carrot",
"cave",
"chant",
"child",
"choice",
"chris",
"cipher",
"clarion",
"clark",
"clever",
"cliff",
"clone",
"conan",
"conduct",
"congo",
"content",
"costume",
"cotton",
"cover",
"crack",
"current",
"danube",
"data",
"decide",
"desire",
"detail",
"dexter",
"dinner",
"dispute",
"donor",
"druid",
"drum",
"easy",
"eddie",
"enjoy",
"enrico",
"epoxy",
"erosion",
"except",
"exile",
"explain",
"fame",
"fast",
"father",
"felix",
"field",
"fiona",
"fire",
"fish",
"flame",
"flex",
"flipper",
"float",
"flood",
"floor",
"forbid",
"forever",
"fractal",
"frame",
"freddie",
"front",
"fuel",
"gallop",
"game",
"garbo",
"gate",
"gibson",
"ginger",
"giraffe",
"gizmo",
"glass",
"goblin",
"gopher",
"grace",
"gray",
"gregory",
"grid",
"griffin",
"ground",
"guest",
"gustav",
"gyro",
"hair",
"halt",
"harris",
"heart",
"heavy",
"herman",
"hippie",
"hobby",
"honey",
"hope",
"horse",
"hostel",
"hydro",
"imitate",
"info",
"ingrid",
"inside",
"invent",
"invest",
"invite",
"iron",
"ivan",
"james",
"jester",
"jimmy",
"join",
"joseph",
"juice",
"julius",
"july",
"justin",
"kansas",
"karl",
"kevin",
"kiwi",
"ladder",
"lake",
"laura",
"learn",
"legacy",
"legend",
"lesson",
"life",
"light",
"list",
"locate",
"lopez",
"lorenzo",
"love",
"lunch",
"malta",
"mammal",
"margo",
"marion",
"mask",
"match",
"mayday",
"meaning",
"mercy",
"middle",
"mike",
"mirror",
"modest",
"morph",
"morris",
"nadia",
"nato",
"navy",
"needle",
"neuron",
"never",
"newton",
"nice",
"night",
"nissan",
"nitro",
"nixon",
"north",
"oberon",
"octavia",
"ohio",
"olga",
"open",
"opus",
"orca",
"oval",
"owner",
"page",
"paint",
"palma",
"parade",
"parent",
"parole",
"paul",
"peace",
"pearl",
"perform",
"phoenix",
"phrase",
"pierre",
"pinball",
"place",
"plate",
"plato",
"plume",
"pogo",
"point",
"polite",
"polka",
"poncho",
"powder",
"prague",
"press",
"presto",
"pretty",
"prime",
"promo",
"quasi",
"quest",
"quick",
"quiz",
"quota",
"race",
"rachel",
"raja",
"ranger",
"region",
"remark",
"rent",
"reward",
"rhino",
"ribbon",
"rider",
"road",
"rodent",
"round",
"rubber",
"ruby",
"rufus",
"sabine",
"saddle",
"sailor",
"saint",
"salt",
"satire",
"scale",
"scuba",
"season",
"secure",
"shake",
"shallow",
"shannon",
"shave",
"shelf",
"sherman",
"shine",
"shirt",
"side",
"sinatra",
"sincere",
"size",
"slalom",
"slow",
"small",
"snow",
"sofia",
"song",
"sound",
"south",
"speech",
"spell",
"spend",
"spoon",
"stage",
"stamp",
"stand",
"state",
"stella",
"stick",
"sting",
"stock",
"store",
"sunday",
"sunset",
"support",
"sweden",
"swing",
"tape",
"think",
"thomas",
"tictac",
"time",
"toast",
"tobacco",
"tonight",
"torch",
"torso",
"touch",
"toyota",
"trade",
"tribune",
"trinity",
"triton",
"truck",
"trust",
"type",
"under",
"unit",
"urban",
"urgent",
"user",
"value",
"vendor",
"venice",
"verona",
"vibrate",
"virgo",
"visible",
"vista",
"vital",
"voice",
"vortex",
"waiter",
"watch",
"wave",
"weather",
"wedding",
"wheel",
"whiskey",
"wisdom",
"deal",
"null",
"nurse",
"quebec",
"reserve",
"reunion",
"roof",
"singer",
"verbal",
"amen"
}
# Update base/remainder if changed with word list change.
@doc """
initialize the word list state
"""
def init(opts) do
short_words = @short_words
base_words = @base_words
all_words = combine_tuples(base_words, short_words)
struct(
__struct__(),
words: all_words,
total_words: tuple_size(all_words),
base_words: tuple_size(base_words),
rem_words: tuple_size(short_words),
word_indices: word_map(all_words),
metaphone_map: metaphone_map(all_words),
words_version: @words_version,
as_list: opts[:as_list],
words_per_group: opts[:words_per_group],
word_sep: opts[:word_separator],
groups_per_line: opts[:groups_per_line],
group_sep: opts[:group_separator],
line_prefix: opts[:line_prefix],
line_suffix: opts[:line_suffix]
)
end
defp combine_tuples(first, second) do
[first, second]
|> Enum.map(&Tuple.to_list/1)
|> List.flatten()
|> List.to_tuple()
end
defp word_map(words) do
words
|> Tuple.to_list()
|> Enum.zip(0..(tuple_size(words) - 1))
|> Enum.reduce(%{}, fn {w, i}, acc -> Map.put(acc, w, i) end)
end
defp map_uniqs({k, v}, {uniq, all}) do
uniq =
case Map.fetch(all, k) do
{:ok, _ov} -> Map.delete(uniq, k)
:error -> Map.put(uniq, k, v)
end
all = Map.put(uniq, k, v)
{uniq, all}
end
defp metaphone_map(words) do
word_list = words |> Tuple.to_list()
word_list
|> Enum.map(&MPA.compute/1)
|> Enum.zip(word_list)
|> Enum.reduce({%{}, %{}}, &map_uniqs/2)
|> elem(0)
end
end
|
lib/mnemonex/config.ex
| 0.654232
| 0.426202
|
config.ex
|
starcoder
|
defmodule Data.Parser.KV do
@moduledoc """
Creates parsers that accept KeyValue-style `Enum`s as input.
In particular, KV parsers work with:
- maps (e.g. `%{"hello" => "world"}`)
- `Keyword.t`s (e.g. `[hello: "world"]`)
- Lists of pairs (e.g. `[{"hello", "world"}]`)
KV parsers are higher-order parsers, and operate in roughly the same way as
`Data.Parser.list/1` or `Data.Parser.set/1`, but their definition is slightly
more involved. A KV parser is created with a list of `field_spec`s, where
each `field_spec` defines what fields of the input to look at, and what
parsers to run on them.
Here are some examples of `field_spec`s and their parsing behavior:
### `{:username, \Data.Parser.BuiltIn.string()}`
This spec says that the input must contain a `:username` field, and the value
of that field must satisfy `Data.Parser.BuiltIn.string/0`. The output map
will contain the key-value pair `username: "some string"`.
If the field cannot be parsed successfully, the entire KV parser will return
`{:error, domain_error_with_details_on_parse_failure}`.
If the field is not present, the entire KV parser will return
`{:error, domain_error_with_details_about_field_not_found}`
### `{:birthday, Data.Parser.BuiltIn.date(), optional: true}`
This spec says that the input *may* contain a `:birthday` field. If the field
does exist, it must satisfy `Data.Parser.BuiltIn.date/0`.
If the field exists and parses successfully, the output map will contain the
key-value pair `birthday: {:just, ~D[1983-07-18]}`.
If the field does not exist, the output map will contain the key-value pair
`birthday: :nothing`.
If the field cannot be parsed successfully, the entire KV parser will return
`{:error, domain_error_with_parse_failure_details}`.
### `{:country, MyApp.country_parser(), default: "Canada"}`
This spec says that the input *may* contain a `:country` field, and if so, the
value of that field must parse successfully with `MyApp.country_parser/0`.
If the field exists and parses successfully, the output map will contain a
key-value pair such as: `country: "Indonesia"`.
If the field cannot be parsed successfully, the entire Constructor will return
`{:error, domain_error_with_details_on_parse_failure}`.
If the field does *not* exist, the `default` value will be used. In this
case, the output will contain the key-value pair: `country: "Canada"`
### `{:country, MyApp.country_parser(), from: :countryName}`
This spec says that the parser will use the data from `:countryName` in the
input map. If the value under this key satisfies the
`MyApp.country_parser()`, then the resulting value will be placed under the
`:country` field.
Note that the `from` keyname MUST always be specified as an atom, but it will
be applied automatically to string keys. If the input contains *both* a
string key and an atom key, the atom key will take priority.
### `{:point, MyApp.point_parser(), recurse: true}`
Sometimes you want to run several different parsers on the same input
map. For example, let's say your input looks like this:
```
%{x: 12,
y: -10,
value: 34,
name: "exploding_barrel"}
```
But the data structure you want after parsing looks like this:
```
%{point: %{x: 12, y: -10},
value: 34,
name: "exploding_barrel"}
```
And you have MyApp.point_parser() which accepts a map with `:x` and `:y`
integer keys and constructs `%{x: integer(), y: integer()}`.
You can define a field_spec with `recurse: true` and have that particular
parser get run on its *parent input map*, not on the value of a field.
"""
alias Data.Parser
alias FE.{Maybe, Result}
defmodule Field do
@moduledoc false
defstruct [:name, :from, :parser, :optional, :default, :recurse]
end
@typedoc """
KV parsers accept either a map or a `Keyword.t` as input.
"""
@type input :: map | Keyword.t()
@typedoc """
KV parsers accept `atom()`s as key names, but will work on inputs where
the keys are `String.t()`s as well.
"""
@type field_name :: atom()
@typedoc """
Options to relax requirements on the fields.
This is a list that consists of zero or one of the below options:
`{:optional, bool()}`
`{:default, any}`
"""
@type field_opts(a) :: [{:optional, bool()} | {:default, a}]
@typedoc """
A 2-tuple or 3-tuple describing the field to parse and parsing semantics.
`{field_name, parser}`
`{field_name, parser, opts}`
"""
@type field_spec(a, b) ::
{field_name(), Parser.t(a, b)} | {field_name(), Parser.t(a, b), field_opts(b)}
@typedoc """
A structure representing a `Data.Parser.t(a,b)` lifted to operate on a KV.
"""
@opaque field(a, b) :: %Field{
name: field_name(),
from: field_name(),
parser: Parser.t(a, b),
optional: boolean(),
default: Maybe.t(b),
recurse: boolean()
}
@doc """
Given a list of `field_spec`s, verify that all specs are well-formed and
return `{:ok, parser}`, where `parser` will accept a `map` or `Keyword` input
and apply the appropriate parsers to their corresponding fields.
If the `field_spec`s are not well-formed, return `{:error, Error.t}` with details
about the invalid `field_spec`s.
## Examples
iex> {:ok, p} = Data.Parser.KV.new([{:username, Data.Parser.BuiltIn.string()}])
...> p.(username: "johndoe")
{:ok, %{username: "johndoe"}}
iex> {:ok, p} = Data.Parser.KV.new([{:username, Data.Parser.BuiltIn.string()}])
...> p.(%{"username" => "johndoe"})
{:ok, %{username: "johndoe"}}
iex> {:error, e} = Data.Parser.KV.new(["not a spec"])
...> e.reason
:invalid_field_spec
...> e.details
%{spec: "not a spec"}
iex> {:ok, p} = Data.Parser.KV.new([{:a, Data.Parser.BuiltIn.integer(), optional: true}])
...> p.(a: 1)
{:ok, %{a: {:just, 1}}}
iex> {:ok, p} = Data.Parser.KV.new([{:a, Data.Parser.BuiltIn.integer(), optional: true}])
...> p.([])
{:ok, %{a: :nothing}}
iex> {:ok, p} = Data.Parser.KV.new([{:b, Data.Parser.BuiltIn.integer(), default: 0}])
...> p.([])
{:ok, %{b: 0}}
iex> {:ok, p} = Data.Parser.KV.new([{:b, Data.Parser.BuiltIn.integer(), default: 0}])
...> p.(b: 10)
{:ok, %{b: 10}}
iex> {:ok, p} = Data.Parser.KV.new([{:b, Data.Parser.BuiltIn.integer(), default: 0}])
...> {:error, e} = p.(b: "i am of the wrong type")
...> Error.reason(e)
:failed_to_parse_field
...> {:just, inner_error} = Error.caused_by(e)
...> Error.reason(inner_error)
:not_an_integer
iex> {:ok, p} = Data.Parser.KV.new([{:a, Data.Parser.BuiltIn.integer(), from: :theAValue}])
...> p.(%{theAValue: 123})
{:ok, %{a: 123}}
iex> {:ok, p} = Data.Parser.KV.new([{:a, Data.Parser.BuiltIn.integer(), from: :aStringKey}])
...> p.(%{"aStringKey" => 1234})
{:ok, %{a: 1234}}
iex> {:ok, point} = Data.Parser.KV.new([{:x, Data.Parser.BuiltIn.integer()}, {:y, Data.Parser.BuiltIn.integer()}])
...> {:ok, item} = Data.Parser.KV.new([{:point, point, recurse: true}, {:value, Data.Parser.BuiltIn.integer()}])
...> item.(%{x: 1, y: -1, value: 34})
{:ok, %{value: 34, point: %{x: 1, y: -1}}}
iex> {:ok, point} = Data.Parser.KV.new([{:x, Data.Parser.BuiltIn.integer()}, {:y, Data.Parser.BuiltIn.integer()}])
...> {:ok, item} = Data.Parser.KV.new([{:point, point, recurse: true}, {:value, Data.Parser.BuiltIn.integer()}])
...> {:error, e} = item.(%{x: "wrong", y: -1, value: 34})
...> {:just, e2} = e.caused_by
...> e2.reason
:failed_to_parse_field
iex> {:ok, point} = Data.Parser.KV.new([{:x, Data.Parser.BuiltIn.integer()}, {:y, Data.Parser.BuiltIn.integer()}])
...> {:ok, item} = Data.Parser.KV.new([{:point, point, recurse: true}, {:value, Data.Parser.BuiltIn.integer()}])
...> {:error, e} = item.(%{y: -1, value: 34})
...> {:just, e2} = e.caused_by
...> e2.reason
:field_not_found_in_input
"""
@spec new([field_spec(a, b)]) :: Result.t(Parser.t(a, b), Error.t()) when a: var, b: var
def new(field_specs) when is_list(field_specs) do
Result.ok([])
|> Result.fold(field_specs, &parse_field_spec/2)
|> Result.map(fn fields -> fn input -> run(fields, input) end end)
end
def new(_other) do
Error.domain(:not_a_list) |> Result.error()
end
@spec run([field(any, any)], input) :: Result.t(map, Error.t())
defp run(fields, input) when is_list(input) do
case Keyword.keyword?(input) do
true -> run(fields, Enum.into(input, %{}))
false -> Error.domain(:invalid_input, %{input: input}) |> Result.error()
end
end
defp run(fields, input) when is_map(input) do
Result.ok([])
|> Result.fold(fields, &run_for_field(&1, &2, input))
|> Result.map(&Enum.into(&1, %{}))
end
defp run(_constructor, other) do
Error.domain(:invalid_input, %{input: other}) |> Result.error()
end
defp parse_field_spec({field_name, parser}, acc) do
field = %Field{
name: field_name,
from: field_name,
parser: parser,
optional: false,
default: Maybe.nothing(),
recurse: false
}
Result.ok([field | acc])
end
defp parse_field_spec({field_name, parser, opts} = spec, acc) do
optional = Keyword.get(opts, :optional, false)
recurse = Keyword.get(opts, :recurse, false)
from = Keyword.get(opts, :from, field_name)
default =
case Keyword.fetch(opts, :default) do
{:ok, default} -> Maybe.just(default)
:error -> Maybe.nothing()
end
case {optional, default} do
{true, {:just, _}} ->
Error.domain(:invalid_field_spec, %{spec: spec}) |> Result.error()
{_, _} ->
field = %Field{
name: field_name,
from: from,
parser: parser,
optional: optional,
default: default,
recurse: recurse
}
Result.ok([field | acc])
end
end
defp parse_field_spec(other, _) do
Error.domain(:invalid_field_spec, %{spec: other}) |> Result.error()
end
defp run_for_field(%Field{from: from, recurse: false} = field, acc, input) do
case fetch_key(input, from) do
{:ok, value} ->
existing_field(field, acc, value, input)
:error ->
missing_field(field, acc, input)
end
end
defp run_for_field(%Field{recurse: true} = field, acc, input) do
existing_field(field, acc, input, input)
end
defp fetch_key(%{} = input, key) when is_atom(key) do
case Map.fetch(input, key) do
:error ->
Map.fetch(input, Atom.to_string(key))
found ->
found
end
end
defp existing_field(%Field{name: name, parser: parser, optional: optional}, acc, value, input) do
parser.(value)
|> Result.map(fn parsed_value ->
case optional do
true -> [{name, Maybe.just(parsed_value)} | acc]
false -> [{name, parsed_value} | acc]
end
end)
|> Result.map_error(fn error ->
error
|> Error.wrap(
Error.domain(
:failed_to_parse_field,
%{field: name, input: input}
)
)
end)
end
defp missing_field(%Field{name: name, optional: optional, default: default}, acc, input) do
case {optional, default} do
{true, :nothing} ->
Result.ok([{name, Maybe.nothing()} | acc])
{false, {:just, default_value}} ->
Result.ok([{name, default_value} | acc])
{false, :nothing} ->
Error.domain(:field_not_found_in_input, %{field: name, input: input}) |> Result.error()
end
end
end
|
lib/data/parser/kv.ex
| 0.933287
| 0.934455
|
kv.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.ExtendedNodeAddStatus do
@moduledoc """
ExtendedNodeAddStatus
This command is used to report the result of a node inclusion of an a node
with an extended node id, normally a long range node.
Params:
* `:seq_number` - the sequence number of the inclusion command
* `:status` - the status of the inclusion
* `:node_id` - the new id of the new Z-Wave node
* `:listening?` - if the node is a listening node or not
* `:basic_device_class` - the Z-Wave basic device class
* `:generic_device_class` - the Z-Wave generic device class
* `:specific_device_class` - the Z-Wave specific device class
* `:command_classes` - a list of the command class the device supports, tagged by their security level
used only if the device was included securely
* `:granted_keys` - the security keys granted during S2 inclusion
* `:kex_fail_type` - the error that occurred in the S2 bootstrapping
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, CommandClasses, Security}
alias Grizzly.ZWave.CommandClasses.NetworkManagementInclusion
@type tagged_command_classes() ::
{:non_secure_supported, [CommandClasses.command_class()]}
| {:non_secure_controlled, [CommandClasses.command_class()]}
| {:secure_supported, [CommandClasses.command_class()]}
| {:secure_controlled, [CommandClasses.command_class()]}
@type param() ::
{:node_id, Grizzly.node_id()}
| {:status, NetworkManagementInclusion.node_add_status()}
| {:seq_number, Grizzly.seq_number()}
| {:listening?, boolean()}
| {:basic_device_class, byte()}
| {:generic_device_class, byte()}
| {:specific_device_class, byte()}
| {:command_classes, [tagged_command_classes()]}
| {:granted_keys, [Security.key()]}
| {:kex_fail_type, Security.key_exchange_fail_type()}
@impl Grizzly.ZWave.Command
@spec new([param()]) :: {:ok, Command.t()}
def new(params \\ []) do
# TODO: validate params
command = %Command{
name: :extended_node_add_status,
command_byte: 0x16,
command_class: NetworkManagementInclusion,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl Grizzly.ZWave.Command
def encode_params(command) do
node_id = Command.param!(command, :node_id)
status = Command.param!(command, :status)
seq_number = Command.param!(command, :seq_number)
status_byte = NetworkManagementInclusion.node_add_status_to_byte(status)
if status == :failed do
<<seq_number, status_byte, node_id::16, 0x01>>
else
listening? = Command.param!(command, :listening?)
basic_device_class = Command.param!(command, :basic_device_class)
generic_device_class = Command.param!(command, :generic_device_class)
specific_device_class = Command.param!(command, :specific_device_class)
command_classes = Command.param!(command, :command_classes)
# We add 6 to the length of the command classes to account for the 3 device
# classes 2 Z-Wave protocol bytes and the node info length byte.
# Also add the number of command classes plus 4 bytes for the separators
# See SDS13784 4.4.8.2 for more details
node_info_length = 6 + cc_count(command_classes)
listening_bit = if listening?, do: 1, else: 0
# TODO: fix opt func bit (after the listening bit)
<<seq_number, status_byte, node_id::16, node_info_length, listening_bit::size(1),
0x00::size(7), 0x00, basic_device_class, generic_device_class,
specific_device_class>> <>
CommandClasses.command_class_list_to_binary(command_classes) <>
add_keys_granted_and_fail_type(command)
end
end
@impl Grizzly.ZWave.Command
def decode_params(<<seq_number, status_byte, node_id::16, node_info_bin::binary>>) do
node_info = NetworkManagementInclusion.parse_node_info(node_info_bin)
params =
%{
status: NetworkManagementInclusion.parse_node_add_status(status_byte),
seq_number: seq_number,
node_id: node_id
}
|> Map.merge(node_info)
|> Enum.into([])
{:ok, params}
end
def decode_params(<<seq_number, status_byte, node_id::16, 0x01>>) do
{:ok,
[
status: NetworkManagementInclusion.parse_node_add_status(status_byte),
seq_number: seq_number,
node_id: node_id,
listening?: false,
basic_device_class: :unknown,
generic_device_class: :unknown,
specific_device_class: :unknown,
command_classes: []
]}
end
defp add_keys_granted_and_fail_type(command) do
keys = Command.param!(command, :keys_granted)
kex_failed_type = Command.param!(command, :kex_fail_type)
<<Security.keys_to_byte(keys), Security.failed_type_to_byte(kex_failed_type)>>
end
defp cc_count(tagged_command_classes) do
padding = get_padding(tagged_command_classes)
cc_length = tagged_command_classes |> Keyword.values() |> List.flatten() |> length()
cc_length + padding
end
defp get_padding(tagged_command_classes) do
Enum.reduce(tagged_command_classes, 0, fn
{_, []}, padding ->
padding
{:secure_supported, _}, padding ->
padding + 2
{other, _}, padding when other in [:non_secure_controlled, :secure_controlled] ->
padding + 1
_, padding ->
padding
end)
end
end
|
lib/grizzly/zwave/commands/extended_node_add_status.ex
| 0.7413
| 0.527621
|
extended_node_add_status.ex
|
starcoder
|
defmodule Advent2019Web.Day05Controller do
use Advent2019Web, :controller
@doc """
Given a list of operations, a position, an input list and the history of
previous operations and output, runs the program until completion. It returns
the final position, list of outputs and history of operations including
the given ones.
It returns as a last element :finished if it finished or :hanging if it
is hanging waiting for more input
"""
def run_intcode(op_data_map, position, input, output, history) do
op_str = String.pad_leading("#{op_data_map[position]}", 5, "0")
# the three modes
[_m3, m2, m1 | _] = String.codepoints(op_str)
# the op itself
op = String.slice(op_str, 3, 2) |> String.to_integer()
# the original "immediate" arguments
arg1_imm = op_data_map[position + 1]
arg2_imm = op_data_map[position + 2]
# the three arguments, can be nil if they are not actually used by the op
# but it's OK, it will be ignored then
# the second element for the tuple is for representing the computation in the FE
{arg1_use, pos1} =
if m1 == "1" do
{arg1_imm, position + 1}
else
{op_data_map[arg1_imm], arg1_imm}
end
{arg2_use, pos2} =
if m2 == "1" do
{arg2_imm, position + 2}
else
{op_data_map[arg2_imm], arg2_imm}
end
# this cannot be in immediate mode...
arg3_imm = op_data_map[position + 3]
case op do
1 ->
# sum
run_intcode(
Map.replace!(
op_data_map,
arg3_imm,
arg1_use + arg2_use
),
position + 4,
input,
output,
history ++
[
%{
op: "add",
target_pos: arg3_imm,
input_pos: [pos1, pos2],
current_state: op_data_map,
position: position
}
]
)
2 ->
# mul
run_intcode(
Map.replace!(
op_data_map,
arg3_imm,
arg1_use * arg2_use
),
position + 4,
input,
output,
history ++
[
%{
op: "mul",
target_pos: arg3_imm,
input_pos: [pos1, pos2],
current_state: op_data_map,
position: position
}
]
)
3 when input != [] ->
[consumed_input | remaining_input] = input
# input
run_intcode(
Map.replace!(
op_data_map,
arg1_imm,
consumed_input
),
position + 2,
remaining_input,
output,
history ++
[
%{
op: "input",
target_pos: arg1_imm,
input_pos: [nil, nil],
current_state: op_data_map,
position: position
}
]
)
3 when input == [] ->
{op_data_map, position, output, history, :hanging}
4 ->
"output"
run_intcode(
op_data_map,
position + 2,
input,
output ++ [arg1_use],
history ++
[
%{
op: "output",
target_pos: nil,
input_pos: [arg1_imm, nil],
current_state: op_data_map,
position: position
}
]
)
5 ->
# jump-if-true
next_position =
if arg1_use != 0 do
arg2_use
else
position + 3
end
run_intcode(
op_data_map,
next_position,
input,
output,
history ++
[
%{
op: "jump-if-true",
target_pos: nil,
input_pos: [pos1, pos2],
current_state: op_data_map,
position: position
}
]
)
6 ->
# jump-if-false
next_position =
if arg1_use == 0 do
arg2_use
else
position + 3
end
run_intcode(
op_data_map,
next_position,
input,
output,
history ++
[
%{
op: "jump-if-false",
target_pos: nil,
input_pos: [pos1, pos2],
current_state: op_data_map,
position: position
}
]
)
7 ->
# less than
to_store =
if arg1_use < arg2_use do
1
else
0
end
run_intcode(
Map.replace!(
op_data_map,
arg3_imm,
to_store
),
position + 4,
input,
output,
history ++
[
%{
op: "less than",
target_pos: arg3_imm,
input_pos: [pos1, pos2],
current_state: op_data_map,
position: position
}
]
)
8 ->
# equals
to_store =
if arg1_use == arg2_use do
1
else
0
end
run_intcode(
Map.replace!(
op_data_map,
arg3_imm,
to_store
),
position + 4,
input,
output,
history ++
[
%{
op: "less than",
target_pos: arg3_imm,
input_pos: [pos1, pos2],
current_state: op_data_map,
position: position
}
]
)
99 ->
{op_data_map, position, output, history, :finished}
end
end
def list_to_map(l) do
l
|> Enum.with_index()
|> Enum.map(fn {v, k} -> {k, v} end)
|> Map.new()
end
def solve1(conn, params) do
{processed_map, _, output, history, :finished} =
run_intcode(list_to_map(params["_json"]), 0, [1], [], [])
IO.puts("Day 05.1 result: #{processed_map[0]}")
json(conn, %{
result: List.last(output),
final_map: processed_map,
history: history,
output: output
})
end
def solve2(conn, params) do
{processed_map, _, output, history, :finished} =
run_intcode(list_to_map(params["_json"]), 0, [5], [], [])
IO.puts("Day 05.2 result: #{processed_map[0]}")
json(conn, %{
result: List.last(output),
final_map: processed_map,
history: history,
output: output
})
end
end
|
lib/advent2019_web/controllers/day05_controller.ex
| 0.614741
| 0.470615
|
day05_controller.ex
|
starcoder
|
defmodule Recurly.XML.Schema do
@moduledoc """
Module responsible for handling schemas of the resources. Schemas are used for building
the resource structs as well as marshalling and unmarshalling the xml. The syntax for
defining schemas is similar to Ecto:
```
defmodule MyResource do
use Recurly.XML.Schema
schema :my_resource do
field :a_string, :string
field :an_integer, :integer
field :a_float, :float, read_only: true
end
end
```
Calling `use Recurly.XML.Schema` adds two macros, `schema` and `field`. It also defines
two functions on the module:
- `__resource_name__/0`
- `__schema__/0`
The first returns the name of the resource `:my_schema`.
The second returns the fields and their options.
It also calls `defstruct` passing in the names of the fields to definte the type.
"""
defstruct [:fields, :resource_type]
alias Recurly.XML.Field
@doc false
defmacro __using__(_) do
quote do
import Recurly.XML.Schema, only: :macros
Module.register_attribute(__MODULE__, :schema_fields, accumulate: true)
end
end
@doc """
Defines the schema for this resource
"""
defmacro schema(resource_name, do: block) do
quote do
try do
unquote(block)
after
:ok
end
fields = @schema_fields |> Enum.reverse
Module.eval_quoted __ENV__, [
Recurly.XML.Schema.__defstruct__(fields),
Recurly.XML.Schema.__defschema__(fields),
Recurly.XML.Schema.__defname__(unquote(resource_name))
]
end
end
@doc """
Defines a field in the schema
"""
defmacro field(name, type, opts \\ []) do
quote do
Recurly.XML.Schema.__field__(__MODULE__, unquote(name), unquote(type), unquote(opts))
end
end
def __defname__(name) do
quote do
@doc false
def __resource_name__ do
unquote(name)
end
end
end
def __defstruct__(fields) do
quote do
keys = Enum.map(unquote(fields), fn kwl ->
Keyword.get(kwl, :name)
end)
defstruct keys ++ [:__meta__]
end
end
# TODO probably a more efficient way to do this?
def __defschema__(fields) do
quote do
@doc false
def __schema__ do
# Convert the raw keyword lists to Fields
fields =
unquote(fields)
|> Enum.map(fn field ->
%Field{
name: Keyword.get(field, :name),
type: Keyword.get(field, :type),
opts: Keyword.get(field, :opts)
}
end)
%Recurly.XML.Schema{
fields: fields,
resource_type: __MODULE__
}
end
end
end
def __field__(mod, name, type, opts) do
Module.put_attribute(mod, :schema_fields, [name: name, type: type, opts: opts])
end
# Public Interface
def get(resource_type) do
resource_type.__schema__
end
def fields(schema, :writeable) do
schema
|> Map.get(:fields, [])
|> Enum.reject(fn field ->
Keyword.get(field.opts, :read_only)
end)
end
def find_field(schema, name) do
schema
|> Map.get(:fields)
|> Enum.find(fn field -> field.name == name end)
end
end
|
lib/recurly/xml/schema.ex
| 0.645679
| 0.756605
|
schema.ex
|
starcoder
|
defmodule CommerceCure.Year do
@moduledoc """
Assumes years are from 1000 - 9999
"""
alias __MODULE__
@type year :: integer
@type t :: %Year{year: year}
@enforce_keys [:year]
defstruct [:year]
@doc """
iex> Year.new(17)
{:ok, %Year{year: 2017}}
iex> Year.new("114")
{:ok, %Year{year: 2114}}
iex> Year.new("12whs")
{:ok, %Year{year: 2012}}
iex> Year.new("whs12")
{:error, :not_integer}
underpinned with Integer.parse/1 for string parsing
years larger than 9999 or smaller than 0 will raise
"""
@spec new(String.t | integer) :: {:ok, t} | {:error, atom}
def new(year)
def new(term) when is_integer(term) do
{:ok, %__MODULE__{year: prepend_millenium(term)}}
end
def new(term) when is_binary(term) do
case Integer.parse(term) do
{int, _} ->
new(int)
_ ->
{:error, :not_integer}
end
end
@doc """
iex> Year.to_two_digits(%{year: 4242})
"42"
"""
@spec to_two_digits(t) :: String.t
def to_two_digits(%{year: year}) do
year
|> Integer.digits()
|> Enum.slice(-2..-1)
|> Enum.join()
end
@doc """
iex> Year.to_string(%{year: 2017})
"2017"
"""
@spec to_string(t) :: String.t
def to_string(%{year: year}), do: "#{year}"
defp prepend_millenium(year, prepend_year \\ Date.utc_today())
defp prepend_millenium(year, %{year: prepend_year}) when year > 0 and year <= 999 do
year = Integer.digits(year)
prepend_year
|> Integer.digits()
|> Enum.slice(0..-length(year)-1)
|> Kernel.++(year)
|> Integer.undigits()
end
defp prepend_millenium(year, _) when year > 999 and year <= 9999, do: year
defp prepend_millenium(year, _) when year > 9999 do
raise ArgumentError, "#{inspect year} cannot be larger than 9999"
end
defp prepend_millenium(year, _) when year < 0 do
raise ArgumentError, "#{inspect year} cannot be negative"
end
## Helpers
defimpl String.Chars do
@doc """
iex> Year.new(1562) |> to_string()
"1562"
"""
def to_string(%{year: year}) do
Year.to_string(%{year: year})
end
end
end
|
lib/commerce_cure/data_type/year.ex
| 0.721547
| 0.469155
|
year.ex
|
starcoder
|
defmodule Trike.Proxy do
@moduledoc """
A Ranch protocol that receives TCP packets, extracts OCS messages from them,
generates a CloudEvent for each message, and forwards the event to Amazon
Kinesis.
"""
use GenServer
require Logger
alias ExAws.Kinesis
alias Trike.CloudEvent
@behaviour :ranch_protocol
@type t() :: %__MODULE__{
socket: :gen_tcp.socket() | nil,
stream: String.t(),
partition_key: String.t() | nil,
buffer: binary(),
received: integer(),
put_record_fn:
(Kinesis.stream_name(), binary(), binary() -> {:ok, term()} | {:error, term()}),
clock: module()
}
@enforce_keys [:stream, :put_record_fn, :clock]
defstruct @enforce_keys ++ [:socket, :partition_key, buffer: "", received: 0]
@eot <<4>>
@staleness_check_interval_ms Application.compile_env(:trike, :staleness_check_interval_ms)
@impl :ranch_protocol
def start_link(ref, transport, opts) do
GenServer.start_link(__MODULE__, {
ref,
transport,
opts[:stream],
opts[:kinesis_client],
opts[:clock]
})
end
@impl GenServer
def init({ref, transport, stream, kinesis_client, clock}) do
:timer.send_interval(@staleness_check_interval_ms, :staleness_check)
{:ok,
%__MODULE__{
stream: stream,
put_record_fn: &kinesis_client.put_record/3,
clock: clock
}, {:continue, {ref, transport}}}
end
@impl GenServer
def handle_continue({ref, transport}, state) do
{:ok, socket} = :ranch.handshake(ref)
:ok = transport.setopts(socket, active: true)
connection_string = format_socket(socket)
Logger.info(["Accepted socket: ", connection_string])
{:noreply,
%{
state
| socket: socket,
partition_key: connection_string
}}
end
@impl GenServer
def handle_info(
{:tcp, _socket, data},
%{
buffer: buffer,
partition_key: partition_key,
clock: clock,
stream: stream
} = state
) do
{messages, rest} = extract(buffer <> data)
current_time = clock.utc_now()
Enum.each(messages, fn msg ->
with {:ok, event} <- CloudEvent.from_ocs_message(msg, current_time, partition_key),
{:ok, event_json} <- Jason.encode(event) do
{:ok, _result} = state.put_record_fn.(stream, partition_key, event_json)
else
error ->
Logger.info(["Failed to parse message: ", inspect(error)])
end
end)
{:noreply, %{state | buffer: rest, received: state.received + 1}}
end
def handle_info({:tcp_closed, socket}, state) do
Logger.info(["Socket closed: ", inspect(socket)])
{:stop, :normal, state}
end
def handle_info(:staleness_check, %{received: received} = state) do
Logger.info("Stale Proxy pid=#{inspect(self())}, received=#{received}")
{:noreply, %{state | received: 0}}
end
def handle_info(msg, state) do
Logger.info(["Proxy received unknown message: ", inspect(msg)])
{:noreply, state}
end
@spec extract(binary()) :: {[binary()], binary()}
defp extract(buffer) do
statements = String.split(buffer, @eot)
{messages, [rest]} = Enum.split(statements, -1)
{messages, rest}
end
@spec format_socket(:gen_tcp.socket()) :: String.t()
defp format_socket(sock) do
with {:ok, {local_ip, local_port}} <- :inet.sockname(sock),
{:ok, {peer_ip, peer_port}} <- :inet.peername(sock) do
"{#{:inet.ntoa(local_ip)}:#{local_port} -> #{:inet.ntoa(peer_ip)}:#{peer_port}}"
else
unexpected -> inspect(unexpected)
end
end
end
|
lib/trike/proxy.ex
| 0.792344
| 0.425605
|
proxy.ex
|
starcoder
|
defmodule Chex.Move do
@moduledoc false
alias Chex.{Piece, Square}
alias Chex.Move.{SAN, Smith}
@files ?a..?h
@file_atoms [:a, :b, :c, :d, :e, :f, :g, :h]
@ranks ?1..?8
@rank_ints 1..8
@piece_atoms [:rook, :knight, :bishop, :queen, :king]
defstruct destination: nil,
origin: nil,
piece: :pawn,
capture: false,
check: false,
checkmate: false,
en_passant: false,
castle: nil,
promote: nil
@doc """
Parses a move string within the context of a `t:Chex.game()` and return a `t:Chex.move()` or an error tuple.
"""
@spec parse(String.t(), Chex.game()) :: Chex.move() | {:error, term}
def parse(move, game) when is_binary(move) do
try do
cond do
Smith.valid?(move) -> Smith.parse_move(move)
SAN.valid?(move) -> SAN.parse_move(move)
end
|> to_map()
|> to_move(game)
|> maybe_error()
rescue
_e in CondClauseError -> {:error, :invalid_move}
end
end
@doc """
Takes a NimbleParsec returned expression and returns a `%Move{}`
"""
@spec to_map({:ok, keyword()} | {:error, term()}) :: %__MODULE__{} | {:error, term()}
def to_map({:ok, list}) do
move = Map.merge(%__MODULE__{}, Enum.into(list, %{}))
%{
move
| piece: parse_piece(move.piece),
destination: parse_square(move.destination),
origin: parse_square(move.origin),
promote: parse_piece(move.promote)
}
end
def to_map({:error, _} = error), do: error
defp parse_piece(list) when is_list(list) do
list
|> to_string()
|> Piece.from_string()
|> Tuple.to_list()
|> List.first()
end
defp parse_piece(piece), do: piece
def parse_square(list) when length(list) == 2, do: Square.from_charlist(list)
def parse_square([char]) when char in @ranks, do: char - 48
def parse_square([char]) when char in @files, do: List.to_existing_atom([char])
def parse_square(_), do: nil
def to_move(%{castle: :kingside}, %{active_color: :white}), do: {{:e, 1}, {:g, 1}}
def to_move(%{castle: :kingside}, %{active_color: :black}), do: {{:e, 8}, {:g, 8}}
def to_move(%{castle: :queenside}, %{active_color: :white}), do: {{:e, 1}, {:c, 1}}
def to_move(%{castle: :queenside}, %{active_color: :black}), do: {{:e, 8}, {:c, 8}}
def to_move(%{destination: to, origin: nil} = san_map, game) do
from = find_origin(game, {san_map.piece, game.active_color}, to)
move_tuple(from, to, san_map.promote)
end
def to_move(%{destination: {_df, _dr} = to, origin: {_of, _or} = from} = san_map, _game) do
move_tuple(from, to, san_map.promote)
end
def to_move(%{destination: to, origin: from} = san_map, game) do
from = find_origin(game, {san_map.piece, game.active_color}, to, from)
move_tuple(from, to, san_map.promote)
end
@spec find_origin(Chex.game(), Chex.piece(), Chex.square()) :: Chex.square() | nil
defp find_origin(game, piece, dest) do
Chex.Board.find_pieces(game, piece)
|> Enum.find(fn sq -> dest in Chex.Piece.possible_moves(game, sq) end)
end
defp find_origin(game, piece, dest, from_file) when from_file in @files do
from_file = List.to_existing_atom([from_file])
find_origin(game, piece, dest, from_file)
end
defp find_origin(game, piece, dest, from_rank) when from_rank in @ranks do
from_rank = from_rank - 48
find_origin(game, piece, dest, from_rank)
end
defp find_origin(game, piece, dest, {ff, fr}) when fr in @ranks do
fr = fr - 48
find_origin(game, piece, dest, {ff, fr})
end
defp find_origin(game, piece, dest, {ff, fr}) when ff in @files do
ff = List.to_existing_atom([ff])
find_origin(game, piece, dest, {ff, fr})
end
defp find_origin(game, piece, dest, from_file) when from_file in @file_atoms do
Chex.Board.find_pieces(game, piece)
|> Enum.filter(fn sq -> dest in Chex.Piece.possible_moves(game, sq) end)
|> Enum.filter(fn {f, _} -> f == from_file end)
|> List.first()
end
defp find_origin(game, piece, dest, from_rank) when from_rank in @rank_ints do
Chex.Board.find_pieces(game, piece)
|> Enum.filter(fn sq -> dest in Chex.Piece.possible_moves(game, sq) end)
|> Enum.filter(fn {_, r} -> r == from_rank end)
|> List.first()
end
defp find_origin(game, piece, dest, {ff, fr}) when fr in @rank_ints do
Chex.Board.find_pieces(game, piece)
|> Enum.filter(fn sq -> dest in Chex.Piece.possible_moves(game, sq) end)
|> Enum.filter(fn
{^ff, ^fr} -> true
_ -> false
end)
|> List.first()
end
defp move_tuple(from, to, nil), do: {from, to}
defp move_tuple(from, to, promote), do: {from, to, promote}
defp maybe_error({{tf, tr}, {ff, fr}} = move)
when tf in @file_atoms and ff in @file_atoms and
tr in @rank_ints and fr in @rank_ints,
do: move
defp maybe_error({{tf, tr}, {ff, fr}, piece} = move)
when tf in @file_atoms and ff in @file_atoms and
tr in @rank_ints and fr in @rank_ints and
piece in @piece_atoms,
do: move
defp maybe_error(_move), do: {:error, :invalid_move}
end
|
lib/chex/move.ex
| 0.785638
| 0.552902
|
move.ex
|
starcoder
|
defmodule Oracledbex do
require Logger
@moduledoc """
Interface for interacting with Oracle Database via an ODBC driver for Elixir.
It implements `DBConnection` behaviour, using `:odbc` to connect to the
system's ODBC driver. Requires Oracle Database ODBC driver, see
[ODBCREADME](readme.html) for installation instructions.
"""
alias Oracle.Query
alias Oracle.Type
@doc """
Connect to a Oracle Database using ODBC.
`opts` expects a keyword list with zero or more of:
* `:dsn` - name of driver the adapter will use
* `:username` - Username
* `:password` - <PASSWORD> password
`Oracledbex` uses the `DBConnection` framework and supports all `DBConnection`
options like `:idle`, `:after_connect` etc.
See `DBConnection.start_link/2` for more information.
## Examples
{:ok, pid} = Oracledbex.start_link([{"DSN","ORACLEODBC"}])
{:ok, #PID<0.268.0>}
"""
@spec start_link(Keyword.t) :: {:ok, pid}
def start_link(opts) do
DBConnection.start_link(Oracle.Protocol, opts)
end
@doc """
Executes a query against an Oracle Database with ODBC.
`conn` expects a `Oracledbex` process identifier.
`statement` expects a SQL query string.
`params` expects a list of values in one of the following formats:
* Strings with only valid ASCII characters, which will be sent to the
database as strings.
* `Decimal` structs, which will be encoded as strings so they can be
sent to the database with arbitrary precision.
* Integers, which will be sent as-is if under 10 digits or encoded
as strings for larger numbers.
`opts` expects a keyword list with zero or more of:
* `:mode` - set to `:savepoint` to use a savepoint to rollback to before the
query on error, otherwise set to `:transaction` (default: `:transaction`);
Result values will be encoded according to the following conversions:
* char and varchar: strings.
* nchar and nvarchar: strings unless `:preserve_encoding` is set to `true`
in which case they will be returned as UTF16 Little Endian binaries.
* int, smallint, tinyint, decimal and numeric when precision < 10 and
scale = 0 (i.e. effectively integers): integers.
* float, real, double precision, decimal and numeric when precision between
10 and 15 and/or scale between 1 and 15: `Decimal` structs.
* bigint, money, decimal and numeric when precision > 15: strings.
* date: `{year, month, day}`
* uniqueidentifier, time, binary, varbinary, rowversion: not currently
supported due to adapter limitations. Select statements for columns
of these types must convert them to supported types (e.g. varchar).
"""
@spec query(pid(), binary(), [Type.param()], Keyword.t) ::
{:ok, iodata(), Oracle.Result.t}
def query(conn, statement, params, opts \\ []) do
DBConnection.prepare_execute(
conn, %Query{name: "", statement: statement}, params, opts)
end
@doc """
Executes a query against an Oracle Database with ODBC.
Raises an error on failure. See `query/4` for details.
"""
@spec query!(pid(), binary(), [Type.param()], Keyword.t) ::
{iodata(), Oracle.Result.t}
def query!(conn, statement, params, opts \\ []) do
DBConnection.prepare_execute!(
conn, %Query{name: "", statement: statement}, params, opts)
end
end
|
lib/oracledbex.ex
| 0.926868
| 0.524029
|
oracledbex.ex
|
starcoder
|
defmodule Fxnk.Math do
@moduledoc """
`Fxnk.Math` are functions dealing with math.
"""
import Fxnk.Functions, only: [curry: 1]
@doc """
Find the maximum of a list.
## Example
iex> Fxnk.Math.max([1337, 42, 23])
1337
"""
@spec max([any(), ...]) :: any()
def max([hd | _] = args) do
find_max(args, hd)
end
defp find_max([hd | []], max) when hd < max, do: max
defp find_max([hd | []], max) when hd > max, do: hd
defp find_max([hd | []], _), do: hd
defp find_max([hd | tail], max) when hd < max, do: find_max(tail, max)
defp find_max([hd | tail], max) when hd > max, do: find_max(tail, hd)
defp find_max([hd | tail], _), do: find_max(tail, hd)
@doc """
Find the minimum of a list
## Example
iex> Fxnk.Math.min([1337, 42, 23])
23
"""
@spec min([...]) :: any()
def min([hd | _] = args) do
find_min(args, hd)
end
defp find_min([hd | []], min) when hd > min, do: min
defp find_min([hd | []], min) when hd < min, do: hd
defp find_min([hd | tail], min) when hd > min, do: find_min(tail, min)
defp find_min([hd | tail], min) when hd < min, do: find_min(tail, hd)
defp find_min([hd | tail], _), do: find_min(tail, hd)
@doc """
Add two numbers together
## Example
iex> Fxnk.Math.add(1, 2)
3
"""
@spec add(number(), number()) :: number()
def add(a, b) when is_number(a) and is_number(b) do
a + b
end
@doc """
Curried `Add/2`
## Example
iex> addOne = Fxnk.Math.add(1)
iex> addOne.(2)
3
"""
@spec add(number()) :: (number() -> number())
def add(n) when is_number(n) do
curry(fn arg -> arg + n end)
end
@doc """
Subtract the second argument from the first.
## Examples
iex> Fxnk.Math.subtract(5, 1)
4
"""
@spec subtract(number(), number()) :: number()
def subtract(a, b) when is_number(a) and is_number(b) do
a - b
end
@doc """
Curried `subtract/2`
## Examples
iex> minusOne = Fxnk.Math.subtract(1)
iex> minusOne.(5)
4
"""
@spec subtract(number()) :: (number() -> number())
def subtract(n) when is_number(n) do
curry(fn arg -> arg - n end)
end
@doc """
Division.
divide(a, b) == a / b
## Examples
iex(1)> Fxnk.Math.divide(1, 4)
0.25
"""
@spec divide(number(), number()) :: float()
def divide(a, b) when is_number(a) and is_number(b) do
a / b
end
@doc """
Curried `divide`
## Examples
iex> recip = Fxnk.Math.divide(1)
iex> recip.(4)
0.25
"""
@spec divide(number()) :: (number() -> number())
def divide(n) when is_number(n) do
curry(fn arg -> n / arg end)
end
@doc """
Multiplication
multiply(a, b) == a * b
## Examples
iex> Fxnk.Math.multiply(10, 10)
100
"""
@spec multiply(number(), number()) :: number()
def multiply(a, b) when is_number(a) and is_number(b) do
a * b
end
@doc """
Curried `multiply/2`
## Examples
iex> timesTen = Fxnk.Math.multiply(10)
iex> timesTen.(10)
100
"""
@spec multiply(number()) :: (number() -> number())
def multiply(n) when is_number(n) do
curry(fn arg -> n * arg end)
end
@doc """
Averages a list of numbers, returns a float.
## Examples
iex> Fxnk.Math.avg([1,4,3,2,5])
3.0
"""
@spec avg([number(), ...]) :: float()
def avg([hd | tail] = list) when is_list(list) do
avg(tail, hd, 1)
end
defp avg([hd | []], n, len), do: (hd + n) / inc(len)
defp avg([hd | tl], n, len), do: avg(tl, hd + n, inc(len))
@doc """
Multiply a number times -1.
## Examples
iex> Fxnk.Math.negate(100)
-100
iex> Fxnk.Math.negate(-100)
100
"""
@spec negate(number()) :: number()
def negate(n) do
n * -1
end
@doc """
Increment a number
## Example
iex> Fxnk.Math.inc(1)
2
"""
@spec inc(number()) :: number()
def inc(n) when is_number(n) do
n + 1
end
@doc """
Decrement a number
## Example
iex> Fxnk.Math.dec(1)
0
"""
@spec dec(number()) :: number()
def dec(n) when is_number(n) do
n - 1
end
@doc """
Curried `clamp/3`. Restrict a number to be between a range of numbers.
### Example
iex> between1And10 = Fxnk.Math.clamp(1, 10)
iex> between1And10.(-5)
1
iex> between1And10.(15)
10
"""
@spec clamp(number(), number()) :: (number() -> number())
def clamp(from, to) do
curry(fn n -> clamp(n, from, to) end)
end
@doc """
Restrict a number to be between a range of numbers.
### Example
iex> Fxnk.Math.clamp(13, 15, 20)
15
iex> Fxnk.Math.clamp(21, 15, 20)
20
iex> Fxnk.Math.clamp(17, 15, 20)
17
"""
@spec clamp(number(), number(), number()) :: number()
def clamp(n, from, to) do
cond do
n <= from -> from
n >= to -> to
true -> n
end
end
end
|
lib/fxnk/Math.ex
| 0.882826
| 0.494324
|
Math.ex
|
starcoder
|
defmodule DiscoveryApi.Data.Model do
@moduledoc """
utilities to persist and load discovery data models
"""
alias DiscoveryApi.Data.Persistence
@behaviour Access
@derive Jason.Encoder
defstruct [
:accessLevel,
:categories,
:completeness,
:conformsToUri,
:contactEmail,
:contactName,
:describedByMimeType,
:describedByUrl,
:description,
:downloads,
:fileTypes,
:homepage,
:id,
:issuedDate,
:keywords,
:language,
:lastUpdatedDate,
:license,
:modifiedDate,
:name,
:organization,
:organizationDetails,
:parentDataset,
:private,
:publishFrequency,
:queries,
:referenceUrls,
:rights,
:schema,
:sourceFormat,
:sourceType,
:sourceUrl,
:spatial,
:systemName,
:temporal,
:title
]
def new(data) do
model = struct(DiscoveryApi.Data.Model, data)
org_with_atom_keys =
model.organizationDetails
|> from_struct()
|> Enum.map(&string_to_atom/1)
|> Map.new()
org_details = struct(DiscoveryApi.Data.OrganizationDetails, org_with_atom_keys)
Map.put(model, :organizationDetails, org_details)
|> Map.put(:schema, SmartCity.Helpers.to_atom_keys(model.schema))
end
defp string_to_atom({k, v}) when is_atom(k) do
{k, v}
end
defp string_to_atom({k, v}) when is_binary(k) do
{String.to_atom(k), v}
end
defp from_struct(%_type{} = data) do
Map.from_struct(data)
end
defp from_struct(data), do: data
@spec get(any) :: any
def get(id) do
{:ok, model} = Brook.ViewState.get(DiscoveryApi.instance(), :models, id)
model
|> ensure_struct()
|> add_system_attributes()
end
def get_all() do
{:ok, models} = Brook.ViewState.get_all(DiscoveryApi.instance(), :models)
models
|> Map.values()
|> add_system_attributes()
end
def get_all(ids) do
{:ok, models} = Brook.ViewState.get_all(DiscoveryApi.instance(), :models)
models
|> Enum.filter(fn {k, _v} -> k in ids end)
|> Enum.map(fn {_k, v} -> v end)
|> add_system_attributes()
end
def get_completeness({id, completeness}) do
processed_completeness =
case completeness do
nil -> nil
score -> score |> Jason.decode!() |> Map.get("completeness", nil)
end
{id, processed_completeness}
end
# sobelow_skip ["DOS.StringToAtom"]
def get_count_maps(dataset_id) do
case Persistence.get_keys("smart_registry:*:count:" <> dataset_id) do
[] ->
%{}
all_keys ->
friendly_keys = Enum.map(all_keys, fn key -> String.to_atom(Enum.at(String.split(key, ":"), 1)) end)
all_values = Persistence.get_many(all_keys)
Enum.into(0..(Enum.count(friendly_keys) - 1), %{}, fn friendly_key ->
{Enum.at(friendly_keys, friendly_key), Enum.at(all_values, friendly_key)}
end)
end
end
def remote?(model) do
model.sourceType == "remote"
end
@impl Access
def fetch(term, key), do: Map.fetch(term, key)
@impl Access
def get_and_update(data, key, func) do
Map.get_and_update(data, key, func)
end
@impl Access
def pop(data, key), do: Map.pop(data, key)
defp add_system_attributes(nil), do: nil
defp add_system_attributes(%__MODULE__{} = model) do
model
|> List.wrap()
|> add_system_attributes()
|> List.first()
end
defp add_system_attributes(models) do
redis_kv_results =
Enum.map(models, &Map.get(&1, :id))
|> get_all_keys()
|> Persistence.get_many_with_keys()
Enum.map(models, fn model ->
completeness = redis_kv_results["discovery-api:stats:#{model.id}"]
downloads = redis_kv_results["smart_registry:downloads:count:#{model.id}"]
queries = redis_kv_results["smart_registry:queries:count:#{model.id}"]
model
|> ensure_struct()
|> Map.put(:completeness, completeness)
|> Map.put(:downloads, downloads)
|> Map.put(:queries, queries)
end)
end
defp get_all_keys(ids) do
ids
|> Enum.map(fn id ->
[
"smart_registry:downloads:count:#{id}",
"smart_registry:queries:count:#{id}",
"discovery-api:stats:#{id}"
]
end)
|> List.flatten()
end
defp ensure_struct(nil), do: nil
defp ensure_struct(%__MODULE__{} = model), do: model
defp ensure_struct(%{} = model), do: struct(__MODULE__, model)
end
|
apps/discovery_api/lib/discovery_api/data/model.ex
| 0.812904
| 0.431045
|
model.ex
|
starcoder
|
defmodule Optimus.ColumnFormatter do
@type align :: :left | :center | :right
@type column_spec :: pos_integer | {pos_integer, align}
@spec format([column_spec], [String.t()]) :: {:ok, [[String.t()]]} | {:error, String.t()}
def format(column_specs, strings) do
with :ok <- validate(column_specs, strings), do: {:ok, format_valid(column_specs, strings)}
end
defp format_valid(column_specs, strings) do
column_specs
|> Enum.zip(strings)
|> Enum.map(fn {spec, string} -> {spec, split(string, width(spec))} end)
|> to_lines
end
defp validate(column_specs, strings) do
with :ok <- validate_lengths(column_specs, strings),
:ok <- validate_strings(strings),
:ok <- validate_specs(column_specs),
do: :ok
end
defp validate_lengths(column_specs, strings) when is_list(column_specs) and is_list(strings) do
if length(column_specs) == length(strings) do
:ok
else
{:error, "arguments should have equal lengths"}
end
end
defp validate_lengths(_, _), do: {:error, "arguments should be lists"}
defp validate_strings([]), do: :ok
defp validate_strings([string | strings]) do
if is_binary(string) do
validate_strings(strings)
else
{:error, "second argument is expected to be a list of strings"}
end
end
defp validate_specs([]), do: :ok
defp validate_specs([spec | specs]) do
case spec do
val when is_integer(val) and val > 0 ->
validate_specs(specs)
{val, align}
when is_integer(val) and val > 0 and (align == :left or align == :center or align == :right) ->
validate_specs(specs)
_ ->
{:error,
"first argument is expected to be a list of width specs, where width spec is a positive integer or a tuple {width, align} where width is a positive integer and align is one of: :left, :center or :right"}
end
end
defp split(string, max_width) do
string
|> String.graphemes()
|> split_graphemes(max_width, [])
end
defp to_lines(split_strings, converted \\ []) do
{heads, rests} = heads_and_rests(split_strings, [], [])
if have_nonempty?(heads) do
line = to_line(heads, [])
to_lines(rests, [line | converted])
else
Enum.reverse(converted)
end
end
defp to_line([], converted) do
converted
|> Enum.reverse()
end
defp to_line([head | heads], converted) do
{spec, line_part} = head
padded_line_part = pad(spec, line_part)
to_line(heads, [padded_line_part | converted])
end
defp pad(spec, nil), do: spec |> width |> spaces
defp pad(spec, {string_width, string}) do
case align(spec) do
:left ->
padding_string = spaces(width(spec) - string_width)
string <> padding_string
:right ->
padding_string = spaces(width(spec) - string_width)
padding_string <> string
:center ->
padding_count = width(spec) - string_width
left = div(padding_count, 2)
right = padding_count - left
spaces(left) <> string <> spaces(right)
end
end
defp spaces(len) when is_integer(len) and len < 0, do: ""
defp spaces(len) when is_integer(len) and len >= 0, do: String.duplicate(" ", len)
defp heads_and_rests([], heads, rests), do: {Enum.reverse(heads), Enum.reverse(rests)}
defp heads_and_rests([{spec, [string_head | string_rest]} | rest], heads, rests) do
heads_and_rests(rest, [{spec, string_head} | heads], [{spec, string_rest} | rests])
end
defp heads_and_rests([{spec, []} | rest], heads, rests) do
heads_and_rests(rest, [{spec, nil} | heads], [{spec, []} | rests])
end
defp width({w, _}), do: w
defp width(w) when is_integer(w), do: w
defp align({_, a}), do: a
defp align(w) when is_integer(w), do: :left
def split_graphemes([], _, already_split), do: Enum.reverse(already_split)
def split_graphemes(graphemes, max_width, already_split) do
{max_graphemes, rest} = Enum.split(graphemes, max_width)
[head, tail] =
if space_first?(rest) do
[max_graphemes, []]
else
split_by_last_space_grapheme(max_graphemes)
end
{new_rest, current} =
case head do
# failed to split
[] ->
{rest, tail}
_ ->
{tail ++ rest, head}
end
formatted_current = current |> trim |> join_and_keep_width
split_graphemes(new_rest, max_width, [formatted_current | already_split])
end
defp split_by_last_space_grapheme(list) do
list
|> Enum.reverse()
|> Enum.split_while(¬_space?/1)
|> Tuple.to_list()
|> Enum.map(&Enum.reverse/1)
|> Enum.reverse()
end
defp trim(graphemes) do
graphemes
|> Enum.drop_while(&space?/1)
|> Enum.reverse()
|> Enum.drop_while(&space?/1)
|> Enum.reverse()
end
def join_and_keep_width(graphemes) do
{length(graphemes), Enum.join(graphemes)}
end
@space ~r/\A\s+\z/
defp space?(grapheme) do
grapheme =~ @space
end
defp not_space?(grapheme) do
!space?(grapheme)
end
defp space_first?([grapheme | _]), do: space?(grapheme)
defp space_first?([]), do: true
defp have_nonempty?(heads) do
Enum.any?(heads, fn {_spec, head} -> head != nil end)
end
end
|
lib/optimus/column_formatter.ex
| 0.773302
| 0.725892
|
column_formatter.ex
|
starcoder
|
defmodule Driver do
@moduledoc"""
All credits to Jostein for implementing this
## Description
You must start the driver with `start_link()` or `start_link(ip_address, port)` before any of the other functions will work
## API:
```
{:ok, driver_pid} = Driver.start_link
set_motor_direction( driver_pid, motor_direction )
set_order_button_light( driver_pid, button_direction ,floor, on_or_off )
set_floor_indicator( driver_pid, floor )
set_stop_button_light( driver_pid, on_or_off )
set_door_open_light( driver_pid, on_or_off )
get_order_button_state( driver_pid,floor, button_direction )
get_floor_sensor_state( driver_pid )
get_stop_button_state( driver_pid )
get_obstruction_switch_state( driver_pid )
```
## Further reading
GenServers are a really neat way to make servers without having to rewrite the same code all the time. It works *Exactly* the same in erlang as well, but it is called gen_server instead. The erlang documentation is kind of hard understand, so use the elixir-video and "Translate" it to erlang (gen_server:call(...) instead of GenServer.call(...)).
Short version is that a GenServer implements the basic parts of a server, and the code seen in this file is the "Blanks you have to fill in"
### A youtube-video that explains GenServers and Supervisors
https://www.youtube.com/watch?v=3EjRvaCOl94
"""
use GenServer
@call_timeout 1000
@button_map %{:hall_up => 0, :hall_down => 1, :cab => 2}
@state_map %{:on => 1, :off => 0}
@direction_map %{:up => 1, :down => 255, :stop => 0}
# Define Types used by dialyzer
@type button :: :hall_up | :hall_down | :cab
@type motor :: :up | :down | :stop
@type state :: :on | :off
@type ip_address :: {integer(), integer(), integer(), integer()}
@doc"""
Returns: `{:ok, driver_pid}` or `{:error, reason}`
If error is retuned, the proces was not started properly
"""
@spec start_link :: :ignore | {:error, any} | {:ok, pid}
def start_link do
start_link {127,0,0,1}, 15657
end
@doc"""
Arguments: IP-address (tuple of 4 ints) and port (integer)
Returns: `{:ok, driver_pid}` or `{:error, reason}`
If error is retuned, the proces was not started properly
"""
@spec start_link(ip_address, integer()) :: :ignore | {:error, any} | {:ok, pid}
def start_link address, port do
GenServer.start_link(__MODULE__, [address, port], [])
end
def stop pid do
GenServer.stop pid
end
def init [address, port] do
{:ok, socket} =:gen_tcp.connect(address, port, [{:active, false}])
{:ok, socket}
end
# User API ----------------------------------------------
# direction can be :up/:down/:stop
@doc"""
Arguments: driver_pid, (:up/:down/:stop)
Returns nothing.
## Examples
{:ok, pid} = Driver.start_link
Driver.set_motor_direction( pid,:up )
"""
@spec set_motor_direction(pid, motor) :: :ok
def set_motor_direction pid, direction do
GenServer.cast pid, {:set_motor_direction, direction}
end
# button_type can be :hall_up/:hall_down/:cab
# state can be :on/:off
@doc"""
Arguments: driver_pid, (:hall_up/:hall_down/:cab), (:up/:down/:stop), (:on/:off)
Returns nothing.
## Examples
{:ok, driver_pid} = Driver.start_link
Driver.set_order_button_light( driver_pid, :hall_up,2, :on )
"""
@spec set_order_button_light(pid, button, integer, state) :: :ok
def set_order_button_light pid, button_type, floor, state do
GenServer.cast pid, {:set_order_button_light, button_type, floor, state}
end
@doc"""
Arguments: driver_pid, (0/1/.../number_of_floors -1)
Returns nothing.
## Examples
{:ok, driver_pid} = Driver.start_link
set_floor_indicator(driver_pid, 2)
"""
@spec set_floor_indicator(pid, integer()) :: :ok
def set_floor_indicator pid, floor do
GenServer.cast pid, {:set_floor_indicator, floor}
end
# state can be :on/:off
@doc"""
Arguments: driver_pid, (:on/:off)
Returns nothing.
## Examples
{:ok, driver_pid} = Driver.start_link
set_stop_button_light(driver_pid, :on)
"""
@spec set_stop_button_light(pid, any) :: :ok
def set_stop_button_light pid, state do
GenServer.cast pid, {:set_stop_button_light, state}
end
@doc"""
Arguments: driver_pid, (:on/:off)
Returns nothing.
## Examples
{:ok, driver_pid} = Driver.start_link
set_door_open_light(driver_pid, :on)
"""
@spec set_door_open_light(pid , state()) :: :ok
def set_door_open_light pid, state do
GenServer.cast pid, {:set_door_open_light, state}
end
@doc"""
Arguments: driver_pid, (0/1/.../number_of_floors -1), (:hall_up/:hall_down/:cab)
Returns (0/1)
0 means not pressed
## Examples
{:ok, driver_pid} = Driver.start_link
Driver.get_order_button_state( driver_pid,1, :hall_up )
"""
@spec get_order_button_state(pid, integer(), button) :: any
def get_order_button_state pid, floor, button_type do
GenServer.call pid, {:get_order_button_state, floor, button_type}
end
@doc"""
Arguments: driver_pid,
Returns (1/2/.../top_floor or :between_floors )
## Examples
{:ok, driver_pid} = Driver.start_link
Driver.get_order_button_state( driver_pid,1, :hall_up )
"""
def get_floor_sensor_state pid do
GenServer.call pid, :get_floor_sensor_state
end
@doc"""
Arguments: driver_pid
Returns (0/1 )
0 means not pressed
## Examples
{:ok, driver_pid} = Driver.start_link
Driver.get_order_button_state( driver_pid,1, :hall_up )
"""
def get_stop_button_state pid do
GenServer.call pid, :get_stop_button_state
end
@doc"""
Arguments: driver_pid
Returns (0/1 )
0 means not obstructed
## Examples
{:ok, driver_pid} = Driver.start_link
Driver.get_stop_button_state( driver_pid )
"""
def get_obstruction_switch_state pid do
GenServer.call pid, :get_obstruction_switch_state
end
# Casts ----------------------------------------------
def handle_cast {:set_motor_direction, direction}, socket do
:gen_tcp.send(socket, [1, @direction_map[direction], 0, 0])
{:noreply, socket}
end
def handle_cast {:set_order_button_light, button_type, floor, state}, socket do
:gen_tcp.send socket, [2, @button_map[button_type], floor, @state_map[state]]
{:noreply, socket}
end
def handle_cast {:set_floor_indicator, floor}, socket do
:gen_tcp.send socket, [3, floor, 0, 0]
{:noreply, socket}
end
def handle_cast {:set_door_open_light, state}, socket do
:gen_tcp.send socket, [4, @state_map[state], 0, 0]
{:noreply, socket}
end
def handle_cast {:set_stop_button_light, state}, socket do
:gen_tcp.send socket, [5, @state_map[state], 0, 0]
{:noreply, socket}
end
# Calls ----------------------------------------------
def handle_call {:get_order_button_state, floor, order_type}, _from, socket do
:gen_tcp.send socket, [6, @button_map[order_type], floor, 0]
{:ok, [6, state, 0, 0]} = :gen_tcp.recv(socket, 4, @call_timeout)
{:reply, state, socket}
end
def handle_call :get_floor_sensor_state, _from, socket do
:gen_tcp.send socket, [7, 0, 0, 0]
button_state = case :gen_tcp.recv(socket, 4, @call_timeout) do
{:ok, [7, 0, _, 0]} -> :between_floors
{:ok, [7, 1, floor, 0]} -> floor
end
{:reply, button_state, socket}
end
def handle_call :get_stop_button_state, _from, socket do
:gen_tcp.send socket, [8, 0, 0, 0]
button_state = case :gen_tcp.recv(socket, 4, @call_timeout) do
{:ok, [8, 0, 0, 0]} -> :inactive
{:ok, [8, 1, 0, 0]} -> :active
end
{:reply, button_state, socket}
end
def handle_call :get_obstruction_switch_state, _from, socket do
:gen_tcp.send socket, [9, 0, 0, 0]
button_state = case :gen_tcp.recv(socket, 4, @call_timeout) do
{:ok, [9, 0, 0, 0]} -> :inactive
{:ok, [9, 1, 0, 0]} -> :active
end
{:reply, button_state, socket}
end
end
|
heis_driver/lib/heis_driver.ex
| 0.860999
| 0.705335
|
heis_driver.ex
|
starcoder
|
defmodule Intro do
def smallest(n1, n2) when n1 < n2, do: n1
def smallest(_n1, n2), do: n2
def smallest(n1, n2, n3, n4), do: smallest(smallest(n1, n2), smallest(n3, n4))
def largest(n1, n2) when n1 > n2, do: n1
def largest(_n1, n2), do: n2
def largest(n1, n2, n3), do: largest(largest(n1, n2), n3)
def price(age) when age < 18, do: 10
def price(age) when age <= 64, do: 20
def price(_age), do: 15
def next(input), do: input + 1
def factorial(0), do: 1
def factorial(input) when input > 0, do: input * factorial(input - 1)
def fibonacci(0), do: 0
def fibonacci(1), do: 1
def fibonacci(input), do: fibonacci(input - 1) + fibonacci(input - 2)
def empty(list), do: length(list) == 0
def first([]), do: []
def first([head | _tail]), do: head
def last([]), do: []
def last([head | tail]) when tail === [], do: head
def last([_head | tail]), do: last(tail)
def prepend(input, list), do: [input | list]
def append([], input), do: [input]
def append([head | tail], input), do: [head | append(tail, input)]
def listLength([]), do: 0
def listLength([_head | tail]), do: listLength(tail) + 1
def sum([]), do: 0
def sum([head | tail]), do: head + sum(tail)
def average([]), do: 0
def average(list), do: sum(list) / listLength(list)
def concat([], []), do: []
def concat(list1, []), do: list1
def concat([], list2), do: list2
def concat(list1, [head | tail]), do: concat(append(list1, head), tail)
def startsWith(first, text), do: first === hd String.graphemes(text)
def endsWith(last, text), do: last == last(String.graphemes(text))
def chomp(text) do
cond do
endsWith("\n", text) == true -> removeLast(text)
true -> text
end
end
def removeLast(input, acc \\ 0)
def removeLast(input, _acc), do: removeLast(String.graphemes(input), [], listLength(String.graphemes(input)) - 1)
def removeLast(_list, text, acc) when acc == 0, do: List.to_string(text)
def removeLast([head | tail], text, acc) when acc > 0, do: removeLast(tail, append(text, head), acc - 1)
end
|
lib/intro.ex
| 0.524882
| 0.721007
|
intro.ex
|
starcoder
|
defmodule Timex.Date do
if Version.compare(System.version(), "1.11.0") == :lt do
@doc false
def new!(year, month, day, calendar \\ Calendar.ISO) do
case Date.new(year, month, day, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot build date, reason: #{inspect(reason)}"
end
end
@doc false
def beginning_of_week(date, starting_on \\ :default)
def beginning_of_week(%{calendar: Calendar.ISO} = date, starting_on) do
%{year: year, month: month, day: day} = date
iso_days = Calendar.ISO.date_to_iso_days(year, month, day)
{year, month, day} =
case iso_days_to_day_of_week(iso_days, starting_on) do
1 ->
{year, month, day}
day_of_week ->
Calendar.ISO.date_from_iso_days(iso_days - day_of_week + 1)
end
%Date{calendar: Calendar.ISO, year: year, month: month, day: day}
end
def beginning_of_week(%{calendar: calendar} = date, starting_on) do
%{year: year, month: month, day: day} = date
case calendar.day_of_week(year, month, day, starting_on) do
{day_of_week, day_of_week, _} ->
%Date{calendar: calendar, year: year, month: month, day: day}
{day_of_week, first_day_of_week, _} ->
Date.add(date, -(day_of_week - first_day_of_week))
end
end
@doc false
def end_of_week(date, starting_on \\ :default)
def end_of_week(%{calendar: Calendar.ISO} = date, starting_on) do
%{year: year, month: month, day: day} = date
iso_days = Calendar.ISO.date_to_iso_days(year, month, day)
{year, month, day} =
case iso_days_to_day_of_week(iso_days, starting_on) do
7 ->
{year, month, day}
day_of_week ->
Calendar.ISO.date_from_iso_days(iso_days + 7 - day_of_week)
end
%Date{calendar: Calendar.ISO, year: year, month: month, day: day}
end
def end_of_week(%{calendar: calendar} = date, starting_on) do
%{year: year, month: month, day: day} = date
case calendar.day_of_week(year, month, day, starting_on) do
{day_of_week, _, day_of_week} ->
%Date{calendar: calendar, year: year, month: month, day: day}
{day_of_week, _, last_day_of_week} ->
Date.add(date, last_day_of_week - day_of_week)
end
end
@doc false
def end_of_month(%{year: year, month: month, calendar: calendar} = date) do
day = Date.days_in_month(date)
%Date{year: year, month: month, day: day, calendar: calendar}
end
@doc false
def day_of_week(%{year: y, month: m, day: d}, starting_on \\ :default) do
with {dow, _, _} <- day_of_week(y, m, d, starting_on), do: dow
end
@doc false
def day_of_week(year, month, day, starting_on) do
iso_days = Calendar.ISO.date_to_iso_days(year, month, day)
{iso_days_to_day_of_week(iso_days, starting_on), 1, 7}
end
@doc false
def iso_days_to_day_of_week(iso_days, starting_on) do
Integer.mod(iso_days + day_of_week_offset(starting_on), 7) + 1
end
defp day_of_week_offset(:default), do: 5
defp day_of_week_offset(:wednesday), do: 3
defp day_of_week_offset(:thursday), do: 2
defp day_of_week_offset(:friday), do: 1
defp day_of_week_offset(:saturday), do: 0
defp day_of_week_offset(:sunday), do: 6
defp day_of_week_offset(:monday), do: 5
defp day_of_week_offset(:tuesday), do: 4
else
@doc false
defdelegate new!(year, month, day, calendar \\ Calendar.ISO), to: Date
@doc false
defdelegate beginning_of_week(date, starting_on \\ :default), to: Date
@doc false
defdelegate end_of_week(date, starting_on \\ :default), to: Date
@doc false
defdelegate end_of_month(date), to: Date
@doc false
defdelegate day_of_week(date, starting_on \\ :default), to: Date
@doc false
defdelegate day_of_week(year, month, day, starting_on), to: Calendar.ISO
end
end
|
lib/timex/date.ex
| 0.776326
| 0.689077
|
date.ex
|
starcoder
|
defmodule AWS.AppConfig do
@moduledoc """
AWS AppConfig
Use AWS AppConfig, a capability of AWS Systems Manager, to create, manage, and
quickly deploy application configurations.
AppConfig supports controlled deployments to applications of any size and
includes built-in validation checks and monitoring. You can use AppConfig with
applications hosted on Amazon EC2 instances, AWS Lambda, containers, mobile
applications, or IoT devices.
To prevent errors when deploying application configurations, especially for
production systems where a simple typo could cause an unexpected outage,
AppConfig includes validators. A validator provides a syntactic or semantic
check to ensure that the configuration you want to deploy works as intended. To
validate your application configuration data, you provide a schema or a Lambda
function that runs against the configuration. The configuration deployment or
update can only proceed when the configuration data is valid.
During a configuration deployment, AppConfig monitors the application to ensure
that the deployment is successful. If the system encounters an error, AppConfig
rolls back the change to minimize impact for your application users. You can
configure a deployment strategy for each application or environment that
includes deployment criteria, including velocity, bake time, and alarms to
monitor. Similar to error monitoring, if a deployment triggers an alarm,
AppConfig automatically rolls back to the previous version.
AppConfig supports multiple use cases. Here are some examples.
* **Application tuning**: Use AppConfig to carefully introduce
changes to your application that can only be tested with production traffic.
* **Feature toggle**: Use AppConfig to turn on new features that
require a timely deployment, such as a product launch or announcement.
* **Allow list**: Use AppConfig to allow premium subscribers to
access paid content.
* **Operational issues**: Use AppConfig to reduce stress on your
application when a dependency or other external factor impacts the system.
This reference is intended to be used with the [AWS AppConfig User Guide](http://docs.aws.amazon.com/systems-manager/latest/userguide/appconfig.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-10-09",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "appconfig",
global?: false,
protocol: "rest-json",
service_id: "AppConfig",
signature_version: "v4",
signing_name: "appconfig",
target_prefix: nil
}
end
@doc """
An application in AppConfig is a logical unit of code that provides capabilities
for your customers.
For example, an application can be a microservice that runs on Amazon EC2
instances, a mobile application installed by your users, a serverless
application using Amazon API Gateway and AWS Lambda, or any system you run on
behalf of others.
"""
def create_application(%Client{} = client, input, options \\ []) do
url_path = "/applications"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Information that enables AppConfig to access the configuration source.
Valid configuration sources include Systems Manager (SSM) documents, SSM
Parameter Store parameters, and Amazon S3 objects. A configuration profile
includes the following information.
* The Uri location of the configuration data.
* The AWS Identity and Access Management (IAM) role that provides
access to the configuration data.
* A validator for the configuration data. Available validators
include either a JSON Schema or an AWS Lambda function.
For more information, see [Create a Configuration and a Configuration Profile](http://docs.aws.amazon.com/systems-manager/latest/userguide/appconfig-creating-configuration-and-profile.html)
in the *AWS AppConfig User Guide*.
"""
def create_configuration_profile(%Client{} = client, application_id, input, options \\ []) do
url_path = "/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
A deployment strategy defines important criteria for rolling out your
configuration to the designated targets.
A deployment strategy includes: the overall duration required, a percentage of
targets to receive the deployment during each interval, an algorithm that
defines how percentage grows, and bake time.
"""
def create_deployment_strategy(%Client{} = client, input, options \\ []) do
url_path = "/deploymentstrategies"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
For each application, you define one or more environments.
An environment is a logical deployment group of AppConfig targets, such as
applications in a `Beta` or `Production` environment. You can also define
environments for application subcomponents such as the `Web`, `Mobile` and
`Back-end` components for your application. You can configure Amazon CloudWatch
alarms for each environment. The system monitors alarms during a configuration
deployment. If an alarm is triggered, the system rolls back the configuration.
"""
def create_environment(%Client{} = client, application_id, input, options \\ []) do
url_path = "/applications/#{AWS.Util.encode_uri(application_id)}/environments"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Create a new configuration in the AppConfig configuration store.
"""
def create_hosted_configuration_version(
%Client{} = client,
application_id,
configuration_profile_id,
input,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles/#{AWS.Util.encode_uri(configuration_profile_id)}/hostedconfigurationversions"
{headers, input} =
[
{"ContentType", "Content-Type"},
{"Description", "Description"},
{"LatestVersionNumber", "Latest-Version-Number"}
]
|> Request.build_params(input)
query_params = []
options =
Keyword.put(
options,
:response_header_parameters,
[
{"Application-Id", "ApplicationId"},
{"Configuration-Profile-Id", "ConfigurationProfileId"},
{"Content-Type", "ContentType"},
{"Description", "Description"},
{"Version-Number", "VersionNumber"}
]
)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Delete an application.
Deleting an application does not delete a configuration from a host.
"""
def delete_application(%Client{} = client, application_id, input, options \\ []) do
url_path = "/applications/#{AWS.Util.encode_uri(application_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Delete a configuration profile.
Deleting a configuration profile does not delete a configuration from a host.
"""
def delete_configuration_profile(
%Client{} = client,
application_id,
configuration_profile_id,
input,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles/#{AWS.Util.encode_uri(configuration_profile_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Delete a deployment strategy.
Deleting a deployment strategy does not delete a configuration from a host.
"""
def delete_deployment_strategy(%Client{} = client, deployment_strategy_id, input, options \\ []) do
url_path = "/deployementstrategies/#{AWS.Util.encode_uri(deployment_strategy_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Delete an environment.
Deleting an environment does not delete a configuration from a host.
"""
def delete_environment(%Client{} = client, application_id, environment_id, input, options \\ []) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/environments/#{AWS.Util.encode_uri(environment_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Delete a version of a configuration from the AppConfig configuration store.
"""
def delete_hosted_configuration_version(
%Client{} = client,
application_id,
configuration_profile_id,
version_number,
input,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles/#{AWS.Util.encode_uri(configuration_profile_id)}/hostedconfigurationversions/#{AWS.Util.encode_uri(version_number)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Retrieve information about an application.
"""
def get_application(%Client{} = client, application_id, options \\ []) do
url_path = "/applications/#{AWS.Util.encode_uri(application_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Receive information about a configuration.
AWS AppConfig uses the value of the `ClientConfigurationVersion` parameter to
identify the configuration version on your clients. If you don’t send
`ClientConfigurationVersion` with each call to `GetConfiguration`, your clients
receive the current configuration. You are charged each time your clients
receive a configuration.
To avoid excess charges, we recommend that you include the
`ClientConfigurationVersion` value with every call to `GetConfiguration`. This
value must be saved on your client. Subsequent calls to `GetConfiguration` must
pass this value by using the `ClientConfigurationVersion` parameter.
"""
def get_configuration(
%Client{} = client,
application,
configuration,
environment,
client_configuration_version \\ nil,
client_id,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application)}/environments/#{AWS.Util.encode_uri(environment)}/configurations/#{AWS.Util.encode_uri(configuration)}"
headers = []
query_params = []
query_params =
if !is_nil(client_id) do
[{"client_id", client_id} | query_params]
else
query_params
end
query_params =
if !is_nil(client_configuration_version) do
[{"client_configuration_version", client_configuration_version} | query_params]
else
query_params
end
options =
Keyword.put(
options,
:response_header_parameters,
[
{"Configuration-Version", "ConfigurationVersion"},
{"Content-Type", "ContentType"}
]
)
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieve information about a configuration profile.
"""
def get_configuration_profile(
%Client{} = client,
application_id,
configuration_profile_id,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles/#{AWS.Util.encode_uri(configuration_profile_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieve information about a configuration deployment.
"""
def get_deployment(
%Client{} = client,
application_id,
deployment_number,
environment_id,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/environments/#{AWS.Util.encode_uri(environment_id)}/deployments/#{AWS.Util.encode_uri(deployment_number)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieve information about a deployment strategy.
A deployment strategy defines important criteria for rolling out your
configuration to the designated targets. A deployment strategy includes: the
overall duration required, a percentage of targets to receive the deployment
during each interval, an algorithm that defines how percentage grows, and bake
time.
"""
def get_deployment_strategy(%Client{} = client, deployment_strategy_id, options \\ []) do
url_path = "/deploymentstrategies/#{AWS.Util.encode_uri(deployment_strategy_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieve information about an environment.
An environment is a logical deployment group of AppConfig applications, such as
applications in a `Production` environment or in an `EU_Region` environment.
Each configuration deployment targets an environment. You can enable one or more
Amazon CloudWatch alarms for an environment. If an alarm is triggered during a
deployment, AppConfig roles back the configuration.
"""
def get_environment(%Client{} = client, application_id, environment_id, options \\ []) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/environments/#{AWS.Util.encode_uri(environment_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Get information about a specific configuration version.
"""
def get_hosted_configuration_version(
%Client{} = client,
application_id,
configuration_profile_id,
version_number,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles/#{AWS.Util.encode_uri(configuration_profile_id)}/hostedconfigurationversions/#{AWS.Util.encode_uri(version_number)}"
headers = []
query_params = []
options =
Keyword.put(
options,
:response_header_parameters,
[
{"Application-Id", "ApplicationId"},
{"Configuration-Profile-Id", "ConfigurationProfileId"},
{"Content-Type", "ContentType"},
{"Description", "Description"},
{"Version-Number", "VersionNumber"}
]
)
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
List all applications in your AWS account.
"""
def list_applications(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/applications"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists the configuration profiles for an application.
"""
def list_configuration_profiles(
%Client{} = client,
application_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
List deployment strategies.
"""
def list_deployment_strategies(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/deploymentstrategies"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists the deployments for an environment.
"""
def list_deployments(
%Client{} = client,
application_id,
environment_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/environments/#{AWS.Util.encode_uri(environment_id)}/deployments"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
List the environments for an application.
"""
def list_environments(
%Client{} = client,
application_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/applications/#{AWS.Util.encode_uri(application_id)}/environments"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
View a list of configurations stored in the AppConfig configuration store by
version.
"""
def list_hosted_configuration_versions(
%Client{} = client,
application_id,
configuration_profile_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles/#{AWS.Util.encode_uri(configuration_profile_id)}/hostedconfigurationversions"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next_token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max_results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Retrieves the list of key-value tags assigned to the resource.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Starts a deployment.
"""
def start_deployment(%Client{} = client, application_id, environment_id, input, options \\ []) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/environments/#{AWS.Util.encode_uri(environment_id)}/deployments"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Stops a deployment.
This API action works only on deployments that have a status of `DEPLOYING`.
This action moves the deployment to a status of `ROLLED_BACK`.
"""
def stop_deployment(
%Client{} = client,
application_id,
deployment_number,
environment_id,
input,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/environments/#{AWS.Util.encode_uri(environment_id)}/deployments/#{AWS.Util.encode_uri(deployment_number)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
202
)
end
@doc """
Metadata to assign to an AppConfig resource.
Tags help organize and categorize your AppConfig resources. Each tag consists of
a key and an optional value, both of which you define. You can specify a maximum
of 50 tags for a resource.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes a tag key and value from an AppConfig resource.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"TagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Updates an application.
"""
def update_application(%Client{} = client, application_id, input, options \\ []) do
url_path = "/applications/#{AWS.Util.encode_uri(application_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates a configuration profile.
"""
def update_configuration_profile(
%Client{} = client,
application_id,
configuration_profile_id,
input,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles/#{AWS.Util.encode_uri(configuration_profile_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates a deployment strategy.
"""
def update_deployment_strategy(%Client{} = client, deployment_strategy_id, input, options \\ []) do
url_path = "/deploymentstrategies/#{AWS.Util.encode_uri(deployment_strategy_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates an environment.
"""
def update_environment(%Client{} = client, application_id, environment_id, input, options \\ []) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/environments/#{AWS.Util.encode_uri(environment_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Uses the validators in a configuration profile to validate a configuration.
"""
def validate_configuration(
%Client{} = client,
application_id,
configuration_profile_id,
input,
options \\ []
) do
url_path =
"/applications/#{AWS.Util.encode_uri(application_id)}/configurationprofiles/#{AWS.Util.encode_uri(configuration_profile_id)}/validators"
headers = []
{query_params, input} =
[
{"ConfigurationVersion", "configuration_version"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
end
|
lib/aws/generated/app_config.ex
| 0.883274
| 0.463019
|
app_config.ex
|
starcoder
|
defmodule Oban.Config do
@moduledoc """
The Config struct validates and encapsulates Oban instance state.
Options passed to `Oban.start_link/1` are validated and stored in a config struct. Internal
modules and plugins are always passed the config with a `:conf` key.
"""
@type t :: %__MODULE__{
dispatch_cooldown: pos_integer(),
engine: module(),
notifier: module(),
name: Oban.name(),
node: binary(),
plugins: [module() | {module() | Keyword.t()}],
prefix: binary(),
queues: [{atom(), Keyword.t()}],
repo: module(),
shutdown_grace_period: timeout(),
log: false | Logger.level(),
get_dynamic_repo: nil | (() -> pid() | atom())
}
@type option :: {:name, module()} | {:conf, t()}
@enforce_keys [:node, :repo]
defstruct dispatch_cooldown: 5,
engine: Oban.Queue.BasicEngine,
notifier: Oban.PostgresNotifier,
name: Oban,
node: nil,
plugins: [],
prefix: "public",
queues: [],
repo: nil,
shutdown_grace_period: :timer.seconds(15),
log: false,
get_dynamic_repo: nil
defguardp is_pos_integer(interval) when is_integer(interval) and interval > 0
@doc false
@spec new(Keyword.t()) :: t()
def new(opts) when is_list(opts) do
opts =
opts
|> crontab_to_plugin()
|> poll_interval_to_plugin()
|> Keyword.put_new(:node, node_name())
|> Keyword.update(:plugins, [], &(&1 || []))
|> Keyword.update(:queues, [], &(&1 || []))
|> Keyword.delete(:circuit_backoff)
Enum.each(opts, &validate_opt!/1)
opts =
opts
|> Keyword.update!(:queues, &parse_queues/1)
|> Keyword.update!(:plugins, &normalize_plugins/1)
struct!(__MODULE__, opts)
end
@doc false
@spec node_name(%{optional(binary()) => binary()}) :: binary()
def node_name(env \\ System.get_env()) do
cond do
Node.alive?() ->
to_string(node())
Map.has_key?(env, "DYNO") ->
Map.get(env, "DYNO")
true ->
:inet.gethostname()
|> elem(1)
|> to_string()
end
end
@doc false
@spec to_ident(t()) :: binary()
def to_ident(%__MODULE__{name: name, node: node}) do
inspect(name) <> "." <> to_string(node)
end
@doc false
@spec match_ident?(t(), binary()) :: boolean()
def match_ident?(%__MODULE__{} = conf, ident) when is_binary(ident) do
to_ident(conf) == ident
end
# Helpers
@cron_keys [:crontab, :timezone]
defp crontab_to_plugin(opts) do
case {opts[:plugins], opts[:crontab]} do
{plugins, [_ | _]} when is_list(plugins) or is_nil(plugins) ->
{cron_opts, base_opts} = Keyword.split(opts, @cron_keys)
plugin = {Oban.Plugins.Cron, cron_opts}
Keyword.update(base_opts, :plugins, [plugin], &[plugin | &1])
_ ->
Keyword.drop(opts, @cron_keys)
end
end
defp poll_interval_to_plugin(opts) do
case {opts[:plugins], opts[:poll_interval]} do
{plugins, interval} when (is_list(plugins) or is_nil(plugins)) and is_integer(interval) ->
plugin = {Oban.Plugins.Stager, interval: interval}
opts
|> Keyword.delete(:poll_interval)
|> Keyword.update(:plugins, [plugin], &[plugin | &1])
{plugins, nil} when is_list(plugins) or is_nil(plugins) ->
plugin = Oban.Plugins.Stager
Keyword.update(opts, :plugins, [plugin], &[plugin | &1])
_ ->
Keyword.drop(opts, [:poll_interval])
end
end
defp validate_opt!({:dispatch_cooldown, cooldown}) do
unless is_pos_integer(cooldown) do
raise ArgumentError,
"expected :dispatch_cooldown to be a positive integer, got: #{inspect(cooldown)}"
end
end
defp validate_opt!({:engine, engine}) do
unless Code.ensure_loaded?(engine) and function_exported?(engine, :init, 2) do
raise ArgumentError,
"expected :engine to be an Oban.Queue.Engine, got: #{inspect(engine)}"
end
end
defp validate_opt!({:notifier, notifier}) do
unless Code.ensure_loaded?(notifier) and function_exported?(notifier, :listen, 2) do
raise ArgumentError,
"expected :notifier to be an Oban.Notifier, got: #{inspect(notifier)}"
end
end
defp validate_opt!({:name, _}), do: :ok
defp validate_opt!({:node, node}) do
unless is_binary(node) and String.trim(node) != "" do
raise ArgumentError,
"expected :node to be a non-empty binary, got: #{inspect(node)}"
end
end
defp validate_opt!({:plugins, plugins}) do
unless is_list(plugins) and Enum.all?(plugins, &valid_plugin?/1) do
raise ArgumentError,
"expected :plugins to be a list of modules or {module, keyword} tuples " <>
", got: #{inspect(plugins)}"
end
end
defp validate_opt!({:prefix, prefix}) do
unless is_binary(prefix) and Regex.match?(~r/^[a-z0-9_]+$/i, prefix) do
raise ArgumentError,
"expected :prefix to be a binary with alphanumeric characters, got: #{inspect(prefix)}"
end
end
defp validate_opt!({:queues, queues}) do
unless Keyword.keyword?(queues) and Enum.all?(queues, &valid_queue?/1) do
raise ArgumentError,
"expected :queues to be a keyword list of {atom, integer} pairs or " <>
"a list of {atom, keyword} pairs, got: #{inspect(queues)}"
end
end
defp validate_opt!({:repo, repo}) do
unless Code.ensure_loaded?(repo) and function_exported?(repo, :__adapter__, 0) do
raise ArgumentError,
"expected :repo to be an Ecto.Repo, got: #{inspect(repo)}"
end
end
defp validate_opt!({:shutdown_grace_period, period}) do
unless is_pos_integer(period) do
raise ArgumentError,
"expected :shutdown_grace_period to be a positive integer, got: #{inspect(period)}"
end
end
@log_levels ~w(false emergency alert critical error warning warn notice info debug)a
defp validate_opt!({:log, log}) do
unless log in @log_levels do
raise ArgumentError,
"expected :log to be one of #{inspect(@log_levels)}, got: #{inspect(log)}"
end
end
defp validate_opt!({:get_dynamic_repo, fun}) do
unless is_nil(fun) or is_function(fun, 0) do
raise ArgumentError,
"expected :get_dynamic_repo to be nil or a zero arity function, got: #{inspect(fun)}"
end
end
defp validate_opt!(option) do
raise ArgumentError, "unknown option provided #{inspect(option)}"
end
defp valid_queue?({_name, opts}) do
is_pos_integer(opts) or Keyword.keyword?(opts)
end
defp valid_plugin?({plugin, opts}) do
is_atom(plugin) and
Code.ensure_loaded?(plugin) and
function_exported?(plugin, :init, 1) and
Keyword.keyword?(opts)
end
defp valid_plugin?(plugin), do: valid_plugin?({plugin, []})
defp parse_queues(queues) do
for {name, value} <- queues do
opts = if is_integer(value), do: [limit: value], else: value
{name, opts}
end
end
# Manually specified plugins will be overwritten by auto-specified plugins unless we reverse the
# plugin list. The order doesn't matter as they are supervised one-for-one.
defp normalize_plugins(plugins) do
plugins
|> Enum.reverse()
|> Enum.uniq_by(fn
{module, _opts} -> module
module -> module
end)
end
end
|
lib/oban/config.ex
| 0.822082
| 0.432363
|
config.ex
|
starcoder
|
require Utils
defmodule D1 do
@moduledoc """
--- Day 1: Report Repair ---
After saving Christmas five years in a row, you've decided to take a vacation at a nice resort on a tropical island. Surely, Christmas will go on without you.
The tropical island has its own currency and is entirely cash-only. The gold coins used there have a little picture of a starfish; the locals just call them stars. None of the currency exchanges seem to have heard of them, but somehow, you'll need to find fifty of these coins by the time you arrive so you can pay the deposit on your room.
To save your vacation, you need to get all fifty stars by December 25th.
Collect stars by solving puzzles. Two puzzles will be made available on each day in the Advent calendar; the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck!
Before you leave, the Elves in accounting just need you to fix your expense report (your puzzle input); apparently, something isn't quite adding up.
Specifically, they need you to find the two entries that sum to 2020 and then multiply those two numbers together.
Of course, your expense report is much larger. Find the two entries that sum to 2020; what do you get if you multiply them together?
--- Part Two ---
The Elves in accounting are thankful for your help; one of them even offers you a starfish coin they had left over from a past vacation. They offer you a second one if you can find three numbers in your expense report that meet the same criteria.
In your expense report, what is the product of the three entries that sum to 2020?
"""
@behaviour Day
defp find(input, against) do
diffs = against |> Enum.map(&(2020 - &1)) |> MapSet.new()
matches = MapSet.intersection(input, diffs)
Enum.reduce(matches, &(&1 * &2))
end
@impl true
def solve(input) do
input = input |> Utils.to_ints() |> MapSet.new()
sums = for i <- input, j <- input, i + j < 2020 and j > i, do: i + j
part_1 = find(input, input)
part_2 = find(input, sums)
{
part_1,
part_2
}
end
end
|
lib/days/01.ex
| 0.709523
| 0.569494
|
01.ex
|
starcoder
|
defmodule State.Metadata do
@moduledoc """
Holds metadata for State data.
Currently, the only metadata being stored is when a service last received
new data as well as the current feed version.
"""
use GenServer
@table_name :state_metadata
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, nil, name: opts[:name] || __MODULE__)
end
@impl GenServer
def init(_) do
table_opts = [:set, :named_table, :public, read_concurrency: true, write_concurrency: true]
table_name = @table_name
^table_name =
case :ets.info(@table_name) do
:undefined -> :ets.new(@table_name, table_opts)
_ -> table_name
end
{:ok, nil}
end
@doc false
def table_name, do: @table_name
@doc """
Marks that a state's data was refreshed with new data.
"""
def state_updated(mod, timestamp) do
timestamp = %{timestamp | microsecond: {0, 0}}
inserts = [
{mod, timestamp},
{{mod, :header}, rfc1123_format(timestamp)}
]
:ets.insert(table_name(), inserts)
rescue
_ -> :error
end
@doc """
Sets latest version, start_date and end_date of the Feed.
"""
def feed_updated({version, start_date, end_date}) do
:ets.insert(table_name(), {State.Feed, {version, start_date, end_date}})
rescue
_ -> :error
end
@doc """
Gets the timestamp for when the state's data was last updated.
"""
@spec last_updated(atom) :: DateTime.t() | nil
def last_updated(mod) when is_atom(mod) do
case :ets.lookup(table_name(), mod) do
[{^mod, timestamp}] -> timestamp
_ -> DateTime.utc_now()
end
end
@doc """
Gets the last-modified header value for the given state.
"""
@spec last_modified_header(atom) :: String.t()
def last_modified_header(mod) when is_atom(mod) do
case :ets.lookup(table_name(), {mod, :header}) do
[{_, header}] -> header
_ -> rfc1123_format(DateTime.utc_now())
end
end
@doc """
Gets a tuple of the current feed version, start_date, and end_date.
"""
@spec feed_metadata() :: {String.t(), Date.t(), Date.t()}
def feed_metadata do
case :ets.lookup(table_name(), State.Feed) do
[{State.Feed, {version, start_date, end_date}}] ->
{version, start_date, end_date}
_ ->
metadata = State.Feed.feed_metadata()
feed_updated(metadata)
metadata
end
end
@doc """
Fetches when each service was last updated.
"""
def updated_timestamps do
%{
alert: last_updated(State.Alert),
facility: last_updated(State.Facility),
prediction: last_updated(State.Prediction),
route: last_updated(State.Route),
schedule: last_updated(State.Schedule),
service: last_updated(State.Service),
shape: last_updated(State.Shape),
stop: last_updated(State.Stop),
trip: last_updated(State.Trip),
vehicle: last_updated(State.Vehicle)
}
end
defp rfc1123_format(datetime) do
{:ok, <<rendered::binary-26, "Z">>} = Timex.format(datetime, "{RFC1123z}")
rendered <> "GMT"
end
end
|
apps/state/lib/state/metadata.ex
| 0.843815
| 0.576184
|
metadata.ex
|
starcoder
|
defmodule Type.Function.Var do
@moduledoc """
a special container type indicating that the function has a type dependency.
### Example:
The following typespec:
```elixir
@spec identity(x) :: x when x: var
```
generates the following typespec:
```elixir
%Type.Function{
params: [%Type.Function.Var{name: :x}],
return: %Type.Function.Var{name: :x}
}
```
if you further put a restriction on this typespec:
```elixir
@spec identity(x) :: x when x: integer
```
the `Type.Function.Var` will further exhibit the issued constraint:
```elixir
%Type.Function{
params: [%Type.Function.Var{name: :x, constraint: %Type{name: :integer}}],
return: %Type.Function.Var{name: :x, constraint: %Type{name: :integer}}
}
```
"""
import Type, only: :macros
@enforce_keys [:name]
defstruct @enforce_keys ++ [constraint: any()]
@type t :: %__MODULE__{
name: atom,
constraint: Type.t
}
@spec resolve(Type.t, %{t => Type.t}) :: Type.t
@doc false
def resolve(type, map) when is_map_key(map, type) do
Type.intersection(type.constraint, map[type])
end
def resolve(t = %Type.Map{required: rmap, optional: omap}, map) do
%{t | required: Enum.map(rmap, &resolve(&1, map)),
optional: Enum.map(omap, &resolve(&1, map))}
end
def resolve(t = %Type.List{type: type, final: final}, map) do
%{t | type: Enum.map(type, &resolve(&1, map)), final: resolve(final, map)}
end
def resolve(t = %Type.Union{of: types}, map) do
%{t | of: Enum.map(types, &resolve(&1, map))}
end
def resolve(t = %Type.Tuple{elements: elements}, map) do
%{t | elements: Enum.map(elements, &resolve(&1, map))}
end
def resolve({k, v}, map) do
{resolve(k, map), resolve(v, map)}
end
def resolve(type, _map), do: type
end
defimpl Inspect, for: Type.Function.Var do
import Type, only: :macros
import Inspect.Algebra
def inspect(var, opts = %{custom_options: [show_constraints: true]}) do
case var.constraint do
any() -> "#{var.name}: var"
_ ->
clean_opts = %{opts | custom_options: []}
concat("#{var.name}: ", to_doc(var.constraint, clean_opts))
end
end
def inspect(var, _opts) do
"#{var.name}"
end
end
defimpl Type.Properties, for: Type.Function.Var do
alias Type.Function.Var
def typegroup(%{constraint: constraint}) do
Type.typegroup(constraint)
end
def compare(lhs, rhs = %Var{}) do
case Type.compare(lhs.constraint, rhs.constraint) do
comp when comp != :eq -> comp
:eq -> Type.compare(lhs.name, rhs.name)
end
end
def compare(%{constraint: constraint}, rhs) do
case Type.compare(constraint, rhs) do
:eq -> :lt
comp -> comp
end
end
import Type, only: :macros
import Type.Helpers
intersection do
def intersection(_, %Var{}) do
raise "can't intersect two var types"
end
def intersection(left = %Var{}, right) do
case Type.intersection(left.constraint, right) do
none() -> none()
type -> %{left | constraint: type}
end
end
end
subtype do
def subtype?(left, right = %Var{}) do
Type.subtype?(left.constraint, right.constraint)
end
def subtype?(left = %{}, right) do
Type.subtype?(left.constraint, right)
end
end
usable_as do
def usable_as(%Var{}, _right, _meta) do
raise "unreachable"
end
end
def normalize(type), do: type
end
|
lib/type/function.var.ex
| 0.932323
| 0.940681
|
function.var.ex
|
starcoder
|
defmodule Ueberauth.Strategy.Meli do
@moduledoc """
Implements an ÜeberauthMeli strategy for authentication with mercadolibre.com
When configuring the strategy in the Üeberauth providers, you can specify some defaults.
* `default_scope` - The scope to request by default from mercadolibre (permissions). Default "read"
* `oauth2_module` - The OAuth2 module to use. Default Ueberauth.Strategy.Meli.OAuth
```elixir
config :ueberauth, Ueberauth,
providers: [
meli: { Ueberauth.Strategy.Meli, [default_scope: "read,write"] }
]
```
"""
use Ueberauth.Strategy,
default_scope: "read",
oauth2_module: Ueberauth.Strategy.Meli.OAuth
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
alias Ueberauth.Strategy.Helpers
@doc false
def handle_request!(conn) do
scopes = conn.params["scope"] || option(conn, :default_scope)
opts = [scope: scopes]
opts =
if conn.params["state"], do: Keyword.put(opts, :state, conn.params["state"]), else: opts
callback_url = callback_url(conn)
callback_url =
if String.ends_with?(callback_url, "?"),
do: String.slice(callback_url, 0..-2),
else: callback_url
opts =
opts
|> Keyword.put(:redirect_uri, callback_url)
|> Helpers.with_state_param(conn)
module = option(conn, :oauth2_module)
redirect!(conn, apply(module, :authorize_url!, [opts]))
end
@doc false
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
module = option(conn, :oauth2_module)
params = [code: code]
redirect_uri = get_redirect_uri(conn)
options = %{
options: [
client_options: [redirect_uri: redirect_uri]
]
}
token = apply(module, :get_token!, [params, options])
if token.access_token == nil do
set_errors!(conn, [
error(token.other_params["error"], token.other_params["error_description"])
])
else
conn
|> store_token(token)
|> fetch_user(token)
end
end
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc false
defp store_token(conn, token) do
put_private(conn, :meli_token, token)
end
@doc false
def handle_cleanup!(conn) do
conn
|> put_private(:meli_user, nil)
|> put_private(:meli_token, nil)
end
@doc false
def credentials(conn) do
token = conn.private.meli_token
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at
}
end
@doc false
def info(conn) do
user = conn.private[:meli_user]
phone = user["phone"]
%Info{
name: "#{user["first_name"]} #{user["last_name"]}",
first_name: user["first_name"],
last_name: user["last_name"],
nickname: user["nickname"],
email: user["email"],
location: get_in(user, ["address", "city"]),
image: get_in(user, ["thumbnail", "picture_url"]),
phone: "#{phone["areacode"]} #{phone["number"]}",
urls: %{profile: user["permalink"]}
}
end
@doc false
def extra(conn), do: %Extra{raw_info: %{user: conn.private[:meli_user]}}
@doc false
def uid(conn), do: conn.private[:meli_user]["id"]
defp fetch_user(%Plug.Conn{assigns: %{ueberauth_failure: _fails}} = conn, _), do: conn
defp fetch_user(conn, token) do
case Ueberauth.Strategy.Meli.OAuth.get(token, "/users/me", []) do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: user}}
when status_code in 200..399 ->
put_private(conn, :meli_user, user)
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
defp get_redirect_uri(%Plug.Conn{} = conn) do
config = Application.get_env(:ueberauth, Ueberauth)
redirect_uri = Keyword.get(config, :redirect_uri)
if is_nil(redirect_uri) do
callback_url(conn)
else
redirect_uri
end
end
end
|
lib/ueberauth/strategy/meli.ex
| 0.693784
| 0.648647
|
meli.ex
|
starcoder
|
defmodule AstraeaVirgo.Cache.Utils.Hash do
@moduledoc """
Interface for Cache Generic Hash
- User
- Judgement
Generate `exist?/1` and `show/1` operation for Cache
Note: Callback functions can be private
"""
@doc """
Cache Show Key
"""
@callback get_show_key(id :: String.t()) :: String.t()
@doc """
Cache Field name of info
Example:
```elixir
defp get_field_name(), do: ["id", "name", "extensions", "time", "memory"]
```
"""
@callback get_field_name() :: list(String.t())
@doc """
Get empty info from Cache
return list of nil, length is the same as the field name list
Example:
```elixir
defp get_empty_value(), do: [nil, nil, nil, nil, nil]
```
"""
@callback get_empty_value() :: list(nil)
@doc """
Get the specified Info from Database
"""
@callback get_info_from_db(id :: String.t()) :: {:ok, nil | list()} | {:error, atom() | String.t() | {:database, String.t()}}
@doc """
Parse Cache Fields
"""
@callback parse(data :: list()) :: map()
defmacro __using__(_opts) do
quote do
@spec exist?(id :: String.t()) :: boolean() | {:error, atom() | String.t() | {:cache, String.t()} | {:database, String.t()}}
@doc """
Check ID exist
"""
def exist?(id) do
empty = get_empty_value()
with {:ok, ^empty} <- Redix.command(:redix, ["HMGET", get_show_key(id)] ++ get_field_name()),
{:ok, nil} <- get_info_from_db(id) do
false
else
{:ok, _result} -> true
{:error, _reason} = error -> AstraeaVirgo.Cache.Utils.ErrorHandler.parse(error)
end
end
@spec show(id :: String.t()) :: {:ok, nil | map()} | {:error, atom() | String.t() | {:cache, String.t()} | {:database, String.t()}}
@doc """
Get Specified Info
- return `nil` when data is not exists
- return parsed data when successfully
"""
def show(id) do
empty = get_empty_value()
with {:ok, ^empty} <- Redix.command(:redix, ["HMGET", get_show_key(id)] ++ get_field_name()),
{:ok, nil} <- get_info_from_db(id) do
{:ok, nil}
else
{:ok, result} -> {:ok, parse(result)}
{:error, _reason} = error -> AstraeaVirgo.Cache.Utils.ErrorHandler.parse(error)
end
end
end
end
end
|
lib/virgo/cache/utils/hash.ex
| 0.885526
| 0.643077
|
hash.ex
|
starcoder
|
defmodule WatchFaces.Faces do
@moduledoc """
The Faces context.
"""
import Ecto.Query, warn: false
alias WatchFaces.Repo
alias WatchFaces.Faces.Face
@doc """
Returns the list of faces.
## Examples
iex> list_faces()
[%Face{}, ...]
"""
def list_faces do
Repo.all(Face)
|> Repo.preload([:keywords])
end
def search_face(query) do
clean_query = query |> String.normalize(:nfkc) |> String.downcase()
pred = fn face ->
String.contains?(String.downcase(face.name), clean_query)
end
Repo.all(Face)
|> Repo.preload([:keywords])
|> Enum.filter(pred)
end
@doc """
Gets a single face.
Raises `Ecto.NoResultsError` if the Face does not exist.
## Examples
iex> get_face!(123)
%Face{}
iex> get_face!(456)
** (Ecto.NoResultsError)
"""
def get_face!(id), do: Repo.get!(Face, id) |> Repo.preload([:user, :keywords])
@doc """
Creates a face.
## Examples
iex> create_face(%{field: value})
{:ok, %Face{}}
iex> create_face(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_face(attrs \\ %{}) do
Face.insert_changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a face.
## Examples
iex> update_face(face, %{field: new_value})
{:ok, %Face{}}
iex> update_face(face, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_face(%Face{} = face, attrs) do
face
|> Face.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a face.
## Examples
iex> delete_face(face)
{:ok, %Face{}}
iex> delete_face(face)
{:error, %Ecto.Changeset{}}
"""
def delete_face(%Face{} = face) do
face
|> Repo.delete()
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking face changes.
## Examples
iex> change_face(face)
%Ecto.Changeset{data: %Face{}}
"""
def change_face(%Face{} = face, attrs \\ %{}) do
Face.changeset(face, attrs)
end
end
|
lib/watch_faces/faces.ex
| 0.8415
| 0.406803
|
faces.ex
|
starcoder
|
defmodule Annex.Layer do
@moduledoc """
The Annex.Layer is the module that defines types, callbacks, and helper for Layers.
By implementing the Layer behaviour a struct/model can be used along side other
Layers to compose the layers of a deep neural network.
"""
alias Annex.{
Data,
Layer.Backprop,
LayerConfig,
Shape
}
@type t() :: struct()
@callback feedforward(t(), Data.data()) :: {struct(), Data.data()}
@callback backprop(t(), Data.data(), Backprop.t()) :: {t(), Data.data(), Backprop.t()}
@callback init_layer(LayerConfig.t(module())) :: t()
@callback data_type(t()) :: Data.type()
@callback shapes(t()) :: {Shape.t(), Shape.t()}
@optional_callbacks [
shapes: 1,
data_type: 1
]
defmacro __using__(_) do
quote do
alias Annex.Layer
@behaviour Layer
alias Annex.AnnexError
alias Annex.LayerConfig
require Annex.Utils
import Annex.Utils, only: [validate: 3]
def __annex__(:is_layer?), do: true
end
end
@spec feedforward(struct(), any()) :: {struct(), any()}
def feedforward(%module{} = layer, inputs) do
module.feedforward(layer, inputs)
end
@spec backprop(struct(), any(), Backprop.t()) :: {struct(), any(), Backprop.t()}
def backprop(%module{} = layer, error, props) do
module.backprop(layer, error, props)
end
@spec init_layer(LayerConfig.t(module())) :: t()
def init_layer(%LayerConfig{} = cfg), do: LayerConfig.init_layer(cfg)
@spec has_data_type?(module() | struct()) :: boolean()
def has_data_type?(%module{}), do: has_data_type?(module)
def has_data_type?(module) when is_atom(module), do: function_exported?(module, :data_type, 1)
@spec data_type(atom | struct()) :: Data.type()
def data_type(%module{} = layer), do: module.data_type(layer)
@spec shapes(t()) :: {Shape.t(), Shape.t()}
def shapes(%module{} = layer), do: module.shapes(layer)
@spec has_shapes?(module() | struct()) :: boolean()
def has_shapes?(%module{}), do: has_shapes?(module)
def has_shapes?(module) when is_atom(module), do: function_exported?(module, :shapes, 1)
@spec is_layer?(any()) :: boolean()
def is_layer?(%module{}) do
is_layer?(module)
end
def is_layer?(item) do
is_atom(item) && function_exported?(item, :__annex__, 1) && item.__annex__(:is_layer?)
end
def input_shape(layer) do
if has_shapes?(layer) do
{input_shape, _} = shapes(layer)
input_shape
end
end
def output_shape(layer) do
if has_shapes?(layer) do
{_, output_shape} = shapes(layer)
output_shape
end
end
@spec convert(t(), Data.data(), Shape.t()) :: Data.data()
def convert(layer, data, shape) do
layer
|> data_type()
|> Data.convert(data, shape)
end
@spec forward_shape(t()) :: Shape.t() | nil
def forward_shape(layer) do
layer
|> input_shape()
|> case do
nil -> nil
shape -> [Shape.resolve_columns(shape), :any]
end
end
@spec backward_shape(t()) :: Shape.t() | nil
def backward_shape(layer) do
layer
|> input_shape()
|> case do
nil ->
nil
shape ->
[:any, Shape.resolve_rows(shape)]
end
end
end
|
lib/annex/layer.ex
| 0.823364
| 0.612686
|
layer.ex
|
starcoder
|
import Kernel, except: [apply: 2]
defmodule Ecto.Query.Builder.Join do
@moduledoc false
alias Ecto.Query.Builder
alias Ecto.Query.{JoinExpr, QueryExpr}
@doc """
Escapes a join expression (not including the `on` expression).
It returns a tuple containing the binds, the on expression (if available)
and the association expression.
## Examples
iex> escape(quote(do: x in "foo"), [], __ENV__)
{:x, {"foo", nil}, nil, %{}}
iex> escape(quote(do: "foo"), [], __ENV__)
{:_, {"foo", nil}, nil, %{}}
iex> escape(quote(do: x in Sample), [], __ENV__)
{:x, {nil, {:__aliases__, [alias: false], [:Sample]}}, nil, %{}}
iex> escape(quote(do: x in {"foo", Sample}), [], __ENV__)
{:x, {"foo", {:__aliases__, [alias: false], [:Sample]}}, nil, %{}}
iex> escape(quote(do: x in {"foo", :sample}), [], __ENV__)
{:x, {"foo", :sample}, nil, %{}}
iex> escape(quote(do: c in assoc(p, :comments)), [p: 0], __ENV__)
{:c, nil, {0, :comments}, %{}}
iex> escape(quote(do: x in fragment("foo")), [], __ENV__)
{:x, {:{}, [], [:fragment, [], [raw: "foo"]]}, nil, %{}}
"""
@spec escape(Macro.t, Keyword.t, Macro.Env.t) :: {[atom], Macro.t | nil, Macro.t | nil, %{}}
def escape({:in, _, [{var, _, context}, expr]}, vars, env)
when is_atom(var) and is_atom(context) do
{_, expr, assoc, params} = escape(expr, vars, env)
{var, expr, assoc, params}
end
def escape({:subquery, _, [expr]}, _vars, _env) do
{:_, quote(do: Ecto.Query.subquery(unquote(expr))), nil, %{}}
end
def escape({:subquery, _, [expr, opts]}, _vars, _env) do
{:_, quote(do: Ecto.Query.subquery(unquote(expr), unquote(opts))), nil, %{}}
end
def escape({:fragment, _, [_ | _]} = expr, vars, env) do
{expr, {params, :acc}} = Builder.escape(expr, :any, {%{}, :acc}, vars, env)
{:_, expr, nil, params}
end
def escape({:unsafe_fragment, _, [_ | _]} = expr, vars, env) do
{expr, {params, :acc}} = Builder.escape(expr, :any, {%{}, :acc}, vars, env)
{:_, expr, nil, params}
end
def escape({:__aliases__, _, _} = module, _vars, _env) do
{:_, {nil, module}, nil, %{}}
end
def escape(string, _vars, _env) when is_binary(string) do
{:_, {string, nil}, nil, %{}}
end
def escape({string, {:__aliases__, _, _} = module}, _vars, _env) when is_binary(string) do
{:_, {string, module}, nil, %{}}
end
def escape({string, atom}, _vars, _env) when is_binary(string) and is_atom(atom) do
{:_, {string, atom}, nil, %{}}
end
def escape({:assoc, _, [{var, _, context}, field]}, vars, _env)
when is_atom(var) and is_atom(context) do
ensure_field!(field)
var = Builder.find_var!(var, vars)
field = Builder.quoted_field!(field)
{:_, nil, {var, field}, %{}}
end
def escape({:^, _, [expr]}, _vars, _env) do
{:_, quote(do: Ecto.Query.Builder.Join.join!(unquote(expr))), nil, %{}}
end
def escape(join, vars, env) do
case Macro.expand(join, env) do
^join ->
Builder.error! "malformed join `#{Macro.to_string(join)}` in query expression"
join ->
escape(join, vars, env)
end
end
@doc """
Called at runtime to check dynamic joins.
"""
def join!(expr) when is_atom(expr),
do: {nil, expr}
def join!(expr) when is_binary(expr),
do: {expr, nil}
def join!({source, module}) when is_binary(source) and is_atom(module),
do: {source, module}
def join!(expr),
do: Ecto.Queryable.to_query(expr)
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t, atom, [Macro.t], Macro.t, Macro.t, Macro.t, Macro.Env.t) ::
{Macro.t, Keyword.t, non_neg_integer | nil}
def build(query, qual, binding, expr, on, count_bind, env) do
{query, binding} = Builder.escape_binding(query, binding)
{join_bind, join_source, join_assoc, join_params} = escape(expr, binding, env)
join_params = Builder.escape_params(join_params)
qual = validate_qual(qual)
validate_bind(join_bind, binding)
{count_bind, query} =
if join_bind != :_ and !count_bind do
# If count_bind is not available,
# we need to compute the amount of binds at runtime
query =
quote do
query = Ecto.Queryable.to_query(unquote(query))
join_count = Builder.count_binds(query)
query
end
{quote(do: join_count), query}
else
{count_bind, query}
end
binding = binding ++ [{join_bind, count_bind}]
next_bind =
if is_integer(count_bind) do
count_bind + 1
else
quote(do: unquote(count_bind) + 1)
end
query = build_on(on || true, query, binding, count_bind, qual,
join_source, join_assoc, join_params, env)
{query, binding, next_bind}
end
def build_on({:^, _, [var]}, query, _binding, count_bind,
join_qual, join_source, join_assoc, join_params, env) do
quote do
query = unquote(query)
Ecto.Query.Builder.Join.join!(query, unquote(var), unquote(count_bind),
unquote(join_qual), unquote(join_source), unquote(join_assoc),
unquote(join_params), unquote(env.file), unquote(env.line))
end
end
def build_on(on, query, binding, count_bind,
join_qual, join_source, join_assoc, join_params, env) do
{on_expr, on_params} = Ecto.Query.Builder.Filter.escape(:on, on, count_bind, binding, env)
on_params = Builder.escape_params(on_params)
join =
quote do
%JoinExpr{qual: unquote(join_qual), source: unquote(join_source),
assoc: unquote(join_assoc), file: unquote(env.file),
line: unquote(env.line), params: unquote(join_params),
on: %QueryExpr{expr: unquote(on_expr), params: unquote(on_params),
line: unquote(env.line), file: unquote(env.file)}}
end
Builder.apply_query(query, __MODULE__, [join], env)
end
@doc """
Applies the join expression to the query.
"""
def apply(%Ecto.Query{joins: joins} = query, expr) do
%{query | joins: joins ++ [expr]}
end
def apply(query, expr) do
apply(Ecto.Queryable.to_query(query), expr)
end
@doc """
Called at runtime to build a join.
"""
def join!(query, expr, count_bind, join_qual, join_source, join_assoc, join_params, file, line) do
{on_expr, on_params, on_file, on_line} =
Ecto.Query.Builder.Filter.filter!(:on, query, expr, count_bind, file, line)
join = %JoinExpr{qual: join_qual, source: join_source, assoc: join_assoc,
file: file, line: line, params: join_params,
on: %QueryExpr{expr: on_expr, params: on_params,
line: on_line, file: on_file}}
apply(query, join)
end
defp validate_qual(qual) when is_atom(qual) do
qual!(qual)
end
defp validate_qual(qual) do
quote(do: Ecto.Query.Builder.Join.qual!(unquote(qual)))
end
defp validate_bind(bind, all) do
if bind != :_ and bind in all do
Builder.error! "variable `#{bind}` is already defined in query"
end
end
@qualifiers [:inner, :inner_lateral, :left, :left_lateral, :right, :full, :cross]
@doc """
Called at runtime to check dynamic qualifier.
"""
def qual!(qual) when qual in @qualifiers, do: qual
def qual!(qual) do
raise ArgumentError,
"invalid join qualifier `#{inspect qual}`, accepted qualifiers are: " <>
Enum.map_join(@qualifiers, ", ", &"`#{inspect &1}`")
end
defp ensure_field!({var, _, _}) when var != :^ do
Builder.error! "you passed the variable `#{var}` to `assoc/2`. Did you mean to pass the atom `:#{var}?`"
end
defp ensure_field!(_), do: true
end
|
lib/ecto/query/builder/join.ex
| 0.827689
| 0.426322
|
join.ex
|
starcoder
|
defmodule UrbitEx.Channel do
@moduledoc """
GenServer module to open Eyre channels.
Defines an UrbitEx.Channel struct to keep track of the channel state.
Elixir processes can subscribe to channels to consume the events their propagate, either raw Eyre events or Eyre events parsed by UrbitEx.Reducer
"""
alias UrbitEx.{Session, API}
@derive Jason.Encoder
use GenServer
defstruct name: :main,
path: "/~/channel/1624121301252-9e6659",
parent: :pid,
last_action: 0,
last_sse: 0,
last_ack: 0,
recent_events: [],
subscriptions: [],
truncated_event: "",
consumers: [],
raw_consumers: [],
status: :init,
autoack: false,
last_event: Time.utc_now(),
keep_state: true
@doc """
Fetches Channel state. Takes a pid or atom name of the channel to fetch.
Returns a Channel struct, to be used in UrbitEx functions involving PUT requests.
"""
def read(pid \\ :main), do: GenServer.call(pid, :get)
@doc """
Subscribes to a channel in order to receive the SSE events (after parsing by the UrbitEx default reducer) it publishes.
Takes a pid or atom name of the channel to subscribe to, and the pid of the subscriber process.
"""
def consume_feed(channel_pid, consoomer_pid), do:
GenServer.cast(channel_pid, {:consume, consoomer_pid})
@doc """
Deletes the subscription to a channel. No events will be received after calling this function.
Takes a pid or atom name of the channel to subscribe from, and the pid of the subscriber process.
"""
def wean(channel_pid, consoomer_pid), do:
GenServer.cast(channel_pid, {:wean, consoomer_pid})
@doc """
Subscribes to a raw Eyre SSE pipeline in order to receive the raw SSE events it publishes.
Takes a pid or atom name of the channel to subscribe to, and the pid of the subscriber process.
"""
def consume_raw(channel_pid, consoomer_pid), do:
GenServer.cast(channel_pid, {:consume_raw, consoomer_pid})
@doc """
Deletes the subscription to raw events.
Takes a pid or atom name of the channel to unsubscribe from, and the pid of the subscriber process.
"""
def wean_raw(channel_pid, consoomer_pid), do:
GenServer.cast(channel_pid, {:wean_raw, consoomer_pid})
# client
def start_link(options \\ [], name \\ :main) when is_list(options) do
GenServer.start_link(__MODULE__, new(options), name: name)
end
def start(options \\ [], name \\ :main) when is_list(options) do
GenServer.start(__MODULE__, new(options), name: name)
end
def connect(pid \\ :main), do: GenServer.call(pid, :connect)
defp new(opts \\ []) do
path =
"/~/channel/#{System.os_time(:millisecond)}-#{:crypto.strong_rand_bytes(3) |> Base.encode16(case: :lower)}"
struct(__MODULE__, Keyword.put(opts, :path, path))
end
@spec add_event(atom | pid | {atom, any} | {:via, atom, any}, any) :: :ok
def add_event(pid \\ :main, event) do
GenServer.cast(pid, {:save_event, event})
end
def save_action(pid \\ :main, id) do
GenServer.cast(pid, {:save_state, :last_action, id})
end
def save_ack(pid \\ :main, id) do
GenServer.cast(pid, {:save_state, :last_ack, id})
end
# todo save the id!!
def save_subscription(pid \\ :main, subscription) do
GenServer.cast(pid, {:subscribe, subscription})
end
def check_stream() do
Process.send_after(self(), :timer, 30_000)
end
def check_ack() do
:timer.send_interval(1_000, :ack)
end
## server
@impl true
def init(channel) do
check_stream()
check_ack()
{:ok, channel}
end
@impl true
def handle_call(:get, _from, channel), do: {:reply, channel, channel}
@impl true
def handle_call(:connect, _from, channel) do
session = Session.read(channel.parent)
new_channel =
with :ok <- API.open_channel(session, channel),
{:ok, _ok } <- API.start_sse(session, channel) do
send(channel.parent, {:channel_added, channel.name, self()})
%{channel | status: :open}
else
{:error, _} -> %{channel | status: :error}
end
{:reply, :ok, new_channel}
end
## handle SSE
@impl true
def handle_info(%HTTPoison.AsyncStatus{}, channel) do
{:noreply, channel}
end
@impl true
# ignore keep-alive messages
def handle_info(%{chunk: "\n"}, channel) do
{:noreply, %{channel | last_event: Time.utc_now()}}
end
@impl true
def handle_info(%{chunk: ":\n"}, channel) do
{:noreply, %{channel | last_event: Time.utc_now()}}
end
@impl true
def handle_info(%{chunk: ""}, channel) do
{:noreply, channel}
end
@impl true
def handle_info(%{chunk: data}, channel) do
parse_stream(data)
{:noreply, channel}
end
@impl true
def handle_info({:handle_valid_message, id, event}, channel) do
if channel.autoack, do: API.ack(UrbitEx.get(), channel, id)
broadcast(channel.raw_consumers, event)
if channel.keep_state, do: UrbitEx.Reducer.default_reducer(event)
{:noreply, %{channel | last_sse: id, last_event: Time.utc_now()}}
end
@impl true
def handle_info({:handle_truncated_message, id, message}, channel) do
new_channel = %{channel | last_sse: id, truncated_event: message}
{:noreply, new_channel}
end
@impl true
def handle_info({:stack_truncated_message, message}, channel) do
tm = channel.truncated_event <> message
case Jason.decode(tm) do
{:ok, json} ->
send(self(), {:handle_valid_message, channel.last_sse, json})
{:noreply, %{channel | truncated_event: ""}}
{:error, _r} ->
{:noreply, %{channel | truncated_event: tm}}
end
end
### private functions used by handle infos
defp parse_stream(event) do
rr = ~r(id:\s\d+\ndata:\s.+?}\n\n)
messages = String.split(event, rr, include_captures: true, trim: true)
for msg <- messages do
check_truncated(msg)
end
end
defp check_truncated(msg) do
r = ~r(^id:\s\d+\ndata:\s)
case String.split(msg, r, include_captures: true, trim: true) do
[event_id, data] ->
[id] = Regex.run(~r(\d+), event_id)
handle_seemingly_valid(id, data)
_ ->
handle_truncated(msg)
end
end
defp handle_seemingly_valid(id_string, data) do
id = String.to_integer(id_string)
case Jason.decode(data) do
{:ok, json} -> send(self(), {:handle_valid_message, id, json})
{:error, _r} -> handle_truncated(id, data)
end
end
# it has an id if it's the first piece of a long message
defp handle_truncated(id, string) do
send(self(), {:handle_truncated_message, id, string})
end
# when it's continuing the former
defp handle_truncated(string) do
send(self(), {:stack_truncated_message, string})
end
## handle parsed events
@impl true
def handle_info({:save, tuple}, channel) do
key = GenServer.call(channel.parent, {:save, tuple})
broadcast(channel.consumers, {:data_set, key})
{:noreply, channel}
end
@impl true
def handle_info({:update, tuple}, channel) do
broadcast(channel.consumers, {:data_updated, tuple})
send(channel.parent, {:update, tuple})
{:noreply, channel}
end
@impl true
def handle_info({:add, tuple}, channel) do
broadcast(channel.consumers, {:data_added, tuple})
# I actually can do calls to this, not sends
send(channel.parent, {:add, tuple})
{:noreply, channel}
end
@impl true
def handle_info({:add_or_update, tuple}, channel) do
broadcast(channel.consumers, {:data_added_or_updated, tuple})
send(channel.parent, {:add_or_update, tuple})
{:noreply, channel}
end
@impl true
def handle_info({:remove, tuple}, channel) do
broadcast(channel.consumers, {:data_removed, tuple})
send(channel.parent, {:remove, tuple})
{:noreply, channel}
end
@impl true
def handle_info({:send, event}, channel) do
broadcast(channel.consumers, event)
{:noreply, channel}
end
@impl true
def handle_info(:timer, channel) do
baseline = Time.utc_now()
diff = (Time.diff(baseline, channel.last_event))
if diff > 30 do
API.restart_sse(UrbitEx.get(channel.parent), channel)
end
check_stream()
{:noreply, channel}
end
def handle_info(:ack, channel) do
if channel.last_sse > channel.last_ack do
API.ack(UrbitEx.get(channel.parent), channel, channel.last_sse)
{:noreply, %{channel | last_ack: channel.last_sse}}
else
{:noreply, channel}
end
end
@impl true
def handle_info(_message, channel) do
{:noreply, channel}
end
## handle casts
@impl true
def handle_cast({:subscribe, subscription}, channel) do
channel = %{channel | subscriptions: [subscription | channel.subscriptions]}
{:noreply, channel}
end
@impl true
def handle_cast({:consume, pid}, channel) do
IO.inspect(pid, label: :consooming)
{:noreply, %{channel | consumers: [pid | channel.consumers]}}
end
@impl true
def handle_cast({:wean, pid}, channel) do
IO.inspect(pid, label: :stopped_consooming)
{:noreply, %{channel | consumers: List.delete(channel.consumers, pid)}}
end
@impl true
def handle_cast({:consume_raw, pid}, channel) do
IO.inspect(pid, label: :consooming_raw)
{:noreply, %{channel | raw_consumers: [pid | channel.raw_consumers]}}
end
@impl true
def handle_cast({:wean_raw, pid}, channel) do
IO.inspect(pid, label: :stopped_consooming_raw)
{:noreply, %{channel | raw_consumers: List.delete(channel.raw_consumers, pid)}}
end
@impl true
def handle_cast({:save_state, key, data}, channel) do
{:noreply, Map.put(channel, key, data)}
end
defp broadcast(consumers, message) do
consumers |> Enum.each(&send(&1, message))
end
end
|
lib/types/channel.ex
| 0.639849
| 0.465509
|
channel.ex
|
starcoder
|
defmodule AWS.RedshiftData do
@moduledoc """
You can use the Amazon Redshift Data API to run queries on Amazon Redshift
tables.
You can run SQL statements, which are committed if the statement succeeds.
For more information about the Amazon Redshift Data API, see [Using the Amazon Redshift Data
API](https://docs.aws.amazon.com/redshift/latest/mgmt/data-api.html) in the
*Amazon Redshift Cluster Management Guide*.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-12-20",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "redshift-data",
global?: false,
protocol: "json",
service_id: "Redshift Data",
signature_version: "v4",
signing_name: "redshift-data",
target_prefix: "RedshiftData"
}
end
@doc """
Runs one or more SQL statements, which can be data manipulation language (DML)
or data definition language (DDL).
Depending on the authorization method, use one of the following combinations of
request parameters:
* Secrets Manager - when connecting to a cluster, specify the Amazon
Resource Name (ARN) of the secret, the database name, and the cluster identifier
that matches the cluster in the secret. When connecting to a serverless
endpoint, specify the Amazon Resource Name (ARN) of the secret and the database
name.
* Temporary credentials - when connecting to a cluster, specify the
cluster identifier, the database name, and the database user name. Also,
permission to call the `redshift:GetClusterCredentials` operation is required.
When connecting to a serverless endpoint, specify the database name.
"""
def batch_execute_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchExecuteStatement", input, options)
end
@doc """
Cancels a running query.
To be canceled, a query must be running.
"""
def cancel_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelStatement", input, options)
end
@doc """
Describes the details about a specific instance when a query was run by the
Amazon Redshift Data API.
The information includes when the query started, when it finished, the query
status, the number of rows returned, and the SQL statement.
"""
def describe_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeStatement", input, options)
end
@doc """
Describes the detailed information about a table from metadata in the cluster.
The information includes its columns. A token is returned to page through the
column list. Depending on the authorization method, use one of the following
combinations of request parameters:
* Secrets Manager - when connecting to a cluster, specify the Amazon
Resource Name (ARN) of the secret, the database name, and the cluster identifier
that matches the cluster in the secret. When connecting to a serverless
endpoint, specify the Amazon Resource Name (ARN) of the secret and the database
name.
* Temporary credentials - when connecting to a cluster, specify the
cluster identifier, the database name, and the database user name. Also,
permission to call the `redshift:GetClusterCredentials` operation is required.
When connecting to a serverless endpoint, specify the database name.
"""
def describe_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTable", input, options)
end
@doc """
Runs an SQL statement, which can be data manipulation language (DML) or data
definition language (DDL).
This statement must be a single SQL statement. Depending on the authorization
method, use one of the following combinations of request parameters:
* Secrets Manager - when connecting to a cluster, specify the Amazon
Resource Name (ARN) of the secret, the database name, and the cluster identifier
that matches the cluster in the secret. When connecting to a serverless
endpoint, specify the Amazon Resource Name (ARN) of the secret and the database
name.
* Temporary credentials - when connecting to a cluster, specify the
cluster identifier, the database name, and the database user name. Also,
permission to call the `redshift:GetClusterCredentials` operation is required.
When connecting to a serverless endpoint, specify the database name.
"""
def execute_statement(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ExecuteStatement", input, options)
end
@doc """
Fetches the temporarily cached result of an SQL statement.
A token is returned to page through the statement results.
"""
def get_statement_result(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetStatementResult", input, options)
end
@doc """
List the databases in a cluster.
A token is returned to page through the database list. Depending on the
authorization method, use one of the following combinations of request
parameters:
* Secrets Manager - when connecting to a cluster, specify the Amazon
Resource Name (ARN) of the secret, the database name, and the cluster identifier
that matches the cluster in the secret. When connecting to a serverless
endpoint, specify the Amazon Resource Name (ARN) of the secret and the database
name.
* Temporary credentials - when connecting to a cluster, specify the
cluster identifier, the database name, and the database user name. Also,
permission to call the `redshift:GetClusterCredentials` operation is required.
When connecting to a serverless endpoint, specify the database name.
"""
def list_databases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatabases", input, options)
end
@doc """
Lists the schemas in a database.
A token is returned to page through the schema list. Depending on the
authorization method, use one of the following combinations of request
parameters:
* Secrets Manager - when connecting to a cluster, specify the Amazon
Resource Name (ARN) of the secret, the database name, and the cluster identifier
that matches the cluster in the secret. When connecting to a serverless
endpoint, specify the Amazon Resource Name (ARN) of the secret and the database
name.
* Temporary credentials - when connecting to a cluster, specify the
cluster identifier, the database name, and the database user name. Also,
permission to call the `redshift:GetClusterCredentials` operation is required.
When connecting to a serverless endpoint, specify the database name.
"""
def list_schemas(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSchemas", input, options)
end
@doc """
List of SQL statements.
By default, only finished statements are shown. A token is returned to page
through the statement list.
"""
def list_statements(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListStatements", input, options)
end
@doc """
List the tables in a database.
If neither `SchemaPattern` nor `TablePattern` are specified, then all tables in
the database are returned. A token is returned to page through the table list.
Depending on the authorization method, use one of the following combinations of
request parameters:
* Secrets Manager - when connecting to a cluster, specify the Amazon
Resource Name (ARN) of the secret, the database name, and the cluster identifier
that matches the cluster in the secret. When connecting to a serverless
endpoint, specify the Amazon Resource Name (ARN) of the secret and the database
name.
* Temporary credentials - when connecting to a cluster, specify the
cluster identifier, the database name, and the database user name. Also,
permission to call the `redshift:GetClusterCredentials` operation is required.
When connecting to a serverless endpoint, specify the database name.
"""
def list_tables(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTables", input, options)
end
end
|
lib/aws/generated/redshift_data.ex
| 0.880296
| 0.600511
|
redshift_data.ex
|
starcoder
|
defmodule Xlsxir do
alias Xlsxir.{XlsxFile}
use Application
def start(_type, _args) do
import Supervisor.Spec
children = [
worker(Xlsxir.StateManager, []),
]
opts = [strategy: :one_for_one, name: __MODULE__]
Supervisor.start_link(children, opts)
end
@moduledoc """
Extracts and parses data from a `.xlsx` file to an Erlang Term Storage (ETS) process and provides various functions for accessing the data.
"""
@doc """
**Deprecated**
Extracts worksheet data contained in the specified `.xlsx` file to an ETS process. Successful extraction
returns `{:ok, tid}` with the timer argument set to false and returns a tuple of `{:ok, tid, time}` where time is a list containing time elapsed during the extraction process
(i.e. `[hour, minute, second, microsecond]`) when the timer argument is set to true and tid - is the ETS table id
Cells containing formulas in the worksheet are extracted as either a `string`, `integer` or `float` depending on the resulting value of the cell.
Cells containing an ISO 8601 date format are extracted and converted to Erlang `:calendar.date()` format (i.e. `{year, month, day}`).
## Parameters
- `path` - file path of a `.xlsx` file type in `string` format
- `index` - index of worksheet from within the Excel workbook to be parsed (zero-based index)
- `timer` - boolean flag that tracks extraction process time and returns it when set to `true`. Default value is `false`.
## Options
- `:max_rows` - the number of rows to fetch from within the worksheet
- `:extract_to` - Specify how the `.xlsx` content (i.e. sharedStrings.xml,
style.xml and worksheets xml files) will be be extracted before being parsed.
`:memory` will extract files to memory, and `:file` to files in the file system
- `:extract_base_dir` - when extracting to file, files will be extracted
in a sub directory in the `:extract_base_dir` directory. Defaults to
`Application.get_env(:xlsxir, :extract_base_dir)` or "temp"
## Example
Extract first worksheet in an example file named `test.xlsx` located in `./test/test_data`:
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 0)
iex> Enum.member?(:ets.all, tid)
true
iex> Xlsxir.close(tid)
:ok
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 0, false, [extract_to: :file])
iex> Enum.member?(:ets.all, tid)
true
iex> Xlsxir.close(tid)
:ok
iex> {:ok, tid, _timer} = Xlsxir.extract("./test/test_data/test.xlsx", 0, true)
iex> Enum.member?(:ets.all, tid)
true
iex> Xlsxir.close(tid)
:ok
## Test parallel parsing
iex> task1 = Task.async(fn -> Xlsxir.extract("./test/test_data/test.xlsx", 0) end)
iex> task2 = Task.async(fn -> Xlsxir.extract("./test/test_data/test.xlsx", 0) end)
iex> {:ok, tid1} = Task.await(task1)
iex> {:ok, tid2} = Task.await(task2)
iex> Xlsxir.get_list(tid1)
[["string one", "string two", 10, 20, {2016, 1, 1}]]
iex> Xlsxir.get_list(tid2)
[["string one", "string two", 10, 20, {2016, 1, 1}]]
iex> Xlsxir.close(tid1)
:ok
iex> Xlsxir.close(tid2)
:ok
## Example (errors)
iex> Xlsxir.extract("./test/test_data/test.invalidfile", 0)
{:error, "Invalid file type (expected xlsx)."}
iex> Xlsxir.extract("./test/test_data/test.xlsx", 100)
{:error, "Invalid worksheet index."}
"""
def extract(path, index, timer \\ false, options \\ []) do
xlsx_file = XlsxFile.initialize(path, options)
result = XlsxFile.parse_to_ets(xlsx_file, index, timer)
XlsxFile.clean(xlsx_file)
result
end
@doc """
Stream worksheet rows contained in the specified `.xlsx` file.
Cells containing formulas in the worksheet are extracted as either a `string`, `integer` or `float` depending on the resulting value of the cell.
Cells containing an ISO 8601 date format are extracted and converted to Erlang `:calendar.date()` format (i.e. `{year, month, day}`).
## Parameters
- `path` - file path of a `.xlsx` file type in `string` format
- `index` - index of worksheet from within the Excel workbook to be parsed (zero-based index)
## Options
- `:extract_to` - Specify how the `.xlsx` content (i.e. sharedStrings.xml,
style.xml and worksheets xml files) will be be extracted before being parsed.
`:memory` will extract files to memory, and `:file` to files in the file system
- `:extract_base_dir` - when extracting to file, files will be extracted
in a sub directory in the `:extract_base_dir` directory. Defaults to
`Application.get_env(:xlsxir, :extract_base_dir)` or "temp"
## Example
Extract first worksheet in an example file named `test.xlsx` located in `./test/test_data`:
iex> Xlsxir.stream_list("./test/test_data/test.xlsx", 1) |> Enum.take(1)
[[1, 2]]
iex> Xlsxir.stream_list("./test/test_data/test.xlsx", 1) |> Enum.take(3)
[[1, 2], [3, 4]]
"""
def stream_list(path, index, options \\ []) do
path
|> stream(index, options)
|> Stream.map(&row_data_to_list/1)
end
defp stream(path, index, options) do
path
|> XlsxFile.stream(index, Keyword.merge([extract_to: :file], options))
end
defp row_data_to_list(row_data) do
row_data # [["A1", 1], ["C1", 2]]
|> do_get_row() # [["A1", 1], ["B1", nil], ["C1", 2]]
|> Enum.map(fn [_ref, val] -> val end) # [1, nil, 2]
end
@doc """
Extracts the first n number of rows from the specified worksheet contained in the specified `.xlsx` file to an ETS process.
Successful extraction returns `{:ok, tid}` where tid - is ETS table id.
## Parameters
- `path` - file path of a `.xlsx` file type in `string` format
- `index` - index of worksheet from within the Excel workbook to be parsed (zero-based index)
- `rows` - the number of rows to fetch from within the specified worksheet
## Options
- `:extract_to` - Specify how the `.xlsx` content (i.e. sharedStrings.xml,
style.xml and worksheets xml files) will be be extracted before being parsed.
`:memory` will extract files to memory, and `:file` to files in the file system
- `:extract_base_dir` - when extracting to file, files will be extracted
in a sub directory in the `:extract_base_dir` directory. Defaults to
`Application.get_env(:xlsxir, :extract_base_dir)` or "temp"
## Example
Peek at the first 10 rows of the 9th worksheet in an example file named `test.xlsx` located in `./test/test_data`:
iex> {:ok, tid} = Xlsxir.peek("./test/test_data/test.xlsx", 8, 10)
iex> Enum.member?(:ets.all, tid)
true
iex> Xlsxir.close(tid)
:ok
"""
def peek(path, index, rows, options \\ []) do
extract(path, index, false, Keyword.merge(options, [max_rows: rows]))
end
@doc """
Extracts worksheet data contained in the specified `.xlsx` file to an ETS process. Successful extraction
returns `{:ok, table_id}` with the timer argument set to false and returns a tuple of `{:ok, table_id, time}` where `time` is a list containing time elapsed during the extraction process
(i.e. `[hour, minute, second, microsecond]`) when the timer argument is set to true. The `table_id` is used to access data for that particular ETS process with the various access functions of the
`Xlsxir` module.
Cells containing formulas in the worksheet are extracted as either a `string`, `integer` or `float` depending on the resulting value of the cell.
Cells containing an ISO 8601 date format are extracted and converted to Erlang `:calendar.date()` format (i.e. `{year, month, day}`).
## Parameters
- `path` - file path of a `.xlsx` file type in `string` format
- `index` - index of worksheet from within the Excel workbook to be parsed (zero-based index)
- `timer` - boolean flag that tracts extraction process time and returns it when set to `true`. Defalut value is `false`.
## Options
- `:max_rows` - the number of rows to fetch from within the worksheets
- `:extract_to` - Specify how the `.xlsx` content (i.e. sharedStrings.xml,
style.xml and worksheets xml files) will be be extracted before being parsed.
`:memory` will extract files to memory, and `:file` to files in the file system
- `:extract_base_dir` - when extracting to file, files will be extracted
in a sub directory in the `:extract_base_dir` directory. Defaults to
`Application.get_env(:xlsxir, :extract_base_dir)` or "temp"
## Example
Extract first worksheet in an example file named `test.xlsx` located in `./test/test_data`:
iex> {:ok, tid} = Xlsxir.multi_extract("./test/test_data/test.xlsx", 0)
iex> Enum.member?(:ets.all, tid)
true
iex> Xlsxir.close(tid)
:ok
## Example
Extract all worksheets in an example file named `test.xlsx` located in `./test/test_data`:
iex> results = Xlsxir.multi_extract("./test/test_data/test.xlsx")
iex> alive_ids = Enum.map(results, fn {:ok, tid} -> Enum.member?(:ets.all, tid) end)
iex> Enum.all?(alive_ids)
true
iex> Enum.map(results, fn {:ok, tid} -> Xlsxir.close(tid) end) |> Enum.all?(fn result -> result == :ok end)
true
## Example
Extract all worksheets in an example file named `test.xlsx` located in `./test/test_data` with timer:
iex> results = Xlsxir.multi_extract("./test/test_data/test.xlsx", nil, true)
iex> alive_ids = Enum.map(results, fn {:ok, tid, _timer} -> Enum.member?(:ets.all, tid) end)
iex> Enum.all?(alive_ids)
true
iex> Enum.map(results, fn {:ok, tid, _timer} -> Xlsxir.close(tid) end) |> Enum.all?(fn result -> result == :ok end)
true
## Example (errors)
iex> Xlsxir.multi_extract("./test/test_data/test.invalidfile", 0)
{:error, "Invalid file type (expected xlsx)."}
iex> Xlsxir.multi_extract("./test/test_data/test.xlsx", 100)
{:error, "Invalid worksheet index."}
"""
def multi_extract(path, index \\ nil, timer \\ false, _excel \\ nil, options \\ [])
def multi_extract(path, nil, timer, _excel, options) do
case XlsxFile.initialize(path, options) do
{:error, msg} ->
{:error, msg}
xlsx_file ->
results = XlsxFile.parse_all_to_ets(xlsx_file, timer)
XlsxFile.clean(xlsx_file)
results
end
end
def multi_extract(path, index, timer, _excel, options) when is_integer(index) do
extract(path, index, timer, options)
end
@doc """
Accesses ETS process and returns data formatted as a list of row value lists.
## Parameters
- `tid` - table identifier of ETS process to be accessed
## Example
An example file named `test.xlsx` located in `./test/test_data` containing the following:
- cell 'A1' -> "string one"
- cell 'B1' -> "string two"
- cell 'C1' -> integer of 10
- cell 'D1' -> formula of "4 * 5"
- cell 'E1' -> date of 1/1/2016 or Excel date serial of 42370
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_list(tid)
[["string one", "string two", 10, 20, {2016, 1, 1}]]
iex> Xlsxir.close(tid)
:ok
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 2)
iex> Xlsxir.get_list(tid) |> List.first |> Enum.count
16384
iex> {:ok, tid} = Xlsxir.multi_extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_list(tid)
[["string one", "string two", 10, 20, {2016, 1, 1}]]
iex> Xlsxir.close(tid)
:ok
"""
def get_list(tid) do
:ets.match(tid, {:"$1", :"$2"})
|> Enum.sort
|> Enum.map(fn [_num, row] ->
row
|> do_get_row()
|> Enum.map(fn [_ref, val] -> val end)
end)
end
@doc """
Accesses ETS process and returns data formatted as a map of cell references and values.
## Parameters
- `tid` - table identifier of ETS process to be accessed
## Example
An example file named `test.xlsx` located in `./test/test_data` containing the following:
- cell 'A1' -> "string one"
- cell 'B1' -> "string two"
- cell 'C1' -> integer of 10
- cell 'D1' -> formula of "4 * 5"
- cell 'E1' -> date of 1/1/2016 or Excel date serial of 42370
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_map(tid)
%{ "A1" => "string one", "B1" => "string two", "C1" => 10, "D1" => 20, "E1" => {2016,1,1}}
iex> Xlsxir.close(tid)
:ok
iex> {:ok, tid} = Xlsxir.multi_extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_map(tid)
%{ "A1" => "string one", "B1" => "string two", "C1" => 10, "D1" => 20, "E1" => {2016,1,1}}
iex> Xlsxir.close(tid)
:ok
"""
def get_map(tid) do
:ets.match(tid, {:"$1", :"$2"})
|> Enum.reduce(%{}, fn [_num, row], acc ->
row |> do_get_row()
|> Enum.reduce(%{}, fn [ref, val], acc2 -> Map.put(acc2, ref, val) end)
|> Enum.into(acc)
end)
end
@doc """
Accesses ETS process and returns an indexed map which functions like a multi-dimensional array in other languages.
## Parameters
- `tid` - table identifier of ETS process to be accessed
## Example
An example file named `test.xlsx` located in `./test/test_data` containing the following:
- cell 'A1' -> "string one"
- cell 'B1' -> "string two"
- cell 'C1' -> integer of 10
- cell 'D1' -> formula of "4 * 5"
- cell 'E1' -> date of 1/1/2016 or Excel date serial of 42370
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 0)
iex> mda = Xlsxir.get_mda(tid)
%{0 => %{0 => "string one", 1 => "string two", 2 => 10, 3 => 20, 4 => {2016,1,1}}}
iex> mda[0][0]
"string one"
iex> mda[0][2]
10
iex> Xlsxir.close(tid)
:ok
iex> {:ok, tid} = Xlsxir.multi_extract("./test/test_data/test.xlsx", 0)
iex> mda = Xlsxir.get_mda(tid)
%{0 => %{0 => "string one", 1 => "string two", 2 => 10, 3 => 20, 4 => {2016,1,1}}}
iex> mda[0][0]
"string one"
iex> mda[0][2]
10
iex> Xlsxir.close(tid)
:ok
"""
def get_mda(tid) do
tid |> :ets.match({:"$1", :"$2"}) |> convert_to_indexed_map(%{})
end
defp convert_to_indexed_map([], map), do: map
defp convert_to_indexed_map([h|t], map) do
row_index = h
|> Enum.at(0)
|> Kernel.-(1)
add_row = h
|> Enum.at(1)
|> do_get_row()
|> Enum.reduce({%{}, 0}, fn cell, {acc, index} ->
{Map.put(acc, index, Enum.at(cell, 1)), index + 1}
end)
|> elem(0)
updated_map = Map.put(map, row_index, add_row)
convert_to_indexed_map(t, updated_map)
end
@doc """
Accesses ETS process and returns value of specified cell.
## Parameters
- `table_id` - table identifier of ETS process to be accessed
- `cell_ref` - Reference name of cell to be returned in `string` format (i.e. `"A1"`)
## Example
An example file named `test.xlsx` located in `./test/test_data` containing the following:
- cell 'A1' -> "string one"
- cell 'B1' -> "string two"
- cell 'C1' -> integer of 10
- cell 'D1' -> formula of "4 * 5"
- cell 'E1' -> date of 1/1/2016 or Excel date serial of 42370
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_cell(tid, "A1")
"string one"
iex> Xlsxir.close(tid)
:ok
iex> {:ok, tid} = Xlsxir.multi_extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_cell(tid, "A1")
"string one"
iex> Xlsxir.close(tid)
:ok
"""
def get_cell(table_id, cell_ref), do: do_get_cell(cell_ref, table_id)
defp do_get_cell(cell_ref, table_id) do
[[row_num]] = ~r/\d+/ |> Regex.scan(cell_ref)
row_num = row_num |> String.to_integer
with [[row]] <- :ets.match(table_id, {row_num, :"$1"}),
[^cell_ref, value] <- Enum.find(row, fn [ref, _val] -> ref == cell_ref end) do
value
else
_ -> nil
end
end
@doc """
Accesses ETS process and returns values of specified row in a `list`.
## Parameters
- `tid` - table identifier of ETS process to be accessed
- `row` - Reference name of row to be returned in `integer` format (i.e. `1`)
## Example
An example file named `test.xlsx` located in `./test/test_data` containing the following:
- cell 'A1' -> "string one"
- cell 'B1' -> "string two"
- cell 'C1' -> integer of 10
- cell 'D1' -> formula of "4 * 5"
- cell 'E1' -> date of 1/1/2016 or Excel date serial of 42370
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_row(tid, 1)
["string one", "string two", 10, 20, {2016, 1, 1}]
iex> Xlsxir.close(tid)
:ok
iex> {:ok, tid} = Xlsxir.multi_extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_row(tid, 1)
["string one", "string two", 10, 20, {2016, 1, 1}]
iex> Xlsxir.close(tid)
:ok
"""
def get_row(tid, row) do
case :ets.match(tid, {row, :"$1"}) do
[[row]] -> row |> do_get_row() |> Enum.map(fn [_ref, val] -> val end)
[] -> []
end
end
defp do_get_row(row) do
row
|> Enum.reduce({[], nil}, fn [ref, val], {values, previous} ->
line = ~r/\d+$/ |> Regex.run(ref) |> List.first
empty_cells = cond do
is_nil(previous) && String.first(ref) != "A" -> fill_empty_cells("A#{line}", ref, line, [])
!is_nil(previous) && !is_next_col(ref, previous) -> fill_empty_cells(next_col(previous), ref, line, [])
true -> []
end
{values ++ empty_cells ++ [[ref, val]], ref}
end)
|> elem(0)
end
defp column_from_index(index, column) when index > 0 do
modulo = rem(index - 1, 26)
column = [65 + modulo | column]
column_from_index(div(index - modulo, 26), column)
end
defp column_from_index(_, column), do: to_string(column)
defp is_next_col(current, previous) do
current == next_col(previous)
end
defp next_col(ref) do
[chars, line] = Regex.run(~r/^([A-Z]+)(\d+)/, ref, capture: :all_but_first)
chars = chars |> String.to_charlist
col_index = Enum.reduce(chars, 0, fn char, acc ->
acc = acc * 26
acc + char - 65 + 1
end)
"#{column_from_index(col_index + 1, '')}#{line}"
end
defp fill_empty_cells(from, from, _line, cells), do: Enum.reverse(cells)
defp fill_empty_cells(from, to, line, cells) do
next_ref = next_col(from)
if next_ref == to do
fill_empty_cells(to, to, line, [[from, nil] | cells])
else
fill_empty_cells(next_ref, to, line, [[from, nil] | cells])
end
end
@doc """
Accesses `tid` ETS process and returns values of specified column in a `list`.
## Parameters
- `tid` - table identifier of ETS process to be accessed
- `col` - Reference name of column to be returned in `string` format (i.e. `"A"`)
## Example
An example file named `test.xlsx` located in `./test/test_data` containing the following:
- cell 'A1' -> "string one"
- cell 'B1' -> "string two"
- cell 'C1' -> integer of 10
- cell 'D1' -> formula of "4 * 5"
- cell 'E1' -> date of 1/1/2016 or Excel date serial of 42370
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_col(tid, "A")
["string one"]
iex> Xlsxir.close(tid)
:ok
iex> {:ok, tid} = Xlsxir.multi_extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.get_col(tid, "A")
["string one"]
iex> Xlsxir.close(tid)
:ok
"""
def get_col(tid, col), do: do_get_col(col, tid)
defp do_get_col(col, tid) do
tid
|> :ets.match({:"$1", :"$2"})
|> Enum.sort
|> Enum.map(fn [_num, row] ->
row
|> do_get_row()
|> Enum.filter(fn [ref, _val] -> Regex.scan(~r/[A-Z]+/i, ref) == [[col]] end)
|> Enum.map(fn [_ref, val] -> val end)
end)
|> List.flatten
end
@doc """
See `get_multi_info/2` documentation.
"""
def get_info(table_id, num_type \\ :all) do
get_multi_info(table_id, num_type)
end
@doc """
Returns count data based on `num_type` specified:
- `:rows` - Returns number of rows contained in worksheet
- `:cols` - Returns number of columns contained in worksheet
- `:cells` - Returns number of cells contained in worksheet
- `:all` - Returns a keyword list containing all of the above
## Parameters
- `tid` - table identifier of ETS process to be accessed
- `num_type` - type of count data to be returned (see above), defaults to `:all`
"""
def get_multi_info(tid, num_type \\ :all) do
case num_type do
:rows -> row_num(tid)
:cols -> col_num(tid)
:cells -> cell_num(tid)
_ -> [
rows: row_num(tid),
cols: col_num(tid),
cells: cell_num(tid)
]
end
end
defp row_num(tid) do
:ets.info(tid, :size)
end
defp col_num(tid) do
:ets.match(tid, {:"$1", :"$2"})
|> Enum.map(fn [_num, row] -> Enum.count(row) end)
|> Enum.max
end
defp cell_num(tid) do
:ets.match(tid, {:"$1", :"$2"})
|> Enum.reduce(0, fn [_num, row], acc -> acc + Enum.count(row) end)
end
@doc """
Deletes ETS process `tid` and returns `:ok` if successful.
## Example
Extract first worksheet in an example file named `test.xlsx` located in `./test/test_data`:
iex> {:ok, tid} = Xlsxir.extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.close(tid)
:ok
iex> {:ok, tid} = Xlsxir.multi_extract("./test/test_data/test.xlsx", 0)
iex> Xlsxir.close(tid)
:ok
"""
def close(tid) do
if Enum.member?(:ets.all, tid) do
if :ets.delete(tid), do: :ok, else: :error
else
:ok
end
end
end
|
lib/xlsxir.ex
| 0.762954
| 0.648953
|
xlsxir.ex
|
starcoder
|
defmodule Pointers.Tables do
@moduledoc """
A Global cache of Tables to be queried by their (Pointer) IDs, table
names or Ecto Schema module names.
Use of the Table Service requires:
1. You have run the migrations shipped with this library.
2. You have started `Pointers.Tables` before querying.
3. All OTP applications with pointable Ecto Schemata to be added to the schema path.
4. OTP 21.2 or greater, though we recommend using the most recent release available.
While this module is a GenServer, it is only responsible for setup
of the cache and then exits with :ignore having done so. It is not
recommended to restart the service as this will lead to a stop the
world garbage collection of all processes and the copying of the
entire cache to each process that has queried it since its last
local garbage collection.
"""
alias Pointers.{NotFound, Table, ULID}
require Logger
use GenServer, restart: :transient
@typedoc """
A query is either a table's (database) name or (Pointer) ID as a
binary or the name of its Ecto Schema Module as an atom.
"""
@type query :: binary | atom
@spec start_link(ignored :: term) :: GenServer.on_start()
@doc "Populates the global cache with table data via introspection."
def start_link(_), do: GenServer.start_link(__MODULE__, [])
def data(), do: :persistent_term.get(__MODULE__)
@spec table(query :: query) :: {:ok, Table.t} | {:error, NotFound.t}
@doc "Get a Table identified by name, id or module."
def table(query) when is_binary(query) or is_atom(query) do
case Map.get(data(), query) do
nil -> {:error, NotFound.new(query)}
other -> {:ok, other}
end
end
@spec table!(query) :: Table.t
@doc "Look up a Table by name or id, raise NotFound if not found."
def table!(query), do: Map.get(data(), query) || not_found(query)
@spec id(query) :: {:ok, integer()} | {:error, NotFound.t()}
@doc "Look up a table id by id, name or schema."
def id(query), do: with( {:ok, val} <- table(query), do: {:ok, val.id})
@spec id!(query) :: integer()
@doc "Look up a table id by id, name or schema, raise NotFound if not found."
def id!(query) when is_atom(query) or is_binary(query), do: id!(query, data())
@spec ids!([binary | atom]) :: [binary]
@doc "Look up many ids at once, raise NotFound if any of them are not found"
def ids!(ids) do
data = data()
Enum.map(ids, &id!(&1, data))
end
# called by id!/1, ids!/1
defp id!(query, data), do: Map.get(data, query).id || not_found(query)
@spec schema(query) :: {:ok, atom} | {:error, NotFound.t}
@doc "Look up a schema module by id, name or schema"
def schema(query), do: with({:ok, val} <- table(query), do: {:ok, val.schema})
@spec schema!(query) :: atom
@doc "Look up a schema module by id, name or schema, raise NotFound if not found"
def schema!(query), do: table!(query).schema
# GenServer callback
@doc false
def init(_) do
if Code.ensure_loaded?(:telemetry),
do: :telemetry.span([:pointers, :tables], %{}, &init/0),
else: init()
:ignore
end
defp init() do
indexed = build_index()
:persistent_term.put(__MODULE__, indexed)
{indexed, indexed}
end
defp build_index() do
search_path()
|> Enum.flat_map(&app_modules/1)
|> Enum.filter(&pointer_schema?/1)
|> Enum.reduce(%{}, &index/2)
end
defp app_modules(app), do: app_modules(app, Application.spec(app, :modules))
defp app_modules(_, nil), do: []
defp app_modules(_, mods), do: mods
# called by init/1
defp search_path(), do: [:pointers | Application.fetch_env!(:pointers, :search_path)]
# called by init/1
defp pointer_schema?(module) do
Code.ensure_loaded?(module) and
function_exported?(module, :__pointers__, 1) and
function_exported?(module, :__schema__, 1) and
module.__pointers__(:role) == :pointable
end
# called by init/1
defp index(mod, acc), do: index(mod, acc, mod.__schema__(:primary_key))
# called by index/2
defp index(mod, acc, [:id]), do: index(mod, acc, mod.__schema__(:type, :id))
# called by index/3, the line above
defp index(mod, acc, ULID), do: index(mod, acc, mod.__pointers__(:table_id), mod.__schema__(:source))
# doesn't look right, skip it
defp index(_, acc, _wat), do: acc
# called by index/3
defp index(mod, acc, id, table) do
t = %Table{id: id, schema: mod, table: table}
log_indexed(t)
Map.merge(acc, %{id => t, table => t, mod => t})
end
defp log_indexed(table) do
if Code.ensure_loaded?(:telemetry),
do: :telemetry.execute([:pointers, :tables, :indexed], %{}, %{table: table})
end
defp not_found(table) do
Logger.error("Pointers Table `#{table}` not found")
raise(NotFound)
end
end
|
lib/tables.ex
| 0.825027
| 0.479686
|
tables.ex
|
starcoder
|
defmodule ZcashExplorerWeb.BlockView do
use ZcashExplorerWeb, :view
def mined_time(nil) do
"Not yet mined"
end
def mined_time(timestamp) do
abs = timestamp |> Timex.from_unix() |> Timex.format!("{ISOdate} {ISOtime}")
rel = timestamp |> Timex.from_unix() |> Timex.format!("{relative}", :relative)
abs <> " " <> "(#{rel})"
end
def mined_time_without_rel(timestamp) do
timestamp |> Timex.from_unix() |> Timex.format!("{ISOdate} {ISOtime}")
end
def mined_time_rel(timestamp) do
timestamp |> Timex.from_unix() |> Timex.format!("{relative}", :relative)
end
def transaction_count(txs) do
txs |> length
end
def vin_count(txs) do
txs |> Enum.reduce(0, fn x, acc -> length(x.vin) + acc end)
end
def vout_count(txs) do
txs |> Enum.reduce(0, fn x, acc -> length(x.vout) + acc end)
end
def is_coinbase_tx?(tx) when tx.vin == [] do
false
end
def is_coinbase_tx?(tx) when length(tx.vin) > 0 do
first_tx = tx.vin |> List.first()
case Map.fetch(first_tx, :coinbase) do
{:ok, nil} -> false
{:ok, _value} -> true
{:error, _reason} -> false
end
end
def mined_by(txs) do
first_trx = txs |> List.first()
if is_coinbase_tx?(first_trx) do
first_trx
|> Map.get(:vout)
|> List.first()
|> Map.get(:scriptPubKey)
|> Map.get(:addresses)
|> List.first()
end
end
def input_total(txs) do
[hd | tail] = txs
tail
|> Enum.map(fn x -> Map.get(x, :vin) end)
|> List.flatten()
|> Enum.reduce(0, fn x, acc -> Map.get(x, :value) + acc end)
|> Kernel.+(0.0)
|> :erlang.float_to_binary([:compact, {:decimals, 10}])
end
def output_total(txs) do
txs
|> Enum.map(fn x -> Map.get(x, :vout) end)
|> List.flatten()
|> Enum.reduce(0, fn x, acc -> Map.get(x, :value) + acc end)
|> Kernel.+(0.0)
|> :erlang.float_to_binary([:compact, {:decimals, 10}])
end
def tx_out_total(%Zcashex.Transaction{} = tx) do
tx
|> Map.get(:vout)
|> List.flatten()
|> Enum.reduce(0, fn x, acc -> Map.get(x, :value) + acc end)
|> Kernel.+(0.0)
|> :erlang.float_to_binary([:compact, {:decimals, 10}])
end
def tx_out_total(tx) when is_map(tx) do
tx
|> Map.get("vout")
|> List.flatten()
|> Enum.reduce(0, fn x, acc -> Map.get(x, "value") + acc end)
|> Kernel.+(0.0)
|> :erlang.float_to_binary([:compact, {:decimals, 10}])
end
# detect if a transaction is Public
# https://z.cash/technology/
def transparent_in_and_out(tx) do
length(tx.vin) > 0 and length(tx.vout) > 0
end
def contains_sprout(tx) do
length(tx.vjoinsplit) > 0
end
def get_joinsplit_count(tx) do
length(tx.vjoinsplit)
end
def contains_sapling(tx) do
value_balance = Map.get(tx, :valueBalance) || 0.0
vshielded_spend = Map.get(tx, :vShieldedSpend) || []
vshielded_output = Map.get(tx, :vShieldedOutput) || []
value_balance != 0.0 and (length(vshielded_spend) > 0 || length(vshielded_output) > 0)
end
def is_shielded_tx?(tx) do
!transparent_in_and_out(tx) and (contains_sprout(tx) or contains_sapling(tx))
end
def is_transparent_tx?(tx) do
value_balance = Map.get(tx, :valueBalance) || 0.0
vshielded_spend = Map.get(tx, :vShieldedSpend) || []
vshielded_output = Map.get(tx, :vShieldedOutput) || []
transparent_in_and_out(tx) && length(tx.vjoinsplit) == 0 && value_balance == 0.0 &&
length(vshielded_spend) == 0 && length(vshielded_output) == 0
end
def is_mixed_tx?(tx) do
t_in_or_out = length(tx.vin) > 0 or length(tx.vout) > 0
t_in_or_out and (contains_sprout(tx) || contains_sapling(tx))
end
def is_shielding(tx) do
tin_and_zout = length(tx.vin) > 0 and length(tx.vout) == 0
tin_and_zout and (contains_sprout(tx) || contains_sapling(tx))
end
def is_deshielding(tx) do
zin_and_tout = length(tx.vin) == 0 and length(tx.vout) > 0
zin_and_tout and (contains_sprout(tx) || contains_sapling(tx))
end
def tx_type(tx) do
cond do
is_coinbase_tx?(tx) ->
"coinbase"
is_mixed_tx?(tx) ->
cond do
is_shielding(tx) -> "shielding"
is_deshielding(tx) -> "deshielding"
true -> "mixed"
end
is_shielded_tx?(tx) ->
"shielded"
is_transparent_tx?(tx) ->
"transparent"
true ->
"unknown"
end
end
end
|
lib/zcash_explorer_web/views/block_view.ex
| 0.636579
| 0.414277
|
block_view.ex
|
starcoder
|
defmodule Jorb.Job do
@moduledoc ~S"""
Modules that `use Jorb.Job` can enqueue, read, and perform jobs.
These modules must implement the `Jorb.Job` behaviour.
In addition to the callbacks defined in `Jorb.Job`, these modules also
export the `enqueue/2`, `work/1`, and `workers/1` functions. See the
documention below for `enqueue/3`, `work/2`, and `workers/2`.
See `Jorb` for more documentation.
"""
@doc ~S"""
List of queues to fetch jobs from, given in highest-priority-first order.
"""
@callback read_queues :: [Jorb.queue()]
@doc ~S"""
Queue to write to, for the given payload.
Implement this or `c:write_queue/0`.
"""
@callback write_queue(any) :: Jorb.queue()
@doc ~S"""
Queue to write to.
Implement this or `c:write_queue/1`.
"""
@callback write_queue :: Jorb.queue()
@doc ~S"""
Performs the given work. Behind the scenes, the message from which the
work originated will be deleted from the queue if this function returns
`:ok`.
"""
@callback perform(any) :: :ok | :error
@optional_callbacks write_queue: 0, write_queue: 1
@doc false
defmacro __using__(_opts) do
quote do
@behaviour Jorb.Job
def write_queue(_payload), do: write_queue()
def write_queue(), do: raise("either write_queue/1 or write_queue/0 must be defined")
defoverridable Jorb.Job
@doc ~S"""
Queue a job to be performed by this module's `perform/1` function
later.
"""
@spec enqueue(any, Keyword.t()) :: :ok | {:error, String.t()}
def enqueue(payload, opts \\ []), do: Jorb.Job.enqueue(__MODULE__, payload, opts)
@doc ~S"""
Attempt to fetch jobs to do, reading from the first item in
`read_queues/0` that has messages. For each message received,
`perform/1` is invoked, deleting the message if the return value
is `:ok`.
"""
@spec work(Keyword.t()) :: :ok | {:error, String.t()}
def work(opts \\ []), do: Jorb.Job.work(__MODULE__, opts)
@doc ~S"""
Returns a list of child specs for GenServers that read (
execute `work(opts)` forever) and write (flush batches of outgoing
messages).
"""
@spec workers(Keyword.t()) :: [:supervisor.child_spec()]
def workers(opts \\ []), do: Jorb.Job.workers(__MODULE__, opts)
end
end
@doc ~S"""
Queue a job to be performed by this module's `perform/1` function
later.
Intended for use through modules that `use Jorb.Job`.
"""
@spec enqueue(atom, any, Keyword.t()) :: :ok | {:error, String.t()}
def enqueue(module, payload, opts) do
message = %{"target" => module, "body" => payload}
queue = module.write_queue(payload)
Jorb.Writer.enqueue(queue, message, opts, module)
end
@doc ~S"""
Returns a list of child specs for GenServers that read (
execute `work(opts)` forever) and write (flush batches of outgoing
messages).
Intended for use through modules that `use Jorb.Job`.
"""
@spec workers(atom, Keyword.t()) :: [:supervisor.child_spec()]
def workers(module, opts) do
reader_count = Jorb.config(:reader_count, opts, module)
writer_count = Jorb.config(:writer_count, opts, module)
write_queues = Jorb.config(:write_queues, opts, module) || []
readers =
case reader_count do
0 ->
[]
_ ->
for i <- 1..reader_count do
%{
id: {module, Jorb.Reader, i},
start: {Jorb.Reader, :start_link, [[{:module, module} | opts]]},
type: :worker,
restart: :permanent,
shutdown: 5000
}
end
end
writers =
case writer_count do
0 ->
[]
_ ->
for i <- 1..writer_count,
queue <- write_queues do
batch_key = {queue, module, i}
opts = [{:batch_key, batch_key}, {:queue, queue}, {:module, module} | opts]
%{
id: {module, Jorb.Writer, queue, i},
start: {Jorb.Writer, :start_link, [opts]},
type: :worker,
restart: :permanent,
shutdown: 5000
}
end
end
readers ++ writers
end
@doc ~S"""
Attempt to fetch jobs to do, reading from the first item in
`read_queues/0` that has messages. For each message received,
`perform/1` is invoked, deleting the message if the return value
is `:ok`.
Intended for use through modules that `use Jorb.Job`.
"""
@spec work(atom, Keyword.t()) :: :ok | {:error, String.t()}
def work(module, opts) do
queues = module.read_queues()
duration = Jorb.config(:read_duration, opts, module)
batch_size = Jorb.config(:read_batch_size, opts, module)
read_timeout = Jorb.config(:read_timeout, opts, module)
perform_timeout = Jorb.config(:perform_timeout, opts, module)
read_opts = [
read_duration: duration,
read_batch_size: batch_size,
read_timeout: read_timeout
]
case read_from_queues(queues, read_opts, module) do
{:ok, messages, queue} ->
tasks = Enum.map(messages, &performance_task(&1, queue, opts, module))
Task.yield_many(tasks, perform_timeout)
|> Enum.each(fn {task, result} ->
if result == nil, do: Task.shutdown(task)
end)
:ok
:none ->
:ok
{:error, e} ->
{:error, e}
end
end
# @spec performance_task(message, queue, Keyword.t(), atom) :: Task.t()
defp performance_task(message, queue, opts, module) do
backend = Jorb.config(:backend, opts, module)
{:ok, body} = backend.message_body(message)
payload = body["body"]
job_module =
case body["target"] do
target when is_binary(target) -> String.to_existing_atom(target)
target -> target
end
Task.async(fn ->
case job_module.perform(payload) do
:ok -> backend.delete_message(queue, message, opts)
_ -> :oh_well
end
end)
end
# @spec read_from_queues([Jorb.queue], Keyword.t(), atom) ::
# {:ok, [Jorb.message], Jorb.queue} | :none | {:error, String.t()}
defp read_from_queues([], _opts, _module), do: :none
defp read_from_queues([queue | rest], opts, module) do
backend = Jorb.config(:backend, opts, module)
case backend.read_messages(queue, opts) do
{:ok, []} -> read_from_queues(rest, opts, module)
{:ok, messages} -> {:ok, messages, queue}
error -> error
end
end
end
|
lib/jorb/job.ex
| 0.896027
| 0.429489
|
job.ex
|
starcoder
|
defmodule Benchmarks.Proto2.GoogleMessage2.Group1 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.9.0-dev", syntax: :proto2
field :field11, 11, required: true, type: :float
field :field26, 26, optional: true, type: :float
field :field12, 12, optional: true, type: :string
field :field13, 13, optional: true, type: :string
field :field14, 14, repeated: true, type: :string
field :field15, 15, required: true, type: :uint64
field :field5, 5, optional: true, type: :int32
field :field27, 27, optional: true, type: :string
field :field28, 28, optional: true, type: :int32
field :field29, 29, optional: true, type: :string
field :field16, 16, optional: true, type: :string
field :field22, 22, repeated: true, type: :string
field :field73, 73, repeated: true, type: :int32
field :field20, 20, optional: true, type: :int32, default: 0
field :field24, 24, optional: true, type: :string
field :field31, 31, optional: true, type: Benchmarks.Proto2.GoogleMessage2GroupedMessage
end
defmodule Benchmarks.Proto2.GoogleMessage2 do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.9.0-dev", syntax: :proto2
field :field1, 1, optional: true, type: :string
field :field3, 3, optional: true, type: :int64
field :field4, 4, optional: true, type: :int64
field :field30, 30, optional: true, type: :int64
field :field75, 75, optional: true, type: :bool, default: false
field :field6, 6, optional: true, type: :string
field :field2, 2, optional: true, type: :bytes
field :field21, 21, optional: true, type: :int32, default: 0
field :field71, 71, optional: true, type: :int32
field :field25, 25, optional: true, type: :float
field :field109, 109, optional: true, type: :int32, default: 0
field :field210, 210, optional: true, type: :int32, default: 0
field :field211, 211, optional: true, type: :int32, default: 0
field :field212, 212, optional: true, type: :int32, default: 0
field :field213, 213, optional: true, type: :int32, default: 0
field :field216, 216, optional: true, type: :int32, default: 0
field :field217, 217, optional: true, type: :int32, default: 0
field :field218, 218, optional: true, type: :int32, default: 0
field :field220, 220, optional: true, type: :int32, default: 0
field :field221, 221, optional: true, type: :int32, default: 0
field :field222, 222, optional: true, type: :float, default: 0.0
field :field63, 63, optional: true, type: :int32
field :group1, 10, repeated: true, type: :group
field :field128, 128, repeated: true, type: :string
field :field131, 131, optional: true, type: :int64
field :field127, 127, repeated: true, type: :string
field :field129, 129, optional: true, type: :int32
field :field130, 130, repeated: true, type: :int64
field :field205, 205, optional: true, type: :bool, default: false
field :field206, 206, optional: true, type: :bool, default: false
end
defmodule Benchmarks.Proto2.GoogleMessage2GroupedMessage do
@moduledoc false
use Protobuf, protoc_gen_elixir_version: "0.9.0-dev", syntax: :proto2
field :field1, 1, optional: true, type: :float
field :field2, 2, optional: true, type: :float
field :field3, 3, optional: true, type: :float, default: 0.0
field :field4, 4, optional: true, type: :bool
field :field5, 5, optional: true, type: :bool
field :field6, 6, optional: true, type: :bool, default: true
field :field7, 7, optional: true, type: :bool, default: false
field :field8, 8, optional: true, type: :float
field :field9, 9, optional: true, type: :bool
field :field10, 10, optional: true, type: :float
field :field11, 11, optional: true, type: :int64
end
|
bench/lib/datasets/google_message2/benchmark_message2.pb.ex
| 0.609408
| 0.448849
|
benchmark_message2.pb.ex
|
starcoder
|
defmodule ExAws.S3.Crypto do
@moduledoc """
`ExAws.S3.Crypto` provides [client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html) support for
[Amazon S3](https://aws.amazon.com/s3/). It allows you to encrypt data before sending it to S3. This particular implementation
currently supports a [AWS KMS-managed customer master key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master_keys)
and assumes you have one already generated.
This library makes heavy use of the existing [ex_aws_s3](https://hex.pm/packages/ex_aws_s3) library
and Erlang's [crypto module](http://erlang.org/doc/man/crypto.html). It has confirmed compatability with the [Golang AWS SDK client-encryption
library](https://github.com/aws/aws-sdk-go/tree/master/service/s3) and uses [AES](https://en.wikipedia.org/wiki/Advanced_Encryption_Standard)
[GCM](https://en.wikipedia.org/wiki/Galois/Counter_Mode) with 256-bit keys by default.
## Examples
First, make sure you have the id for your master key (should be of the form of a UUID, like `<KEY>`) and the
bucket you're using already set up. You should be able to make requests using `ExAws` (see
[the ExAws docs](https://hexdocs.pm/ex_aws/ExAws.html#module-aws-key-configuration) for configuration instructions).
To encrypt and upload an object, it's easy as pie.
bucket = "my-awesome-bucket"
key_id = "<KEY>"
contents = "this is some special text that should be secret"
# Encrypt, then upload object
request = ExAws.S3.put_object(bucket, "secret.txt.enc", contents)
{:ok, encrypted_request} = ExAws.S3.Crypto.encrypt(request, key_id)
ExAws.request(encrypted_request)
# Or, use this shorter version of above
ExAws.S3.Crypto.put_encrypted_object(bucket, "secret.txt.enc", contents, key_id)
Decrypting is easy too, and doesn't even require knowing the original key id.
# get encrypted object, then decrypt
{:ok, encrypted} = ExAws.S3.get_object(bucket, "secret.txt.enc") |> ExAws.request
{:ok, decrypted} = ExAws.S3.Crypto.decrypt(encrypted)
IO.puts decrypted.body
# Or, use this shorter version of above
{:ok, decrypted} = ExAws.S3.Crypto.get_encrypted_object(bucket, "secret.txt.enc")
IO.puts decrypted.body
"""
alias ExAws.S3.Crypto.{AESGCMCipher, KMSWrapper}
import ExAws.S3.Utils, only: [put_object_headers: 1]
@doc """
Encrypt and then create an object within a bucket. This merely wraps creating a `ExAws.Operation.S3` request, calling `encrypt/3`, and uploading to S3
via a call to `ExAws.request/1`.
For example:
bucket = "my-awesome-bucket"
key_id = "<KEY>"
contents = "this is some special text that should be secret"
ExAws.S3.Crypto.put_encrypted_object(bucket, "secret.txt.enc", contents, key_id)
"""
@spec put_encrypted_object(
bucket :: binary,
object :: binary,
body :: binary,
key_id :: binary,
opts :: ExAws.S3.put_object_opts()
) :: ExAws.Request.response_t()
def put_encrypted_object(bucket, object, body, key_id, opts \\ []) do
bucket
|> ExAws.S3.put_object(object, body, opts)
|> encrypt(key_id, opts)
|> case do
{:ok, request} ->
ExAws.request(request)
err ->
err
end
end
@doc """
Get an object from a bucket and then decrypt the body. This merely wraps sending a `ExAws.S3.get_object/3` request and then calling `decrypt/1` with
the results.
For example:
{:ok, decrypted} = ExAws.S3.Crypto.get_encrypted_object("my-awesome-bucket", "secret.txt.enc")
IO.puts decrypted.body
"""
@spec get_encrypted_object(
bucket :: binary,
object :: binary,
opts :: ExAws.S3.get_object_opts()
) :: ExAws.Request.response_t()
def get_encrypted_object(bucket, object, opts \\ []) do
bucket
|> ExAws.S3.get_object(object, opts)
|> ExAws.request()
|> case do
{:ok, response} ->
decrypt(response)
err ->
err
end
end
@type supported_cipher :: :aes_gcm
@type encrypt_opts :: [{:cipher, supported_cipher}]
@doc """
Modify a `ExAws.Operation.S3` put operation by encrypting the body with a key generated
from KMS using the given master key_id.
For example:
bucket = "my-awesome-bucket"
key_id = "<KEY>"
contents = "this is some special text that should be secret"
# Encrypt, then upload object
request = ExAws.S3.put_object(bucket, "secret.txt.enc", contents)
{:ok, encrypted_request} = ExAws.S3.Crypto.encrypt(request, key_id)
ExAws.request(encrypted_request)
"""
@spec encrypt(operation :: ExAws.Operation.S3.t(), key_id :: binary, opts :: encrypt_opts) ::
ExAws.Operation.S3.t()
def encrypt(%ExAws.Operation.S3{http_method: :put} = operation, key_id, opts \\ []) do
cipher = Keyword.get(opts, :cipher, :aes_gcm)
case KMSWrapper.generate_data_key(key_id) do
{:ok, {encrypted_keyblob, key}} ->
update_request(operation, encrypted_keyblob, key, key_id, cipher)
err ->
err
end
end
@doc """
Take the result of a `ExAws.S3.get_object/3` and replace the body with the decrypted value.
For example:
bucket = "my-awesome-bucket"
key_id = "<KEY>"
# get encrypted object, then decrypt
{:ok, encrypted} = ExAws.S3.get_object(bucket, "secret.txt.enc") |> ExAws.request
{:ok, decrypted} = ExAws.S3.Crypto.decrypt(encrypted)
IO.puts decrypted.body
"""
@spec decrypt(response :: ExAws.Request.response_t()) :: ExAws.Request.response_t()
def decrypt(%{body: body, headers: headers} = response) do
case decrypt_body(body, Map.new(headers)) do
{:ok, decrypted} ->
{:ok, %{response | body: decrypted}}
err ->
err
end
end
defp decrypt_body(
body,
%{
"x-amz-meta-x-amz-cek-alg" => "AES/GCM/NoPadding",
"x-amz-meta-x-amz-iv" => encoded_iv,
"x-amz-meta-x-amz-key-v2" => encrypted_keyblob,
"x-amz-meta-x-amz-matdesc" => matdesc
} = headers
) do
with {:ok, context} <- Jason.decode(matdesc),
{:ok, key} <- KMSWrapper.decrypt_key(encrypted_keyblob, context),
{:ok, decrypted} <- AESGCMCipher.decrypt(key, body, :base64.decode(encoded_iv)),
{:ok} <- validate_length(decrypted, headers) do
{:ok, decrypted}
else
err ->
err
end
end
defp decrypt_body(_, _), do: {:error, "Object missing client-side encryption metadata necssary"}
defp validate_length(decrypted, %{"x-amz-meta-x-amz-unencrypted-content-length" => length}) do
expected = String.to_integer(length)
bytes = byte_size(decrypted)
cond do
byte_size(decrypted) == expected ->
{:ok}
String.length(decrypted) == expected ->
# due to a bug in the way size was previously calculated (using String.length) don't
# error if the String length of the decrypted result matches the expected value
{:ok}
true ->
{:error, "Decrypted body size (#{bytes}) is not size expected in headers (#{expected})"}
end
end
defp validate_length(_decrypted, _headers), do: {:ok}
defp update_request(
%ExAws.Operation.S3{headers: headers, body: contents} = operation,
encrypted_keyblob,
key,
key_id,
:aes_gcm
)
when is_binary(contents) do
{:ok, {encrypted, iv}} = AESGCMCipher.encrypt(key, contents)
# these are based on the values in the reference implementaiton here:
# https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/package-summary.html
meta = [
{"x-amz-key-v2", encrypted_keyblob},
{"x-amz-iv", :base64.encode(iv)},
{"x-amz-unencrypted-content-length", byte_size(contents)},
{"x-amz-cek-alg", "AES/GCM/NoPadding"},
{"x-amz-wrap-alg", "kms"},
{"x-amz-matdesc", Jason.encode!(%{kms_cmk_id: key_id})},
{"x-amz-tag-len", "128"}
]
newheaders =
headers
|> Map.merge(put_object_headers(meta: meta))
|> Map.put("content-type", "binary/octet-stream")
{:ok, %ExAws.Operation.S3{operation | headers: newheaders, body: encrypted}}
end
defp update_request(_request, _encrypted_keyblob, _key, _key_id, cipher),
do: {:error, "Cipher #{cipher} is not supported"}
end
|
lib/ex_aws_s3_crypto.ex
| 0.863147
| 0.54256
|
ex_aws_s3_crypto.ex
|
starcoder
|
defmodule Roadtrip.Garage.Vin do
@moduledoc """
Provides utilities for working with (North American) VIN sequences.
"""
def length(), do: 17
def na_weights(), do: [8, 7, 6, 5, 4, 3, 2, 10, 0, 9, 8, 7, 6, 5, 4, 3, 2]
@doc """
Provides a value that can be used in `<input pattern=… />` HTML tags.
"""
@spec html_input_pattern() :: String.t()
def html_input_pattern(), do: "[0-9A-HJ-NPR-Za-hj-npr-z]{#{length()}}"
@doc """
Computes the North-American checksum for a VIN.
This uses the standard North American checksum algorithm, which is explained
in detail on [Wikipedia][0].
## Examples
```elixir
iex> Roadtrip.Garage.Vin.na_checksum("4S4BSANC6K3352864")
{:ok, "4S4BSANC6K3352864"}
iex> Roadtrip.Garage.Vin.na_checksum("4S4BSANC6K335286") # digit deleted
{:error, "VINs must be 17 ASCII alphanumeric digits (excepting I, O, and Q)"}
iex> Roadtrip.Garage.Vin.na_checksum("4S4BSANC7K3352864") # checksum changed
{:warn, "VIN checksum does not match"}
iex> Roadtrip.Garage.Vin.na_checksum("abcdefghijklmnopq")
{:error, "Letter `i` is outside the valid VIN character set"}
```
[0]: https://en.wikipedia.org/wiki/Vehicle_identification_number#Check-digit_calculation
"""
@spec na_checksum(String.t()) :: {:ok | :warn | :error, String.t()}
def na_checksum(vin) when is_binary(vin) do
cond do
vin |> String.length() != length() ->
{:error, "VINs must be #{length()} ASCII alphanumeric digits (excepting I, O, and Q)"}
vin |> weigh() |> Integer.mod(11) |> check_digit() != vin |> String.at(8) ->
{:warn, "VIN checksum does not match"}
true ->
{:ok, vin}
end
rescue
err in ArgumentError -> {:error, err.message}
end
@doc """
Modifies a VIN so that it passes the North American checksum computation.
"""
@spec write_na_checksum(String.t()) :: String.t()
def write_na_checksum(vin) when is_binary(vin) do
case vin |> na_checksum() do
{:ok, vin} ->
vin
{:warn, "VIN checksum does not match"} ->
digit = vin |> weigh() |> Integer.mod(11) |> check_digit()
front = vin |> String.slice(0..7)
back = vin |> String.slice(9..16)
front <> digit <> back
{:error, msg} ->
raise ArgumentError, msg
end
end
@doc """
Computes the total “weight” for a VIN sequence. The algorithm maps each digit
in the VIN down to a number in 1-9, multiplies it by a weighting factor
corresponding to its position in the VIN, then sums them all together.
The total weight is reduced by modulus-11 and translated back into a VIN digit
for checksumming.
"""
@spec weigh(String.t()) :: integer()
def weigh(vin) when is_binary(vin), do: weigh(vin, na_weights())
@spec weigh(String.t(), [integer()]) :: integer()
def weigh(vin_fragment, weights_fragment)
when is_binary(vin_fragment) and is_list(weights_fragment) do
case {vin_fragment |> String.next_codepoint(), weights_fragment} do
{nil, _} ->
0
{_, []} ->
0
{{letter, vin_rest}, [weight | weights_rest]} ->
transliterate(letter) * weight + weigh(vin_rest, weights_rest)
end
end
@doc """
Transliterates VIN digits (ASCII alphanumeric except I/O/Q) into their
corresponding numeric value.
"""
@spec transliterate(binary) :: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9
def transliterate("0"), do: 0
def transliterate(letter) when is_binary(letter) do
cond do
"1AaJj" |> String.contains?(letter) -> 1
"2BbKkSs" |> String.contains?(letter) -> 2
"3CcLlTt" |> String.contains?(letter) -> 3
"4DdMmUu" |> String.contains?(letter) -> 4
"5EeNnVv" |> String.contains?(letter) -> 5
"6FfWw" |> String.contains?(letter) -> 6
"7GgPpXx" |> String.contains?(letter) -> 7
"8HhYy" |> String.contains?(letter) -> 8
"9RrZz" |> String.contains?(letter) -> 9
true -> raise ArgumentError, "Letter `#{letter}` is outside the valid VIN character set"
end
end
@spec check_digit(integer()) :: String.t()
defp check_digit(10), do: "X"
defp check_digit(n) when n >= 0 and n < 10, do: to_string(n)
end
|
apps/roadtrip/lib/roadtrip/garage/vin.ex
| 0.926028
| 0.792544
|
vin.ex
|
starcoder
|
defmodule Ecto.Adapters.Postgres do
@moduledoc """
Adapter module for PostgreSQL.
It uses `postgrex` for communicating to the database
and a connection pool, such as `poolboy`.
## Features
* Full query support (including joins, preloads and associations)
* Support for transactions
* Support for data migrations
* Support for ecto.create and ecto.drop operations
* Support for transactional tests via `Ecto.Adapters.SQL`
## Options
Postgres options split in different categories described
below. All options should be given via the repository
configuration.
### Compile time options
Those options should be set in the config file and require
recompilation in order to make an effect.
* `:adapter` - The adapter name, in this case, `Ecto.Adapters.Postgres`
* `:name`- The name of the Repo supervisor process
* `:pool` - The connection pool module, defaults to `Ecto.Pools.Poolboy`
* `:pool_timeout` - The default timeout to use on pool calls, defaults to `5000`
* `:timeout` - The default timeout to use on queries, defaults to `15000`
* `:log_level` - The level to use when logging queries (default: `:debug`)
### Connection options
* `:hostname` - Server hostname
* `:port` - Server port (default: 5432)
* `:username` - Username
* `:password` - <PASSWORD>
* `:parameters` - Keyword list of connection parameters
* `:ssl` - Set to true if ssl should be used (default: false)
* `:ssl_opts` - A list of ssl options, see Erlang's `ssl` docs
* `:connect_timeout` - The timeout for establishing new connections (default: 5000)
* `:extensions` - Specify extensions to the postgres adapter
* `:after_connect` - A `{mod, fun, args}` to be invoked after a connection is established
### Storage options
* `:encoding` - the database encoding (default: "UTF8")
* `:template` - the template to create the database from
* `:lc_collate` - the collation order
* `:lc_ctype` - the character classification
"""
# Inherit all behaviour from Ecto.Adapters.SQL
use Ecto.Adapters.SQL, :postgrex
# And provide a custom storage implementation
@behaviour Ecto.Adapter.Storage
## Storage API
@doc false
def storage_up(opts) do
database = Keyword.fetch!(opts, :database)
encoding = Keyword.get(opts, :encoding, "UTF8")
extra = ""
if template = Keyword.get(opts, :template) do
extra = extra <> " TEMPLATE=#{template}"
end
if lc_collate = Keyword.get(opts, :lc_collate) do
extra = extra <> " LC_COLLATE='#{lc_collate}'"
end
if lc_ctype = Keyword.get(opts, :lc_ctype) do
extra = extra <> " LC_CTYPE='#{lc_ctype}'"
end
command = "CREATE DATABASE \"#{database}\" " <>
"ENCODING '#{encoding}'" <> extra
case run_query(opts, command) do
:ok ->
:ok
{:error, %{postgres: %{code: :duplicate_database}}} ->
:already_up
{:error, error} ->
{:error, Exception.message(error)}
end
end
@doc false
def storage_down(opts) do
database = Keyword.fetch!(opts, :database)
command = "DROP DATABASE \"#{database}\""
case run_query(opts, command) do
:ok ->
:ok
{:error, %{postgres: %{code: :invalid_catalog_name}}} ->
:already_down
{:error, error} ->
{:error, Exception.message(error)}
end
end
defp run_query(opts, sql) do
opts =
opts
|> Keyword.delete(:name)
|> Keyword.put(:database, "template1")
|> Keyword.put(:pool, DBConnection.Connection)
|> Keyword.put(:backoff_type, :stop)
{:ok, pid} = Task.Supervisor.start_link
task = Task.Supervisor.async_nolink(pid, fn ->
{:ok, conn} = Postgrex.start_link(opts)
value = Ecto.Adapters.Postgres.Connection.query(conn, sql, [], opts)
GenServer.stop(conn)
value
end)
timeout = Keyword.get(opts, :timeout, 15_000)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, {:ok, _}} ->
:ok
{:ok, {:error, error}} ->
{:error, error}
{:exit, {%{__struct__: struct} = error, _}}
when struct in [Postgrex.Error, DBConnection.Error] ->
{:error, error}
{:exit, reason} ->
{:error, RuntimeError.exception(Exception.format_exit(reason))}
nil ->
{:error, RuntimeError.exception("command timed out")}
end
end
@doc false
def supports_ddl_transaction? do
true
end
end
|
lib/ecto/adapters/postgres.ex
| 0.783036
| 0.538073
|
postgres.ex
|
starcoder
|
Postgrex.Types.define(Plenario.PostgresTypes,
[Geo.PostGIS.Extension] ++ Ecto.Adapters.Postgres.extensions(),
json: Jason)
for type <- [Geo.Point, Geo.Polygon, Geo.LineString, Geo.MultiPoint, Geo.MultiPolygon, Geo.MultiLineString] do
defimpl Phoenix.HTML.Safe, for: type, do: def to_iodata(geom), do: Geo.WKT.encode(geom)
defimpl Poison.Encoder, for: type, do: def encode(geom, _), do: %{type: "Feature", geometry: Geo.JSON.encode(geom)} |> Poison.encode!()
defimpl Jason.Encoder, for: type, do: def encode(geom, _), do: %{type: "Feature", geometry: Geo.JSON.encode(geom)} |> Jason.encode!()
end
defmodule Plenario.TsRange do
@moduledoc """
"""
alias Plenario.TsRange
alias Postgrex.Range
@typedoc """
"""
@type t :: %__MODULE__{
lower: NaiveDateTime.t(),
upper: NaiveDateTime.t(),
lower_inclusive: boolean,
upper_inclusive: boolean
}
defstruct lower: nil,
upper: nil,
lower_inclusive: true,
upper_inclusive: true
@doc """
"""
@spec new(NaiveDateTime.t(), NaiveDateTime.t(), boolean, boolean) :: TsRange.t()
def new(lower, upper, lower_inclusive \\ true, upper_inclusive \\ true) do
%TsRange{
lower: from_erl(lower),
upper: from_erl(upper),
lower_inclusive: lower_inclusive,
upper_inclusive: upper_inclusive
}
end
@doc """
"""
@spec from_postgrex(Range.t()) :: TsRange.t()
def from_postgrex(range) do
new(
from_erl(range.lower),
from_erl(range.upper),
range.lower_inclusive,
range.upper_inclusive
)
end
@doc """
"""
@spec to_postgrex(TsRange.t()) :: Range.t()
def to_postgrex(range) do
%Range{
lower: range.lower,
upper: range.upper,
lower_inclusive: range.lower_inclusive,
upper_inclusive: range.upper_inclusive
}
end
defp from_erl(%NaiveDateTime{} = ndt), do: ndt
defp from_erl({{y, m, d}, {h, i, s}}) do
case NaiveDateTime.new(y, m, d, h, i, s, {0, 0}) do
{:ok, n} -> n
_ -> nil
end
end
defp from_erl({{y, m, d}, {h, i, s, u}}) do
case NaiveDateTime.new(y, m, d, h, i, s, {u, 0}) do
{:ok, n} -> n
_ -> nil
end
end
defp from_erl(_), do: nil
defimpl String.Chars, for: Plenario.TsRange do
@spec to_string(TsRange.t()) :: String.t()
def to_string(r) do
lb =
case r.lower_inclusive do
true -> "["
false -> "("
end
ub =
case r.upper_inclusive do
true -> "]"
false -> ")"
end
lo =
case r.lower do
nil -> ""
_ -> "#{r.lower}"
end
hi =
case r.upper do
nil -> ""
_ -> "#{r.upper}"
end
"#{lb}#{lo}, #{hi}#{ub}"
end
end
defimpl Poison.Encoder, for: Plenario.TsRange do
def encode(range, opts) do
Poison.Encoder.Map.encode(
%{
lower_inclusive: range.lower_inclusive,
upper_inclusive: range.upper_inclusive,
lower: Timex.format!(range.lower, "%Y-%m-%dT%H:%M:%S", :strftime),
upper: Timex.format!(range.upper, "%Y-%m-%dT%H:%M:%S", :strftime)
},
opts
)
end
end
defimpl Poison.Decoder, for: Plenario.TsRange do
def decode(tasks, _opts) do
Map.update!(tasks, :lower, &Timex.parse!(&1, "%Y-%m-%dT%H:%M:%S", :strftime))
|> Map.update!(:upper, &Timex.parse!(&1, "%Y-%m-%dT%H:%M:%S", :strftime))
end
end
defimpl Phoenix.HTML.Safe, for: Plenario.TsRange, do: def to_iodata(r), do: "#{r}"
defimpl Jason.Encoder, for: Plenario.TsRange do
def encode(range, _), do: Poison.encode!(range)
end
@behaviour Ecto.Type
@doc false
def type, do: :tsrange
@doc false
def cast(nil), do: {:ok, nil}
def cast(%Range{} = r), do: {:ok, r}
def cast(%TsRange{} = r), do: {:ok, to_postgrex(r)}
def cast(_), do: :error
@doc false
def load(nil), do: {:ok, nil}
def load(%Range{} = r), do: {:ok, from_postgrex(r)}
def load(%TsRange{} = r), do: {:ok, r}
def load(_), do: :error
@doc false
def dump(nil), do: {:ok, nil}
def dump(%Range{} = r), do: {:ok, r}
def dump(%TsRange{} = r), do: {:ok, to_postgrex(r)}
def dump(_), do: :error
end
defmodule Plenario.Extensions.TsRange do
@moduledoc false
use Bitwise, only_operators: true
import Postgrex.BinaryUtils, warn: false
@behaviour Postgrex.SuperExtension
@range_empty 0x01
@range_lb_inc 0x02
@range_ub_inc 0x04
@range_lb_inf 0x08
@range_ub_inf 0x10
def init(_), do: nil
def matching(_), do: [type: "tsrange"]
def format(_), do: :super_binary
def oids(%Postgrex.TypeInfo{base_type: oid}, _), do: [oid]
def encode(_) do
quote location: :keep do
%Plenario.TsRange{lower: lower, upper: upper} = range, [oid], [type] ->
# encode_value/2 defined by TypeModule
lower = encode_value(lower, type)
upper = encode_value(upper, type)
unquote(__MODULE__).encode(range, oid, lower, upper)
other, _, _ ->
raise ArgumentError, Postgrex.Utils.encode_msg(other, Postgrex.Range)
end
end
def decode(_) do
quote location: :keep do
<<len::int32, binary::binary-size(len)>>, [oid], [type] ->
<<flags, data::binary>> = binary
# decode_list/2 and @null defined by TypeModule
case decode_list(data, type) do
[upper, lower] ->
unquote(__MODULE__).decode(flags, oid, [lower, upper], @null)
empty_or_one ->
unquote(__MODULE__).decode(flags, oid, empty_or_one, @null)
end
end
end
# helpers
def encode(
%Plenario.TsRange{lower_inclusive: lower_inc, upper_inclusive: upper_inc},
_oid,
lower,
upper
) do
flags = 0
{flags, bin} =
if lower == <<-1::int32>> do
{flags ||| @range_lb_inf, ""}
else
{flags, lower}
end
{flags, bin} =
if upper == <<-1::int32>> do
{flags ||| @range_ub_inf, bin}
else
{flags, [bin | upper]}
end
flags =
if lower_inc do
flags ||| @range_lb_inc
else
flags
end
flags =
if upper_inc do
flags ||| @range_ub_inc
else
flags
end
[<<IO.iodata_length(bin) + 1::int32>>, flags | bin]
end
def decode(flags, _oid, [], null) when (flags &&& @range_empty) != 0 do
%Plenario.TsRange{lower: null, upper: null}
end
def decode(flags, _oid, elems, null) do
{lower, elems} =
if (flags &&& @range_lb_inf) != 0 do
{null, elems}
else
[lower | rest] = elems
{lower, rest}
end
{upper, []} =
if (flags &&& @range_ub_inf) != 0 do
{null, elems}
else
[upper | rest] = elems
{upper, rest}
end
lower_inclusive = (flags &&& @range_lb_inc) != 0
upper_inclusive = (flags &&& @range_ub_inc) != 0
%Plenario.TsRange{
lower: lower,
upper: upper,
lower_inclusive: lower_inclusive,
upper_inclusive: upper_inclusive
}
end
end
|
lib/plenario/postgres_extensions.ex
| 0.696887
| 0.517693
|
postgres_extensions.ex
|
starcoder
|
defmodule AWS.IoT do
@moduledoc """
AWS IoT
AWS IoT provides secure, bi-directional communication between
Internet-connected devices (such as sensors, actuators, embedded devices,
or smart appliances) and the AWS cloud. You can discover your custom
IoT-Data endpoint to communicate with, configure rules for data processing
and integration with other services, organize resources associated with
each device (Registry), configure logging, and create and manage policies
and credentials to authenticate devices.
For more information about how AWS IoT works, see the [Developer
Guide](https://docs.aws.amazon.com/iot/latest/developerguide/aws-iot-how-it-works.html).
For information about how to use the credentials provider for AWS IoT, see
[Authorizing Direct Calls to AWS
Services](https://docs.aws.amazon.com/iot/latest/developerguide/authorizing-direct-aws.html).
"""
@doc """
Accepts a pending certificate transfer. The default state of the
certificate is INACTIVE.
To check for pending certificate transfers, call `ListCertificates` to
enumerate your certificates.
"""
def accept_certificate_transfer(client, certificate_id, input, options \\ []) do
url = "/accept-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Adds a thing to a billing group.
"""
def add_thing_to_billing_group(client, input, options \\ []) do
url = "/billing-groups/addThingToBillingGroup"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Adds a thing to a thing group.
"""
def add_thing_to_thing_group(client, input, options \\ []) do
url = "/thing-groups/addThingToThingGroup"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Associates a group with a continuous job. The following criteria must be
met:
<ul> <li> The job must have been created with the `targetSelection` field
set to "CONTINUOUS".
</li> <li> The job status must currently be "IN_PROGRESS".
</li> <li> The total number of targets associated with a job must not
exceed 100.
</li> </ul>
"""
def associate_targets_with_job(client, job_id, input, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}/targets"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Attaches a policy to the specified target.
"""
def attach_policy(client, policy_name, input, options \\ []) do
url = "/target-policies/#{URI.encode(policy_name)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Attaches the specified policy to the specified principal (certificate or
other credential).
**Note:** This API is deprecated. Please use `AttachPolicy` instead.
"""
def attach_principal_policy(client, policy_name, input, options \\ []) do
url = "/principal-policies/#{URI.encode(policy_name)}"
headers = []
if Dict.has_key?(input, "principal") do
headers = [{"x-amzn-iot-principal", input["principal"]}|headers]
input = Dict.delete(input, "principal")
end
request(client, :put, url, headers, input, options, nil)
end
@doc """
Associates a Device Defender security profile with a thing group or with
this account. Each thing group or account can have up to five security
profiles associated with it.
"""
def attach_security_profile(client, security_profile_name, input, options \\ []) do
url = "/security-profiles/#{URI.encode(security_profile_name)}/targets"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Attaches the specified principal to the specified thing. A principal can be
X.509 certificates, IAM users, groups, and roles, Amazon Cognito identities
or federated identities.
"""
def attach_thing_principal(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/principals"
headers = []
if Dict.has_key?(input, "principal") do
headers = [{"x-amzn-principal", input["principal"]}|headers]
input = Dict.delete(input, "principal")
end
request(client, :put, url, headers, input, options, nil)
end
@doc """
Cancels an audit that is in progress. The audit can be either scheduled or
on-demand. If the audit is not in progress, an "InvalidRequestException"
occurs.
"""
def cancel_audit_task(client, task_id, input, options \\ []) do
url = "/audit/tasks/#{URI.encode(task_id)}/cancel"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Cancels a pending transfer for the specified certificate.
**Note** Only the transfer source account can use this operation to cancel
a transfer. (Transfer destinations can use `RejectCertificateTransfer`
instead.) After transfer, AWS IoT returns the certificate to the source
account in the INACTIVE state. After the destination account has accepted
the transfer, the transfer cannot be cancelled.
After a certificate transfer is cancelled, the status of the certificate
changes from PENDING_TRANSFER to INACTIVE.
"""
def cancel_certificate_transfer(client, certificate_id, input, options \\ []) do
url = "/cancel-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Cancels a job.
"""
def cancel_job(client, job_id, input, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}/cancel"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Cancels the execution of a job for a given thing.
"""
def cancel_job_execution(client, job_id, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/jobs/#{URI.encode(job_id)}/cancel"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Clears the default authorizer.
"""
def clear_default_authorizer(client, input, options \\ []) do
url = "/default-authorizer"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Creates an authorizer.
"""
def create_authorizer(client, authorizer_name, input, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a billing group.
"""
def create_billing_group(client, billing_group_name, input, options \\ []) do
url = "/billing-groups/#{URI.encode(billing_group_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates an X.509 certificate using the specified certificate signing
request.
**Note:** The CSR must include a public key that is either an RSA key with
a length of at least 2048 bits or an ECC key from NIST P-256 or NIST P-384
curves.
**Note:** Reusing the same certificate signing request (CSR) results in a
distinct certificate.
You can create multiple certificates in a batch by creating a directory,
copying multiple .csr files into that directory, and then specifying that
directory on the command line. The following commands show how to create a
batch of certificates given a batch of CSRs.
Assuming a set of CSRs are located inside of the directory
my-csr-directory:
On Linux and OS X, the command is:
$ ls my-csr-directory/ | xargs -I {} aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/{}
This command lists all of the CSRs in my-csr-directory and pipes each CSR
file name to the aws iot create-certificate-from-csr AWS CLI command to
create a certificate for the corresponding CSR.
The aws iot create-certificate-from-csr part of the command can also be run
in parallel to speed up the certificate creation process:
$ ls my-csr-directory/ | xargs -P 10 -I {} aws iot
create-certificate-from-csr --certificate-signing-request
file://my-csr-directory/{}
On Windows PowerShell, the command to create certificates for all CSRs in
my-csr-directory is:
> ls -Name my-csr-directory | %{aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/$_}
On a Windows command prompt, the command to create certificates for all
CSRs in my-csr-directory is:
> forfiles /p my-csr-directory /c "cmd /c aws iot
create-certificate-from-csr --certificate-signing-request file://@path"
"""
def create_certificate_from_csr(client, input, options \\ []) do
url = "/certificates"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a dynamic thing group.
"""
def create_dynamic_thing_group(client, thing_group_name, input, options \\ []) do
url = "/dynamic-thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a job.
"""
def create_job(client, job_id, input, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Creates a 2048-bit RSA key pair and issues an X.509 certificate using the
issued public key.
**Note** This is the only time AWS IoT issues the private key for this
certificate, so it is important to keep it in a secure location.
"""
def create_keys_and_certificate(client, input, options \\ []) do
url = "/keys-and-certificate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates an AWS IoT OTAUpdate on a target group of things or groups.
"""
def create_o_t_a_update(client, ota_update_id, input, options \\ []) do
url = "/otaUpdates/#{URI.encode(ota_update_id)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates an AWS IoT policy.
The created policy is the default version for the policy. This operation
creates a policy version with a version identifier of **1** and sets **1**
as the policy's default version.
"""
def create_policy(client, policy_name, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a new version of the specified AWS IoT policy. To update a policy,
create a new policy version. A managed policy can have up to five versions.
If the policy has five versions, you must use `DeletePolicyVersion` to
delete an existing version before you create a new one.
Optionally, you can set the new version as the policy's default version.
The default version is the operative version (that is, the version that is
in effect for the certificates to which the policy is attached).
"""
def create_policy_version(client, policy_name, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a role alias.
"""
def create_role_alias(client, role_alias, input, options \\ []) do
url = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a scheduled audit that is run at a specified time interval.
"""
def create_scheduled_audit(client, scheduled_audit_name, input, options \\ []) do
url = "/audit/scheduledaudits/#{URI.encode(scheduled_audit_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a Device Defender security profile.
"""
def create_security_profile(client, security_profile_name, input, options \\ []) do
url = "/security-profiles/#{URI.encode(security_profile_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a stream for delivering one or more large files in chunks over
MQTT. A stream transports data bytes in chunks or blocks packaged as MQTT
messages from a source like S3. You can have one or more files associated
with a stream. The total size of a file associated with the stream cannot
exceed more than 2 MB. The stream will be created with version 0. If a
stream is created with the same streamID as a stream that existed and was
deleted within last 90 days, we will resurrect that old stream by
incrementing the version by 1.
"""
def create_stream(client, stream_id, input, options \\ []) do
url = "/streams/#{URI.encode(stream_id)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a thing record in the registry. If this call is made multiple times
using the same thing name and configuration, the call will succeed. If this
call is made with the same thing name but different configuration a
`ResourceAlreadyExistsException` is thrown.
<note> This is a control plane operation. See
[Authorization](https://docs.aws.amazon.com/iot/latest/developerguide/authorization.html)
for information about authorizing control plane actions.
</note>
"""
def create_thing(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Create a thing group.
<note> This is a control plane operation. See
[Authorization](https://docs.aws.amazon.com/iot/latest/developerguide/authorization.html)
for information about authorizing control plane actions.
</note>
"""
def create_thing_group(client, thing_group_name, input, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a new thing type.
"""
def create_thing_type(client, thing_type_name, input, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a rule. Creating rules is an administrator-level action. Any user
who has permission to create rules will be able to access data processed by
the rule.
"""
def create_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
if Dict.has_key?(input, "tags") do
headers = [{"x-amz-tagging", input["tags"]}|headers]
input = Dict.delete(input, "tags")
end
request(client, :post, url, headers, input, options, nil)
end
@doc """
Restores the default settings for Device Defender audits for this account.
Any configuration data you entered is deleted and all audit checks are
reset to disabled.
"""
def delete_account_audit_configuration(client, input, options \\ []) do
url = "/audit/configuration"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes an authorizer.
"""
def delete_authorizer(client, authorizer_name, input, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the billing group.
"""
def delete_billing_group(client, billing_group_name, input, options \\ []) do
url = "/billing-groups/#{URI.encode(billing_group_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a registered CA certificate.
"""
def delete_c_a_certificate(client, certificate_id, input, options \\ []) do
url = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified certificate.
A certificate cannot be deleted if it has a policy attached to it or if its
status is set to ACTIVE. To delete a certificate, first use the
`DetachPrincipalPolicy` API to detach all policies. Next, use the
`UpdateCertificate` API to set the certificate to the INACTIVE status.
"""
def delete_certificate(client, certificate_id, input, options \\ []) do
url = "/certificates/#{URI.encode(certificate_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a dynamic thing group.
"""
def delete_dynamic_thing_group(client, thing_group_name, input, options \\ []) do
url = "/dynamic-thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a job and its related job executions.
Deleting a job may take time, depending on the number of job executions
created for the job and various other factors. While the job is being
deleted, the status of the job will be shown as "DELETION_IN_PROGRESS".
Attempting to delete or cancel a job whose status is already
"DELETION_IN_PROGRESS" will result in an error.
Only 10 jobs may have status "DELETION_IN_PROGRESS" at the same time, or a
LimitExceededException will occur.
"""
def delete_job(client, job_id, input, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a job execution.
"""
def delete_job_execution(client, execution_number, job_id, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/jobs/#{URI.encode(job_id)}/executionNumber/#{URI.encode(execution_number)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Delete an OTA update.
"""
def delete_o_t_a_update(client, ota_update_id, input, options \\ []) do
url = "/otaUpdates/#{URI.encode(ota_update_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified policy.
A policy cannot be deleted if it has non-default versions or it is attached
to any certificate.
To delete a policy, use the DeletePolicyVersion API to delete all
non-default versions of the policy; use the DetachPrincipalPolicy API to
detach the policy from any certificate; and then use the DeletePolicy API
to delete the policy.
When a policy is deleted using DeletePolicy, its default version is deleted
with it.
"""
def delete_policy(client, policy_name, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified version of the specified policy. You cannot delete
the default version of a policy using this API. To delete the default
version of a policy, use `DeletePolicy`. To find out which version of a
policy is marked as the default version, use ListPolicyVersions.
"""
def delete_policy_version(client, policy_name, policy_version_id, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a CA certificate registration code.
"""
def delete_registration_code(client, input, options \\ []) do
url = "/registrationcode"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a role alias
"""
def delete_role_alias(client, role_alias, input, options \\ []) do
url = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a scheduled audit.
"""
def delete_scheduled_audit(client, scheduled_audit_name, input, options \\ []) do
url = "/audit/scheduledaudits/#{URI.encode(scheduled_audit_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a Device Defender security profile.
"""
def delete_security_profile(client, security_profile_name, input, options \\ []) do
url = "/security-profiles/#{URI.encode(security_profile_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a stream.
"""
def delete_stream(client, stream_id, input, options \\ []) do
url = "/streams/#{URI.encode(stream_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified thing. Returns successfully with no error if the
deletion is successful or you specify a thing that doesn't exist.
"""
def delete_thing(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a thing group.
"""
def delete_thing_group(client, thing_group_name, input, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified thing type. You cannot delete a thing type if it has
things associated with it. To delete a thing type, first mark it as
deprecated by calling `DeprecateThingType`, then remove any associated
things by calling `UpdateThing` to change the thing type on any associated
thing, and finally use `DeleteThingType` to delete the thing type.
"""
def delete_thing_type(client, thing_type_name, input, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the rule.
"""
def delete_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a logging level.
"""
def delete_v2_logging_level(client, input, options \\ []) do
url = "/v2LoggingLevel"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deprecates a thing type. You can not associate new things with deprecated
thing type.
"""
def deprecate_thing_type(client, thing_type_name, input, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}/deprecate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Gets information about the Device Defender audit settings for this account.
Settings include how audit notifications are sent and which audit checks
are enabled or disabled.
"""
def describe_account_audit_configuration(client, options \\ []) do
url = "/audit/configuration"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about a Device Defender audit.
"""
def describe_audit_task(client, task_id, options \\ []) do
url = "/audit/tasks/#{URI.encode(task_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes an authorizer.
"""
def describe_authorizer(client, authorizer_name, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Returns information about a billing group.
"""
def describe_billing_group(client, billing_group_name, options \\ []) do
url = "/billing-groups/#{URI.encode(billing_group_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a registered CA certificate.
"""
def describe_c_a_certificate(client, certificate_id, options \\ []) do
url = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified certificate.
"""
def describe_certificate(client, certificate_id, options \\ []) do
url = "/certificates/#{URI.encode(certificate_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes the default authorizer.
"""
def describe_default_authorizer(client, options \\ []) do
url = "/default-authorizer"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Returns a unique endpoint specific to the AWS account making the call.
"""
def describe_endpoint(client, options \\ []) do
url = "/endpoint"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes event configurations.
"""
def describe_event_configurations(client, options \\ []) do
url = "/event-configurations"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a search index.
"""
def describe_index(client, index_name, options \\ []) do
url = "/indices/#{URI.encode(index_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a job.
"""
def describe_job(client, job_id, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a job execution.
"""
def describe_job_execution(client, job_id, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/jobs/#{URI.encode(job_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a role alias.
"""
def describe_role_alias(client, role_alias, options \\ []) do
url = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about a scheduled audit.
"""
def describe_scheduled_audit(client, scheduled_audit_name, options \\ []) do
url = "/audit/scheduledaudits/#{URI.encode(scheduled_audit_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about a Device Defender security profile.
"""
def describe_security_profile(client, security_profile_name, options \\ []) do
url = "/security-profiles/#{URI.encode(security_profile_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about a stream.
"""
def describe_stream(client, stream_id, options \\ []) do
url = "/streams/#{URI.encode(stream_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified thing.
"""
def describe_thing(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describe a thing group.
"""
def describe_thing_group(client, thing_group_name, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Describes a bulk thing provisioning task.
"""
def describe_thing_registration_task(client, task_id, options \\ []) do
url = "/thing-registration-tasks/#{URI.encode(task_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified thing type.
"""
def describe_thing_type(client, thing_type_name, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Detaches a policy from the specified target.
"""
def detach_policy(client, policy_name, input, options \\ []) do
url = "/target-policies/#{URI.encode(policy_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Removes the specified policy from the specified certificate.
**Note:** This API is deprecated. Please use `DetachPolicy` instead.
"""
def detach_principal_policy(client, policy_name, input, options \\ []) do
url = "/principal-policies/#{URI.encode(policy_name)}"
headers = []
if Dict.has_key?(input, "principal") do
headers = [{"x-amzn-iot-principal", input["principal"]}|headers]
input = Dict.delete(input, "principal")
end
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Disassociates a Device Defender security profile from a thing group or from
this account.
"""
def detach_security_profile(client, security_profile_name, input, options \\ []) do
url = "/security-profiles/#{URI.encode(security_profile_name)}/targets"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Detaches the specified principal from the specified thing. A principal can
be X.509 certificates, IAM users, groups, and roles, Amazon Cognito
identities or federated identities.
<note> This call is asynchronous. It might take several seconds for the
detachment to propagate.
</note>
"""
def detach_thing_principal(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/principals"
headers = []
if Dict.has_key?(input, "principal") do
headers = [{"x-amzn-principal", input["principal"]}|headers]
input = Dict.delete(input, "principal")
end
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Disables the rule.
"""
def disable_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}/disable"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Enables the rule.
"""
def enable_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}/enable"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Gets a list of the policies that have an effect on the authorization
behavior of the specified device when it connects to the AWS IoT device
gateway.
"""
def get_effective_policies(client, input, options \\ []) do
url = "/effective-policies"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Gets the search configuration.
"""
def get_indexing_configuration(client, options \\ []) do
url = "/indexing/config"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets a job document.
"""
def get_job_document(client, job_id, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}/job-document"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets the logging options.
NOTE: use of this command is not recommended. Use `GetV2LoggingOptions`
instead.
"""
def get_logging_options(client, options \\ []) do
url = "/loggingOptions"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets an OTA update.
"""
def get_o_t_a_update(client, ota_update_id, options \\ []) do
url = "/otaUpdates/#{URI.encode(ota_update_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified policy with the policy document of the
default version.
"""
def get_policy(client, policy_name, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified policy version.
"""
def get_policy_version(client, policy_name, policy_version_id, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets a registration code used to register a CA certificate with AWS IoT.
"""
def get_registration_code(client, options \\ []) do
url = "/registrationcode"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets statistics about things that match the specified query.
"""
def get_statistics(client, input, options \\ []) do
url = "/indices/statistics"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Gets information about the rule.
"""
def get_topic_rule(client, rule_name, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets the fine grained logging options.
"""
def get_v2_logging_options(client, options \\ []) do
url = "/v2LoggingOptions"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the active violations for a given Device Defender security profile.
"""
def list_active_violations(client, options \\ []) do
url = "/active-violations"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the policies attached to the specified thing group.
"""
def list_attached_policies(client, target, input, options \\ []) do
url = "/attached-policies/#{URI.encode(target)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Lists the findings (results) of a Device Defender audit or of the audits
performed during a specified time period. (Findings are retained for 180
days.)
"""
def list_audit_findings(client, input, options \\ []) do
url = "/audit/findings"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Lists the Device Defender audits that have been performed during a given
time period.
"""
def list_audit_tasks(client, options \\ []) do
url = "/audit/tasks"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the authorizers registered in your account.
"""
def list_authorizers(client, options \\ []) do
url = "/authorizers"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the billing groups you have created.
"""
def list_billing_groups(client, options \\ []) do
url = "/billing-groups"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the CA certificates registered for your AWS account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
"""
def list_c_a_certificates(client, options \\ []) do
url = "/cacertificates"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the certificates registered in your AWS account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
"""
def list_certificates(client, options \\ []) do
url = "/certificates"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List the device certificates signed by the specified CA certificate.
"""
def list_certificates_by_c_a(client, ca_certificate_id, options \\ []) do
url = "/certificates-by-ca/#{URI.encode(ca_certificate_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the search indices.
"""
def list_indices(client, options \\ []) do
url = "/indices"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the job executions for a job.
"""
def list_job_executions_for_job(client, job_id, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}/things"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the job executions for the specified thing.
"""
def list_job_executions_for_thing(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/jobs"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists jobs.
"""
def list_jobs(client, options \\ []) do
url = "/jobs"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists OTA updates.
"""
def list_o_t_a_updates(client, options \\ []) do
url = "/otaUpdates"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists certificates that are being transferred but not yet accepted.
"""
def list_outgoing_certificates(client, options \\ []) do
url = "/certificates-out-going"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists your policies.
"""
def list_policies(client, options \\ []) do
url = "/policies"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the principals associated with the specified policy.
**Note:** This API is deprecated. Please use `ListTargetsForPolicy`
instead.
"""
def list_policy_principals(client, policy_name \\ nil, options \\ []) do
url = "/policy-principals"
headers = []
if !is_nil(policy_name) do
headers = [{"x-amzn-iot-policy", policy_name}|headers]
end
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the versions of the specified policy and identifies the default
version.
"""
def list_policy_versions(client, policy_name, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the policies attached to the specified principal. If you use an
Cognito identity, the ID must be in [AmazonCognito Identity
format](https://docs.aws.amazon.com/cognitoidentity/latest/APIReference/API_GetCredentialsForIdentity.html#API_GetCredentialsForIdentity_RequestSyntax).
**Note:** This API is deprecated. Please use `ListAttachedPolicies`
instead.
"""
def list_principal_policies(client, principal \\ nil, options \\ []) do
url = "/principal-policies"
headers = []
if !is_nil(principal) do
headers = [{"x-amzn-iot-principal", principal}|headers]
end
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the things associated with the specified principal. A principal can
be X.509 certificates, IAM users, groups, and roles, Amazon Cognito
identities or federated identities.
"""
def list_principal_things(client, principal \\ nil, options \\ []) do
url = "/principals/things"
headers = []
if !is_nil(principal) do
headers = [{"x-amzn-principal", principal}|headers]
end
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the role aliases registered in your account.
"""
def list_role_aliases(client, options \\ []) do
url = "/role-aliases"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists all of your scheduled audits.
"""
def list_scheduled_audits(client, options \\ []) do
url = "/audit/scheduledaudits"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the Device Defender security profiles you have created. You can use
filters to list only those security profiles associated with a thing group
or only those associated with your account.
"""
def list_security_profiles(client, options \\ []) do
url = "/security-profiles"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the Device Defender security profiles attached to a target (thing
group).
"""
def list_security_profiles_for_target(client, options \\ []) do
url = "/security-profiles-for-target"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists all of the streams in your AWS account.
"""
def list_streams(client, options \\ []) do
url = "/streams"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the tags (metadata) you have assigned to the resource.
"""
def list_tags_for_resource(client, options \\ []) do
url = "/tags"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List targets for the specified policy.
"""
def list_targets_for_policy(client, policy_name, input, options \\ []) do
url = "/policy-targets/#{URI.encode(policy_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Lists the targets (thing groups) associated with a given Device Defender
security profile.
"""
def list_targets_for_security_profile(client, security_profile_name, options \\ []) do
url = "/security-profiles/#{URI.encode(security_profile_name)}/targets"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List the thing groups in your account.
"""
def list_thing_groups(client, options \\ []) do
url = "/thing-groups"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List the thing groups to which the specified thing belongs.
"""
def list_thing_groups_for_thing(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/thing-groups"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the principals associated with the specified thing. A principal can
be X.509 certificates, IAM users, groups, and roles, Amazon Cognito
identities or federated identities.
"""
def list_thing_principals(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/principals"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Information about the thing registration tasks.
"""
def list_thing_registration_task_reports(client, task_id, options \\ []) do
url = "/thing-registration-tasks/#{URI.encode(task_id)}/reports"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List bulk thing provisioning tasks.
"""
def list_thing_registration_tasks(client, options \\ []) do
url = "/thing-registration-tasks"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the existing thing types.
"""
def list_thing_types(client, options \\ []) do
url = "/thing-types"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists your things. Use the **attributeName** and **attributeValue**
parameters to filter your things. For example, calling `ListThings` with
attributeName=Color and attributeValue=Red retrieves all things in the
registry that contain an attribute **Color** with the value **Red**.
"""
def list_things(client, options \\ []) do
url = "/things"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the things you have added to the given billing group.
"""
def list_things_in_billing_group(client, billing_group_name, options \\ []) do
url = "/billing-groups/#{URI.encode(billing_group_name)}/things"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the things in the specified group.
"""
def list_things_in_thing_group(client, thing_group_name, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}/things"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the rules for the specific topic.
"""
def list_topic_rules(client, options \\ []) do
url = "/rules"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists logging levels.
"""
def list_v2_logging_levels(client, options \\ []) do
url = "/v2LoggingLevel"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the Device Defender security profile violations discovered during the
given time period. You can use filters to limit the results to those alerts
issued for a particular security profile, behavior or thing (device).
"""
def list_violation_events(client, options \\ []) do
url = "/violation-events"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Registers a CA certificate with AWS IoT. This CA certificate can then be
used to sign device certificates, which can be then registered with AWS
IoT. You can register up to 10 CA certificates per AWS account that have
the same subject field. This enables you to have up to 10 certificate
authorities sign your device certificates. If you have more than one CA
certificate registered, make sure you pass the CA certificate when you
register your device certificates with the RegisterCertificate API.
"""
def register_c_a_certificate(client, input, options \\ []) do
url = "/cacertificate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Registers a device certificate with AWS IoT. If you have more than one CA
certificate that has the same subject field, you must specify the CA
certificate that was used to sign the device certificate being registered.
"""
def register_certificate(client, input, options \\ []) do
url = "/certificate/register"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Provisions a thing.
"""
def register_thing(client, input, options \\ []) do
url = "/things"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Rejects a pending certificate transfer. After AWS IoT rejects a certificate
transfer, the certificate status changes from **PENDING_TRANSFER** to
**INACTIVE**.
To check for pending certificate transfers, call `ListCertificates` to
enumerate your certificates.
This operation can only be called by the transfer destination. After it is
called, the certificate will be returned to the source's account in the
INACTIVE state.
"""
def reject_certificate_transfer(client, certificate_id, input, options \\ []) do
url = "/reject-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Removes the given thing from the billing group.
"""
def remove_thing_from_billing_group(client, input, options \\ []) do
url = "/billing-groups/removeThingFromBillingGroup"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Remove the specified thing from the specified group.
"""
def remove_thing_from_thing_group(client, input, options \\ []) do
url = "/thing-groups/removeThingFromThingGroup"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Replaces the rule. You must specify all parameters for the new rule.
Creating rules is an administrator-level action. Any user who has
permission to create rules will be able to access data processed by the
rule.
"""
def replace_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
The query search index.
"""
def search_index(client, input, options \\ []) do
url = "/indices/search"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Sets the default authorizer. This will be used if a websocket connection is
made without specifying an authorizer.
"""
def set_default_authorizer(client, input, options \\ []) do
url = "/default-authorizer"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Sets the specified version of the specified policy as the policy's default
(operative) version. This action affects all certificates to which the
policy is attached. To list the principals the policy is attached to, use
the ListPrincipalPolicy API.
"""
def set_default_policy_version(client, policy_name, policy_version_id, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Sets the logging options.
NOTE: use of this command is not recommended. Use `SetV2LoggingOptions`
instead.
"""
def set_logging_options(client, input, options \\ []) do
url = "/loggingOptions"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Sets the logging level.
"""
def set_v2_logging_level(client, input, options \\ []) do
url = "/v2LoggingLevel"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Sets the logging options for the V2 logging service.
"""
def set_v2_logging_options(client, input, options \\ []) do
url = "/v2LoggingOptions"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Starts an on-demand Device Defender audit.
"""
def start_on_demand_audit_task(client, input, options \\ []) do
url = "/audit/tasks"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a bulk thing provisioning task.
"""
def start_thing_registration_task(client, input, options \\ []) do
url = "/thing-registration-tasks"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Cancels a bulk thing provisioning task.
"""
def stop_thing_registration_task(client, task_id, input, options \\ []) do
url = "/thing-registration-tasks/#{URI.encode(task_id)}/cancel"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Adds to or modifies the tags of the given resource. Tags are metadata which
can be used to manage a resource.
"""
def tag_resource(client, input, options \\ []) do
url = "/tags"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Tests if a specified principal is authorized to perform an AWS IoT action
on a specified resource. Use this to test and debug the authorization
behavior of devices that connect to the AWS IoT device gateway.
"""
def test_authorization(client, input, options \\ []) do
url = "/test-authorization"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Tests a custom authorization behavior by invoking a specified custom
authorizer. Use this to test and debug the custom authorization behavior of
devices that connect to the AWS IoT device gateway.
"""
def test_invoke_authorizer(client, authorizer_name, input, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}/test"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Transfers the specified certificate to the specified AWS account.
You can cancel the transfer until it is acknowledged by the recipient.
No notification is sent to the transfer destination's account. It is up to
the caller to notify the transfer target.
The certificate being transferred must not be in the ACTIVE state. You can
use the UpdateCertificate API to deactivate it.
The certificate must not have any policies attached to it. You can use the
DetachPrincipalPolicy API to detach them.
"""
def transfer_certificate(client, certificate_id, input, options \\ []) do
url = "/transfer-certificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Removes the given tags (metadata) from the resource.
"""
def untag_resource(client, input, options \\ []) do
url = "/untag"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Configures or reconfigures the Device Defender audit settings for this
account. Settings include how audit notifications are sent and which audit
checks are enabled or disabled.
"""
def update_account_audit_configuration(client, input, options \\ []) do
url = "/audit/configuration"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates an authorizer.
"""
def update_authorizer(client, authorizer_name, input, options \\ []) do
url = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates information about the billing group.
"""
def update_billing_group(client, billing_group_name, input, options \\ []) do
url = "/billing-groups/#{URI.encode(billing_group_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates a registered CA certificate.
"""
def update_c_a_certificate(client, certificate_id, input, options \\ []) do
url = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates the status of the specified certificate. This operation is
idempotent.
Moving a certificate from the ACTIVE state (including REVOKED) will not
disconnect currently connected devices, but these devices will be unable to
reconnect.
The ACTIVE state is required to authenticate devices connecting to AWS IoT
using a certificate.
"""
def update_certificate(client, certificate_id, input, options \\ []) do
url = "/certificates/#{URI.encode(certificate_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates a dynamic thing group.
"""
def update_dynamic_thing_group(client, thing_group_name, input, options \\ []) do
url = "/dynamic-thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates the event configurations.
"""
def update_event_configurations(client, input, options \\ []) do
url = "/event-configurations"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates the search configuration.
"""
def update_indexing_configuration(client, input, options \\ []) do
url = "/indexing/config"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Updates supported fields of the specified job.
"""
def update_job(client, job_id, input, options \\ []) do
url = "/jobs/#{URI.encode(job_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates a role alias.
"""
def update_role_alias(client, role_alias, input, options \\ []) do
url = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates a scheduled audit, including what checks are performed and how
often the audit takes place.
"""
def update_scheduled_audit(client, scheduled_audit_name, input, options \\ []) do
url = "/audit/scheduledaudits/#{URI.encode(scheduled_audit_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates a Device Defender security profile.
"""
def update_security_profile(client, security_profile_name, input, options \\ []) do
url = "/security-profiles/#{URI.encode(security_profile_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates an existing stream. The stream version will be incremented by one.
"""
def update_stream(client, stream_id, input, options \\ []) do
url = "/streams/#{URI.encode(stream_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates the data for a thing.
"""
def update_thing(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Update a thing group.
"""
def update_thing_group(client, thing_group_name, input, options \\ []) do
url = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates the groups to which the thing belongs.
"""
def update_thing_groups_for_thing(client, input, options \\ []) do
url = "/thing-groups/updateThingGroupsForThing"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Validates a Device Defender security profile behaviors specification.
"""
def validate_security_profile_behaviors(client, input, options \\ []) do
url = "/security-profile-behaviors/validate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
defp request(client, method, url, headers, input, options, success_status_code) do
client = %{client | service: "execute-api"}
host = get_host("iot", client)
url = get_url(host, url, client)
headers = Enum.concat([{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"}],
headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 202, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 204, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, url, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{url}/"
end
defp encode_payload(input) do
if input != nil do
Poison.Encoder.encode(input, [])
else
""
end
end
end
|
lib/aws/iot.ex
| 0.782663
| 0.455986
|
iot.ex
|
starcoder
|
defmodule Money do
@moduledoc """
Represents a Money type, inspired in [<NAME>'s Money Patter](https://martinfowler.com/eaaCatalog/money.html)
"""
defstruct [:amount, :currency]
@number ~r/^[\+\-]?\d*\,?\d*\.?\d+(?:[\+\-]?\d+)?$/
@doc """
Creates a new `Money` type with amount and currency: Default currency is `:BRL`
## Examples:
```
iex> Money.new(5)
%Money{amount: 500, currency: :BRL}
iex> Money.new(5, :USD)
%Money{amount: 500, currency: :USD}
iex> Money.new(5.78, :USD)
%Money{amount: 578, currency: :USD}
iex> Money.new(5, "USD")
%Money{amount: 500, currency: :USD}
iex> Money.new(5, "usd")
%Money{amount: 500, currency: :USD}
iex> Money.new(5, "new_currency")
{:error, "Currency NEW_CURRENCY not found"}
"""
def new(amount, currency_code \\ :BRL) when is_integer(amount) or is_float(amount) do
do_new!(amount, currency_code)
rescue
e -> {:error, e.message}
end
@doc """
Creates a new `Money` type with amount and currency: Default currency is `:BRL`
## Examples:
```
iex> Money.new!(5)
%Money{amount: 500, currency: :BRL}
iex> Money.new!(5, :USD)
%Money{amount: 500, currency: :USD}
iex> Money.new!(5.78, :USD)
iex> Money.new!(5, "new_currency")
** (ArgumentError) Currency NEW_CURRENCY not found
"""
def new!(amount, currency_code \\ :BRL) when is_integer(amount) or is_float(amount) do
do_new!(amount, currency_code)
end
defp do_new!(amount, currency_code) do
currency = Currency.find!(currency_code)
# Fator usado para conversoes pre-operacoes e para exibicao do dinheiro
factor = Currency.get_factor(currency)
%Money{amount: round(amount * factor), currency: Currency.to_atom(currency)}
end
@doc """
Adds Monies (`Money`) with same currency
## Examples:
```
iex> Money.add(Money.new(10), Money.new(1))
%Money{amount: 1100, currency: :BRL}
iex> Money.add(Money.new(5, :USD), Money.new(10, :usd))
%Money{amount: 1500, currency: :USD}
```
"""
def add(%Money{currency: currency} = a, %Money{currency: currency} = b) do
%Money{amount: a.amount + b.amount, currency: currency}
end
@doc """
Adds amount to `Money`
## Examples:
```
iex> Money.add(Money.new(10), 1.50)
%Money{amount: 1150, currency: :BRL}
iex> Money.add(Money.new(5, :USD), 10)
%Money{amount: 1500, currency: :USD}
```
"""
def add(%Money{currency: currency} = a, b) when is_integer(b) or is_float(b) do
add(a, Money.new(b, currency))
end
@doc """
Raises different currencies exception
## Examples:
```
iex> Money.add(Money.new(10), Money.new(10, :USD))
** (ArgumentError) Monies with different currencies. Got BRL and USD
```
"""
def add(a, b) do
raise_different_currencies(a, b)
end
@doc """
Divides `Money` from a given list of ratios
## Examples:
```
iex> Money.divide(Money.new(7), [1, 9])
[%Money{amount: 70, currency: :BRL}, %Money{amount: 630, currency: :BRL}]
iex> Money.divide(Money.new(0.15), [3, 7])
[%Money{amount: 5, currency: :BRL}, %Money{amount: 10, currency: :BRL}]
iex> Money.divide(Money.new(0.10), [4, 6])
[%Money{amount: 5, currency: :BRL}, %Money{amount: 6, currency: :BRL}]
iex> Money.divide(Money.new(0.10), [4, "6"])
** (ArgumentError) Value "6" must be integer
"""
def divide(%Money{currency: currency} = m, ratios) when is_list(ratios) do
raise_if_not_valid_ratios(ratios)
divisions = calculate_values_by_ratio(ratios, m.amount)
rem = m.amount - sum_values(divisions)
do_alocate(divisions, rem, currency)
end
@doc """
Divides `Money` from a given a denominator
## Examples:
```
iex> Money.divide(Money.new(10), 2)
[%Money{amount: 500, currency: :BRL}, %Money{amount: 500, currency: :BRL}]
iex> Money.divide(Money.new(9), 3)
[%Money{amount: 300, currency: :BRL}, %Money{amount: 300, currency: :BRL}, %Money{amount: 300, currency: :BRL}]
iex> Money.divide(Money.new(9, :USD), 1)
[%Money{amount: 900, currency: :USD}]
iex> Money.divide(Money.new(5), "2")
** (ArgumentError) Value "2" must be integer
```
"""
def divide(%Money{currency: currency} = m, denominator) do
raise_if_not_integer(denominator)
raise_if_not_greater_than_zero(denominator)
div = div(m.amount, denominator)
rem = rem(m.amount, denominator)
do_alocate(div, rem, currency, denominator)
end
defp calculate_values_by_ratio(ratios, amount) do
total_ratio = sum_values(ratios)
Enum.map(ratios, fn ratio -> div(amount * ratio, total_ratio) end)
end
defp sum_values(values) do
values |> Enum.reduce(fn value, acc -> value + acc end)
end
defp do_alocate([head | tail], rem, currency) do
amount =
if rem > 0 do
head + 1
else
head
end
money = %Money{amount: amount, currency: currency}
remainder =
if rem > 0 do
rem - 1
else
rem
end
if tail != [] do
[money | do_alocate(tail, remainder, currency)]
else
[money]
end
end
defp do_alocate(value, rem, currency, times) do
cond do
rem > 0 and times > 0 ->
[
%Money{amount: value + 1, currency: currency}
| do_alocate(value, rem - 1, currency, times - 1)
]
rem <= 0 and times > 0 ->
[%Money{amount: value, currency: currency} | do_alocate(value, rem, currency, times - 1)]
true ->
[]
end
end
@doc """
Multiplies `Money` by amount
## Examples:
```
iex> Money.multiply(Money.new(15, :USD), 2)
%Money{amount: 3000, currency: :USD}
iex> Money.multiply(Money.new(750, :JPY), 3.5)
%Money{amount: 2625, currency: :JPY}
iex> Money.multiply(Money.new(750), "3.5")
** (ArgumentError) Value "3.5" must be integer or float
```
"""
def multiply(%Money{currency: currency} = a, b) do
raise_if_not_integer_or_float(b)
float_amount = float_value(a)
do_new!(float_amount * b, currency)
end
@doc """
Multiplies `Money` by amount and create a new one with a given `Currency`
## Examples:
```
iex> Money.multiply(Money.new(15), 2, :USD)
%Money{amount: 3000, currency: :USD}
iex> Money.multiply(Money.new(750, :USD), 3.5, :JPY)
%Money{amount: 2625, currency: :JPY}
"""
def multiply(%Money{amount: _a, currency: _c} = a, b, currency) do
raise_if_not_integer_or_float(b)
float_amount = float_value(a)
do_new!(float_amount * b, currency)
end
defp float_value(%Money{currency: currency} = m) do
currency_v = Currency.find!(currency)
factor = Currency.get_factor(currency_v)
Float.round(m.amount / factor, currency_v.exponent)
end
@doc """
Parse an amount to `Money`
## Examples:
```
iex> Money.parse("12")
%Money{amount: 1200, currency: :BRL}
iex> Money.parse("0,1")
%Money{amount: 10, currency: :BRL}
iex> Money.parse("12aa", :USD)
{:error, "Cannot parse value \\"12aa\\""}
"""
def parse(amount, currency \\ :BRL) when is_binary(amount) do
parse!(amount, currency)
rescue
e -> {:error, e.message}
end
@doc """
Parse an amount value to `Money`. Raises an error if the value is not a number
## Examples:
```
iex> Money.parse!("12")
%Money{amount: 1200, currency: :BRL}
iex> Money.parse!("0.1", :USD)
%Money{amount: 10, currency: :USD}
iex> Money.parse!("0,1")
%Money{amount: 10, currency: :BRL}
iex> Money.parse!("bad", :USD)
** (ArgumentError) Cannot parse value "bad"
"""
def parse!(amount, currency \\ :BRL) when is_binary(amount) do
raise_if_not_number(amount)
{_int, floating} = Integer.parse(amount)
if floating !== "" do
{value, _} = String.replace(amount, ",", ".") |> Float.parse()
do_new!(value, currency)
else
{value, _} = Integer.parse(amount)
do_new!(value, currency)
end
end
@doc """
Converts `Money` to formated string with properly symbol and number of decimal cases
## Examples:
```
iex> Money.to_string(Money.new(4))
"R$ 4.00"
iex> Money.to_string(Money.new(25, :USD))
"$ 25.00"
"""
def to_string(%Money{currency: currency_code} = m) do
currency = Currency.find!(currency_code)
factor = Currency.get_factor(currency)
formated_value =
(m.amount / factor)
|> :erlang.float_to_binary(decimals: currency.exponent)
"#{currency.symbol} #{formated_value}"
end
defp raise_different_currencies(a, b) do
raise ArgumentError,
message: "Monies with different currencies. Got #{a.currency} and #{b.currency}"
end
defp raise_if_not_valid_ratios(ratios) do
Enum.each(ratios, fn ratio ->
raise_if_not_integer(ratio)
raise_if_not_greater_than_zero(ratio)
end)
end
defp raise_if_not_integer(value) do
if !is_integer(value) do
raise ArgumentError,
message: "Value \"#{value}\" must be integer"
end
end
defp raise_if_not_integer_or_float(value) do
if !is_integer(value) and !is_float(value) do
raise ArgumentError,
message: "Value \"#{value}\" must be integer or float"
end
end
defp raise_if_not_greater_than_zero(value) do
if value <= 0 do
raise ArgumentError,
message: "Value \"#{value}\" must be greater than zero"
end
end
defp raise_if_not_number(number) do
if !String.match?(number, @number) do
raise ArgumentError,
message: "Cannot parse value \"#{number}\""
end
end
end
|
lib/money/money.ex
| 0.934208
| 0.825765
|
money.ex
|
starcoder
|
defprotocol Realm.Semigroupoid do
@moduledoc """
A semigroupoid describes some way of composing morphisms on between some
collection of objects.
## Type Class
An instance of `Realm.Semigroupoid` must define `Realm.Semigroupoid.compose/2`.
Semigroupoid [compose/2]
"""
@doc """
Take two morphisms and return their composition "the math way".
That is, `(b -> c) -> (a -> b) -> (a -> c)`.
## Examples
iex> times_ten_plus_one = compose(fn x -> x + 1 end, fn y -> y * 10 end)
...> times_ten_plus_one.(5)
51
"""
@spec compose(t(), t()) :: t()
def compose(left, right)
@doc """
Express how to apply arguments to the _very end_ of a semigroupoid,
or "run the morphism". This should not be used to inject values part way
though a composition chain.
It is provided here to remain idiomatic with Elixir, and to make
prop testing _possible_.
## Examples
iex> Realm.Semigroupoid.apply(&inspect/1, [42])
"42"
"""
@spec apply(t(), [any()]) :: t() | any()
def apply(morphism, arguments)
end
defmodule Realm.Semigroupoid.Algebra do
alias Realm.Semigroupoid
@doc """
Pipe some data through a morphism.
Similar to `apply/2`, but with a single argument, not needing to wrap
the argument in a list.
## Examples
iex> import Realm.Semigroupoid.Algebra
...> pipe(42, &(&1 + 1))
43
"""
@spec pipe(any(), Semigroupoid.t()) :: any()
def pipe(data, semigroupoid), do: Semigroupoid.apply(semigroupoid, [data])
@doc """
`compose/2`, but with the arguments flipped (same direction as `|>`).
## Examples
iex> import Realm.Semigroupoid.Algebra
...> times_ten_plus_one = compose(fn y -> y * 10 end, fn x -> x + 1 end)
...> times_ten_plus_one.(5)
51
"""
@spec compose(Semigroupoid.t(), Semigroupoid.t()) :: Semigroupoid.t()
def compose(left, right), do: Semigroupoid.compose(right, left)
end
defimpl Realm.Semigroupoid, for: Function do
def apply(fun, args), do: Kernel.apply(fun, args)
def compose(left, right), do: Quark.compose(left, right)
end
|
lib/realm/semigroupoid.ex
| 0.826116
| 0.566678
|
semigroupoid.ex
|
starcoder
|
defmodule Sdr do
@moduledoc """
SDR is an Elixir library for Sparse Distributed Representations
"""
defp factorial(n), do: factorial(n, 1)
defp factorial(0, acc), do: acc
defp factorial(n, acc) when n > 0, do: factorial(n - 1, acc * n)
@doc """
Capacity of a SDR.
## Examples
```elixir
iex(1)> Sdr.capacity(2048, 6)
101733385755251712
```
"""
def capacity(n, w) do
factorial(n) |> div(factorial(w) * factorial(n-w))
end
@doc """
Sparsity of a SDR.
## Examples
```elixir
iex(1)> Sdr.sparsity(2048, 6)
0.0029296875
```
"""
def sparsity(n, w) do
w / n
end
@doc """
Overlap of two SDRs.
## Examples
```elixir
iex(1)> Sdr.overlap(MapSet.new([1, 2]), MapSet.new([2, 3]))
#MapSet<[2]>
```
"""
def overlap(m1, m2) do
MapSet.intersection(m1, m2)
end
@doc """
Overlap of two random similar SDRs given their number of bits n and the number of on bits w.
"""
def overlapr(n, w) do
MapSet.intersection(MapSet.new(1..w, fn _x -> :crypto.rand_uniform(0,n) end), MapSet.new(1..w, fn _x -> :crypto.rand_uniform(0,n) end))
end
@doc """
Union of two SDRs.
## Examples
```elixir
iex(1)> Sdr.union(MapSet.new([1, 2]), MapSet.new([2, 3]))
#MapSet<[1, 2, 3]>
```
Use MapSet.size/1 to get the length of the MapSet. Use MapSet.to_list/1 to convert it to a list.
"""
def union(m1, m2) do
MapSet.union(m1, m2)
end
@doc """
Union of two random similar SDRs given their number of bits n and the number of on bits w.
"""
def unionr(n, w) do
MapSet.union(MapSet.new(1..w, fn _x -> :crypto.rand_uniform(0,n) end), MapSet.new(1..w, fn _x -> :crypto.rand_uniform(0,n) end))
end
@doc """
Simple encoder.
## Examples
```elixir
iex(1)> Sdr.simple(0, 0, 100, 100, 21, 72)
#MapSet<[72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92]>
```
```elixir
iex(1)> Sdr.simple(0, 0, 100, 100, 21, 73)
#MapSet<[73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93]>
```
"""
def simple(start, min, max, buckets, w, v) do
g = w - 1 + start
i = trunc(:math.floor(buckets * (v-min)) / max - min)
MapSet.new(start..g, fn x -> i + x end)
end
@doc """
Infinite encoder.
## Examples
```elixir
iex(1)> Sdr.infinite(21, 72)
#MapSet<[72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92]>
```
```elixir
iex(1)> Sdr.infinite(21, 773)
#MapSet<[773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793]>
```
"""
def infinite(w, v) do
g = w - 1
MapSet.new(0..g, fn x -> v + x end)
end
@doc """
Hash encoder.
## Examples
```elixir
iex(1)> Sdr.hash(3, 732)
#MapSet<["6C29793A140A811D0C45CE03C1C93A28", "BA3866600C3540F67C1E9575E213BE0A", "E995F98D56967D946471AF29D7BF99F1"]>
```
"""
def hash(w, v) do
g = w - 1
MapSet.new(0..g, fn x -> :crypto.hash(:md5, Integer.to_string(v + x)) |> Base.encode16 end)
end
@doc """
Log encoder.
## Examples
```elixir
iex(1)> Sdr.log(0, 21, 1)
#MapSet<[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]>
```
```elixir
iex(1)> Sdr.log(0, 21, 10)
#MapSet<[2302, 2303, 2304, 2305, 2306, 2307, 2308, 2309, 2310, 2311, 2312, 2313, 2314, 2315, 2316, 2317, 2318, 2319, 2320, 2321, 2322]>
```
```elixir
iex(1)> Sdr.log(0, 21, 100)
#MapSet<[4605, 4606, 4607, 4608, 4609, 4610, 4611, 4612, 4613, 4614, 4615, 4616, 4617, 4618, 4619, 4620, 4621, 4622, 4623, 4624, 4625]>
```
"""
def log(start, w, v) do
g = w - 1 + start
MapSet.new(start..g, fn x -> trunc(:math.log(v) * 1000) + x end)
end
@doc """
Delta encoder.
## Examples
```elixir
iex(1)> Sdr.delta(0, 10, 75, 82)
#MapSet<[7, 8, 9, 10, 11, 12, 13, 14, 15, 16]>
```
```elixir
iex(1)> Sdr.delta(0, 10, 78, 82)
#MapSet<[4, 5, 6, 7, 8, 9, 10, 11, 12, 13]>
```
```elixir
iex(1)> Sdr.delta(0, 10, 78, 72)
#MapSet<[-6, -5, -4, -3, -2, -1, 0, 1, 2, 3]>
```
"""
def delta(start, w, prev, curr) do
g = w - 1 + start
d = curr - prev
MapSet.new(start..g, fn x -> d + x end)
end
@doc """
Cyclic encoder.
## Examples
```elixir
iex(1)> Sdr.cyclic(0, 0, 10, 20, 8, 2)
#MapSet<[4, 5, 6, 7, 8, 9, 10, 11]>
```
```elixir
iex(1)> Sdr.cyclic(0, 0, 10, 20, 8, 5)
#MapSet<[10, 11, 12, 13, 14, 15, 16, 17]>
```
```elixir
iex(1)> Sdr.cyclic(0, 0, 10, 20, 8, 15)
#MapSet<[10, 11, 12, 13, 14, 15, 16, 17]>
```
"""
def cyclic(start, min, max, buckets, w, input) do
g = w - 1 + start
v = rem(input, max)
n = buckets + start
i = trunc(:math.floor(buckets * (v-min)) / max - min)
MapSet.new(start..g, fn x -> cycle(i, x, n) end)
end
defp cycle(i, x, n) do
if (i + x) > n do
i + x - n;
else
i + x;
end
end
@doc """
Multi encoder.
## Examples
```elixir
iex(1)> Sdr.multi([[0,0,100,1000,21,72],[1022,0,10,200,4,5]])
#MapSet<[720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 1122, 1123, 1124, 1125]>
```
```elixir
iex(1)> Sdr.multi([[0,0,100,1000,21,73],[1022,0,10,200,4,5.1]])
#MapSet<[730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 1123, 1124, 1125, 1126]>
```
"""
def multi(inputs) do
Enum.map(inputs, fn x -> combine(x) end)
|> List.flatten()
|> MapSet.new()
end
defp combine(input) do
acc = Enum.at(input, 0)
min = Enum.at(input, 1)
max = Enum.at(input, 2)
buckets = Enum.at(input, 3)
w = Enum.at(input, 4)
v = Enum.at(input, 5)
g = w - 1 + acc
i = trunc(:math.floor(buckets * (v-min)) / max - min)
Enum.map(acc..g, fn x -> i + x end)
end
@doc """
Merge encoder.
## Examples
```elixir
iex(1)> Sdr.simple(0, 0, 100, 100, 21, 72) |> Sdr.merge(Sdr.cyclic(122, 0, 24, 24, 8, 4))
#MapSet<[72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 126, 127, 128, 129, 130, 131, 132, 133]>
```
"""
def merge(encoded1, encoded2) do
MapSet.union(encoded1, encoded2)
end
end
|
lib/sdr.ex
| 0.799168
| 0.893216
|
sdr.ex
|
starcoder
|
defmodule Adventofcode.Day07HandyHaversacks do
use Adventofcode
alias __MODULE__.{Graph, Parser, Part1, Part2}
def part_1(input) do
input
|> Parser.parse()
|> Graph.new()
|> Part1.solve()
|> Enum.count()
end
def part_2(input) do
input
|> Parser.parse()
|> Graph.new()
|> Part2.solve()
end
defmodule Graph do
defstruct [:g, :bags]
def new(bags) do
graph = %__MODULE__{g: :digraph.new(), bags: bags}
Enum.each(bags, &add_bag(graph, &1))
graph
end
defp add_bag(%Graph{} = graph, [bag | rules]) do
:digraph.add_vertex(graph.g, bag, 1)
Enum.each(rules, fn [quantity, bag2] ->
:digraph.add_vertex(graph.g, bag2)
:digraph.add_edge(graph.g, {bag, bag2}, bag, bag2, quantity)
end)
end
end
defmodule Part1 do
@top "shiny gold"
def solve(%Graph{} = graph) do
graph.bags
|> Enum.filter(fn [bag | _] -> bag != @top end)
|> Enum.map(fn [bag | _] -> :digraph.get_path(graph.g, bag, @top) end)
|> Enum.filter(&(&1 != false))
end
end
defmodule Part2 do
@top "shiny gold"
def solve(%Graph{} = graph) do
graph
|> paths({@top, 1})
|> Enum.map(&quantities/1)
|> Enum.map(&sum/1)
|> :lists.flatten()
|> Enum.sum()
end
defp paths(%Graph{} = graph, {vertex, _quantity}) do
:digraph.edges(graph.g, vertex)
|> Enum.filter(&(elem(&1, 0) == vertex))
|> Enum.map(&:digraph.edge(graph.g, &1))
|> Enum.map(fn {{_, _}, _, vertex, quantity} -> {vertex, quantity} end)
|> Enum.map(&[&1 | paths(graph, &1)])
end
def quantities([{_vertex, quantity} | vertices]) do
[quantity | vertices |> Enum.map(&quantities/1)]
end
def sum([quantity | quantities]) do
[quantity | quantities |> Enum.map(fn [q | rest] -> sum([q * quantity | rest]) end)]
end
end
defmodule Parser do
def parse(input) do
input
|> String.trim()
|> String.split("\n")
|> Enum.map(&parse_line/1)
end
defp parse_line(line) do
~r/^(\w+ \w+) bags contain (.+)$/
|> Regex.run(line)
|> Enum.drop(1)
|> do_parse_line
end
defp do_parse_line([bag, contents]) do
[bag | parse_bag_contents(contents)]
end
defp parse_bag_contents(contents) do
~r/(\d+) (\w+ \w+)/
|> Regex.scan(contents)
|> Enum.map(fn [_, quantity, bag] -> [String.to_integer(quantity), bag] end)
end
end
end
|
lib/day_07_handy_haversacks.ex
| 0.716715
| 0.460895
|
day_07_handy_haversacks.ex
|
starcoder
|
defmodule XtbClient.Messages.TradeTransaction do
defmodule Command do
alias XtbClient.Messages.{Operation, TradeType}
@moduledoc """
Info about command to trade the transaction.
## Parameters
- `cmd` operation code, see `XtbClient.Messages.Operation`,
- `customComment` the value the customer may provide in order to retrieve it later,
- `expiration` pending order expiration time,
- `offset` trailing offset,
- `order` `0` or position number for closing/modifications,
- `price` trade price,
- `sl` stop loss,
- `tp` take profit,
- `symbol` trade symbol,
- `type` trade transaction type, see `XtbClient.Messages.TradeType`,
- `volume` trade volume.
"""
@type t :: %__MODULE__{
cmd: integer(),
customComment: binary(),
expiration: integer(),
offset: integer(),
order: integer(),
price: float(),
sl: float(),
tp: float(),
symbol: binary(),
type: integer(),
volume: float()
}
@derive Jason.Encoder
defstruct cmd: nil,
customComment: "",
expiration: 0,
offset: 0,
order: 0,
price: 0.0,
sl: 0.0,
tp: 0.0,
symbol: "",
type: nil,
volume: 0.0
def new(%{} = params) do
params
|> Enum.reduce(%__MODULE__{}, fn {key, value}, acc ->
apply(__MODULE__, key, [acc, value])
end)
end
def operation(%__MODULE__{} = params, operation) when is_atom(operation) do
%{params | cmd: Operation.format(operation)}
end
def custom_comment(%__MODULE__{} = params, comment) when is_binary(comment) do
%{params | customComment: comment}
end
def expiration(%__MODULE__{} = params, %DateTime{} = expiration) do
%{params | expiration: DateTime.to_unix(expiration, :millisecond)}
end
def offset(%__MODULE__{} = params, offset) when is_integer(offset) do
%{params | offset: offset}
end
def order(%__MODULE__{} = params, order) when is_integer(order) and order > 0 do
%{params | order: order}
end
def price(%__MODULE__{} = params, price) when is_number(price) do
%{params | price: price}
end
def stop_loss(%__MODULE__{} = params, sl) when is_number(sl) do
%{params | sl: sl}
end
def take_profit(%__MODULE__{} = params, tp) when is_number(tp) do
%{params | tp: tp}
end
def symbol(%__MODULE__{} = params, symbol) when is_binary(symbol) do
%{params | symbol: symbol}
end
def type(%__MODULE__{} = params, type) when is_atom(type) do
%{params | type: TradeType.format(type)}
end
def volume(%__MODULE__{} = params, volume) when is_number(volume) do
%{params | volume: volume}
end
end
@moduledoc """
Info about realized trade transaction.
## Parameters
- `order` holds info about order number, needed later for verification about order status.
## Handled Api methods
- `tradeTransaction`
"""
@type t :: %__MODULE__{
order: integer()
}
@enforce_keys [:order]
defstruct order: 0
def new(%{"order" => order}) when is_integer(order) do
%__MODULE__{
order: order
}
end
def match(method, data) when method in ["tradeTransaction"] do
{:ok, __MODULE__.new(data)}
end
def match(_method, _data) do
{:no_match}
end
end
|
lib/xtb_client/messages/trade_transaction.ex
| 0.879361
| 0.477371
|
trade_transaction.ex
|
starcoder
|
defmodule FalconPlusApi.Api.Host do
alias Maxwell.Conn
alias FalconPlusApi.{Util, Sig, Api}
@doc """
* [Session](#/authentication) Required
### Request
```
{
"ids": [1,2,3,4],
"maintain_begin": 1497951907,
"maintain_end": 1497951907
}
```
or
```
{
"hosts": ["host.a","host.b"],
"maintain_begin": 1497951907,
"maintain_end": 1497951907
}
```
### Response
```Status: 200```
```{ "message": "Through: hosts, Affect row: 2" }```
"""
def maintain(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/host/maintain>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.post
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* ex. /api/v1/host/1647/hostgroup
* grp_name: hostgroup name
### Response
```Status: 200```
```[
{
"id": 78,
"grp_name": "tplB",
"create_user": "userA"
},
{
"id": 145,
"grp_name": "Owl_Default_Group",
"create_user": "userA"
}
]```
"""
def related_hostgroup(host_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/host/#{host_id}/hostgroup>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* ex. /api/v1/host/1647/template
* tpl_name: template name
### Response
```Status: 200```
```[
{
"id": 125,
"tpl_name": "tplA",
"parent_id": 0,
"action_id": 99,
"create_user": "root"
},
{
"id": 142,
"tpl_name": "tplB",
"parent_id": 0,
"action_id": 111,
"create_user": "root"
},
{
"id": 180,
"tpl_name": "tplC",
"parent_id": 0,
"action_id": 142,
"create_user": "root"
}
]```
"""
def related_template(host_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/host/#{host_id}/template>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Request
```{"ids": [1,2,3,4]}```
or
```{"hosts": ["host.a","host.b"]}```
### Response
```Status: 200```
```{ "message": "Through: hosts, Affect row: 2" }```
"""
def reset(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/host/maintain>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.delete
|> Api.get_result
end
end
|
lib/falcon_plus_api/api/host.ex
| 0.533397
| 0.738763
|
host.ex
|
starcoder
|
defmodule SupabaseSurface.Plugs.Session do
@moduledoc """
A plug to handle access and refresh tokens.
In case `access_token` and `refresh_token` are not available
in the session, it redirects to a `login_endpoint`.
If the session already contains those tokens, it checks the
expiration time of the access token. In case the token is already
expired or expires in less than the (optionally provided)
`expiry_tolerance`, it tries to refresh it.
## Options
* `:login_endpoint` - a endpoint to redirect to in case of invalid tokens (default: "/login")
* `:expiry_tolerance` - time in seconds the `access_token` still has to be valid,
otherwise it should be refreshed (default: 60)
## Examples
plug SupabaseSurface.Plugs.Session, login_endpoint: "/auth", expiry_tolerance: 120
"""
import Plug.Conn
import Phoenix.Controller
def init(options \\ []) do
%{
login_endpoint: Keyword.get(options, :login_endpoint, "/login"),
expiry_tolerance: Keyword.get(options, :expiry_tolerance, 60)
}
end
def call(%Plug.Conn{request_path: ep} = conn, %{login_endpoint: ep}), do: conn
def call(%Plug.Conn{} = conn, %{login_endpoint: ep, expiry_tolerance: exp_tolerance}) do
with %{"access_token" => _at, "refresh_token" => _rt} = tokens <- get_session(conn),
{:ok, %{"access_token" => access_token, "refresh_token" => refresh_token}} <-
check_token_expiration(tokens, exp_tolerance),
{:ok, user_id} <- fetch_user(access_token) do
conn
|> put_session(:access_token, access_token)
|> put_session(:refresh_token, refresh_token)
|> put_session(:user_id, user_id)
else
_error ->
conn
|> clear_session()
|> redirect(to: ep)
|> halt()
end
end
defp check_token_expiration(
%{"access_token" => access_token} = tokens,
expiry_tolerance
) do
{:ok, %{"exp" => exp}} = Joken.peek_claims(access_token)
refresh_access_token(exp - System.system_time(:second), expiry_tolerance, tokens)
end
defp refresh_access_token(time_remaining, expiry_tolerance, tokens)
when time_remaining < expiry_tolerance do
Supabase.auth() |> GoTrue.refresh_access_token(tokens["refresh_token"])
end
defp refresh_access_token(_, _, tokens), do: {:ok, tokens}
defp fetch_user(access_token) do
{:ok, user} = Supabase.auth() |> GoTrue.get_user(access_token)
{:ok, user["id"]}
end
end
|
lib/supabase_surface/plugs/session.ex
| 0.736495
| 0.400515
|
session.ex
|
starcoder
|
defmodule FusionDsl.Kernel do
@moduledoc """
Kernel module of FusionDSL
"""
use FusionDsl.Impl
alias FusionDsl.Runtime.Executor
@r_json_vars ~r/\$([A-Za-z]{1}[A-Za-z0-9.\_]*)/
@functions [
:last_index_of,
:regex_replace,
:create_array,
:json_decode,
:json_encode,
:regex_match,
:starts_with,
:regex_scan,
:ends_with,
:regex_run,
:to_number,
:to_string,
:contains,
:index_of,
:jump_not,
:dispose,
:jump_to,
:replace,
:reverse,
:insert,
:length,
:remove,
:error,
:regex,
:round,
:slice,
:elem,
:json,
:jump,
:mult,
:noop,
:rand,
:wait,
:add,
:and,
:div,
:gte,
:int,
:lte,
:mod,
:neq,
:not,
:set,
:sub,
:var,
:eq,
:gt,
:lt,
:or
]
@impl true
def __list_fusion_functions__, do: @functions
def fn_and({:and, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_boolean(left) and is_boolean(right) ->
{:ok, left and right, env}
true ->
{:error,
"And(&&) is not supported for #{inspect(left)} and #{inspect(right)}"}
end
end
def fn_or({:or, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_boolean(left) and is_boolean(right) ->
{:ok, left or right, env}
true ->
{:error,
"Or(||) is not supported for #{inspect(left)} and #{inspect(right)}"}
end
end
def fn_not({:not, ctx, args}, env) do
{:ok, [value], env} = prep_arg(env, args)
cond do
is_boolean(value) ->
{:ok, not value, env}
true ->
error(env.prog, ctx, "not is not supported for #{inspect(value)}")
end
end
def add({:add, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_number(left) and is_number(right) ->
{:ok, left + right, env}
is_binary(left) or is_binary(right) ->
{:ok, to_string(left) <> to_string(right), env}
is_list(left) and is_list(right) ->
{:ok, left ++ right, env}
is_list(left) ->
{:ok, left ++ [right], env}
is_list(right) ->
{:ok, [left | right], env}
true ->
{:error,
"Add(+) is not supported for #{inspect(left)} and #{inspect(right)}"}
end
end
def sub({:sub, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_number(left) and is_number(right) ->
{:ok, left - right, env}
true ->
{:error,
"Sub(-) is not supported for #{inspect(left)} and #{inspect(right)}"}
end
end
def mult({:mult, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_number(left) and is_number(right) ->
{:ok, left * right, env}
true ->
{:error,
"Mult(*) is not supported for #{inspect(left)} and #{inspect(right)}"}
end
end
def div({:div, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_number(left) and is_number(right) ->
{:ok, left / right, env}
true ->
{:error,
"Div(/) is not supported for #{inspect(left)} and #{inspect(right)}"}
end
end
def mod({:mod, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_number(left) and is_number(right) ->
{:ok, rem(left, right), env}
true ->
{:error,
"Mod(%) is not supported for #{inspect(left)} and #{inspect(right)}"}
end
end
def eq({:eq, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_tuple(left) or is_tuple(right) ->
{:error,
"Equals(==) is not supported for #{inspect(left)} and #{inspect(right)}"}
is_nil(left) ->
{:ok, is_nil(right), env}
is_nil(right) ->
{:ok, is_nil(left), env}
true ->
{:ok, left == right, env}
end
end
def neq({:neq, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_tuple(left) or is_tuple(right) ->
{:error,
"Not-Equals(!=) is not supported for #{inspect(left)} and #{
inspect(right)
}"}
is_nil(left) ->
{:ok, not is_nil(right), env}
is_nil(right) ->
{:ok, not is_nil(left), env}
true ->
{:ok, left != right, env}
end
end
def lte({:lte, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_number(left) and is_number(right) ->
{:ok, left <= right, env}
true ->
{:error,
"Lower-Than-Equal(<=) is not supported for #{inspect(left)} and #{
inspect(right)
}"}
end
end
def gte({:gte, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_number(left) and is_number(right) ->
{:ok, left >= right, env}
true ->
{:error,
"Greater-Than-Equal(>=) is not supported for #{inspect(left)} and #{
inspect(right)
}"}
end
end
def lt({:lt, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_number(left) and is_number(right) ->
{:ok, left < right, env}
true ->
{:error,
"Lower-Than(<) is not supported for #{inspect(left)} and #{
inspect(right)
}"}
end
end
def gt({:gt, _ctx, args}, env) do
{:ok, [left, right], env} = prep_arg(env, args)
cond do
is_number(left) and is_number(right) ->
{:ok, left > right, env}
true ->
{:error,
"Greater-Than(>) is not supported for #{inspect(left)} and #{
inspect(right)
}"}
end
end
def set({:set, _ctx, [{:var, _, [var]}, right]}, env) do
if String.starts_with?(var, "_") do
case get_var(env, var) do
{:ok, _, _} ->
{:error, "Cannot reset an immutable object #{inspect(var)}"}
{:error, :not_initialized} ->
:ok
end
end
{:ok, val, env} = Executor.execute_ast(right, env)
env =
case String.split(var, ".") do
[_] ->
%{env | vars: Map.put(env.vars, var, val)}
[var | map_list] ->
case get_var(env, var) do
{:ok, var_val, env} ->
%{
env
| vars:
Map.put(
env.vars,
var,
insert_map_var(map_list, val, var_val)
)
}
_ ->
%{
env
| vars:
Map.put(env.vars, var, insert_map_var(map_list, val, %{}))
}
end
end
{:ok, val, env}
end
def rand({:rand, _ctx, args}, env) do
{:ok, [lower, upper], env} = prep_arg(env, args)
cond do
is_number(lower) and is_number(upper) ->
{:ok, Enum.random(lower..upper), env}
true ->
{:error,
"rand is not supported for #{inspect(lower)} and #{inspect(upper)}"}
end
end
def to_number({:to_number, _ctx, args}, env) do
{:ok, [binary], env} = prep_arg(env, args)
cond do
is_binary(binary) ->
if String.contains?(binary, ".") do
num =
case Float.parse(binary) do
{n, _} -> n
:error -> nil
end
{:ok, num, env}
else
num =
case Integer.parse(binary) do
{n, _} -> n
:error -> nil
end
{:ok, num, env}
end
true ->
{:error, "to_string is not supported for #{inspect(binary)}"}
end
end
def int({:int, ctx, [_] = args}, env) do
{:ok, [num], env} = prep_arg(env, args)
cond do
is_binary(num) ->
{val, _} = Integer.parse(num)
{:ok, val, env}
is_number(num) ->
{:ok, trunc(num), env}
true ->
error(env.prog, ctx, "Cannot convert #{num} to int")
end
end
def round({:round, ctx, [_] = args}, env) do
{:ok, [num], env} = prep_arg(env, args)
cond do
is_binary(num) ->
{val, _} = Float.parse(num)
{:ok, round(val), env}
is_number(num) ->
{:ok, round(num), env}
true ->
error(env.prog, ctx, "Cannot convert #{num} to int")
end
end
def create_array({:create_array, _ctx, args}, env) do
{:ok, arr_elems, env} = prep_arg(env, args)
{:ok, arr_elems, env}
end
def noop({:noop, _ctx, _}, env) do
{:ok, nil, env}
end
def jump_not({:jump_not, _ctx, args}, env) do
{:ok, [condition, jump_amount], env} = prep_arg(env, args)
cond do
is_boolean(condition) ->
if not condition do
{:jump, jump_amount, env}
else
{:ok, nil, env}
end
true ->
{:error,
"Only boolean (true|false) is accepted in condition. not #{
inspect(condition)
}"}
end
end
def elem({:elem, _ctx, [_, _] = args}, env) do
{:ok, [array, index], env} = prep_arg(env, args)
cond do
is_list(array) and is_integer(index) ->
{:ok, Enum.at(array, index), env}
true ->
{:error,
"elem is not supported for #{inspect(array)} at #{inspect(index)}"}
end
end
def insert({:insert, _ctx, [_, _, _] = args}, env) do
{:ok, [array, index, value], env} = prep_arg(env, args)
cond do
is_list(array) and is_integer(index) ->
{:ok, List.insert_at(array, index, value), env}
true ->
{:error,
"insert is not supported for #{inspect(array)} and #{inspect(value)} at #{
inspect(index)
}"}
end
end
def wait({:wait, _ctx, [_] = args}, env) do
{:ok, [amount], env} = prep_arg(env, args)
cond do
is_number(amount) ->
:timer.sleep(trunc(amount * 1000))
{:ok, nil, env}
true ->
{:error,
"wait should be called with a valid number. not #{inspect(amount)}"}
end
end
def remove({:remove, _ctx, [_, _] = args}, env) do
{:ok, [value, index], env} = prep_arg(env, args)
cond do
is_list(value) and is_integer(index) ->
{:ok, List.delete_at(value, index), env}
is_binary(value) and is_integer(index) ->
{lead, <<_::utf8, tail::binary>>} = String.split_at(value, index)
{:ok, lead <> tail, env}
is_map(value) and is_binary(index) ->
{:ok, Map.delete(value, index), env}
true ->
{:error,
"remove is not supported with args: #{inspect(value)} and #{
inspect(index)
}"}
end
end
def dispose({:dispose, _ctx, [{:var, _, [name]}] = args}, env) do
{:ok, [value], env} = prep_arg(env, args)
cond do
String.contains?(name, ".") ->
{:error,
"Dispose only works on variables (not map elements) $#{inspect(name)}"}
true ->
{:ok, value, Map.put(env, :vars, Map.delete(env.vars, name))}
end
end
# TODO: Add an option to cancel variable injection
def json_decode({:json_decode, _ctx, [_] = args}, env) do
# {:ok, [json], env} = prep_arg(env, args)
{:ok, [json], env} = prep_arg(env, args)
variables = Regex.scan(@r_json_vars, json)
{json, env} = replace_json_vars(variables, json, env)
cond do
is_binary(json) ->
case Poison.decode(json) do
{:ok, data} ->
{:ok, data, env}
_ ->
{:error, "Invalid json binary for json_decode: #{inspect(json)}"}
end
true ->
{:error,
"Only binary(Strings) are accepted in json_decode, not #{inspect(json)}"}
end
end
def json_encode({:json_encode, ctx, [_] = args}, env) do
{:ok, [obj], env} = prep_arg(env, args)
case Poison.encode(obj) do
{:ok, string} ->
{:ok, string, env}
_ ->
error(env.prog, ctx, "Invalid object to json_encode #{inspect(obj)}")
end
end
def contains({:contains, _ctx, [_, _] = args}, env) do
{:ok, [source, element], env} = prep_arg(env, args)
cond do
is_binary(source) and is_binary(element) ->
{:ok, String.contains?(source, element), env}
is_list(source) ->
{:ok, Enum.member?(source, element), env}
true ->
{:error,
"contains works on arrays and strings only. called with: #{
inspect(source)
} and #{inspect(element)}"}
end
end
def index_of({:index_of, _ctx, [_, _] = args}, env) do
{:ok, [source, element], env} = prep_arg(env, args)
cond do
is_binary(source) and is_binary(element) ->
len =
case String.split(source, element) do
[h, _ | _] ->
String.length(h)
_ ->
nil
end
{:ok, len, env}
is_list(source) ->
{:ok, Enum.find_index(source, &(&1 == element)), env}
true ->
{:error,
"index_of works on arrays and strings only. called with: #{
inspect(source)
} and #{inspect(element)}"}
end
end
def last_index_of({:last_index_of, _ctx, [_, _] = args}, env) do
{:ok, [source, element], env} = prep_arg(env, args)
cond do
is_binary(source) and is_binary(element) ->
source = String.reverse(source)
len =
case String.split(source, element) do
[h, _ | _] ->
String.length(source) - String.length(h) - 1
_ ->
nil
end
{:ok, len, env}
is_list(source) ->
source = Enum.reverse(source)
case Enum.find_index(source, &(&1 == element)) do
nil ->
{:ok, nil, env}
count when is_integer(count) ->
{:ok, Enum.count(source) - count - 1, env}
end
true ->
{:error,
"last_index_of works on arrays and strings only. called with: #{
inspect(source)
} and #{inspect(element)}"}
end
end
def starts_with({:starts_with, _ctx, [_, _] = args}, env) do
{:ok, [source, element], env} = prep_arg(env, args)
cond do
is_binary(source) and is_binary(element) ->
{:ok, String.starts_with?(source, element), env}
is_list(source) ->
case source do
[^element | _] ->
{:ok, true, env}
_ ->
{:ok, false, env}
end
true ->
{:error,
"starts_with works on arrays and strings only. called with: #{
inspect(source)
} and #{inspect(element)}"}
end
end
def ends_with({:ends_with, _ctx, [_, _] = args}, env) do
{:ok, [source, element], env} = prep_arg(env, args)
cond do
is_binary(source) and is_binary(element) ->
{:ok, String.ends_with?(source, element), env}
is_list(source) ->
last = List.last(source)
cond do
is_nil(last) ->
{:ok, false, env}
last == element ->
{:ok, true, env}
true ->
{:ok, false, env}
end
true ->
{:error,
"ends_with works on arrays and strings only. called with: #{
inspect(source)
} and #{inspect(element)}"}
end
end
def replace({:replace, _ctx, [_, _, _] = args}, env) do
{:ok, [source, element, replacement], env} = prep_arg(env, args)
cond do
is_binary(source) and is_binary(element) ->
{:ok, String.replace(source, element, replacement), env}
is_list(source) ->
result = replace_in_list(source, element, replacement)
{:ok, result, env}
true ->
{:error,
"replace works on arrays and strings only. called with: #{
inspect(source)
} and #{inspect(element)} and #{inspect(replacement)}"}
end
end
def reverse({:reverse, _ctx, [_] = args}, env) do
{:ok, [source], env} = prep_arg(env, args)
cond do
is_binary(source) ->
{:ok, String.reverse(source), env}
is_list(source) ->
{:ok, Enum.reverse(source), env}
true ->
{:error,
"reverse works on arrays and strings only. called with: #{
inspect(source)
}"}
end
end
def length({:length, _ctx, [_] = args}, env) do
{:ok, [source], env} = prep_arg(env, args)
cond do
is_binary(source) ->
{:ok, String.length(source), env}
is_list(source) ->
{:ok, Enum.count(source), env}
true ->
{:error,
"length works on arrays and strings only. called with: #{
inspect(source)
}"}
end
end
def slice({:slice, ctx, [_, _ | _] = args}, env) do
{:ok, [source, start | count] = f_args, env} = prep_arg(env, args)
count =
case count do
[] ->
-1
[num | _] when is_integer(num) ->
num
end
cond do
is_binary(source) and count == -1 ->
{:ok, String.slice(source, start..-1), env}
is_binary(source) ->
{:ok, String.slice(source, start, count), env}
is_list(source) and count == -1 ->
{:ok, Enum.slice(source, start..-1), env}
is_list(source) ->
{:ok, Enum.slice(source, start, count), env}
true ->
error(env.prog, ctx, "Bad arguments for slice #{inspect(f_args)}")
end
end
def regex({:regex, _ctx, args}, env) do
{:ok, [regex_str | opts] = f_args, env} = prep_arg(env, args)
opt =
case opts do
[] -> ""
[str] -> str
end
cond do
is_binary(regex_str) and is_binary(opt) ->
{:ok, Regex.compile!(regex_str, opt), env}
true ->
{:error, "Invalid arguments for regex compile: #{inspect(f_args)}"}
end
end
def regex_run({:regex_run, _ctx, args}, env) do
{:ok, [%Regex{} = regex, string] = f_args, env} = prep_arg(env, args)
cond do
is_binary(string) ->
{:ok, norm_regex(Regex.run(regex, string, return: :index)), env}
true ->
{:error, "Invalid arguments for regex run: #{inspect(f_args)}"}
end
end
def regex_match({:regex_match, _ctx, args}, env) do
{:ok, [%Regex{} = regex, string] = f_args, env} = prep_arg(env, args)
cond do
is_binary(string) ->
{:ok, Regex.match?(regex, string), env}
true ->
{:error, "Invalid arguments for regex match: #{inspect(f_args)}"}
end
end
def regex_replace({:regex_replace, _ctx, args}, env) do
{:ok, [%Regex{} = regex, string, replacement] = f_args, env} =
prep_arg(env, args)
cond do
is_binary(string) ->
{:ok, Regex.replace(regex, string, replacement), env}
true ->
{:error, "Invalid arguments for regex replace: #{inspect(f_args)}"}
end
end
def regex_scan({:regex_scan, _ctx, args}, env) do
{:ok, [%Regex{} = regex, string] = f_args, env} = prep_arg(env, args)
cond do
is_binary(string) ->
{:ok, norm_regex(Regex.scan(regex, string, return: :index)), env}
true ->
{:error, "Invalid arguments for regex replace: #{inspect(f_args)}"}
end
end
def to_string({:to_string, _ctx, args}, env) do
{:ok, [val], env} = prep_arg(env, args)
{:ok, to_string(val), env}
end
defp norm_regex(list, acc \\ [])
defp norm_regex([a | t], acc) when is_tuple(a) do
norm_regex(t, [Tuple.to_list(a) | acc])
end
defp norm_regex([a | t], acc) when is_list(a) do
a = norm_regex(a, [])
norm_regex(t, [a | acc])
end
defp norm_regex([], acc) do
Enum.reverse(acc)
end
defp insert_map_var([final_var], val, var_val) do
Map.put(var_val, final_var, val)
end
defp insert_map_var([h | t], val, var_val) do
acc =
case Map.fetch(var_val, h) do
{:ok, acc} when is_map(acc) ->
acc
_ ->
%{}
end
final = insert_map_var(t, val, acc)
Map.put(var_val, h, final)
end
defp error(_prog, ctx, msg) do
raise("Kernel error\n Line: #{ctx[:ln]}: #{msg}")
end
defp replace_in_list(source, pattern, replacement),
do:
source
|> Enum.reduce([], fn x, acc ->
if x == pattern do
[replacement | acc]
else
[x | acc]
end
end)
|> Enum.reverse()
defp replace_json_vars([], json, env), do: {json, env}
defp replace_json_vars(variables, json, env) do
Enum.reduce(variables, {json, env}, fn [name, var], acc ->
{json, env} = acc
case get_var(env, var) do
{:ok, v, env} ->
{String.replace(json, name, to_string(v)), env}
{:error} ->
{json, env}
end
end)
end
end
|
lib/fusion_dsl/kernel.ex
| 0.830732
| 0.61927
|
kernel.ex
|
starcoder
|
defmodule Hawk.Server do
@moduledoc """
This module provides functions to create response headers and authenticate request.
"""
alias Hawk.{Crypto, Header, Now}
@algorithms Crypto.algorithms()
@doc """
Authenticate a hawk request
## Options
* `:timestamp_skew_sec` Number of seconds of permitted clock skew for incoming timestamps. Defaults to 60 seconds.
Provides a +/- skew which means actual allowed window is double the number of seconds.
* `:localtime_offset_msec` Local clock time offset express in a number of milliseconds (positive or negative). Defaults to 0.
* `:payload` Payload for validation. The client calculates the hash value and includes it via the `hash`
header attribute. The server always ensures the value provided has been included in the request
MAC. When this option is provided, it validates the hash value itself. Validation is done by calculating
a hash value over the entire payload (assuming it has already be normalized to the same format and
encoding used by the client to calculate the hash on request). If the payload is not available at the time
of authentication, the `Hawk.Server.authenticate_payload/4` method can be used by passing it the credentials and
`artifacts` returned from `Hawk.Server.authenticate/3`.
* `:host_header_name` Used to override the default `host` header when used
behind a cache of a proxy. Apache2 changes the value of the 'Host' header while preserving
the original (which is what the module must verify) in the 'x-forwarded-host' header field.
"""
@spec authenticate(Hawk.Request.t(), module(), Hawk.opts()) :: {:ok, %{artifacts: map(), credentials: map()}} | {:error, {integer, binary()}} | {:error, {integer, binary(), {binary(), binary()}}}
def authenticate(request, config, options \\ %{})
def authenticate(request, config, options) when is_list(options), do: authenticate(request, config, Map.new(options))
def authenticate(%{method: method, host: host, port: port, url: url} = req, config, options) do
options = Map.merge(%{timestamp_skew_sec: 60}, options)
now = Now.msec(options)
case Header.parse(req[:authorization]) do
{:ok, %{id: id, ts: _, nonce: _, mac: mac} = attributes} ->
case id |> config.get_credentials(options) |> validate_credentials() do
{:error, reason} -> {:error, reason}
{:ok, credentials} ->
{:ok, %{artifacts: Map.merge(attributes, %{method: method, host: host, port: port, resource: url}), credentials: credentials}}
|> validate_mac(mac, "header")
|> check_payload(options)
|> check_nonce(config)
|> check_timestamp_staleness(now, options, fn -> Crypto.timestamp_message(credentials, options) end)
end
{:ok, _attributes} -> {:error, {400, "Missing attributes"}}
{:error, reason} -> {:error, reason}
end
end
@doc """
Authenticate a raw request payload hash - used when payload cannot be provided during `Hawk.Server.authenticate/3`
the `credentials` and `artifacts` are received from `Hawk.Server.authenticate/3` the `content-type` is from the request
"""
@spec authenticate_payload(iodata(), %{artifacts: map(), credentials: map()}, iodata()) :: {:ok, %{artifacts: map(), credentials: map()}} | {:error, {401, binary(), {binary(), binary()}}}
def authenticate_payload(payload, %{artifacts: %{hash: hash}, credentials: %{algorithm: algorithm}} = result, content_type) do
algorithm
|> Crypto.calculate_payload_hash(payload, content_type)
|> Kryptiles.fixed_time_comparison(hash)
|> case do
false -> {:error, {401, "Bad payload hash", Header.error("Bad payload hash")}}
true -> {:ok, result}
end
end
@doc """
Authenticate payload hash - used when payload cannot be provided during `Hawk.Server.authenticate/3`
takes the payload hash calculated using Hawk.Crypto.calculate_payload_hash/3 and `artifacts` received from `Hawk.Server.authenticate/3`
"""
@spec authenticate_payload_hash(binary(), map()) :: {:ok, %{artifacts: map()}} | {:error, {401, binary(), {binary(), binary()}}}
def authenticate_payload_hash(calculate_hash, %{hash: hash} = artifacts) do
case Kryptiles.fixed_time_comparison(calculate_hash, hash) do
false -> {:error, {401, "Bad payload hash", Header.error("Bad payload hash")}}
true -> {:ok, %{artifacts: artifacts}}
end
end
@doc """
Generate a Server-Authorization header for a given response
takes `result` received from `Hawk.Server.authenticate/3`
## Options
* `:ext` Application specific data sent via the ext attribute
* `:payload` UTF-8 encoded string for body hash generation (ignored if hash provided)
* `:content_type` Payload content-type (ignored if hash provided)
* `:hash` Pre-calculated payload hash
"""
@spec header(%{artifacts: map(), credentials: map()}, Hawk.opts()) :: binary()
def header(result, options \\ %{})
def header(result, options) when is_list(options), do: header(result, Map.new(options))
def header(%{artifacts: %{method: _, host: _, port: _, resource: _, ts: _, nonce: _, id: _} = artifacts, credentials: %{key: _key, algorithm: algorithm} = credentials}, %{hash: _} = options) when algorithm in @algorithms do
options = Map.take(options, [:ext, :hash])
artifacts = artifacts |> Map.drop([:ext, :hash, :mac]) |> Map.merge(options)
maybe_add(artifacts, "Hawk mac=\"#{Crypto.calculate_mac("response", credentials, artifacts)}\"")
end
def header(%{artifacts: %{method: _, host: _, port: _, resource: _, ts: _, nonce: _, id: _} = artifacts, credentials: %{key: _key, algorithm: algorithm} = credentials}, %{payload: payload} = options) when algorithm in @algorithms do
options = options |> Map.take([:ext]) |> Map.put(:hash, Crypto.calculate_payload_hash(algorithm, payload, options[:content_type]))
artifacts = artifacts |> Map.drop([:ext, :hash, :mac]) |> Map.merge(options)
maybe_add(artifacts, "Hawk mac=\"#{Crypto.calculate_mac("response", credentials, artifacts)}\"")
end
def header(%{artifacts: %{method: _, host: _, port: _, resource: _, ts: _, nonce: _, id: _} = artifacts, credentials: %{key: _key, algorithm: algorithm} = credentials}, options) when algorithm in @algorithms do
options = Map.take(options, [:ext, :hash])
artifacts = artifacts |> Map.drop([:ext, :hash, :mac]) |> Map.merge(options)
maybe_add(artifacts, "Hawk mac=\"#{Crypto.calculate_mac("response", credentials, artifacts)}\"")
end
defp maybe_add(%{hash: hash, ext: ext}, string), do: <<string::binary(), ", hash=", ?", hash::binary(), ?", ", ext=", ?", Header.escape_attribute(ext)::binary(), ?">>
defp maybe_add(%{hash: hash}, string), do: <<string::binary(), ", hash=", ?", hash::binary(), ?">>
defp maybe_add(%{ext: ext}, string), do: <<string::binary(), ", ext=", ?", Header.escape_attribute(ext)::binary(), ?">>
defp maybe_add(_, string), do: string
@doc """
Authenticate a Hawk bewit request
## Options
* `:localtime_offset_msec` Local clock time offset express in a number of milliseconds (positive or negative). Defaults to 0.
"""
@spec authenticate_bewit(Hawk.Request.t(), module(), Hawk.opts()) :: {:ok, %{attributes: map(), credentials: map()}} | {:error, {integer, binary()}} | {:error, {integer, binary(), {binary(), binary()}}}
def authenticate_bewit(request, config, options \\ %{})
def authenticate_bewit(request, config, options) when is_list(options), do: authenticate_bewit(request, config, Map.new(options))
def authenticate_bewit(%{url: url}, _config, _options) when byte_size(url) > 4096, do: {:error, {400, "Resource path exceeds max length"}}
def authenticate_bewit(%{method: method}, _config, _options) when method not in ["GET", "HEAD"], do: {:error, {401, "Invalid method", Header.error("Invalid method")}}
def authenticate_bewit(%{authorization: authorization}, _config, _options) when authorization !== [], do: {:error, {400, "Multiple authentications"}}
def authenticate_bewit(req, config, options) do
options = Map.merge(%{timestamp_skew_sec: 60}, options)
now = Now.msec(options)
case parse(req[:url], now) do
{:error, reason} -> {:error, reason}
{:ok, %{id: id, exp: exp, ext: ext, mac: mac} = bewit, url} ->
case id |> config.get_credentials(options) |> validate_credentials() do
{:error, reason} -> {:error, reason}
{:ok, credentials} ->
{:ok, %{artifacts: %{ts: exp, nonce: "", method: "GET", resource: url, host: req[:host], port: req[:port], ext: ext}, credentials: credentials}}
|> validate_mac(mac, "bewit")
|> case do
{:ok, %{credentials: credentials}} -> {:ok, %{attributes: bewit, credentials: credentials}}
{:error, reason} -> {:error, reason}
end
end
end
end
defp parse(binary, now, resource \\ <<>>)
defp parse(<<>>, _now, _resource), do: {:error, {400, "Invalid bewit encoding"}}
defp parse([], _now, _resource), do: {:error, {400, "Invalid bewit encoding"}}
defp parse(<<_::binary-size(1), "bewit=">>, _now, _resource), do: {:error, {401, "Empty bewit", Header.error("Empty bewit")}}
defp parse([_, ?b, ?e, ?w, ?i, ?t, ?=], _now, _resource), do: {:error, {401, "Empty bewit", Header.error("Empty bewit")}}
defp parse(<<b::binary-size(1), "bewit=", bewit::binary()>>, now, resource) do
resource = if b == "?", do: <<resource::binary(), b::binary()>>, else: resource
bewit
|> parse_bewit(resource)
|> validate_bewit(now)
end
defp parse(<<b::binary-size(1), rest::binary()>>, now, resource) do
parse(rest, now, <<resource::binary(), b::binary()>>)
end
defp parse(_binary, _now, _resource), do: {:error, {401, "Unauthorized", Header.error()}}
defp parse_bewit(binary, resource, bewit \\ <<>>)
defp parse_bewit(<<>>, resource, bewit), do: [bewit, String.trim(resource, "?")]
defp parse_bewit(<<??, _::binary()>> = query, resource, bewit) when bewit !== <<>>, do: [bewit, resource <> query]
defp parse_bewit(<<?&, query::binary()>>, resource, bewit) when bewit !== <<>>, do: [bewit, resource <> query]
defp parse_bewit(<<b::binary-size(1), rest::binary()>>, resource, bewit) do
parse_bewit(rest, resource, <<bewit::binary(), b::binary-size(1)>>)
end
defp validate_bewit([bewit, url], now) do
bewit
|> Base.url_decode64(padding: false)
|> validate_bewit(now, url)
end
defp validate_bewit(:error, _now, _url), do: {:error, {400, "Invalid bewit encoding"}}
defp validate_bewit({:ok, bewit}, now, url) do
case :string.split(bewit, "\\", :all) do
values when length(values) != 4 -> {:error, {400, "Invalid bewit structure"}}
[id, exp, mac | _] when id == "" or exp == "" or mac == "" -> {:error, {400, "Missing bewit attributes"}}
[_id, exp | _] = values ->
bewit = [:id, :exp, :mac, :ext] |> Enum.zip(values) |> Enum.into(%{})
case :erlang.binary_to_integer(exp, 10) * 1000 <= now do
true -> {:error, {401, "Access expired", Header.error("Access expired")}}
false -> {:ok, bewit, url}
end
end
end
@doc """
Authenticate a message
## Options
* `:localtime_offset_msec` Local clock time offset express in a number of milliseconds (positive or negative). Defaults to 0.
* `:timestamp_skew_sec`. Defaults to 60.
"""
@spec authenticate_message(binary(), 0..65535, binary(), map(), module(), Hawk.opts()) :: {:ok, %{credentials: map()}} | {:error, {integer, binary()}} | {:error, {integer, binary(), {binary(), binary()}}}
def authenticate_message(host, port, message, authorization, config, options \\ %{})
def authenticate_message(host, port, message, authorization, config, options) when is_list(options) do
authenticate_message(host, port, message, authorization, config, Map.new(options))
end
def authenticate_message(host, port, message, %{id: id, ts: ts, nonce: nonce, hash: hash, mac: mac}, config, options) do
options = Map.merge(%{timestamp_skew_sec: 60}, options)
now = Now.msec(options)
case id |> config.get_credentials(options) |> validate_credentials() do
{:error, reason} -> {:error, reason}
{:ok, credentials} ->
{:ok, %{artifacts: %{port: port, host: host, ts: ts, nonce: nonce, hash: hash}, credentials: credentials}}
|> validate_mac(mac, "message")
|> check_payload(%{payload: message}, "Bad message hash")
|> check_nonce(config)
|> check_timestamp_staleness(now, options)
|> case do
{:error, reason} -> {:error, reason}
{:ok, %{credentials: credentials}} -> {:ok, %{credentials: credentials}}
end
end
end
def authenticate_message(_host, _port, _message, _authorization, _config, _options), do: {:error, {400, "Invalid authorization"}}
defp validate_credentials({:error, {status, msg, header}}), do: {:error, {status, msg, header}}
defp validate_credentials(%{algorithm: algorithm, key: _key} = result) when algorithm in @algorithms, do: {:ok, result}
defp validate_credentials(%{algorithm: _,}), do: {:error, {500, "Unknown algorithm"}}
defp validate_credentials(credentials) when is_map(credentials), do: {:error, {500, "Invalid credentials"}}
defp validate_credentials(_credentials), do: {:error, {401, "Unknown credentials", Header.error("Unknown credentials")}}
def validate_mac({:error, reason}, _mac, _type), do: {:error, reason}
def validate_mac({:ok, %{artifacts: artifacts, credentials: credentials}} = ok, mac, type) do
type
|> Crypto.calculate_mac(credentials, artifacts)
|> Kryptiles.fixed_time_comparison(mac)
|> case do
false -> {:error, {401, "Bad mac", Header.error("Bad mac")}}
true -> ok
end
end
defp check_payload(result, options, msg \\ "Bad payload hash")
defp check_payload({:error, reason}, _options, _msg), do: {:error, reason}
defp check_payload({:ok, %{artifacts: %{hash: hash}, credentials: %{algorithm: algorithm}}} = ok, %{payload: payload}, msg) do
algorithm
|> Crypto.calculate_payload_hash(payload, "")
|> Kryptiles.fixed_time_comparison(hash)
|> case do
false -> {:error, {401, msg, Header.error(msg)}}
true -> ok
end
end
defp check_payload({:ok, _}, %{payload: _}, _attributes), do: {:error, {401, "Missing required payload hash", Header.error("Missing required payload hash")}}
defp check_payload({:ok, %{artifacts: %{hash: _hash}}} = ok, _options, _attributes), do: ok
defp check_payload({:ok, _} = ok, _options, _attributes), do: ok
defp check_nonce({:error, reason}, _options), do: {:error, reason}
defp check_nonce({:ok, %{artifacts: %{nonce: nonce, ts: ts}, credentials: %{key: key}}} = ok, config) do
case config.nonce(key, nonce, ts) do
:ok -> ok
_ -> {:error, {401, "Invalid nonce", Header.error("Invalid nonce")}}
end
end
defp check_timestamp_staleness(result, now, options, attributes \\ fn -> [] end)
defp check_timestamp_staleness({:error, reason}, _now, _options, _attributes), do: {:error, reason}
defp check_timestamp_staleness({:ok, %{artifacts: %{ts: ts}}} = ok, now, %{timestamp_skew_sec: timestamp_skew_sec}, attributes) do
ts = if is_binary(ts), do: :erlang.binary_to_integer(ts), else: ts
case Kernel.abs((ts * 1000) - now) > (timestamp_skew_sec * 1000) do
true -> {:error, {401, "Stale timestamp", Header.error("Stale timestamp", attributes.())}}
false -> ok
end
end
end
|
lib/hawk/server.ex
| 0.919971
| 0.515437
|
server.ex
|
starcoder
|
defmodule Unicode.Utils do
@moduledoc false
@doc """
Returns a map of the Unicode codepoints with the `script` name
as the key and a list of codepoint ranges as the values.
"""
@scripts_path Path.join(Unicode.data_dir(), "scripts.txt")
@external_resource @scripts_path
def scripts do
parse_file(@scripts_path)
|> downcase_keys()
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the `block` name
as the key and a list of codepoint ranges as the values.
"""
@blocks_path Path.join(Unicode.data_dir(), "blocks.txt")
@external_resource @blocks_path
def blocks do
parse_file(@blocks_path)
|> downcase_keys
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the `combining_class` number
as the key and a list of codepoint ranges as the values.
"""
@combining_class_path Path.join(Unicode.data_dir(), "combining_class.txt")
@external_resource @combining_class_path
def combining_classes do
parse_file(@combining_class_path)
|> Enum.map(fn {k, v} -> {String.to_integer(k), v} end)
|> Map.new()
end
@doc """
Returns a map of the Unicode codepoints with the `category` name
as the key and a list of codepoint ranges as the values.
"""
@categories_path Path.join(Unicode.data_dir(), "categories.txt")
@external_resource @categories_path
def categories do
parse_file(@categories_path)
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the `derived property` name
as the key and a list of codepoint ranges as the values.
"""
@derived_properties_path Path.join(Unicode.data_dir(), "derived_properties.txt")
@external_resource @derived_properties_path
def derived_properties do
parse_file(@derived_properties_path)
|> downcase_keys
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the `property` name
as the key and a list of codepoint ranges as the values.
"""
@properties_path Path.join(Unicode.data_dir(), "properties.txt")
@external_resource @properties_path
def properties do
parse_file(@properties_path)
|> downcase_keys()
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the emoji type name
as the key and a list of codepoint ranges as the values.
"""
@emoji_path Path.join(Unicode.data_dir(), "emoji.txt")
@external_resource @emoji_path
def emoji do
parse_file(@emoji_path)
|> downcase_keys
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the `grapheme_break` name
as the key and a list of codepoint ranges as the values.
"""
@grapheme_breaks_path Path.join(Unicode.data_dir(), "grapheme_break.txt")
@external_resource @grapheme_breaks_path
def grapheme_breaks do
parse_file(@grapheme_breaks_path)
|> downcase_keys()
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the `line_break` name
as the key and a list of codepoint ranges as the values.
"""
@line_breaks_path Path.join(Unicode.data_dir(), "line_break.txt")
@external_resource @line_breaks_path
def line_breaks do
parse_file(@line_breaks_path)
|> downcase_keys()
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the `word_break` name
as the key and a list of codepoint ranges as the values.
"""
@word_breaks_path Path.join(Unicode.data_dir(), "word_break.txt")
@external_resource @word_breaks_path
def word_breaks do
parse_file(@word_breaks_path)
|> downcase_keys()
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints from SpecialCasing.txt
as the key and a list of codepoint ranges as the values.
"""
@case_folding_path Path.join(Unicode.data_dir(), "case_folding.txt")
@external_resource @case_folding_path
def case_folding do
parse_alias_file(@case_folding_path)
|> Enum.map(fn
[from, status, to, _] -> [encode(status), extract(from), extract(to)]
end)
|> Enum.sort_by(&hd/1)
|> Enum.reverse
end
defp encode("c"), do: :common
defp encode("t"), do: :turkic
defp encode("f"), do: :full
defp encode("s"), do: :simple
@doc """
Returns a map of the Unicode codepoints from SpecialCasing.txt
as the key and a list of codepoint ranges as the values.
"""
@special_casing_path Path.join(Unicode.data_dir(), "special_casing.txt")
@external_resource @special_casing_path
def special_casing do
parse_alias_file(@special_casing_path)
|> Enum.map(fn row ->
Enum.map(row, &extract/1)
|> Enum.reverse
|> tl
|> Enum.reverse
end)
|> Enum.group_by(&hd/1)
end
defp extract(string) do
string
|> String.split(" ")
|> Enum.map(&to_integer/1)
|> return_list_or_integer
rescue ArgumentError ->
string
end
def return_list_or_integer([integer]), do: integer
def return_list_or_integer(list), do: list
def to_integer(""), do: nil
def to_integer(string), do: String.to_integer(string, 16)
@doc """
Returns a map of the Unicode codepoints with the `sentence_break` name
as the key and a list of codepoint ranges as the values.
"""
@sentence_breaks_path Path.join(Unicode.data_dir(), "sentence_break.txt")
@external_resource @sentence_breaks_path
def sentence_breaks do
parse_file(@sentence_breaks_path)
|> downcase_keys()
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the `east_asian_width` name
as the key and a list of codepoint ranges as the values.
"""
@east_asian_width_path Path.join(Unicode.data_dir(), "east_asian_width.txt")
@external_resource @east_asian_width_path
def east_asian_width do
parse_file(@east_asian_width_path)
|> downcase_keys()
|> atomize_keys()
end
@doc """
Returns a map of the Unicode codepoints with the `sentence_break` name
as the key and a list of codepoint ranges as the values.
"""
@indic_syllabic_category_path Path.join(Unicode.data_dir(), "indic_syllabic_category.txt")
@external_resource @indic_syllabic_category_path
def indic_syllabic_categories do
parse_file(@indic_syllabic_category_path)
|> downcase_keys()
|> atomize_keys()
end
@doc """
Returns a map of the property value aliases.
"""
@property_alias_path Path.join(Unicode.data_dir(), "property_alias.txt")
@external_resource @property_alias_path
def property_alias do
parse_alias_file(@property_alias_path)
|> Enum.flat_map(fn
[alias1, code] ->
[{alias1, code}]
[alias1, code, alias2] ->
[{alias1, code}, {alias2, code}]
[alias1, code, alias2, alias3] ->
[{alias1, code}, {alias2, code}, {alias3, code}]
[_alias1, _code, _alias2, _alias3, _alias4] ->
[]
end)
|> Map.new()
end
@doc """
Returns a mapping of property names and
aliases to the module that serves that
property
"""
def property_servers do
property_alias()
|> atomize_values
|> add_canonical_alias()
|> Enum.map(fn {k, v} ->
{k, Module.concat(Unicode, Macro.camelize(Atom.to_string(v)))}
end)
|> Enum.filter(fn {_k, v} -> ensure_compiled?(v) end)
|> Map.new()
end
@doc """
Returns a map of the property value aliases.
"""
@property_value_alias_path Path.join(Unicode.data_dir(), "property_value_alias.txt")
@external_resource @property_value_alias_path
def property_value_alias do
parse_alias_file(@property_value_alias_path)
|> Enum.group_by(&hd/1, &tl/1)
|> Enum.map(fn {category, aliases} -> {category, map_from_aliases(aliases)} end)
|> Map.new()
end
defp map_from_aliases(aliases) do
Enum.flat_map(aliases, fn
[code, alias1] ->
[{alias1, code}]
[code, alias1, alias2] ->
[{alias1, code}, {alias2, code}]
[code, alias1, alias2, alias3] ->
[{alias1, code}, {alias2, code}, {alias3, code}]
end)
|> Map.new()
end
@doc false
def parse_file(path) do
Enum.reduce(File.stream!(path), %{}, fn line, map ->
case line do
<<"#", _rest::bitstring>> ->
map
<<"\n", _rest::bitstring>> ->
map
data ->
[range, script | tail] =
data
|> String.split(~r/[;#]/)
|> Enum.map(&String.trim/1)
[start, finish] =
range
|> String.split("..")
|> extract_codepoint_range
range =
case Map.get(map, script) do
nil ->
[{start, finish, tail}]
[{first, last, text}] when is_integer(first) and is_integer(last) ->
if start == last + 1 do
[{first, finish, tail ++ text}]
else
[{start, finish, tail}, {first, last, text}]
end
[{first, last, text} | rest] when is_integer(first) and is_integer(last) ->
if start == last + 1 do
[{first, finish, tail ++ text} | rest]
else
[{start, finish, tail}, {first, last, text} | rest]
end
[{first, last, text} | rest] when is_list(first) and is_list(last) ->
[{start, finish, tail}, {first, last, text} | rest]
end
Map.put(map, script, range)
end
end)
|> Enum.map(fn {key, ranges} ->
{key, Enum.reverse(ranges)}
end)
|> Map.new
end
# Range
defp extract_codepoint_range([first, last]) do
[codepoint_from(first), codepoint_from(last)]
end
defp extract_codepoint_range([codepoint]) do
cp = codepoint_from(codepoint)
[cp, cp]
end
defp codepoint_from(codepoint) do
case String.split(codepoint, " ") do
[codepoint] ->
String.to_integer(codepoint, 16)
codepoints ->
Enum.map(codepoints, &String.to_integer(&1, 16))
end
end
@doc false
def parse_alias_file(path) do
Enum.reduce(File.stream!(path), [], fn line, acc ->
case line do
<<"#", _rest::bitstring>> ->
acc
<<"\n", _rest::bitstring>> ->
acc
data ->
[
data
|> String.replace(~r/ *#.*/, "")
|> String.split(";")
|> Enum.map(fn n -> String.trim(n) |> String.downcase() end)
| acc
]
end
end)
end
# Take the atom values of the map
# and add a string version as an alias
def add_canonical_alias(map) do
map
|> Enum.map(fn {_k, v} -> {downcase_and_remove_whitespace(v), v} end)
|> Map.new()
|> Map.merge(map)
end
def downcase_keys_and_remove_whitespace(map) when is_map(map) do
Enum.map(map, fn {k, v} -> {downcase_and_remove_whitespace(k), v} end)
|> Map.new()
end
@match [" ", "-", "_"]
def downcase_and_remove_whitespace(string) when is_binary(string) do
string
|> String.trim()
|> String.downcase()
|> String.replace(@match, "")
end
def downcase_and_remove_whitespace(atom) when is_atom(atom) do
atom
|> Atom.to_string()
|> downcase_and_remove_whitespace()
end
def downcase_and_remove_whitespace(integer) when is_integer(integer) do
integer
|> Integer.to_string()
end
def conform_key(string) do
string
|> String.replace(" ", "_")
|> String.replace("-", "_")
end
@doc false
def ranges_to_guard_clause([{first, first}]) do
quote do
var!(codepoint) == unquote(first)
end
end
def ranges_to_guard_clause([{first, last}]) do
quote do
var!(codepoint) in unquote(first)..unquote(last)
end
end
def ranges_to_guard_clause([{first, first} | rest]) do
quote do
var!(codepoint) == unquote(first) or unquote(ranges_to_guard_clause(rest))
end
end
def ranges_to_guard_clause([{first, last} | rest]) do
quote do
var!(codepoint) in unquote(first)..unquote(last) or unquote(ranges_to_guard_clause(rest))
end
end
@doc """
Takes a list of codepoints and collapses them into
a list of tuple ranges
"""
def list_to_ranges(list) do
list
|> Enum.sort
|> Enum.reduce([], fn
codepoint, [] ->
[{codepoint, codepoint}]
codepoint, [{start, finish} | rest] when codepoint == finish + 1 ->
[{start, finish + 1} | rest]
codepoint, acc ->
[{codepoint, codepoint} | acc]
end)
|> Enum.reverse
end
@doc """
Takes a list of tuple ranges and compacts
adjacent ranges
"""
def compact_ranges([]) do
[]
end
def compact_ranges([{first, last}, {next, final} | rest]) when next >= first and final <= last do
compact_ranges([{first, last} | rest])
end
def compact_ranges([{first, last}, {first, last} | rest]) do
compact_ranges([{first, last} | rest])
end
def compact_ranges([{first, last}, {next, final} | rest])
when next >= first and next <= last and final >= last do
compact_ranges([{first, final} | rest])
end
def compact_ranges([{first, last}, {next, final} | rest]) when next == last + 1 do
compact_ranges([{first, final} | rest])
end
def compact_ranges([entry | rest]) do
[entry | compact_ranges(rest)]
end
@doc false
def capitalize_keys(map) do
Enum.map(map, fn {k, v} -> {String.capitalize(k), v} end)
|> Map.new()
end
@doc false
def downcase_keys(map) do
Enum.map(map, fn {k, v} -> {String.downcase(k), v} end)
|> Map.new()
end
@doc false
def atomize_keys(map) do
Enum.map(map, fn {k, v} -> {String.to_atom(conform_key(k)), v} end)
|> Map.new()
end
@doc false
def capitalize_values(map) do
Enum.map(map, fn {k, v} -> {k, String.capitalize(v)} end)
|> Map.new()
end
def atomize_values(map) do
Enum.map(map, fn {k, v} -> {k, String.to_atom(conform_key(v))} end)
|> Map.new()
end
@doc false
def remove_annotations(data) do
data
|> Enum.map(fn {k, v} ->
{k, Enum.map(v, fn {s, f, _} -> {s, f} end)}
end)
|> Map.new()
end
@doc false
@reserved "<reserved"
def remove_reserved_codepoints(data) do
data
|> Enum.map(fn {k, v} ->
filtered_list =
Enum.reject(v, fn {_, _, notes} ->
Enum.any?(notes, fn note ->
String.contains?(note, @reserved)
end)
end)
{k, filtered_list}
end)
|> Map.new()
end
@doc false
def ranges_to_codepoints(ranges) when is_list(ranges) do
Enum.reduce(ranges, [], fn
{first, first}, acc ->
[first | acc]
{first, last}, acc ->
Enum.map(last..first, & &1) ++ acc
end)
end
@doc false
def invert_map(map) do
Enum.map(map, fn {k, v} -> {v, k} end)
|> Map.new()
end
defp ensure_compiled?(module) do
case Code.ensure_compiled(module) do
{:module, _} -> true
{:error, _} -> false
end
end
end
|
lib/utils.ex
| 0.887339
| 0.517144
|
utils.ex
|
starcoder
|
defmodule GatherSubmissions.DOMjudge do
@moduledoc """
This module defines a function `gather_submissions` that uses the
DOMjudge API to obtain the submissions of a given problem.
"""
alias GatherSubmissions.DOMjudge.API
alias GatherSubmissions.DOMjudge.Connection
alias GatherSubmissions.Submission
alias GatherSubmissions.Submission.File, as: SubFile
defmodule ContestNotFoundError do
defexception [:contest]
@impl true
def message(exc), do: "Contest not found: #{exc.contest}"
end
defmodule ProblemNotFoundError do
defexception [:problem]
@impl true
def message(exc), do: "Problem not found: #{exc.problem}"
end
@doc """
It gathers all the submission of a given problem in a given contest.
The `problem_name` and `contest_name` parameters contain the name of the problem to
be retrieved and the contest to which it belongs.
Retrieval progress is reported by calling the `logger` function with an informative
message.
**IMPORTANT:** The source code is not downloaded by this function. Instead, the
`Submission.t()` returns a closure which, when executed, performs the retrieval of
the source code.
"""
@spec gather_submissions(Connection.t(), String.t(), String.t(), (String.t() -> any())) :: [
Submission.t()
]
def gather_submissions(conn, contest_name, problem_name, logger \\ fn _ -> :ok end) do
# Retrieve contest and problem IDS
contest_id = API.get_contest_id_by_name(conn, contest_name)
if contest_id == nil, do: raise(ContestNotFoundError, contest: contest_name)
logger.("Contest ID: #{contest_id}")
problem_id = API.get_problem_id_by_name(conn, contest_id, problem_name)
if problem_id == nil, do: raise(ProblemNotFoundError, problem: problem_name)
logger.("Problem ID: #{problem_id}")
logger.("Fetching users...")
teams = API.get_teams(conn, contest_id)
logger.("Fetching judgements...")
judgements = API.get_judgements(conn, contest_id)
logger.("Fetching list of submissions...")
subs = API.get_submissions(conn, contest_id, problem_id)
user_table = build_user_table(teams)
# judgements_table maps each submission ID to its judgement ID
judgements_table = build_judgement_table(judgements)
subs
|> Enum.map(&into_domjudge_submission(&1, conn, contest_id, user_table, judgements_table))
end
defp build_user_table(users) do
users
|> Enum.map(fn %{"id" => id, "name" => name} -> {id, name} end)
|> Enum.into(%{})
end
defp build_judgement_table(judgements) do
judgements
|> Enum.map(fn %{"submission_id" => sub_id, "judgement_type_id" => judgement} ->
{sub_id, judgement}
end)
|> Enum.into(%{})
end
defp into_domjudge_submission(submission, conn, contest_id, user_table, judgements_table) do
%{"id" => id, "team_id" => team_id, "time" => time} = submission
require Logger
%Submission{
id: id,
user: user_table[team_id],
time: NaiveDateTime.from_iso8601!(time),
verdict: judgements_table[id],
files: fn ->
get_source_code(conn, contest_id, id)
end
}
end
defp get_source_code(conn, contest_id, submission_id) do
API.get_source_code(conn, contest_id, submission_id)
|> Enum.map(fn %{"filename" => name, "source" => source_base_64} ->
%SubFile{name: name, content: source_base_64 |> Base.decode64!()}
end)
end
end
|
lib/domjudge/domjudge.ex
| 0.846451
| 0.469155
|
domjudge.ex
|
starcoder
|
defmodule ExMpesa.C2B do
@moduledoc """
C2B M-Pesa API enables Paybill and Buy Goods merchants to integrate to M-Pesa and receive real time payments notifications.
"""
import ExMpesa.MpesaBase
@doc """
There are two URLs required for RegisterURL API: Validation URL and Confirmation URL.
For the two URLs, below are some pointers. This will also apply to the Callback URLs used on other APIs:
- Use publicly available (Internet-accessible) IP addresses or domain names.
- Do not use the words MPesa, M-Pesa, Safaricom or any of their variants in either upper or lower cases in your URLs, the system filters these URLs out and blocks them. Of course any Localhost URL will be refused.
- Do not use public URL testers e.g. mockbin or requestbin especially on production, they are also blocked by the API.
## Parameters
attrs: - a map containing:
- `ShortCode` - This is your paybill number/till number, which you expect to receive payments notifications about.
- `ResponseType` - [Cancelled/Completed] This is the default action value that determines what MPesa will do in the scenario that
your endpoint is unreachable or is unable to respond on time. Only two values are allowed: Completed or Cancelled.
Completed means MPesa will automatically complete your transaction, whereas Cancelled means
MPesa will automatically cancel the transaction, in the event MPesa is unable to reach your Validation URL.
- `ConfirmationURL` - [confirmation URL].
- `ValidationURL` - [validation URL].
## Example
iex> ExMpesa.C2B.registerurl(%{ConfirmationURL: "https://58cb49b30213.ngrok.io/confirmation", ValidationURL: "https://58cb49b30213.ngrok.io/validation", ResponseType: "Completed"})
{:ok,
%{
"ConversationID" => "",
"OriginatorCoversationID" => "",
"ResponseDescription" => "success"
}
}
"""
def registerurl(%{
ConfirmationURL: confirmation_url,
ValidationURL: validation_url,
ResponseType: response_type
}) do
paybill = Application.get_env(:ex_mpesa, :c2b_short_code)
payload = %{
"ShortCode" => paybill,
"ResponseType" => response_type,
"ConfirmationURL" => confirmation_url,
"ValidationURL" => validation_url
}
make_request("/mpesa/c2b/v1/registerurl", payload)
end
def registerurl(%{}) do
{:error, "Required Parameter missing, 'ConfirmationURL', 'ValidationURL','ResponseType'"}
end
@doc """
This API is used to make payment requests from Client to Business (C2B).
## Parameters
attrs: - a map containing:
- `CommandID` - This is a unique identifier of the transaction type: There are two types of these Identifiers:
CustomerPayBillOnline: This is used for Pay Bills shortcodes.
CustomerBuyGoodsOnline: This is used for Buy Goods shortcodes.
- `Amount` - This is the amount being transacted. The parameter expected is a numeric value.
- `Msisdn` - This is the phone number initiating the C2B transaction.
- `BillRefNumber` - This is used on CustomerPayBillOnline option only.
This is where a customer is expected to enter a unique bill identifier, e.g an Account Number.
- `ShortCode` - This is the Short Code receiving the amount being transacted.
You can use the sandbox provided test credentials down below to simulates a payment made from the client phone's STK/SIM Toolkit menu, and enables you to receive the payment requests in real time.
## Example
iex> ExMpesa.C2B.simulate(%{command_id: "CustomerPayBillOnline", phone_number: "254728833100", amount: 10, bill_reference: "Some Reference" })
{:ok,
%{
"ConversationID" => "AG_20200921_00006e93a78f009f7025",
"OriginatorCoversationID" => "9769-145819182-2",
"ResponseDescription" => "Accept the service request successfully."
}
}
"""
def simulate(%{
command_id: command_id,
phone_number: phone_number,
amount: amount,
bill_reference: bill_reference
}) do
paybill = Application.get_env(:ex_mpesa, :c2b_short_code)
payload = %{
"ShortCode" => paybill,
"CommandID" => command_id,
"Amount" => amount,
"Msisdn" => phone_number,
"BillRefNumber" => bill_reference
}
make_request("/mpesa/c2b/v1/simulate", payload)
end
def simulate(%{}) do
{:error, "Required Parameter missing, 'CommandID','Amount','Msisdn', 'BillRefNumber'"}
end
end
|
lib/ex_mpesa/c2b.ex
| 0.825625
| 0.535949
|
c2b.ex
|
starcoder
|
defmodule Kelvin.InOrderSubscription do
@moduledoc """
A subscription producer which processes events in order as they appear
in the EventStoreDB
## Options
* `:name` - (optional) the GenServer name for this producer
* `:stream_name` - (required) the stream name to which to subscribe
* `:connection` - (required) the Extreme client module to use as a
connection to the EventStoreDB. This may either be the name of the
Extreme client module or its pid.
* `:restore_stream_position!` - (required) a function which determines
the stream position from which this listener should begin after initializing
or restarting. Values may be either an MFA tuple or a 0-arity anonymous
function.
* `:subscribe_on_init?` - (required) a function which determines whether
the producer should subscribe immediately after starting up. Values may
be either an MFA tuple or a 0-arity anonymous function. The function
should return either `true` to subscribe immediately on initialization or
`false` if the author intends on manually subscribing the producer. This
producer can be manually subscribed by `send/2`ing a message of
`:subscribe` to the process.
* `:subscribe_after` - (default: `Enum.random(3_000..5_000)`) the amount of
time to wait after initializing to query the `:subscribe_on_init?` option.
This can be useful to prevent all producers from trying to subscribe at
the same time and to await an active connection to the EventStoreDB.
* `:catch_up_chunk_size` - (default: `256`) the number of events to query
for each read chunk while catching up. This option presents a trade-off
between network queries and query duration over the network.
"""
use GenStage
require Logger
defstruct [
:config,
:subscription,
:self,
:max_buffer_size,
demand: 0,
buffer: :queue.new(),
buffer_size: 0
]
def start_link(opts) do
GenStage.start_link(__MODULE__, opts, Keyword.take(opts, [:name]))
end
@impl GenStage
def init(opts) do
max_buffer_size =
Keyword.get(
opts,
:catch_up_chunk_size,
Application.get_env(:kelvin, :catch_up_chunk_size, 256)
)
state = %__MODULE__{
config: Map.new(opts),
self: Keyword.get(opts, :name, self()),
max_buffer_size: max_buffer_size
}
Process.send_after(
self(),
:check_auto_subscribe,
opts[:subscribe_after] || Enum.random(3_000..5_000)
)
{:producer, state}
end
@impl GenStage
def handle_info(:check_auto_subscribe, state) do
identifier = "#{inspect(__MODULE__)} (#{inspect(state.self)})"
if do_function(state.config.subscribe_on_init?) do
Logger.info("#{identifier} subscribing to '#{state.config.stream_name}'")
GenStage.async_info(self(), :subscribe)
else
# coveralls-ignore-start
Logger.info(
"#{identifier} did not subscribe to '#{state.config.stream_name}'"
)
# coveralls-ignore-stop
end
{:noreply, [], state}
end
def handle_info(:subscribe, state) do
case subscribe(state) do
{:ok, sub} ->
Process.link(sub)
{:noreply, [], put_in(state.subscription, sub)}
# coveralls-ignore-start
{:error, reason} ->
{:stop, reason, state}
# coveralls-ignore-stop
end
end
def handle_info(_info, state), do: {:noreply, [], state}
@impl GenStage
def handle_call({:on_event, event}, from, state) do
# when the current demand is 0, we should
case state do
%{demand: 0, buffer_size: size, max_buffer_size: max}
when size + 1 == max ->
{:noreply, [], enqueue(state, {event, from})}
%{demand: 0} ->
{:reply, :ok, [], enqueue(state, event)}
%{demand: demand} ->
{:reply, :ok, [{state.self, event}], put_in(state.demand, demand - 1)}
end
end
@impl GenStage
def handle_demand(demand, state) do
dequeue_events(state, demand, [])
end
defp dequeue_events(%{buffer_size: size} = state, demand, events)
when size == 0 or demand == 0 do
{:noreply, :lists.reverse(events), put_in(state.demand, demand)}
end
defp dequeue_events(state, demand, events) do
case dequeue(state) do
{{:value, {event, from}}, state} ->
GenStage.reply(from, :ok)
dequeue_events(state, demand - 1, [{state.self, event} | events])
{{:value, event}, state} ->
dequeue_events(state, demand - 1, [{state.self, event} | events])
end
end
defp dequeue(state) do
case :queue.out(state.buffer) do
{:empty, buffer} ->
{:empty, %{state | buffer: buffer, buffer_size: 0}}
{value, buffer} ->
{value, %{state | buffer: buffer, buffer_size: state.buffer_size - 1}}
end
end
defp subscribe(state) do
state.config.connection
|> Extreme.RequestManager._name()
|> GenServer.call(
{:read_and_stay_subscribed, self(),
{state.config.stream_name,
do_function(state.config.restore_stream_position!) + 1,
state.max_buffer_size, true, false, :infinity}},
:infinity
)
end
defp do_function(func) when is_function(func, 0), do: func.()
defp do_function({m, f, a}) when is_atom(m) and is_atom(f) and is_list(a) do
apply(m, f, a)
end
defp enqueue(state, element) do
%{
state
| buffer: :queue.in(element, state.buffer),
buffer_size: state.buffer_size + 1
}
end
end
|
lib/kelvin/in_order_subscription.ex
| 0.800887
| 0.59193
|
in_order_subscription.ex
|
starcoder
|
defmodule JsonSchema.Resolver do
@moduledoc """
Module containing functions for resolving types. Main function being
the `resolve_type` function.
"""
alias JsonSchema.{Parser, Types}
alias Parser.{ErrorUtil, ParserError}
alias Types.{PrimitiveType, SchemaDefinition, TypeReference}
@doc """
Resolves a type given its identifier, parent identifier, its enclosing `SchemaDefinition`
and the schema dictionary of the whole set of parsed JSON schema files.
"""
@spec resolve_type(
Types.typeIdentifier(),
Types.typeIdentifier(),
SchemaDefinition.t(),
Types.schemaDictionary()
) ::
{:ok, {Types.typeDefinition(), SchemaDefinition.t()}}
| {:error, ParserError.t()}
def resolve_type(identifier, parent, schema_def, schema_dict) do
resolved_result =
cond do
identifier in ["string", "number", "integer", "boolean"] ->
resolve_primitive_identifier(identifier, schema_def)
URI.parse(identifier).scheme == nil ->
resolve_uri_fragment_identifier(
URI.parse(identifier),
parent,
schema_def
)
URI.parse(identifier).scheme != nil ->
resolve_fully_qualified_uri_identifier(
URI.parse(identifier),
parent,
schema_dict
)
true ->
{:error, ErrorUtil.unresolved_reference(identifier, parent)}
end
case resolved_result do
{:ok, {resolved_type, resolved_schema_def}} ->
case resolved_type do
%TypeReference{} ->
resolve_type(
resolved_type.path,
parent,
resolved_schema_def,
schema_dict
)
_ ->
{:ok, {resolved_type, resolved_schema_def}}
end
{:error, error} ->
{:error, error}
end
end
@spec resolve_primitive_identifier(String.t(), SchemaDefinition.t()) ::
{:ok, {Types.typeDefinition(), SchemaDefinition.t()}}
defp resolve_primitive_identifier(identifier, schema_def) do
primitive_type = %PrimitiveType{
name: identifier,
path: identifier,
type: identifier
}
{:ok, {primitive_type, schema_def}}
end
@spec resolve_uri_fragment_identifier(
URI.t(),
URI.t(),
SchemaDefinition.t()
) ::
{:ok, {Types.typeDefinition(), SchemaDefinition.t()}}
| {:error, ParserError.t()}
defp resolve_uri_fragment_identifier(identifier, parent, schema_def) do
type_dict = schema_def.types
resolved_type = type_dict[to_string(identifier)]
if resolved_type != nil do
{:ok, {resolved_type, schema_def}}
else
{:error, ErrorUtil.unresolved_reference(identifier, parent)}
end
end
@spec resolve_fully_qualified_uri_identifier(
URI.t(),
Types.typeIdentifier(),
Types.schemaDictionary()
) ::
{:ok, {Types.typeDefinition(), SchemaDefinition.t()}}
| {:error, ParserError.t()}
defp resolve_fully_qualified_uri_identifier(identifier, parent, schema_dict) do
schema_id = determine_schema_id(identifier)
schema_def = schema_dict[schema_id]
if schema_def != nil do
type_dict = schema_def.types
resolved_type =
cond do
to_string(identifier) == schema_id ->
type_dict["#"]
type_dict[to_string(identifier)] != nil ->
type_dict[to_string(identifier)]
true ->
type_dict["##{identifier.fragment}"]
end
if resolved_type != nil do
{:ok, {resolved_type, schema_def}}
else
{:error, ErrorUtil.unresolved_reference(identifier, parent)}
end
else
{:error, ErrorUtil.unresolved_reference(identifier, parent)}
end
end
@spec determine_schema_id(URI.t()) :: String.t()
defp determine_schema_id(identifier) do
identifier
|> Map.put(:fragment, nil)
|> to_string
end
end
|
lib/resolver.ex
| 0.819171
| 0.435421
|
resolver.ex
|
starcoder
|
defmodule Combine.Parsers.Binary do
@moduledoc """
This module defines common raw binary parsers, i.e. bits, bytes, uint, etc.
To use them, just add `import Combine.Parsers.Binary` to your module, or
reference them directly.
All of these parsers operate on, and return bitstrings as their results.
"""
alias Combine.ParserState
use Combine.Helpers
@doc """
This parser parses N bits from the input.
# Example
iex> import #{__MODULE__}
...> Combine.parse("Hi", bits(8))
["H"]
...> Combine.parse("Hi", bits(8) |> bits(8))
["H", "i"]
"""
@spec bits(previous_parser, pos_integer) :: parser
defparser bits(%ParserState{status: :ok, column: col, input: input, results: results} = state, n) when is_integer(n) do
case input do
<<bits::bitstring-size(n), rest::bitstring>> ->
%{state | :column => col + n, :input => rest, :results => [bits|results]}
_ ->
%{state | :status => :error, :error => "Expected #{n} bits starting at position #{col + 1}, but encountered end of input."}
end
end
@doc """
This parser parses N bytes from the input.
# Example
iex> import #{__MODULE__}
...> Combine.parse("Hi", bytes(1))
["H"]
...> Combine.parse("Hi", bytes(1) |> bytes(1))
["H", "i"]
"""
@spec bytes(previous_parser, pos_integer) :: parser
defparser bytes(%ParserState{status: :ok, column: col, input: input, results: results} = state, n) when is_integer(n) do
bits_size = n * 8
case input do
<<bits::bitstring-size(bits_size), rest::bitstring>> ->
%{state | :column => col + bits_size, :input => rest, :results => [bits|results]}
_ ->
%{state | :status => :error, :error => "Expected #{n} bytes starting at position #{col + 1}, but encountered end of input."}
end
end
@doc """
This parser parses an unsigned, n-bit integer from the input with the given
endianness.
# Example
iex> import #{__MODULE__}
...> Combine.parse(<<85::big-unsigned-size(16), "-90"::binary>>, uint(16, :be))
[85]
"""
@spec uint(previous_parser, pos_integer, :be | :le) :: parser
defparser uint(%ParserState{status: :ok, column: col, input: input, results: results} = state, size, endianness) do
case endianness do
:be ->
case input do
<<int::big-unsigned-size(size), rest::bitstring>> ->
%{state | :column => col + size, :input => rest, :results => [int|results]}
_ ->
%{state | :status => :error, :error => "Expected #{size}-bit, unsigned, big-endian integer starting at position #{col + 1}."}
end
:le ->
case input do
<<int::little-unsigned-size(size), rest::bitstring>> ->
%{state | :column => col + size, :input => rest, :results => [int|results]}
_ ->
%{state | :status => :error, :error => "Expected #{size}-bit, unsigned, little-endian integer starting at position #{col + 1}."}
end
end
end
@doc """
This parser parses a signed, n-bit integer from the input with the given
endianness.
# Example
iex> import #{__MODULE__}
...> Combine.parse(<<-85::big-signed-size(16),"-90"::binary>>, int(16, :be))
[-85]
"""
@spec int(previous_parser, pos_integer, :be | :le) :: parser
defparser int(%ParserState{status: :ok, column: col, input: input, results: results} = state, size, endianness)
when is_integer(size) and endianness in [:be, :le] do
case endianness do
:be ->
case input do
<<int::big-signed-size(size), rest::bitstring>> ->
%{state | :column => col + size, :input => rest, :results => [int|results]}
_ ->
%{state | :status => :error, :error => "Expected #{size}-bit, signed, big-endian integer starting at position #{col + 1}."}
end
:le ->
case input do
<<int::little-signed-size(size), rest::bitstring>> ->
%{state | :column => col + size, :input => rest, :results => [int|results]}
_ ->
%{state | :status => :error, :error => "Expected #{size}-bit, signed, little-endian integer starting at position #{col + 1}."}
end
end
end
@doc """
This parser parses a n-bit floating point number from the input.
# Example
iex> import #{__MODULE__}
...> Combine.parse(<<2.50::float-size(32)>>, float(32))
[2.5]
"""
@spec float(previous_parser, 32 | 64) :: parser
defparser float(%ParserState{status: :ok, column: col, input: input, results: results} = state, size) when is_integer(size) do
case input do
<<num::float-size(size), rest::bitstring>> ->
%{state | :column => col + size, :input => rest, :results => [num|results]}
_ ->
%{state | :status => :error, :error => "Expected #{size}-bit, floating point number starting at position #{col + 1}."}
end
end
end
|
deps/combine/lib/combine/parsers/binary.ex
| 0.783326
| 0.519948
|
binary.ex
|
starcoder
|
defmodule Omise.Customer do
@moduledoc ~S"""
Provides Customer API interfaces.
<https://www.omise.co/customers-api>
"""
use Omise.HTTPClient, endpoint: "customers"
defstruct object: "customer",
id: nil,
livemode: nil,
location: nil,
default_card: nil,
email: nil,
description: nil,
metadata: %{},
created: nil,
cards: %Omise.List{data: [%Omise.Card{}]},
deleted: false
@type t :: %__MODULE__{
object: String.t(),
id: String.t(),
livemode: boolean,
location: String.t(),
default_card: String.t(),
email: String.t(),
description: String.t(),
metadata: map,
created: String.t(),
cards: Omise.List.t(),
deleted: boolean
}
@doc ~S"""
List all customers.
Returns `{:ok, customers}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
* `offset` - (optional, default: 0) The offset of the first record returned.
* `limit` - (optional, default: 20, maximum: 100) The maximum amount of records returned.
* `from` - (optional, default: 1970-01-01T00:00:00Z, format: ISO 8601) The UTC date and time limiting the beginning of returned records.
* `to` - (optional, default: current UTC Datetime, format: ISO 8601) The UTC date and time limiting the end of returned records.
## Examples
Omise.Customer.list
Omise.Customer.list(limit: 5)
"""
@spec list(Keyword.t(), Keyword.t()) :: {:ok, Omise.List.t()} | {:error, Omise.Error.t()}
def list(params \\ [], opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.List{data: [%__MODULE__{}]})
get(@endpoint, params, opts)
end
@doc ~S"""
Retrieve a customer.
Returns `{:ok, customer}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Customer.retrieve("cust_test_4xtrb759599jsxlhkrb")
"""
@spec retrieve(String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def retrieve(id, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
get("#{@endpoint}/#{id}", [], opts)
end
@doc ~S"""
Create a customer.
Returns `{:ok, customer}` if the request is successful, `{:error, error}` otherwise.
## Request Parameters:
* `email` - (optional) Customer's email.
* `description` - (optional) A custom description for the customer.
* `card` - (optional) A card token in case you want to add a card to the customer.
## Examples
# Create a customer without attaching a card
Omise.Customer.create(
email: "<EMAIL>",
description: "You know, it almost makes me wish for rain"
)
# Create a customer and attach a card
Omise.Customer.create(
email: "<EMAIL>",
description: "Don't take me tongue tied",
card: "tokn_test_51yer81s9aqqyktdoeh"
)
"""
@spec create(Keyword.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def create(params, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
post(@endpoint, params, opts)
end
@doc ~S"""
Update a customer.
Returns `{:ok, customer}` if the request is successful, `{:error, error}` otherwise.
## Request Parameters:
* `email` - (optional) Customer's email.
* `description` - (optional) A custom description for the customer.
* `card` - (optional) A card token in case you want to add a card to the customer.
## Examples
# Update email and description.
Omise.Customer.update("cust_test_5<PASSWORD>",
email: "<EMAIL>",
description: "Home is when I’m alone with you"
)
# Attach a card to a customer.
Omise.Customer.update("cust_test_4xtrb759599jsxlhkrb",
card: "tokn_test_4xs9408a642a1htto8z"
)
"""
@spec update(String.t(), Keyword.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def update(id, params, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
put("#{@endpoint}/#{id}", params, opts)
end
@doc ~S"""
Destroy a customer.
Returns `{:ok, customer}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Customer.destroy("cust_test_4xtrb759599jsxlhkrb")
"""
@spec destroy(String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def destroy(id, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
delete("#{@endpoint}/#{id}", opts)
end
@doc ~S"""
Search all the customers.
Returns `{:ok, customers}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
<https://www.omise.co/search-query-and-filters>
## Examples
Omise.Customer.search(filters: [created: "2016-09-09"])
Omise.Customer.search(query: "<EMAIL>")
"""
@spec search(Keyword.t(), Keyword.t()) :: {:ok, Omise.Search.t()} | {:error, Omise.Error.t()}
def search(params \\ [], opts \\ []) do
Omise.Search.execute("customer", params, opts)
end
@doc ~S"""
List all cards that belongs to the customer.
Returns `{:ok, cards}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
* `offset` - (optional, default: 0) The offset of the first record returned.
* `limit` - (optional, default: 20, maximum: 100) The maximum amount of records returned.
* `from` - (optional, default: 1970-01-01T00:00:00Z, format: ISO 8601) The UTC date and time limiting the beginning of returned records.
* `to` - (optional, default: current UTC Datetime, format: ISO 8601) The UTC date and time limiting the end of returned records.
* `order` - (optional, default: chronological) The order of the list returned.
## Examples
Omise.Customer.list_cards("cust_test_520j6g67py52xa7qbu2")
"""
@spec list_cards(String.t(), Keyword.t(), Keyword.t()) :: {:ok, Omise.List.t()} | {:error, Omise.Error.t()}
def list_cards(id, params \\ [], opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.List{data: [%Omise.Card{}]})
get("#{@endpoint}/#{id}/cards", params, opts)
end
@doc ~S"""
Retrieve a card.
Returns `{:ok, card}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Customer.retrieve_card("cust_test_520j6g67py52xa7qbu2", "card_test_520j6g4rxrmurw16b2d")
"""
@spec retrieve_card(String.t(), String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def retrieve_card(id, card_id, opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.Card{})
get("#{@endpoint}/#{id}/cards/#{card_id}", [], opts)
end
@doc ~S"""
Update a card.
Returns `{:ok, card}` if the request is successful, `{:error, error}` otherwise.
## Request Parameters:
* `name` - (optional) The cardholder name as printed on the card.
* `expiration_month` - (optional) The expiration month printed on the card.
* `expiration_year` - (optional) The expiration year printed on the card in the format YYYY.
* `postal_code` - (optional) The postal code from the city where the card was issued.
* `city` - (optional) The city where the card was issued.
## Examples
params = [expiration_month: 2018, city: "Bangkok"]
Omise.Customer.update_card("cust_test_520j6g67py52xa7qbu2", "card_test_520j6g4rxrmurw16b2d", params)
"""
@spec update_card(String.t(), String.t(), Keyword.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def update_card(id, card_id, params, opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.Card{})
put("#{@endpoint}/#{id}/cards/#{card_id}", params, opts)
end
@doc ~S"""
Destroy a card.
Returns `{:ok, card}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Customer.destroy_card("cust_test_520j6g67py52xa7qbu2", "card_test_520j6g4rxrmurw16b2d")
"""
@spec destroy_card(String.t(), String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def destroy_card(id, card_id, opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.Card{})
delete("#{@endpoint}/#{id}/cards/#{card_id}", opts)
end
@doc ~S"""
List all charge schedules for a given customer.
Returns `{:ok, schedules}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
* `offset` - (optional, default: 0) The offset of the first record returned.
* `limit` - (optional, default: 20, maximum: 100) The maximum amount of records returned.
* `from` - (optional, default: 1970-01-01T00:00:00Z, format: ISO 8601) The UTC date and time limiting the beginning of returned records.
* `to` - (optional, default: current UTC Datetime, format: ISO 8601) The UTC date and time limiting the end of returned records.
## Examples
Omise.Customer.list_schedules("cust_test_520j6g67py52xa7qbu2")
"""
@spec list_schedules(String.t(), Keyword.t(), Keyword.t()) :: {:ok, Omise.List.t()} | {:error, Omise.Error.t()}
def list_schedules(id, params \\ [], opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.List{data: [%Omise.Schedule{}]})
get("#{@endpoint}/#{id}/schedules", params, opts)
end
end
|
lib/omise/customer.ex
| 0.899152
| 0.420183
|
customer.ex
|
starcoder
|
defprotocol Timex.Convertable do
@moduledoc """
This protocol is used to convert between various common datetime formats.
"""
@doc """
Converts a date/time representation to an Erlang datetime tuple + timezone tuple
## Examples:
iex> use Timex
...> datetime = Timex.datetime({{2015, 3, 5}, {12, 0, 0}}, "America/Chicago")
...> Timex.to_gregorian(datetime)
{{2015, 3, 5}, {12, 0, 0}, {6, "CST"}}
"""
def to_gregorian(date)
@doc """
Converts a date/time representation to a Julian date number
## Examples:
iex> use Timex
...> Timex.to_julian({{2016,3,9}, {11,0,0}})
2457457.4
"""
def to_julian(date)
@doc """
Converts a date/time representation to the number of seconds since the start of
year zero of the Gregorian calendar.
## Examples:
iex> use Timex
...> Timex.to_gregorian_seconds({{2015, 3, 5}, {12, 0, 0}})
63592776000
"""
def to_gregorian_seconds(date)
@doc """
Converts a date/time representation to an Erlang datetime tuple
## Examples:
iex> use Timex
...> datetime = Timex.datetime({{2015, 3, 5}, {12, 0, 0}}, "America/Chicago")
...> Timex.to_erlang_datetime(datetime)
{{2015, 3, 5}, {12, 0, 0}}
"""
def to_erlang_datetime(date)
@doc """
Converts a date/time representation to a Date struct
## Examples:
iex> use Timex
...> Timex.to_date({{2015, 3, 5}, {12, 0, 0}})
%Timex.Date{:year => 2015, :month => 3, :day => 5}
"""
def to_date(date)
@doc """
Converts a date/time representation to a DateTime struct
## Examples:
iex> use Timex
...> Timex.to_date({{2015, 3, 5}, {12, 0, 0}})
%Timex.Date{:year => 2015, :month => 3, :day => 5}
"""
def to_datetime(date)
@doc """
Converts a date/time representation to a UNIX timestamp (i.e. seconds since UNIX epoch)
Returns {:error, :not_representable} if the date/time occurs before the UNIX epoch
## Examples:
iex> use Timex
...> Timex.to_unix({{2015, 3, 5}, {12, 0, 0}})
1425556800
"""
def to_unix(date)
@doc """
Converts a date/time representation to an Erlang timestamp tuple, relative to the UNIX epoch
## Examples:
iex> use Timex
...> Timex.to_timestamp({{2015, 3, 5}, {12, 0, 0}})
{1425, 556800, 0}
"""
def to_timestamp(date)
end
|
lib/convert/convertable.ex
| 0.911362
| 0.670425
|
convertable.ex
|
starcoder
|
defmodule Irateburgers.Aggregate do
@moduledoc """
Defines common helpers for working with Aggregates.
Aggregates are represented as `Agent` processes holding some state.
The state must at least have `id` `:binary_id` and `version` `:integer` keys.
"""
alias Irateburgers.{Command, CommandProtocol, Event, Repo}
require Ecto.Query, as: Query
@type aggregate_id :: binary
@type aggregate :: %{
:id => aggregate_id,
:version => integer,
atom => term
}
@type event :: %{
:version => integer,
atom => term
}
@doc """
Finds Aggregate process by id,
or starts one using the given initial state and module.
"""
@spec find_or_start(aggregate_id, aggregate) :: pid
def find_or_start(id, initial = %{id: id, version: 0}) do
case Registry.lookup(Irateburgers.AggregateRegistry, id) do
[{pid, _}] -> pid
[] ->
case start_agent(id, initial) do
{:ok, pid} -> pid
{:error, {:already_started, pid}} -> pid
end
end
end
# Start an aggregate agent, registering it in AggregateRegistry under key: id
@spec start_agent(aggregate_id, aggregate) :: {:ok, pid} | {:error, {:already_started, pid}}
defp start_agent(id, initial_state) do
Agent.start(fn ->
{_new, _old} = Registry.update_value(
Irateburgers.AggregateRegistry,
id,
fn _ -> &ensure_event_applied/2 end)
init(initial_state)
end,
name: {:via, Registry, {Irateburgers.AggregateRegistry, id}})
end
#Initialize an aggregate from events in the Repo
@spec init(aggregate) :: aggregate
defp init(aggregate = %{id: id, version: version}) do
db_events = Repo.all(
Query.from e in Event,
where: e.aggregate == ^id,
where: e.sequence > ^version,
order_by: {:asc, e.sequence})
events = Enum.map(db_events, &Event.to_struct/1)
apply_events(aggregate, events)
end
@doc """
Given an aggregate and an event, ensures that the event is applied to the aggregate by one of:
- Applying the event to the aggregate, if the event version is 1 greater than the aggregate version
- Loading all new events for the aggregate, if the event version is more than 1 greater than the aggregate version
- Otherwise return the aggregate as-is if the event version is not greater than the aggregate version
"""
@spec ensure_event_applied(aggregate, event) :: aggregate
def ensure_event_applied(
aggregate = %{version: version},
event = %{version: event_version})
do
cond do
event_version == version + 1 -> Event.apply(event, aggregate)
event_version > version + 1 -> init(aggregate)
event_version <= version -> aggregate
end
end
@spec apply_events(aggregate, list(event)) :: aggregate
defp apply_events(aggregate, events) when is_list(events) do
Enum.reduce(events, aggregate, &Event.apply/2)
end
@doc """
Dispatch a command to an Agent PID.
If the command is successful, updates the agent state and returns {:ok, state}
If the command fails, returns {:error, reason} leaving the agent state unchanged
"""
@spec dispatch_command(pid, CommandProtocol.t) :: {:ok, aggregate} | {:error, term}
def dispatch_command(pid, command = %{}) when is_pid(pid) do
Agent.get_and_update pid, fn state ->
with {:ok, events} <- Command.execute(command, state) do
new_state = apply_events(state, events)
{{:ok, new_state}, new_state}
else
{:error, reason} ->
{{:error, reason}, state}
end
end
end
end
|
lib/aggregate.ex
| 0.835584
| 0.431704
|
aggregate.ex
|
starcoder
|
defmodule Granulix.Util do
alias Granulix.Math
@doc """
Make two channels muliplied with pos and 1.0 - pos respectively.
pos shall be between 0.0 and 1.0.
"""
@spec pan(x :: Granulix.frames(), pos :: float()) :: list(Granulix.frames)
def pan(x, pos) when is_binary(x) do
posn = 0.5 * pos + 0.5
[Math.mul(x, posn), Math.mul(x, 1.0 - posn)]
end
@doc "Sum a list of frames into one"
@spec mix(l :: list(Granulix.frames())) :: Granulix.frames()
def mix(l) when is_list(l) do
Enum.reduce(l, <<>>, fn x, acc -> Math.add(x, acc) end)
end
defmodule Stream do
@type fs() :: Granulix.Stream.frames_stream()
@type list_of_frames_stream() :: Enumerable.list(Granulix.frames())
@type lfs() :: fs() | list_of_frames_stream()
@doc "Sum a stream of list of frames into one"
@spec mix(enum :: list_of_frames_stream()) :: fs()
def mix(enum) do
Elixir.Stream.map(enum, &Granulix.Util.mix/1)
end
@doc """
Make two channels muliplied with pos and 1.0 - pos respectively.
pos shall be between 0.0 and 1.0. The returned stream holds a list
of two frame arrays.
"""
@spec pan(enum :: fs(), pos :: float() | Enumerable.t) :: list_of_frames_stream()
def pan(enum, panning) when is_number(panning) do
Elixir.Stream.map(enum, fn frames -> Granulix.Util.pan(frames, panning) end)
end
def pan(enum, panning) do
Elixir.Stream.zip(enum, panning)
|> Elixir.Stream.map(fn {frames, panf} -> Granulix.Util.pan(frames, panf) end)
end
@doc """
This function is the one that will halt the stream. It shall be included
in your pipeline unless you have some other means of stopping it.
The time argument is in seconds.
"""
@spec dur(enum :: lfs(),
time :: float()) :: lfs()
def dur(enum, time) do
ctx = Granulix.Ctx.get()
period_size = ctx.period_size
no_of_frames = round(time * ctx.rate)
Elixir.Stream.transform(
enum,
no_of_frames,
fn frames, acc ->
if acc > 0 do
cond do
is_list(frames) ->
{[frames], acc - byte_size(hd(frames)) / 4}
is_binary(frames) ->
{[frames], acc - byte_size(frames) / 4}
is_float(frames) ->
{[frames], acc - period_size}
is_integer(frames) ->
{[frames * 1.0], acc - period_size}
end
else
{:halt, acc}
end
end)
end
def value(value) when is_number(value) do
Elixir.Stream.unfold(
value * 1.0,
fn x -> {x,x} end
)
end
def setter(key, start_value) when is_number(start_value) do
Elixir.Stream.unfold(
start_value * 1.0,
fn x ->
receive do
{^key,nil} -> nil
{^key,y} -> {y,y}
after
0 -> {x,x}
end
end
)
end
def set(pid, key, :halt), do: send(pid, {key, nil})
def set(pid, key, value), do: send(pid, {key, 1.0 * value})
end
end
|
lib/granulix/util.ex
| 0.778649
| 0.469277
|
util.ex
|
starcoder
|
defmodule Flex.Decoder do
@moduledoc """
Decoder decodes the JSON-Cadence Data Interchange Format into a simple JSON format.
"""
def decode(%{"type" => "Void"}) do
nil
end
def decode(%{"type" => "Optional", "value" => value}) do
case value do
nil -> nil
value -> decode(value)
end
end
def decode(%{"type" => "Bool", "value" => value}) do
value
end
def decode(%{"type" => "String", "value" => value}) do
value
end
def decode(%{"type" => "Address", "value" => value}) do
value
end
def decode(%{"type" => "Int", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "UInt", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Int8", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "UInt8", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Int16", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "UInt16", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Int32", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "UInt32", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Int64", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "UInt64", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Int128", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "UInt128", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Int256", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "UInt256", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Word8", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Word16", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Word32", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Word64", "value" => val}) do
String.to_integer(val)
end
def decode(%{"type" => "Fix64", "value" => val}) do
String.to_float(val)
end
def decode(%{"type" => "UFix64", "value" => val}) do
String.to_float(val)
end
def decode(%{"type" => "Array", "value" => val}) do
Enum.map(val, fn v ->
decode(v)
end)
end
def decode(%{"type" => "Dictionary", "value" => [val]}) do
key = Map.get(val, "key") |> decode
key_value = Map.get(val, "value") |> decode
%{key => key_value}
end
def decode(%{"type" => "Resource", "value" => val}) do
{id, fields} = extract_composite(val)
{:resource, id, fields}
end
def decode(%{"type" => "Struct", "value" => val}) do
{id, fields} = extract_composite(val)
{:struct, id, fields}
end
def decode(%{"type" => "Event", "value" => val}) do
{id, fields} = extract_composite(val)
{:event, id, fields}
end
def decode(%{"type" => "Contract", "value" => val}) do
{id, fields} = extract_composite(val)
{:contract, id, fields}
end
def decode(%{"type" => "Enum", "value" => val}) do
{id, fields} = extract_composite(val)
{:enum, id, fields}
end
def decode(%{
"type" => "Path",
"value" => %{
"domain" => domain,
"identifier" => identifier
}
}) do
{:path, domain, identifier}
end
def decode(%{
"type" => "Type",
"value" => %{
"staticType" => static_type
}
}) do
{:type, static_type}
end
def decode(%{
"type" => "Capability",
"value" => %{
"path" => path,
"address" => addr,
"borrowType" => borrow_type
}
}) do
{:capability,
%{
path: path,
address: addr,
borrow_type: borrow_type
}}
end
defp extract_composite(%{"id" => id, "fields" => fields}) do
{id,
fields
|> Enum.reduce(%{}, fn %{"name" => name, "value" => value}, acc ->
Map.put(acc, name, decode(value))
end)}
end
end
|
lib/flex/decoder.ex
| 0.61231
| 0.571677
|
decoder.ex
|
starcoder
|
defmodule Salvadanaio.Import.Fineco do
@moduledoc """
This module creates movements for a Fineco account, reading them from the XLSX file exported
from the Fineco movements web page.
The original exported file is a XLS file, which must be converted to XSLX before running this
script.
The module looks for the Account or creates it if not found.
"""
alias Salvadanaio.Account
alias Salvadanaio.Category
alias Salvadanaio.Repo
# After this line in the XLSX file, the movements begin
@id_line ["Data Operazione", "Data Valuta", "Entrate", "Uscite", "Descrizione", "Descrizione Completa"]
def run(filepath) do
case Xlsxir.multi_extract(filepath, 0) do
{:ok, xls} -> parse_rows(Xlsxir.get_list(xls))
{:error, reason} ->
IO.puts(reason)
exit(1)
end
end
def help() do
IO.puts("Usage: mix run priv/import/fineco.exs <XLSX file>")
end
defp parse_rows([head | tail]) do
[first_element | _] = head
if first_element != nil do
case Regex.named_captures(~r/Saldo EUR al (?<date>\d+\/\d+\/\d+): (?<balance>\d+\.\d+)/iu, first_element) do
%{"balance" => balance, "date" => date} ->
{balance, ""} = Float.parse(balance)
date = parse_date(date)
get_account_id(balance, date)
_ -> nil
end
end
case head do
@id_line -> print_values(tail, get_account_id())
_ -> parse_rows(tail)
end
end
defp parse_rows([]) do
end
defp print_values([head | tail], account_id) do
insert_movement(head, account_id)
print_values(tail, account_id)
end
defp print_values([], _) do
end
defp insert_movement(row, account_id) do
case row do
[operation_date, value_date, income, "", short_description, description] ->
movement_attrs = %Salvadanaio.Movement{
account_id: account_id,
operation_date: parse_date(operation_date),
value_date: parse_date(value_date),
amount: Money.new(Kernel.trunc(Kernel.round(income*100)), :EUR),
short_description: short_description,
description: description
}
add_movement(movement_attrs)
[operation_date, value_date, "", outcome, short_description, description] ->
movement_attrs = %Salvadanaio.Movement{
account_id: account_id,
operation_date: parse_date(operation_date),
value_date: parse_date(value_date),
amount: Money.new(Kernel.trunc(Kernel.round(-outcome*100)), :EUR),
short_description: short_description,
description: description
}
add_movement(movement_attrs)
end
end
defp add_movement(movement_attrs) do
category_id = Category.get_category(movement_attrs.description, String.trim(movement_attrs.short_description))
movement_attrs = Map.put(movement_attrs, :category_id, category_id)
Salvadanaio.Services.Movements.insert_movement(movement_attrs)
end
defp get_account_id(initial_balance \\ 0, balance_date \\ Date.utc_today()) do
# find account or create it
case Repo.get_by(Account, name: "Fineco") do
nil -> create_account(initial_balance, balance_date)
account -> account.id
end
end
defp create_account(initial_balance, balance_date) do
Salvadanaio.Repo.insert!(%Salvadanaio.Account{
name: "Fineco",
balance: Money.new(Kernel.trunc(initial_balance*100), :EUR),
balance_update_date: balance_date
}).id
end
# Accepts a date as string in the format DD/MM/YYYY and returns a Date.
defp parse_date(datestr) do
String.split(datestr, "/")
|> Enum.map(&Integer.parse/1)
|> Enum.map(fn {int, ""} -> int end)
|> Enum.reverse
|> List.to_tuple
|> Date.from_erl!
end
end
|
backend/lib/salvadanaio/import/fineco.ex
| 0.53048
| 0.403214
|
fineco.ex
|
starcoder
|
defmodule Durango.Dsl.Function do
@moduledoc """
Listed at https://docs.arangodb.com/3.3/AQL/Functions/
This module parses and renders functions.
"""
def validate!({_name, low..high}, args) when length(args) >= low and length(args) <= high do
nil
end
def validate!({name, count}, args) when is_integer(count) do
validate!({name, count..count}, args)
end
def validate!({name, count..count}, args) do
msg = "Durango.Function error - function #{inspect name} requires #{count} arguments. Got #{length(args)} arguments."
raise CompileError, description: msg
end
def validate!({name, low..high}, args) do
msg = "Durango.Function error - function #{inspect name} requires between #{low} and #{high} arguments. Got #{length(args)} arguments."
raise CompileError, description: msg
end
def suffix(index, limit) when index >= limit do
""
end
def suffix(_, _) do
","
end
def render_func_name(func_name) when is_atom(func_name) do
func_name
|> to_string
|> String.upcase
end
defmacro inject_parser() do
quote do
alias Durango.Query
alias Durango.Dsl.Function
alias Durango.Dsl
@function_names Function.Names.names_list()
@functions Function.Names.functions()
def parse_expr(%Query{} = q, {func_name, _, args}) when func_name in @function_names and is_list(args) do
arity = Keyword.fetch!(@functions, func_name)
Function.validate!({func_name, arity}, args)
# args_query =
# %Query{bound_variables: q.bound_variables}
# |> Dsl.parse_expr(args)
args_query =
Enum.reduce(args, %Query{bound_variables: q.bound_variables}, fn arg, q_acc ->
new_q =
%Query{bound_variables: q_acc.bound_variables}
|> Dsl.parse_expr(arg)
%{ q_acc |
bound_variables: new_q.bound_variables,
}
|> Query.append_tokens(Enum.join(new_q.tokens, " "))
end)
func_token =
[
Function.render_func_name(func_name),
"(",
Enum.join(args_query.tokens, ", "),
")",
]
|> Enum.join("")
q
|> Map.put(:bound_variables, args_query.bound_variables)
|> Query.append_tokens(func_token)
end
end
end
end
|
lib/dsl/function.ex
| 0.556159
| 0.461927
|
function.ex
|
starcoder
|
defmodule Wordza.GameBoard do
@moduledoc """
This is our Wordza GameBoard
The configuration of all board positions.
The configuration of all current played tiles on the board.
NOTE I have chosen to represent the board as a map vs. 2 dim list
because elixir...
http://blog.danielberkompas.com/2016/04/23/multidimensional-arrays-in-elixir.html
"""
require Logger
alias Wordza.GameTile
@doc """
Build a new board for a type of game.
It sets up a grid of the correct size and adds all the bonuses.
## Examples
iex> Wordza.GameBoard.create(:mock)
%{
0 => %{
0 => %{bonus: :tw, letter: nil},
1 => %{bonus: nil, letter: nil},
2 => %{bonus: :tl, letter: nil},
3 => %{bonus: nil, letter: nil},
4 => %{bonus: :tw, letter: nil}
},
1 => %{
0 => %{bonus: nil, letter: nil},
1 => %{bonus: :dw, letter: nil},
2 => %{bonus: nil, letter: nil},
3 => %{bonus: :dw, letter: nil},
4 => %{bonus: nil, letter: nil}
},
2 => %{
0 => %{bonus: :dl, letter: nil},
1 => %{bonus: nil, letter: nil},
2 => %{bonus: :st, letter: nil},
3 => %{bonus: nil, letter: nil},
4 => %{bonus: :dl, letter: nil}
},
3 => %{
0 => %{bonus: nil, letter: nil},
1 => %{bonus: :dw, letter: nil},
2 => %{bonus: nil, letter: nil},
3 => %{bonus: :dw, letter: nil},
4 => %{bonus: nil, letter: nil}
},
4 => %{
0 => %{bonus: :tw, letter: nil},
1 => %{bonus: nil, letter: nil},
2 => %{bonus: :tl, letter: nil},
3 => %{bonus: nil, letter: nil},
4 => %{bonus: :tw, letter: nil}
}
}
"""
def create(:scrabble) do
board = create_board(15, 15)
board
|> add_bonus_bulk([
[7, 7],
], :st)
|> add_bonus_bulk([
[0, 0],
[0, 7],
[0, 14],
[7, 0],
], :tw)
|> add_bonus_bulk([
[1, 1],
[2, 2],
[3, 3],
[4, 4],
], :dw)
|> add_bonus_bulk([
[1, 5],
[5, 1],
[5, 5],
], :tl)
|> add_bonus_bulk([
[0, 3],
[3, 0],
[2, 6],
[6, 2],
[6, 6],
[3, 0],
[3, 7],
[7, 3],
], :dl)
|> add_bonus_mirror()
end
def create(:wordfeud) do
board = create_board(15, 15)
board
|> add_bonus_bulk([
[7, 7],
], :st)
|> add_bonus_bulk([
[0, 4],
[4, 0],
], :tw)
|> add_bonus_bulk([
[2, 2],
[4, 4],
[3, 7],
[7, 3],
], :dw)
|> add_bonus_bulk([
[0, 0],
[1, 5],
[3, 3],
[5, 1],
[5, 5],
], :tl)
|> add_bonus_bulk([
[0, 7],
[1, 1],
[2, 6],
[4, 6],
[6, 2],
[6, 4],
[7, 0],
], :dl)
|> add_bonus_mirror()
end
def create(:mock) do
board = create_board(5, 5)
board
|> add_bonus_bulk([[2, 2]], :st)
|> add_bonus_bulk([[0, 0]], :tw)
|> add_bonus_bulk([[1, 1]], :dw)
|> add_bonus_bulk([[0, 2]], :tl)
|> add_bonus_bulk([[2, 0]], :dl)
|> add_bonus_mirror()
end
@doc """
given an X & Y count, build out a matrix of nils
## Examples
iex> Wordza.GameBoard.create_board(3, 3)
%{
0 => %{
0 => %{bonus: nil, letter: nil},
1 => %{bonus: nil, letter: nil},
2 => %{bonus: nil, letter: nil},
},
1 => %{
0 => %{bonus: nil, letter: nil},
1 => %{bonus: nil, letter: nil},
2 => %{bonus: nil, letter: nil},
},
2 => %{
0 => %{bonus: nil, letter: nil},
1 => %{bonus: nil, letter: nil},
2 => %{bonus: nil, letter: nil},
},
}
"""
def create_board(y_count, x_count) do
r = Range.new(0, x_count - 1)
r |> Enum.reduce(%{}, fn(i, board) -> board |> Map.put(i, create_board_row(y_count)) end)
end
def create_board_row(y_count) do
r = Range.new(0, y_count - 1)
r |> Enum.reduce(%{}, fn(i, row) -> row |> Map.put(i, create_board_cell()) end)
end
defp create_board_cell(), do: %{bonus: nil, letter: nil}
@doc """
Update a single cell with a single bonus
## Examples
iex> board = Wordza.GameBoard.create(:mock)
iex> Wordza.GameBoard.add_bonus(board, 0, 1, :x) |> get_in([0, 1, :bonus])
:x
"""
def add_bonus(board, y, x, bonus) do
put_in(board, [y, x, :bonus], bonus)
end
@doc """
Update a set of cells, with a bonus (bulk add)
## Examples
iex> board = Wordza.GameBoard.create(:mock)
iex> Wordza.GameBoard.add_bonus_bulk(board, [[0, 0], [0, 1]], :x) |> get_in([0, 1, :bonus])
:x
"""
def add_bonus_bulk(board, [] = _coords, _bonus), do: board
def add_bonus_bulk(board, coords, bonus) do
{[y, x], coords} = List.pop_at(coords, 0)
board |> put_in([y, x, :bonus], bonus) |> add_bonus_bulk(coords, bonus)
end
@doc """
Update all bonus cells, make the board a 4 quadrent mirror-copy of the top-left quad
(this is kinda silly, but fun)
## Examples
iex> board = Wordza.GameBoard.create(:mock) |> put_in([0, 0, :bonus], :x)
iex> Wordza.GameBoard.add_bonus_mirror(board) |> get_in([4, 4, :bonus])
:x
"""
def add_bonus_mirror(board) do
{total_y, total_x, center_y, center_x} = measure(board)
board |> add_bonus_mirror(total_y, center_y, total_x, center_x)
end
def add_bonus_mirror(board, _total_y, -1 = _y, _total_x, _x), do: board
def add_bonus_mirror(board, total_y, y, total_x, -1 = _x) do
x = Integer.floor_div(total_x, 2)
board |> add_bonus_mirror(total_y, y - 1, total_x, x)
end
def add_bonus_mirror(board, total_y, y, total_x, x) do
mirror_at_x = total_x - x - 1
mirror_at_y = total_y - y - 1
board
|> add_bonus(y, mirror_at_x, board[y][x][:bonus])
|> add_bonus(mirror_at_y, x, board[y][x][:bonus])
|> add_bonus(mirror_at_y, mirror_at_x, board[y][x][:bonus])
|> add_bonus_mirror(total_y, y, total_x, x - 1)
end
@doc """
Get the basic measurements for a board
## Examples
iex> board = Wordza.GameBoard.create(:mock)
iex> Wordza.GameBoard.measure(board)
{5, 5, 2, 2}
"""
def measure(board) do
total_y = board |> Map.keys() |> Enum.count()
total_x = board[0] |> Map.keys() |> Enum.count()
center_y = Integer.floor_div(total_y, 2)
center_x = Integer.floor_div(total_x, 2)
{total_y, total_x, center_y, center_x}
end
@doc """
Print a board, nicely
## Examples
#iex> board = Wordza.GameBoard.create(:mock) |> Wordza.GameBoard.add_letters([["A", 1, 1], ["B", 3, 3]])
#iex> Wordza.GameBoard.to_string(board)
#"---------\n| ..... |\n| .A... |\n| ..... |\n| ...B. |\n| ..... |\n---------"
"""
def to_string(board) do
b = board |> to_list()
|> Enum.map(fn(row) ->
row
|> Enum.map(&to_string_letter/1)
|> Enum.join("")
end)
|> Enum.map(fn(s) -> "| #{s} |" end)
total_x = Enum.count(board[0])
b = [String.duplicate("-", total_x + 4)] ++ b
b = b ++ [String.duplicate("-", total_x + 4)]
b |> Enum.join("\n")
end
defp to_string_letter(nil), do: "."
defp to_string_letter(l), do: l
@doc """
Convert a board to a 2-dim list matrix of letters
## Examples
iex> board = Wordza.GameBoard.create(:mock)
iex> Wordza.GameBoard.to_list(board)
[
[nil, nil, nil, nil, nil],
[nil, nil, nil, nil, nil],
[nil, nil, nil, nil, nil],
[nil, nil, nil, nil, nil],
[nil, nil, nil, nil, nil]
]
iex> board = Wordza.GameBoard.create(:mock)
iex> Wordza.GameBoard.to_list(board, :bonus)
[
[:tw, nil, :tl, nil, :tw],
[nil, :dw, nil, :dw, nil],
[:dl, nil, :st, nil, :dl],
[nil, :dw, nil, :dw, nil],
[:tw, nil, :tl, nil, :tw]
]
"""
def to_list(board, key \\ :letter) do
board
|> Enum.map(
fn({_y, row}) ->
Enum.map(row, fn({_x, cell}) -> Map.get(cell, key, nil) end)
end
)
end
@doc """
Convert a board to a flat list of y+x pairs
## Examples
iex> board = Wordza.GameBoard.create(:mock)
iex> Wordza.GameBoard.to_yx_list(board)
[
[0, 0], [0, 1], [0, 2], [0, 3], [0, 4],
[1, 0], [1, 1], [1, 2], [1, 3], [1, 4],
[2, 0], [2, 1], [2, 2], [2, 3], [2, 4],
[3, 0], [3, 1], [3, 2], [3, 3], [3, 4],
[4, 0], [4, 1], [4, 2], [4, 3], [4, 4]
]
"""
def to_yx_list(board) do
board
|> Enum.map(
fn({y, row}) ->
Enum.map(row, fn({x, _cell}) -> [y, x] end)
end
)
|> Enum.reduce([], fn(l, acc) -> acc ++ l end)
end
@doc """
Convert a board to a flat list of letters_yx sets
## Examples
iex> board = Wordza.GameBoard.create(:mock) |> Wordza.GameBoard.add_letters([["A", 1, 1], ["B", 3, 3]])
iex> Wordza.GameBoard.to_letter_yx_list(board)
[["A", 1, 1], ["B", 3, 3]]
"""
def to_letter_yx_list(board) do
board
|> Enum.map(
fn({y, row}) ->
Enum.map(row, fn({x, %{letter: l}}) -> [l, y, x] end)
end
)
|> Enum.reduce([], fn(l, acc) -> acc ++ l end)
|> Enum.filter(fn([l, _, _]) -> !is_nil(l) end)
end
@doc """
Is a board empty?
## Examples
iex> board = Wordza.GameBoard.create(:mock)
iex> Wordza.GameBoard.empty?(board)
true
iex> board = Wordza.GameBoard.create(:mock) |> put_in([3, 3, :letter], "a")
iex> Wordza.GameBoard.empty?(board)
false
"""
def empty?(board) do
board
|> to_list(:letter)
|> List.flatten()
|> Enum.all?(&is_nil/1)
end
@doc """
Add a letters_yx format set of letters to a board
(this is usually for building out the next version of the board, if a play commits)
## Examples
iex> board = %{0 => %{0 => %{letter: nil}, 1 => %{letter: nil}}}
iex> letters_yx = [["A", 0, 0], ["B", 0, 1]]
iex> Wordza.GameBoard.add_letters(board, letters_yx)
%{0 => %{0 => %{letter: "A"}, 1 => %{letter: "B"}}}
"""
def add_letters(board, []), do: board
def add_letters(board, [[letter, y, x] | letters_yx]) do
board |> put_in([y, x, :letter], letter) |> add_letters(letters_yx)
end
def add_letters(board, [%{x: x, y: y, letter: _l} = tile | letters_yx]) do
square = board |> get_in([y, x]) |> Map.merge(simplify_letter(tile))
board
|> put_in([y, x], square)
|> add_letters(letters_yx)
end
def simplify_letter(%GameTile{x: x, y: y, letter: _l} = tile) do
tile |> Map.from_struct() |> Map.delete(:y) |> Map.delete(:x)
end
def simplify_letter(%{x: x, y: y, letter: _l} = tile) do
tile |> Map.delete(:y) |> Map.delete(:x)
end
@doc """
Given a board and a y + x, is there a letter played there?
## Examples
iex> board = %{0 => %{0 => %{letter: "A"}, 1 => %{letter: nil}}}
iex> Wordza.GameBoard.played?(board, 0, 0)
true
iex> board = %{0 => %{0 => %{letter: "A"}, 1 => %{letter: nil}}}
iex> Wordza.GameBoard.played?(board, 0, 1)
false
"""
def played?(board, y, x) do
case board |> get_in([y, x, :letter]) do
nil -> false
_ -> true
end
end
@doc """
Given a board and a y + x, is the y + x on the board?
## Examples
iex> board = %{0 => %{0 => %{letter: "A"}, 1 => %{letter: nil}}}
iex> Wordza.GameBoard.exists?(board, 0, 0)
true
iex> board = %{0 => %{0 => %{letter: "A"}, 1 => %{letter: nil}}}
iex> Wordza.GameBoard.exists?(board, 0, 1)
true
iex> board = %{0 => %{0 => %{letter: "A"}, 1 => %{letter: nil}}}
iex> Wordza.GameBoard.exists?(board, 0, 2)
false
"""
def exists?(board, y, x) do
case board |> get_in([y, x]) do
nil -> false
_ -> true
end
end
end
|
lib/game/game_board.ex
| 0.76934
| 0.540318
|
game_board.ex
|
starcoder
|
defmodule BlueHeron.HCI.Command.ControllerAndBaseband.Reset do
use BlueHeron.HCI.Command.ControllerAndBaseband, ocf: 0x0003
@moduledoc """
Reset the baseband
* OGF: `#{inspect(@ogf, base: :hex)}`
* OCF: `#{inspect(@ocf, base: :hex)}`
* Opcode: `#{inspect(@opcode)}`
Bluetooth Spec v5.2, Vol 4, Part E, section 7.3.2
The `HCI_Reset` command will reset the Controller and the Link Manager on the BR/EDR Controller, the PAL on an AMP Controller, or the Link Layer on an LE Controller. If the Controller supports both BR/EDR and LE then the HCI_Reset command shall reset the Link Manager, Baseband and Link Layer. The HCI_Reset command shall not affect the used HCI transport layer since the HCI transport layers may have reset mechanisms of their own. After the reset is completed, the current operational state will be lost, the Controller will enter standby mode and the Controller will automatically revert to the default values for the parameters for which default values are defined in the specification.
Note: The HCI_Reset command will not necessarily perform a hardware reset. This is implementation defined.
On an AMP Controller, the HCI_Reset command shall reset the service provided at the logical HCI to its initial state, but beyond this the exact effect on the Controller device is implementation defined and should not interrupt the service provided to other protocol stacks.
The Host shall not send additional HCI commands before the HCI_Command_Complete event related to the HCI_Reset command has been received.
## Command Parameters
> None
## Return Parameters
* `:status` - see `BlueHeron.ErrorCode`
"""
defparameters []
defimpl BlueHeron.HCI.Serializable do
def serialize(%{opcode: opcode}) do
<<opcode::binary, 0>>
end
end
@impl BlueHeron.HCI.Command
def deserialize(<<@opcode::binary, 0>>) do
# This is a pretty useless function because there aren't
# any parameters to actually parse out of this, but we
# can at least assert its correct with matching
%__MODULE__{}
end
@impl BlueHeron.HCI.Command
def deserialize_return_parameters(<<status>>) do
%{status: status}
end
@impl true
def serialize_return_parameters(%{status: status}) do
<<BlueHeron.ErrorCode.to_code!(status)>>
end
end
|
lib/blue_heron/hci/commands/controller_and_baseband/reset.ex
| 0.803135
| 0.530784
|
reset.ex
|
starcoder
|
defmodule Vivid.Polygon do
alias Vivid.{Polygon, Point, Line}
defstruct vertices: [], fill: false
require Integer
@moduledoc ~S"""
Describes a Polygon as a series of vertices.
Polygon implements both the `Enumerable` and `Collectable` protocols.
## Example
iex> use Vivid
...> 0..3
...> |> Stream.map(fn
...> i when rem(i, 2) == 0 -> Point.init(i * 3, i * 4)
...> i -> Point.init(i * 3, i * 2)
...> end)
...> |> Enum.into(Polygon.init())
...> |> to_string()
"@@@@@@@@@@@@\n" <>
"@@@@@@@ @@@@\n" <>
"@@@@@@@ @@\n" <>
"@@@@@@ @@@ @\n" <>
"@@@@@@ @ @@\n" <>
"@@@@@ @ @@@@\n" <>
"@@@@@ @@@@@\n" <>
"@@@@ @@@@@@@\n" <>
"@@ @@@@@@@@\n" <>
"@ @@@@@@@@@@\n" <>
"@@@@@@@@@@@@\n"
"""
@opaque t :: %Polygon{vertices: [Point.t()], fill: boolean}
@doc """
Initialize an empty Polygon.
## Example
iex> Vivid.Polygon.init
%Vivid.Polygon{vertices: []}
"""
@spec init() :: Polygon.t()
def init, do: %Polygon{vertices: [], fill: false}
@doc """
Initialize a Polygon from a list of points.
## Example
iex> Vivid.Polygon.init([Vivid.Point.init(1,1), Vivid.Point.init(1,2), Vivid.Point.init(2,2), Vivid.Point.init(2,1)])
%Vivid.Polygon{vertices: [
%Vivid.Point{x: 1, y: 1},
%Vivid.Point{x: 1, y: 2},
%Vivid.Point{x: 2, y: 2},
%Vivid.Point{x: 2, y: 1}
]}
"""
@spec init([Point.t()]) :: Polygon.t()
def init(points) when is_list(points), do: %Polygon{vertices: points, fill: false}
@doc false
@spec init([Point.t()], boolean) :: Polygon.t()
def init(points, fill) when is_list(points) and is_boolean(fill),
do: %Polygon{vertices: points, fill: fill}
@doc """
Convert a Polygon into a list of lines joined by the vertices.
## Examples
iex> Vivid.Polygon.init([Vivid.Point.init(1,1), Vivid.Point.init(1,2), Vivid.Point.init(2,2), Vivid.Point.init(2,1)]) |> Vivid.Polygon.to_lines
[%Vivid.Line{origin: %Vivid.Point{x: 1, y: 1},
termination: %Vivid.Point{x: 1, y: 2}},
%Vivid.Line{origin: %Vivid.Point{x: 1, y: 2},
termination: %Vivid.Point{x: 2, y: 2}},
%Vivid.Line{origin: %Vivid.Point{x: 2, y: 2},
termination: %Vivid.Point{x: 2, y: 1}},
%Vivid.Line{origin: %Vivid.Point{x: 2, y: 1},
termination: %Vivid.Point{x: 1, y: 1}}]
"""
@spec to_lines(Polygon.t()) :: [Line.t()]
def to_lines(%Polygon{vertices: points}) do
points_to_lines([], points)
end
@doc """
Remove a vertex from a Polygon.
## Example
iex> Vivid.Polygon.init([Vivid.Point.init(1,1), Vivid.Point.init(2,2)]) |> Vivid.Polygon.delete(Vivid.Point.init(2,2))
%Vivid.Polygon{vertices: [%Vivid.Point{x: 1, y: 1}]}
"""
@spec delete(Polygon.t(), Point.t()) :: Polygon.t()
def delete(%Polygon{vertices: points}, %Point{} = point) do
points
|> List.delete(point)
|> init
end
@doc """
Remove a vertex at a specific index in the Polygon.
## Example
iex> Vivid.Polygon.init([Vivid.Point.init(1,1), Vivid.Point.init(2,2)]) |> Vivid.Polygon.delete_at(1)
%Vivid.Polygon{vertices: [%Vivid.Point{x: 1, y: 1}]}
"""
@spec delete_at(Polygon.t(), integer) :: Polygon.t()
def delete_at(%Polygon{vertices: points}, index) do
points
|> List.delete_at(index)
|> init
end
@doc """
Return the first vertex in the Polygon.
## Example
iex> Vivid.Polygon.init([Vivid.Point.init(1,1), Vivid.Point.init(2,2)]) |> Vivid.Polygon.first
%Vivid.Point{x: 1, y: 1}
"""
@spec first(Polygon.t()) :: Point.t()
def first(%Polygon{vertices: points}) do
points
|> List.first()
end
@doc """
Insert a vertex at a specific index in the Polygon.
## Example
iex> Vivid.Polygon.init([Vivid.Point.init(1,1), Vivid.Point.init(2,2)]) |> Vivid.Polygon.insert_at(1, Vivid.Point.init(3,3))
%Vivid.Polygon{vertices: [
%Vivid.Point{x: 1, y: 1},
%Vivid.Point{x: 3, y: 3},
%Vivid.Point{x: 2, y: 2}
]}
"""
@spec insert_at(Polygon.t(), integer, Point.t()) :: Polygon.t()
def insert_at(%Polygon{vertices: points}, index, %Point{} = point) do
points
|> List.insert_at(index, point)
|> init
end
@doc """
Return the last vertext in the Polygon.
## Example
iex> Vivid.Polygon.init([Vivid.Point.init(1,1), Vivid.Point.init(2,2)]) |> Vivid.Polygon.last
%Vivid.Point{x: 2, y: 2}
"""
@spec last(Polygon.t()) :: Point.t()
def last(%Polygon{vertices: points}) do
points
|> List.last()
end
@doc """
Replace a vertex at a specific index in the Polygon.
## Example
iex> Vivid.Polygon.init([Vivid.Point.init(1,1), Vivid.Point.init(2,2), Vivid.Point.init(3,3)]) |> Vivid.Polygon.replace_at(1, Vivid.Point.init(4,4))
%Vivid.Polygon{vertices: [
%Vivid.Point{x: 1, y: 1},
%Vivid.Point{x: 4, y: 4},
%Vivid.Point{x: 3, y: 3}
]}
"""
@spec replace_at(Polygon.t(), integer, Point.t()) :: Polygon.t()
def replace_at(%Polygon{vertices: points}, index, %Point{} = point) do
points
|> List.replace_at(index, point)
|> init
end
@doc """
Is the polygon filled or not?
## Example
iex> use Vivid
...> Polygon.init([Point.init(1,1), Point.init(2,2), Point.init(1,2)])
...> |> Polygon.filled?
false
iex> use Vivid
...> Polygon.init([Point.init(1,1), Point.init(2,2), Point.init(1,2)], true)
...> |> Polygon.filled?
true
iex> use Vivid
...> Polygon.init([Point.init(1,1), Point.init(2,2), Point.init(1,2)], false)
...> |> Polygon.filled?
false
"""
@spec filled?(Polygon.t()) :: boolean
def filled?(%Polygon{fill: fill}), do: fill
@doc """
Turn on or off filling for this polygon.
## Example
iex> use Vivid
...> Polygon.init([Point.init(1,1), Point.init(2,2), Point.init(1,2)])
...> |> Polygon.fill(true)
...> |> Polygon.filled?
true
"""
@spec fill(Polygon.t(), boolean) :: Polygon.t()
def fill(%Polygon{} = polygon, fill) when is_boolean(fill), do: %{polygon | fill: fill}
defp points_to_lines(lines, []) do
origin = lines |> List.last() |> Line.termination()
term = lines |> List.first() |> Line.origin()
lines ++ [Line.init(origin, term)]
end
defp points_to_lines([], [origin | [term | points]]) do
line = Line.init(origin, term)
points_to_lines([line], points)
end
defp points_to_lines(lines, [point | rest]) do
origin = lines |> List.last() |> Line.termination()
term = point
lines = lines ++ [Line.init(origin, term)]
points_to_lines(lines, rest)
end
end
|
lib/vivid/polygon.ex
| 0.926935
| 0.61477
|
polygon.ex
|
starcoder
|
defmodule Alerts.Alert do
@moduledoc "Module for representation of an alert, including information such as description, severity or additional URL to learn more"
alias Alerts.Priority
alias Alerts.InformedEntitySet, as: IESet
defstruct id: "",
header: "",
informed_entity: %IESet{},
active_period: [],
effect: :unknown,
severity: 5,
lifecycle: :unknown,
updated_at: Timex.now(),
description: "",
priority: :low,
url: ""
@type period_pair :: {DateTime.t() | nil, DateTime.t() | nil}
@type effect ::
:access_issue
| :amber_alert
| :bike_issue
| :cancellation
| :delay
| :detour
| :dock_issue
| :dock_closure
| :elevator_closure
| :escalator_closure
| :extra_service
| :facility_issue
| :no_service
| :parking_closure
| :parking_issue
| :policy_change
| :service_change
| :shuttle
| :suspension
| :station_closure
| :stop_closure
| :stop_moved
| :schedule_change
| :snow_route
| :station_issue
| :stop_shoveling
| :summary
| :track_change
| :unknown
@type severity :: 0..10
@type lifecycle :: :ongoing | :upcoming | :ongoing_upcoming | :new | :unknown
@type t :: %Alerts.Alert{
id: String.t(),
header: String.t(),
informed_entity: IESet.t(),
active_period: [period_pair],
effect: effect,
severity: severity,
lifecycle: lifecycle,
updated_at: DateTime.t(),
description: String.t() | nil,
priority: Priority.priority_level(),
url: String.t() | nil
}
@type icon_type :: :alert | :cancel | :none | :shuttle | :snow
use Timex
@ongoing_effects [
:cancellation,
:detour,
:no_service,
:service_change,
:snow_route,
:shuttle,
:stop_closure,
:stop_shoveling
]
@all_types [
:access_issue,
:amber_alert,
:delay,
:dock_closure,
:dock_issue,
:extra_service,
:elevator_closure,
:escalator_closure,
:policy_change,
:schedule_change,
:station_closure,
:station_issue,
:stop_moved,
:summary,
:suspension,
:track_change,
:unknown | @ongoing_effects
]
@diversion_effects [
:shuttle,
:stop_closure,
:station_closure,
:detour
]
@spec new(Keyword.t()) :: t()
def new(keywords \\ [])
def new([]) do
%__MODULE__{}
end
def new(keywords) do
keywords
|> build_struct()
|> set_priority()
|> ensure_entity_set()
end
@spec update(t(), Keyword.t()) :: t()
def update(%__MODULE__{} = alert, keywords) do
alert
|> struct!(keywords)
|> set_priority()
|> ensure_entity_set()
end
@spec set_priority(map) :: map
defp set_priority(%__MODULE__{} = alert) do
%__MODULE__{alert | priority: Priority.priority(alert)}
end
@spec build_struct(Keyword.t()) :: t()
defp build_struct(keywords), do: struct!(__MODULE__, keywords)
@spec ensure_entity_set(map) :: t()
defp ensure_entity_set(alert) do
%__MODULE__{alert | informed_entity: IESet.new(alert.informed_entity)}
end
@spec all_types :: [effect]
def all_types, do: @all_types
@spec ongoing_effects :: [effect]
def ongoing_effects, do: @ongoing_effects
@spec get_entity(t, :route | :stop | :route_type | :trip | :direction_id) :: Enumerable.t()
@doc "Helper function for retrieving InformedEntity values for an alert"
def get_entity(%__MODULE__{informed_entity: %IESet{route: set}}, :route), do: set
def get_entity(%__MODULE__{informed_entity: %IESet{stop: set}}, :stop), do: set
def get_entity(%__MODULE__{informed_entity: %IESet{route_type: set}}, :route_type), do: set
def get_entity(%__MODULE__{informed_entity: %IESet{trip: set}}, :trip), do: set
def get_entity(%__MODULE__{informed_entity: %IESet{direction_id: set}}, :direction_id), do: set
def access_alert_types do
[elevator_closure: "Elevator", escalator_closure: "Escalator", access_issue: "Other"]
end
@doc "Returns a friendly name for the alert's effect"
@spec human_effect(t) :: String.t()
def human_effect(%__MODULE__{effect: effect}) do
do_human_effect(effect)
end
@spec do_human_effect(effect) :: String.t()
defp do_human_effect(:amber_alert), do: "Amber Alert"
defp do_human_effect(:cancellation), do: "Cancellation"
defp do_human_effect(:delay), do: "Delay"
defp do_human_effect(:suspension), do: "Suspension"
defp do_human_effect(:track_change), do: "Track Change"
defp do_human_effect(:detour), do: "Detour"
defp do_human_effect(:shuttle), do: "Shuttle"
defp do_human_effect(:stop_closure), do: "Stop Closure"
defp do_human_effect(:dock_closure), do: "Dock Closure"
defp do_human_effect(:station_closure), do: "Station Closure"
defp do_human_effect(:stop_moved), do: "Stop Move"
defp do_human_effect(:extra_service), do: "Extra Service"
defp do_human_effect(:schedule_change), do: "Schedule Change"
defp do_human_effect(:service_change), do: "Service Change"
defp do_human_effect(:snow_route), do: "Snow Route"
defp do_human_effect(:stop_shoveling), do: "Snow Shoveling"
defp do_human_effect(:station_issue), do: "Station Issue"
defp do_human_effect(:dock_issue), do: "Dock Issue"
defp do_human_effect(:access_issue), do: "Access Issue"
defp do_human_effect(:facility_issue), do: "Facility Issue"
defp do_human_effect(:bike_issue), do: "Bike Issue"
defp do_human_effect(:parking_issue), do: "Parking Issue"
defp do_human_effect(:parking_closure), do: "Parking Closure"
defp do_human_effect(:elevator_closure), do: "Elevator Closure"
defp do_human_effect(:escalator_closure), do: "Escalator Closure"
defp do_human_effect(:policy_change), do: "Policy Change"
defp do_human_effect(:summary), do: "Summary"
defp do_human_effect(_), do: "Unknown"
@doc "Returns a friendly name for the alert's lifecycle"
@spec human_lifecycle(t) :: String.t()
def human_lifecycle(%__MODULE__{lifecycle: lifecycle}) do
do_human_lifecycle(lifecycle)
end
@spec do_human_lifecycle(lifecycle) :: String.t()
defp do_human_lifecycle(:new), do: "New"
defp do_human_lifecycle(:upcoming), do: "Upcoming"
defp do_human_lifecycle(:ongoing_upcoming), do: "Upcoming"
defp do_human_lifecycle(:ongoing), do: "Ongoing"
defp do_human_lifecycle(_), do: "Unknown"
@doc """
Show a label according to the following mutually exclusive rules:
* if it is a delay, show a time estimatation
* if now is withing the active period, show "today"
* otherwise, show the lifecycle (unless is new or unknown)
"""
@spec human_label(t) :: String.t()
def human_label(%__MODULE__{effect: :delay, severity: 0}), do: ""
def human_label(%__MODULE__{effect: :delay, severity: 1}), do: ""
def human_label(%__MODULE__{effect: :delay, severity: 2}), do: ""
def human_label(%__MODULE__{effect: :delay, severity: 3}), do: "up to 10 minutes"
def human_label(%__MODULE__{effect: :delay, severity: 4}), do: "up to 15 minutes"
def human_label(%__MODULE__{effect: :delay, severity: 5}), do: "up to 20 minutes"
def human_label(%__MODULE__{effect: :delay, severity: 6}), do: "up to 25 minutes"
def human_label(%__MODULE__{effect: :delay, severity: 7}), do: "up to 30 minutes"
def human_label(%__MODULE__{effect: :delay, severity: 8}), do: "30+ minutes"
def human_label(%__MODULE__{effect: :delay, severity: 9}), do: "more than an hour"
def human_label(alert), do: do_ongoing_upcoming(alert)
@spec do_ongoing_upcoming(t) :: String.t()
defp do_ongoing_upcoming(%{lifecycle: lifecycle})
when lifecycle not in [:new, :unknown] do
do_human_lifecycle(lifecycle)
end
defp do_ongoing_upcoming(_), do: ""
@spec icon(t) :: icon_type
def icon(%{priority: :low}), do: :none
def icon(%{priority: :high, effect: :suspension}), do: :cancel
def icon(%{priority: :high, effect: :cancellation}), do: :cancel
def icon(%{priority: :high, effect: :snow_route}), do: :snow
def icon(%{priority: :high, effect: :shuttle}), do: :shuttle
def icon(_), do: :alert
@spec is_high_severity_or_high_priority(t) :: boolean()
def is_high_severity_or_high_priority(%{priority: :high}), do: true
def is_high_severity_or_high_priority(%{severity: severity}) when severity >= 7,
do: true
def is_high_severity_or_high_priority(_), do: false
@spec is_diversion(t) :: boolean()
def is_diversion(%{effect: effect}),
do: effect in @diversion_effects
end
|
apps/alerts/lib/alert.ex
| 0.812049
| 0.406096
|
alert.ex
|
starcoder
|
defmodule Univrse do
@moduledoc """


Univrse is a universal schema for serializing data objects, secured with
signatures and encryption.
* **Serialising data** - Simple, binary-friendly data exchange using the Concise Binary Object Representation (CBOR) data format.
* **Authenticating data** - Protect integrity of data with digital signatures or message authentication code (MAC) algorithms.
* **Securing data** - Ensure confidentiality and integrity of data for one or multiple recipients, using standardised authenticated encryption algorithms.
## Installation
The package can be installed by adding `univrse` to your list of dependencies
in `mix.exs`.
```elixir
def deps do
[
{:manic, "~> #{ Mix.Project.config[:version] }"}
]
end
```
## Usage
For full documentation, please refer to:
* [univrse.network docs](https://univrse.network/docs)
* [univrse API docs](https://hexdocs.pm/univrse)
### 1. Serialising data
Any arbitrary payload can be wrapped in a `t:Univrse.Envelope.t/0` structure,
and then encoded in one of three serialisation formats, using
`Univrse.Envelope.encode/2` and `Univrse.Envelope.to_script/2`
* `:cbor` - Concise CBOR-encoded binary value
* `:base64` - Compact Base64-url encoded string value
* `:script` - Encoded in a Bitcoin `OP_RETURN` script
```elixir
# Wrap any arbitrary data payload in an Envelope structure
iex> payload = "Hello world!"
iex> env = Univrse.wrap(payload, %{proto: "univrse.demo"})
# Encode the data in one of three serialisation formats
iex> env_cbor = Univrse.encode(env, :cbor)
iex> env_base64 = Univrse.encode(env, :base64)
iex> env_script = Univrse.Envelope.to_script(env)
# Decode the serialised data back into an Envelope structure
iex> {:ok, env2} = Univrse.decode(env_cbor)
iex> {:ok, env3} = Univrse.decode(env_base64)
iex> {:ok, env4} = Univrse.Envelope.parse_script(env_script)
# Compare payload
iex> env2.payload == payload and env3.payload == payload and env4.payload == payload
true
```
### 2. Using signatures
Digital signatures or message authentication code (MAC) algorithms can be used
to protect the integrity of an Envelope's data payload.
```elixir
# Generate keys
iex> alice_key = Univrse.Key.generate_key({:ec, :secp256k1})
iex> alice_pubkey = Univrse.Key.to_public(alice_key)
iex> app_secret = Univrse.Key.generate_key({:oct, 256})
# Sign and verify using a single key
iex> {:ok, env1} = "Hello world!"
...> |> Univrse.wrap(%{proto: "univrse.demo"})
...> |> Univrse.sign(alice_key, %{"alg" => "ES256K", "kid" => "alice"})
iex> Univrse.verify(env1, alice_pubkey)
true
# Sign and verify using multiple keys and algorithms
iex> {:ok, env2} = "Hello world!"
...> |> Univrse.wrap(%{proto: "univrse.demo"})
...> |> Univrse.sign([
...> {alice_key, %{"alg" => "ES256K", "kid" => "alice"}},
...> {app_secret, %{"alg" => "HS256", "kid" => "app"}}
...> ])
iex> Univrse.verify(env2, [alice_pubkey, app_secret])
true
```
### 3. Using encryption
Authenticated encryption algorithms may be used to ensure the confidentiality
of an Envelope's data payload for one or multiple recipients.
```elixir
# Generate keys
iex> bob_key = Univrse.Key.generate_key({:ec, :secp256k1})
iex> bob_pubkey = Univrse.Key.to_public(bob_key)
iex> charlie_key = Univrse.Key.generate_key({:ec, :secp256k1})
iex> charlie_pubkey = Univrse.Key.to_public(charlie_key)
iex> app_secret = Univrse.Key.generate_key({:oct, 256})
# Encrypt and decrypt data for a single recipient
iex> {:ok, env1} = "Hello world!"
...> |> Univrse.wrap(%{proto: "univrse.demo"})
...> |> Univrse.encrypt(bob_pubkey, %{"alg" => "ECDH-ES+A128GCM", "kid" => "bob"})
iex> {:ok, env1} = Univrse.decrypt(env1, bob_key)
iex> env1.payload
"Hello world!"
# Encrypt and decrypt data for multiple recipients using multiple algorithms
iex> {:ok, env2} = "Hello world!"
...> |> Univrse.wrap(%{proto: "univrse.demo"})
...> |> Univrse.encrypt([
...> {app_secret, %{"alg" => "A256GCM"}},
...> {bob_pubkey, %{"alg" => "ECDH-ES+A128GCM", "kid" => "bob"}},
...> {charlie_pubkey, %{"alg" => "ECDH-ES+A128GCM", "kid" => "charlie"}}
...> ])
iex> {:ok, bob_env} = Univrse.Envelope.decrypt_at(env2, 1, bob_key)
iex> bob_env.payload
"Hello world!"
iex> {:ok, charlie_env} = Univrse.Envelope.decrypt_at(env2, 2, charlie_key)
iex> charlie_env.payload
"Hello world!"
```
"""
alias Univrse.{Envelope, Recipient, Signature}
defdelegate decode(env), to: Envelope
defdelegate decode(env, encoding), to: Envelope
defdelegate encode(env, encoding \\ :cbor), to: Envelope
defdelegate wrap(payload, headers \\ %{}), to: Envelope
defdelegate decrypt(env, key, opts \\ []), to: Recipient
defdelegate encrypt(env, key, headers, opts \\ []), to: Recipient
defdelegate sign(env, key, headers \\ %{}), to: Signature
defdelegate verify(env, key), to: Signature
end
|
lib/univrse.ex
| 0.876456
| 0.915545
|
univrse.ex
|
starcoder
|
defprotocol RDF.Term do
@moduledoc """
Shared behaviour for all RDF terms.
A `RDF.Term` is anything which can be an element of RDF statements of a RDF graph:
- `RDF.IRI`s
- `RDF.BlankNode`s
- `RDF.Literal`s
see <https://www.w3.org/TR/sparql11-query/#defn_RDFTerm>
"""
@type t :: RDF.IRI.t() | RDF.BlankNode.t() | RDF.Literal.t()
@doc """
Checks if the given value is a RDF term.
Note: As opposed to `RDF.term?` this function returns false on atoms and does
not try resolves those to IRIs.
## Examples
iex> RDF.Term.term?(RDF.iri("http://example.com/resource"))
true
iex> RDF.Term.term?(EX.Resource)
false
iex> RDF.Term.term?(RDF.bnode)
true
iex> RDF.Term.term?(RDF.XSD.integer(42))
true
iex> RDF.Term.term?(42)
false
"""
def term?(value)
@doc """
Tests for term equality.
see <http://www.w3.org/TR/rdf-sparql-query/#func-sameTerm>
"""
@fallback_to_any true
def equal?(term1, term2)
@doc """
Tests for equality of values.
Non-RDF terms are tried to be coerced via `RDF.Term.coerce/1` before comparison.
Returns `nil` if the given terms are not comparable.
see <http://www.w3.org/TR/rdf-sparql-query/#func-RDFterm-equal>
and the value equality semantics of the different literal datatypes here:
<https://www.w3.org/TR/sparql11-query/#OperatorMapping>
"""
@fallback_to_any true
def equal_value?(term1, term2)
@doc """
Converts a given value into a RDF term.
Returns `nil` if the given value is not convertible into any valid RDF.Term.
## Examples
iex> RDF.Term.coerce("foo")
~L"foo"
iex> RDF.Term.coerce(42)
RDF.XSD.integer(42)
"""
def coerce(value)
@doc """
Returns the native Elixir value of a RDF term.
Returns `nil` if the given value is not a a valid RDF term or a value convertible to a RDF term.
## Examples
iex> RDF.Term.value(~I<http://example.com/>)
"http://example.com/"
iex> RDF.Term.value(~L"foo")
"foo"
iex> RDF.XSD.integer(42) |> RDF.Term.value()
42
"""
def value(term)
end
defimpl RDF.Term, for: RDF.IRI do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.IRI.equal_value?(term1, term2)
def coerce(term), do: term
def value(term), do: term.value
def term?(_), do: true
end
defimpl RDF.Term, for: RDF.BlankNode do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.BlankNode.equal_value?(term1, term2)
def coerce(term), do: term
def value(term), do: to_string(term)
def term?(_), do: true
end
defimpl RDF.Term, for: Reference do
@dialyzer {:nowarn_function, equal_value?: 2}
@dialyzer {:nowarn_function, coerce: 1}
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.BlankNode.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: RDF.Literal do
def equal?(term1, term2), do: RDF.Literal.equal?(term1, term2)
def equal_value?(term1, term2), do: RDF.Literal.equal_value?(term1, term2)
def coerce(term), do: term
def value(term), do: RDF.Literal.value(term) || RDF.Literal.lexical(term)
def term?(_), do: true
end
defimpl RDF.Term, for: Atom do
def equal?(term1, term2), do: term1 == term2
def equal_value?(nil, _), do: nil
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(true), do: RDF.XSD.true()
def coerce(false), do: RDF.XSD.false()
def coerce(nil), do: nil
def coerce(term) do
case RDF.Namespace.resolve_term(term) do
{:ok, iri} -> iri
_ -> nil
end
end
def value(true), do: true
def value(false), do: false
def value(nil), do: nil
def value(term), do: RDF.Term.value(coerce(term))
def term?(_), do: false
end
defimpl RDF.Term, for: BitString do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.String.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: Integer do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Integer.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: Float do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Double.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: Decimal do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Decimal.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: DateTime do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.DateTime.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: NaiveDateTime do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.DateTime.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: Date do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Date.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: Time do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.Time.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: URI do
def equal?(term1, term2), do: term1 == term2
def equal_value?(term1, term2), do: RDF.Term.equal_value?(coerce(term1), term2)
def coerce(term), do: RDF.XSD.AnyURI.new(term)
def value(term), do: term
def term?(_), do: false
end
defimpl RDF.Term, for: Any do
def equal?(term1, term2), do: term1 == term2
def equal_value?(_, _), do: nil
def coerce(_), do: nil
def value(_), do: nil
def term?(_), do: false
end
|
lib/rdf/term.ex
| 0.930844
| 0.741393
|
term.ex
|
starcoder
|
defmodule K8s.Client.Runner.Base do
@moduledoc """
Base HTTP processor for `K8s.Client`
"""
@type result_t ::
{:ok, map() | reference()}
| {:error, K8s.Middleware.Error.t()}
| {:error, :connection_not_registered}
| {:error, :missing_required_param}
| {:error, binary()}
@typedoc "Acceptable HTTP body types"
@type body_t :: list(map()) | map() | binary() | nil
alias K8s.Conn
alias K8s.Operation
alias K8s.Middleware.Request
@doc """
Runs a `K8s.Operation`.
## Examples
*Note:* Examples assume a `K8s.Conn` was configured named `:test`. See `K8s.Conn.Config`.
Running a list pods operation:
```elixir
conn = K8s.Conn.lookup(:test)
operation = K8s.Client.list("v1", "Pod", namespace: :all)
{:ok, %{"items" => pods}} = K8s.Client.run(operation, conn)
```
Running a dry-run of a create deployment operation:
```elixir
deployment = %{
"apiVersion" => "apps/v1",
"kind" => "Deployment",
"metadata" => %{
"labels" => %{
"app" => "nginx"
},
"name" => "nginx",
"namespace" => "test"
},
"spec" => %{
"replicas" => 2,
"selector" => %{
"matchLabels" => %{
"app" => "nginx"
}
},
"template" => %{
"metadata" => %{
"labels" => %{
"app" => "nginx"
}
},
"spec" => %{
"containers" => %{
"image" => "nginx",
"name" => "nginx"
}
}
}
}
}
operation = K8s.Client.create(deployment)
conn = K8s.Conn.lookup(:tes)
# opts is passed to HTTPoison as opts.
opts = [params: %{"dryRun" => "all"}]
:ok = K8s.Client.Runner.Base.run(operation, conn, opts)
```
"""
@spec run(Operation.t(), Conn.t() | nil) :: result_t
def run(%Operation{} = operation, %Conn{} = conn),
do: run(operation, conn, [])
@doc """
Run an operation and pass `opts` to HTTPoison.
Destructures `Operation` data and passes as the HTTP body.
See `run/2`
"""
@spec run(Operation.t(), Conn.t(), keyword()) :: result_t
def run(%Operation{} = operation, %Conn{} = conn, opts) when is_list(opts) do
run(operation, conn, operation.data, opts)
end
@doc """
Run an operation with an HTTP Body (map) and pass `opts` to HTTPoison.
See `run/2`
"""
@spec run(Operation.t(), Conn.t(), map(), keyword()) :: result_t
def run(%Operation{} = operation, %Conn{} = conn, body, opts \\ []) do
with {:ok, url} <- K8s.Discovery.url_for(conn, operation),
req <- new_request(conn, url, operation, body, opts),
{:ok, req} <- K8s.Middleware.run(req) do
K8s.http_provider().request(req.method, req.url, req.body, req.headers, req.opts)
end
end
@spec new_request(Conn.t(), String.t(), K8s.Operation.t(), body_t, Keyword.t()) ::
Request.t()
defp new_request(%Conn{} = conn, url, %Operation{} = operation, body, opts) do
req = %Request{conn: conn, method: operation.method, body: body}
http_opts_params = build_http_params(opts[:params], operation.label_selector)
opts_with_selector_params = Keyword.put(opts, :params, http_opts_params)
http_opts = Keyword.merge(req.opts, opts_with_selector_params)
%Request{req | opts: http_opts, url: url}
end
@spec build_http_params(nil | keyword | map, nil | K8s.Selector.t()) :: map()
defp build_http_params(nil, nil), do: %{}
defp build_http_params(nil, %K8s.Selector{} = s), do: %{labelSelector: K8s.Selector.to_s(s)}
defp build_http_params(params, nil), do: params
defp build_http_params(params, %K8s.Selector{} = s) when is_list(params),
do: params |> Enum.into(%{}) |> build_http_params(s)
# Supplying a `labelSelector` to `run/4 should take precedence
defp build_http_params(params, %K8s.Selector{} = s) when is_map(params) do
from_operation = %{labelSelector: K8s.Selector.to_s(s)}
Map.merge(from_operation, params)
end
end
|
lib/k8s/client/runner/base.ex
| 0.901902
| 0.456894
|
base.ex
|
starcoder
|
defmodule Translecto.Schema.Translatable do
import Ecto.Schema
@moduledoc """
Reference a translatable field in the schema.
This module coincides with the migration function `Translecto.Migration.translate/2`.
To correctly use this module a schema should call `use Translecto.Schema.Translatable`.
Model's with translatable fields can be introspected by using the
`get_translation/1` and `translations/0` functions added to the model. See
your model's documentation for additional information.
"""
defmacro __using__(_options) do
quote do
import Translecto.Schema.Translatable
import Translecto.Changeset
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(env) do
quote do
@doc """
Get the translation model for the given field for this model.
"""
@spec get_translation(atom) :: module
unquote(Enum.map(Module.get_attribute(env.module, :translecto_translate), fn { name, queryable } ->
quote do
def get_translation(unquote(name)) do
unquote(queryable)
end
end
end))
@doc """
Get all translation fields for this model.
"""
@spec translations() :: [{ atom, module }]
def translations, do: unquote(Module.get_attribute(env.module, :translecto_translate))
end
end
@doc """
Expose a field as being translatable to the schema.
The name of the field specified should coincide with a migration table field that was made
using `Translecto.Migration.translate/2`.
The queryable should be the translation module (schema) that represents the translation table.
defmodule Ingredient do
use Translecto.Schema.Translatable
schema "ingredients" do
translatable :name, Ingredient.Translation
end
def changeset(struct, params \\\\ %{}) do
struct
|> translatable_changeset(params, [:name])
|> validate_required([:name])
end
end
"""
@spec translatable(atom, module(), keyword()) :: Macro.t
defmacro translatable(name, queryable, _opts \\ []) do
Module.put_attribute(__CALLER__.module, :translecto_translate, [{ name, queryable }|(Module.get_attribute(__CALLER__.module, :translecto_translate) || [])])
quote do
field unquote(name), :id
end
end
end
|
lib/translecto/schema/translatable.ex
| 0.860384
| 0.453685
|
translatable.ex
|
starcoder
|
defmodule Gyx.Core.Env do
@moduledoc """
This behaviour is intended to be followed for any `Environment` implementation
The most critical function to be exposed is `step/1` , which serves as a direct bridge
between the environment and any agent.
Here, an important design question to address is the fundamental difference between
the environment state (its internal representation) and an _observation_ of such state.
In principle, the environment returns an observation as part of step/1 response.
Should it be a way to obtain an evironment state abstraction as suposed to be shown
to an agent? i.e. an indirect observation.
"""
alias Gyx.Core.Exp
@type initial_state :: Exp.t()
@type observation :: any()
@type action :: any()
@type environment :: any()
@doc "Sets the state of the environment to its default"
@callback reset(environment) :: initial_state()
@doc "Gets an environment representation usable by the agent"
@callback observe(environment) :: observation()
@doc """
Recieves an agent's `action` and responds to it,
informing the agent back with a reward, a modified environment
and a termination signal
"""
@callback step(environment, action()) :: Exp.t() | {:error, reason :: String.t()}
defmacro __using__(_params) do
quote do
@before_compile Gyx.Core.Env
@behaviour Gyx.Core.Env
@enforce_keys [:action_space, :observation_space]
@impl true
def observe(environment), do: GenServer.call(environment, :observe)
@impl true
def step(environment, action) do
case action_checked = GenServer.call(environment, {:check, action}) do
{:error, _} -> action_checked
{:ok, action} -> GenServer.call(environment, {:act, action})
end
end
end
end
defmacro __before_compile__(_) do
quote do
def handle_call({:check, action}, _from, state = %__MODULE__{action_space: action_space}) do
case Gyx.Core.Spaces.contains?(action_space, action) do
false -> {:reply, {:error, "invalid_action"}, state}
_ -> {:reply, {:ok, action}, state}
end
end
end
end
end
|
lib/core/env.ex
| 0.895657
| 0.696926
|
env.ex
|
starcoder
|
defmodule Gorpo do
@moduledoc """
An OTP application that announce services on consul. After a
successful start `Gorpo.Announce` process will be
running. Unconfigured, it assumes consul is running on localhost:8500
requiring no ACL and no services are announced.
Optionally you may provide services that gets announce when this
application starts. For instance:
iex> # you may need to restart the application after this
iex> Application.put_env(:gorpo, :announce, [services: [[id: "foo", name: "bar", check: [ttl: "1s"]]]])
:ok
This will announce a service `"bar"` with a health check with a
`TTL` of `1s`. You may pass additional information [like `tags`] as
long as they exist in the `Gorpo.Service` [or `Gorpo.Check`]
struct. Refer to those modules for more information.
You continue being able to register/unregister a service
dynamically. Notice that services configured in the application are
nothing special: you may unregister them like other services you may
have registered afterwords.
"""
use Application
require Logger
@spec start(any, any) :: {:ok, pid} | {:error, term}
@doc """
Starts the `Gorpo` application. `Application.put_env(:gorpo, ...)`
may be used to configure where to find the consul agent and services
that get announce right from the start. The following keys are
available:
* consul: `[endpoint: URL, token: STRING]`
* announce: `[services: [SERVICE_SPEC]]`;
`SPEC_SPEC` is a keyword list of keys found in `Gorpo.Service`. A
valid example:
[id: "foo",
name: "bar",
tags: ["foo", "bar"],
port: 9000,
check: CHECK_SPEC,
address: "127.0.0.1"]
`CHECK_SPEC` is a keyword list of keys found in `Gorpo.Check`.
[check: [ttl: "1s"]]
"""
def start(_type, _args) do
:ok = inets_start()
consul = new_consul()
services = read_services(announce_cfg())
announce = Supervisor.Spec.worker(Gorpo.Announce, [consul, services], restart: :permanent)
Supervisor.start_link([announce], strategy: :one_for_one)
end
defp inets_start do
case :inets.start(:permanent) do
:ok -> :ok
{:error, {:already_started, :inets}} -> :ok
end
end
@spec new_consul() :: Gorpo.Consul.t
@doc """
Uses the `Application.get_env(:gorpo, :consul)` to configure and return
`Gorpo.Consul` module.
"""
def new_consul do
read_consul(consul_cfg())
end
defp read_consul(config) do
driver = Gorpo.Drivers.HTTPC.new
config = Keyword.update(config, :driver, driver, &Gorpo.Drivers.HTTPC.new/1)
struct(Gorpo.Consul, config)
end
defp read_services(config) do
config
|> Keyword.fetch!(:services)
|> Enum.filter(& Keyword.get(&1, :enabled, true))
|> Enum.map(& read_service/1)
end
defp read_service(service) do
service = Keyword.update(service, :check, nil, & struct(Gorpo.Check, &1))
struct(Gorpo.Service, service)
end
defp announce_cfg do
default = [services: []]
app_cfg = Application.get_env(:gorpo, :announce, [])
Keyword.merge(default, app_cfg)
end
defp consul_cfg do
default = [endpoint: "http://localhost:8500"]
app_cfg = Application.get_env(:gorpo, :consul, [])
Keyword.merge(default, app_cfg)
end
end
|
lib/gorpo.ex
| 0.69946
| 0.534127
|
gorpo.ex
|
starcoder
|
defmodule Manic.Multi do
@moduledoc """
Module for encapsulating multiple miner Merchant API clients.
"""
alias Manic.Miner
defstruct miners: [],
operation: nil,
yield: :any,
timeout: 5_000
@typedoc "Bitcoin multi miner API client"
@type t :: %__MODULE__{
miners: list,
operation: {atom, atom, list} | function,
yield: :any | :all
}
@typedoc "Multi miner API response"
@type result :: {Manic.miner, {:ok, any}} |
[{Manic.miner, {:ok | :error, any}}, ...]
@doc """
Returns a [`multi miner`](`t:t/0`) client for the given list of
Merchant API endpoints.
"""
@spec new(list, keyword) :: __MODULE__.t
def new(miners, options \\ []) when is_list(miners) do
yield = Keyword.get(options, :yield, :any)
struct(__MODULE__, [
miners: Enum.map(miners, &Miner.new/1),
yield: yield
])
end
@doc """
Sets the asynchronous operation on the given [`multi miner`](`t:t/0`)
client.
The operation is an inline function which receives the [`miner`](`t:Manic.miner/0`)
client.
## Example
iex> Manic.Multi.async(multi, fn miner ->
...> MyModule.some_function(miner)
...> end)
Or, the same more succinctly:
iex> Manic.Multi.async(multi, &MyModule.some_function/1)
"""
@spec async(__MODULE__.t, function) :: __MODULE__.t
def async(%__MODULE__{} = multi, operation)
when is_function(operation, 1),
do: Map.put(multi, :operation, operation)
@doc """
Sets the asynchronous operation on the given [`multi miner`](`t:t/0`)
client.
The operation is passed as a tuple containing the module, function name and
list or arguments. In this case, the [`miner`](`t:Manic.miner/0`) client will
automatically be prepended to the list of arguments.
## Example
iex> Manic.Multi.async(multi, MyModule, :some_function, args)
"""
@spec async(__MODULE__.t, atom, atom, list) :: __MODULE__.t
def async(%__MODULE__{} = multi, module, function_name, args)
when is_atom(module) and is_atom(function_name) and is_list(args),
do: Map.put(multi, :operation, {module, function_name, args})
@doc """
Concurrently runs the asynchronous operation on the given [`multi miner`](`t:t/0`)
client, yielding the response from any or all of the miners.
By default, multi miner operations will yield until **any** of the miners
respond. Alternatively, a multi client can be initialized with the option
`yield: :all` which awaits for **all** miner clients to respond.
"""
@spec yield(__MODULE__.t) :: result
def yield(%__MODULE__{yield: :any, timeout: timeout} = multi) do
parent = self()
spawn_link(fn ->
multi.miners
|> Enum.map(& init_task(&1, multi.operation))
|> yield_any(parent)
end)
receive do
{miner, result} ->
{miner, {:ok, result}}
errors when is_list(errors) ->
Enum.map(errors, fn {miner, reason} -> {miner, {:error, reason}} end)
after
timeout ->
{:error, "Timeout"}
end
end
def yield(%__MODULE__{yield: :all, timeout: timeout} = multi) do
keyed_tasks = multi.miners
|> Enum.map(& init_task(&1, multi.operation))
keyed_tasks
|> Enum.map(& elem(&1, 1))
|> Task.yield_many(timeout)
|> Enum.reduce([], fn {task, res}, results ->
miner = keyed_tasks
|> Enum.find(fn {_miner, t} -> task == t end)
|> elem(0)
case res do
{:ok, res} -> [{miner, res} | results]
_ -> results
end
end)
|> Enum.reverse
end
# Yields until any miner client responds
defp yield_any(tasks, parent, errors \\ [])
defp yield_any(tasks, parent, errors)
when length(tasks) > 0
and is_pid(parent)
do
receive do
{ref, {:ok, reply}} ->
miner = tasks
|> Enum.find(fn {_miner, task} -> task.ref == ref end)
|> elem(0)
send(parent, {miner, reply})
{ref, {:error, reason}} ->
miner = tasks
|> Enum.find(fn {_miner, task} -> task.ref == ref end)
|> elem(0)
tasks
|> Enum.reject(fn {m, _task} -> m == miner end)
|> yield_any(parent, [{miner, reason} | errors])
{:DOWN, _ref, _, _pid, :normal} ->
yield_any(tasks, parent, errors)
{:DOWN, ref, _, _pid, reason} ->
miner = tasks
|> Enum.find(fn {_miner, task} -> task.ref == ref end)
|> elem(0)
tasks
|> Enum.reject(fn {k, _task} -> k == miner end)
|> yield_any(parent, [{miner, reason} | errors])
msg ->
IO.puts "Some other msg"
IO.inspect msg
end
end
defp yield_any([], parent, errors),
do: send(parent, Enum.reverse(errors))
# Inits the asynchronous operation task
defp init_task(%Miner{} = miner, operation) do
task = Task.async(fn ->
try do
case operation do
operation when is_function(operation, 1) ->
apply(operation, [miner])
{module, function_name, args} ->
apply(module, function_name, [miner | args])
end
rescue
error -> {:error, error}
end
end)
{miner, task}
end
end
|
lib/manic/multi.ex
| 0.803251
| 0.50415
|
multi.ex
|
starcoder
|
defmodule Talib.CMA do
@moduledoc ~S"""
Defines a Cumulative Moving Average.
## History
Version: 1.0
Source: https://qkdb.wordpress.com/tag/cumulative-moving-average/
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@typedoc """
Defines a Cumulative Moving Average.
* :values - List of values resulting from the calculation
* :weight - The current weight of the CMA
"""
@type t :: %Talib.CMA{values: [number], weight: integer}
defstruct [
values: [],
weight: 0
]
@doc """
Gets the CMA of a list.
Returns `{:ok, cma}`, otherwise `{:error, reason}`.
## Examples
iex> Talib.CMA.from_list([17, 23, 44])
{:ok, %Talib.CMA{
values: [17.0, 20.0, 28.0],
weight: 3
}}
iex> Talib.CMA.from_list([])
{:error, :no_data}
"""
@spec from_list([number]) :: {:ok, Talib.CMA.t} | {:error, atom}
def from_list(data), do: calculate(data)
@doc """
Gets the CMA of a list with pre-existing average and weight.
Returns `{:ok, cma}`, otherwise `{:error, reason}`.
## Examples
iex> Talib.CMA.from_list([17, 23, 44], 1, 3)
{:ok, %Talib.CMA{
values: [1.0, 5.0, 8.6, 14.5],
weight: 6
}}
iex> Talib.CMA.from_list([], 1, 3)
{:ok, %Talib.CMA{
values: [1.0],
weight: 3
}}
iex> Talib.CMA.from_list([], 0, 0)
{:error, :no_data}
"""
@spec from_list([number], number, integer) ::
{:ok, Talib.CMA.t}
| {:error, atom}
def from_list(average, weight, data),
do: calculate(average, weight, data)
@doc """
Gets the CMA of a list.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex> Talib.CMA.from_list!([17, 23, 44])
%Talib.CMA{
values: [17.0, 20.0, 28.0],
weight: 3
}
iex> Talib.CMA.from_list!([])
** (NoDataError) no data error
"""
@spec from_list!([number]) :: Talib.CMA.t | no_return
def from_list!(data) do
case calculate(data) do
{:ok, result} -> result
{:error, :no_data} -> raise NoDataError
end
end
@doc """
Gets the cumulative moving average of a list with pre-existing average and
weight.
Raises `NoDataError` if the given list is an empty list and no pre-existing
average and weight are given.
## Examples
iex> Talib.CMA.from_list!([17, 23, 44], 1, 3)
%Talib.CMA{
values: [1.0, 5.0, 8.6, 14.5],
weight: 6
}
iex> Talib.CMA.from_list!([], 1, 3)
%Talib.CMA{
values: [1.0],
weight: 3
}
iex> Talib.CMA.from_list!([], 0, 0)
** (NoDataError) no data error
"""
@spec from_list!([number], number, integer) :: Talib.CMA.t | no_return
def from_list!(average, weight, data) do
case calculate(average, weight, data) do
{:ok, result} -> result
{:error, :no_data} -> raise NoDataError
end
end
@doc false
@spec calculate([number]) :: {:ok, Talib.CMA.t} | {:error, atom}
defp calculate(data), do: calculate(data, 0, 0)
@doc false
@spec calculate([number], number, integer) ::
{:ok, Talib.CMA.t}
| {:error, atom}
defp calculate([], _average, 0),
do: {:error, :no_data}
defp calculate([], average, weight),
do: {:ok, %Talib.CMA{values: [average / 1], weight: weight}}
defp calculate(data, average, weight) do
result = for {_number, index} <- Enum.with_index(data, 1) do
Enum.take(data, index)
|> Enum.sum
|> Kernel.+(average * weight)
|> Kernel./(weight + index)
end
case weight do
0 ->
{:ok, %Talib.CMA{
values: result,
weight: weight + length(data)
}}
_ ->
{:ok, %Talib.CMA{
values: [average / 1 | result],
weight: weight + length(data)
}}
end
end
end
|
lib/talib/cma.ex
| 0.910759
| 0.649766
|
cma.ex
|
starcoder
|
defmodule AWS.LexRuntime do
@moduledoc """
Amazon Lex provides both build and runtime endpoints. Each endpoint
provides a set of operations (API). Your application uses the runtime API
to understand user utterances (user input text or voice). For example,
suppose user says "I want pizza", your application sends this input to
Amazon Lex using the runtime API. Amazon Lex recognizes that the user
request is for the OrderPizza intent (one of the intents defined in the
application). Then Amazon Lex engages in user conversation on behalf of the
application to elicit required information (slot values, such as pizza size
and crust type), and then performs fulfillment activity (that you
configured when you created the application). You use the build-time API to
create and manage your Amazon Lex applications. For a list of build-time
operations, see the build-time API. .
"""
@doc """
Sends user input text to Amazon Lex at runtime. Amazon Lex uses the machine
learning model that the service built for the application to interpret user
input.
In response, Amazon Lex returns the next message to convey to the user
(based on the context of the user interaction) and whether to expect a user
response to the message (`dialogState`). For example, consider the
following response messages:
<ul> <li> "What pizza toppings would you like?" – In this case, the
`dialogState` would be `ElicitSlot` (that is, a user response is expected).
</li> <li> "Your order has been placed." – In this case, Amazon Lex returns
one of the following `dialogState` values depending on how the intent
fulfillment is configured (see `fulfillmentActivity` in `CreateIntent`):
<ul> <li> `FulFilled` – The intent fulfillment is configured through a
Lambda function.
</li> <li> `ReadyForFulfilment` – The intent's `fulfillmentActivity` is to
simply return the intent data back to the client application.
</li> </ul> </li> </ul>
"""
def post_text(client, bot_alias, bot_name, user_id, input, options \\ []) do
url = "/bot/#{URI.encode(bot_name)}/alias/#{URI.encode(bot_alias)}/user/#{URI.encode(user_id)}/text"
headers = []
request(client, :post, url, headers, input, options, nil)
end
defp request(client, method, url, headers, input, options, success_status_code) do
client = %{client | service: "lex"}
host = get_host("runtime.lex", client)
url = get_url(host, url, client)
headers = Enum.concat([{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"}],
headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 202, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 204, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, url, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{url}/"
end
defp encode_payload(input) do
if input != nil do
Poison.Encoder.encode(input, [])
else
""
end
end
end
|
lib/aws/lex_runtime.ex
| 0.870687
| 0.536738
|
lex_runtime.ex
|
starcoder
|
defmodule Retro.Phoenix.HTML.SVG do
@moduledoc """
View helpers for rendering inline SVG.
> The core code is borrowed from [nikkomiu/phoenix_inline_svg](https://github.com/nikkomiu/phoenix_inline_svg)
> whose author has been gone for a long time.
## Import Helpers
Add the following to the quoted `view` in your `my_app_web.ex` file.
def view do
quote do
use Retro.Phoenix.HTML.SVG
end
end
This will generate functions for each SVG file, effectively caching them at compile time.
## Usage
### render SVG from default collection
```eex
<%= svg("home") %>
```
It will load the SVG file from `assets/static/svg/generic/home.svg`, and inject
the content of SVG file to HTML:
```html
<svg>...</svg>
```
### render SVG from other collections
You can break up SVG files into collections, and use the second argument of
`svg/2` to specify the name of collection:
```eex
<%= svg_image("user", "fontawesome") %>
```
It will load the SVG file from `assets/static/svg/fontawesome/user.svg`, and
inject the content of SVG file to HTML:
```html
<svg>...</svg>
```
### render SVG with custom HTML attributes
You can also pass optional HTML attributes into the function to set those
attributes on the SVG:
```eex
<%= svg("home", class: "logo", id: "bounce-animation") %>
<%= svg("home", "fontawesome", class: "logo", id: "bounce-animation") %>
```
It will output:
```html
<svg class="logo" id="bounce-animation">...</svg>
<svg class="logo" id="bounce-animation">...</svg>
```
## Configuration Options
There are several configuration options for meeting your needs.
### `:dir`
Specify the directory from which to load SVG files.
The default value for standard way is `assets/static/svg/`.
```elixir
config :retro, Retro.Phoenix.HTML.SVG,
dir: "relative/path/to/the/root/of/project"
```
### `:default_collection`
Specify the default collection to use.
The deafult value is `generic`.
```elixir
config :retro, Retro.Phoenix.HTML.SVG,
default_collection: "fontawesome"
```
### `:not_found`
Specify content to displayed in the `<i>` element when there is no SVG file found.
The default value is:
```
<svg viewbox='0 0 60 60'>
<text x='0' y='40' font-size='30' font-weight='bold'
font-family='monospace'>Error</text>
</svg>
```
```elixir
config :retro, Retro.Phoenix.HTML.SVG,
not_found: "<p>Oh No!</p>"
```
"""
alias Retro.Phoenix.HTML.SVG.Util
@doc """
The macro precompiles the SVG images into functions.
"""
defmacro __using__(_) do
get_config(:dir, "assets/static/svg/")
|> find_collection_sets
|> Enum.uniq()
|> Enum.map(&create_cached_svg(&1))
end
defp find_collection_sets(svgs_path) do
if File.dir?(svgs_path) do
case File.ls(svgs_path) do
{:ok, listed_files} ->
listed_files
|> Stream.filter(fn e -> File.dir?(Path.join(svgs_path, e)) end)
|> Enum.flat_map(&map_collection(&1, svgs_path))
_ ->
[]
end
else
[]
end
end
defp map_collection(collection, svgs_path) do
collection_path = Path.join(svgs_path, collection)
collection_path
|> File.ls!()
|> Stream.map(&Path.join(collection_path, &1))
|> Stream.flat_map(&to_file_path/1)
|> Enum.map(&{collection, &1})
end
defp to_file_path(path) do
if File.dir?(path) do
path
|> File.ls!()
|> Stream.map(&Path.join(path, &1))
|> Enum.flat_map(&to_file_path/1)
else
[path]
end
end
defp create_cached_svg({collection, name}) do
try do
filename = hd(Regex.run(~r|.*/#{collection}/(.*)\.svg$|, name, capture: :all_but_first))
content = read_svg_from_path(name)
generic_functions =
if get_config(:default_collection, "generic") == collection do
quote do
def svg(unquote(filename)) do
svg(unquote(filename), unquote(collection), [])
end
def svg(unquote(filename), opts) when is_list(opts) do
svg(unquote(filename), unquote(collection), opts)
end
end
end
explicit_functions =
quote do
def svg(unquote(filename), unquote(collection)) do
svg(unquote(filename), unquote(collection), [])
end
def svg(unquote(filename), unquote(collection), opts) do
unquote(content)
|> Util.insert_attrs(opts)
|> Util.safety_string()
end
end
[generic_functions, explicit_functions]
rescue
ArgumentError -> nil
end
end
defp read_svg_from_path(path) do
case File.read(path) do
{:ok, result} ->
String.trim(result)
{:error, _} ->
get_config(
:not_found,
"<svg viewbox='0 0 60 60'>" <>
"<text x='0' y='40' font-size='30' font-weight='bold'" <>
"font-family='monospace'>Error</text></svg>"
)
end
end
defp get_config(key, default) do
config = Application.get_env(:retro, __MODULE__, [])
Keyword.get(config, key, default)
end
end
|
lib/retro/phoenix_html/svg.ex
| 0.750004
| 0.915318
|
svg.ex
|
starcoder
|
defmodule Day14 do
@moduledoc """
Advent of Code 2019
Day 14: Space Stoichiometry
"""
alias Day14.{Part1, Part2}
def get_reactions() do
Path.join(__DIR__, "inputs/day14.txt")
|> File.open!()
|> IO.stream(:line)
|> parse_reactions()
end
def parse_reactions(lines) do
lines
|> Stream.map(&String.trim/1)
|> Stream.map(&String.split(&1, " => "))
|> Stream.map(fn [left, right] -> [String.split(left, ", "), right] end)
|> Enum.to_list()
end
def execute() do
reactions = get_reactions()
IO.puts("Part 1: #{Part1.run(reactions)}")
IO.puts("Part 2: #{Part2.run(reactions)}")
end
end
defmodule Day14.Part1 do
def run(reactions, quantity \\ 1) do
reactions
|> transform_reactions_to_map()
|> calculate_ore_cost("FUEL", quantity)
|> (&elem(&1, 0)).()
end
@doc """
%{output => {quantity, [{input_quantity, input_elements}, ...]}, ...}
"""
def transform_reactions_to_map(reactions, map \\ %{})
def transform_reactions_to_map([], map), do: map
def transform_reactions_to_map([[inputs, output] | reactions], map) do
{element, quantity} = parse_element_quantity(output)
inputs = for inp <- inputs, into: %{}, do: parse_element_quantity(inp)
map = Map.put(map, element, {quantity, inputs})
transform_reactions_to_map(reactions, map)
end
defp parse_element_quantity(string) do
string
|> String.split(" ")
|> (fn [qty, element] -> {element, String.to_integer(qty)} end).()
end
def calculate_ore_cost(reactions, element, qty_needed, leftovers \\ %{})
def calculate_ore_cost(_, "ORE", qty_needed, leftovers), do: {qty_needed, leftovers}
def calculate_ore_cost(reactions, element, qty_needed, leftovers) do
{qty, inputs} = reactions[element]
{inputs, leftovers} = multiply_qty(inputs, qty, qty_needed, element, leftovers)
Enum.reduce(inputs, {0, leftovers}, fn {ele, qty}, {ore_cost, leftovers} ->
{inp_oc, leftovers} = calculate_ore_cost(reactions, ele, qty, leftovers)
{inp_oc + ore_cost, leftovers}
end)
end
def multiply_qty(inputs, qty, qty_needed, element, leftovers) do
leftover_element = Map.get(leftovers, element, 0)
{qty_needed, leftover_element} = {
max(qty_needed - leftover_element, 0),
max(leftover_element - qty_needed, 0)
}
multiple = ceil(qty_needed / qty)
leftover_element = leftover_element + (qty * multiple - qty_needed)
leftovers = Map.put(leftovers, element, leftover_element)
inputs =
for inp <- inputs,
into: %{},
do: inp |> (fn {ele, qty} -> {ele, qty * multiple} end).()
subtract_leftovers_from_inputs(inputs, leftovers)
end
defp subtract_leftovers_from_inputs(inputs, leftovers) do
Enum.reduce(inputs, {inputs, leftovers}, fn {ele, qty}, {inputs, leftovers} ->
leftover_amt = Map.get(leftovers, ele, 0)
{
Map.put(inputs, ele, max(qty - leftover_amt, 0)),
Map.put(leftovers, ele, max(leftover_amt - qty, 0))
}
end)
end
end
defmodule Day14.Part2 do
alias Day14.Part1
def run(reactions) do
reactions
|> bsearch_ore_cost(1_000_000_000_000)
end
def bsearch_ore_cost(reactions, target, low \\ 0, high \\ 1_000_000_000) do
mid = trunc(low + (high - low) / 2)
ore_cost = Part1.run(reactions, mid)
cond do
ore_cost == target or (ore_cost < target and mid + 1 == high) ->
mid
ore_cost < target ->
bsearch_ore_cost(reactions, target, mid, high)
ore_cost > target ->
bsearch_ore_cost(reactions, target, low, mid)
end
end
end
|
lib/day14.ex
| 0.786623
| 0.61819
|
day14.ex
|
starcoder
|
defmodule Gim.Rdf do
@moduledoc """
Terse RDF Triple Language (Turtle) import and export for Gim.
"""
# Import Terse RDF Triple Language (Turtle)
# "1million.rdf.gz" |> File.stream!([:compressed]) |> Gim.Rdf.read_rdf()
@doc """
Read Terse RDF Triple Language (Turtle) format from a stream.
`pfun` is an optional predicate mapping function.
"""
def read_rdf(stream, pfun \\ &String.to_atom/1) do
stream
|> Stream.map(&tokenize(pfun, &1))
|> Enum.reduce(%{}, &rdf_to_map/2)
end
@doc """
Stream raw Terse RDF Triple Language (Turtle) format from a stream.
`pfun` is an optional predicate mapping function.
"""
def stream_rdf(stream, pfun \\ &String.to_atom/1) do
stream
|> Stream.map(&tokenize(pfun, &1))
end
# Raw RDF token to RDF map
defp rdf_to_map(:blank, acc), do: acc
defp rdf_to_map({:comment, _}, acc), do: acc
defp rdf_to_map({subject, predicate, object}, acc) do
Map.update(acc, subject, [{predicate, object}], fn x ->
[{predicate, object} | x]
end)
end
# Tokenize RDF string data
defp tokenize(_pfun, <<>>), do: :blank
defp tokenize(_pfun, <<"\n">>), do: :blank
defp tokenize(_pfun, <<"\r\n">>), do: :blank
defp tokenize(pfun, <<" ", rest::binary>>), do: tokenize(pfun, rest)
defp tokenize(pfun, <<"\t", rest::binary>>), do: tokenize(pfun, rest)
defp tokenize(_pfun, <<"#", rest::binary>>), do: tokenize_comment(rest)
defp tokenize(pfun, <<"<", rest::binary>>), do: tokenize_tag1(pfun, rest)
defp tokenize_comment(<<>>), do: {:comment, nil}
defp tokenize_comment(<<"\n">>), do: {:comment, nil}
defp tokenize_comment(<<"\r\n">>), do: {:comment, nil}
defp tokenize_comment(<<" ", rest::binary>>), do: tokenize_comment(rest)
defp tokenize_comment(<<"\t", rest::binary>>), do: tokenize_comment(rest)
defp tokenize_comment(<<"-", rest::binary>>), do: tokenize_comment(rest)
defp tokenize_comment(rest) do
[comment, _rest] = :binary.split(rest, " --")
{:comment, comment}
end
# subject, predicate, object
defp tokenize_tag1(pfun, rest) do
[subject, rest] = :binary.split(rest, ">")
tokenize_predicate(pfun, subject, rest)
end
#defp tokenize_predicate(pfun, _subject, <<>>), do: :error
#defp tokenize_predicate(pfun, _subject, <<"\n">>), do: :error
#defp tokenize_predicate(pfun, _subject, <<"\r\n">>), do: :error
defp tokenize_predicate(pfun, subject, <<" ", rest::binary>>), do: tokenize_predicate(pfun, subject, rest)
defp tokenize_predicate(pfun, subject, <<"\t", rest::binary>>), do: tokenize_predicate(pfun, subject, rest)
defp tokenize_predicate(pfun, subject, <<"<", rest::binary>>), do: tokenize_tag2(pfun, subject, rest)
defp tokenize_tag2(pfun, subject, rest) do
[predicate, rest] = :binary.split(rest, ">")
tokenize_object(subject, pfun.(predicate), rest)
end
#defp tokenize_object(_subject, _predicate, <<>>), do: :error
#defp tokenize_object(_subject, _predicate, <<"\n">>), do: :error
#defp tokenize_object(_subject, _predicate, <<"\r\n">>), do: :error
defp tokenize_object(subject, predicate, <<" ", rest::binary>>), do: tokenize_object(subject, predicate, rest)
defp tokenize_object(subject, predicate, <<"\t", rest::binary>>), do: tokenize_object(subject, predicate, rest)
defp tokenize_object(subject, predicate, <<"<", rest::binary>>), do: tokenize_tag3(subject, predicate, rest)
defp tokenize_object(subject, predicate, <<"\"", rest::binary>>), do: tokenize_text3(subject, predicate, rest)
defp tokenize_tag3(subject, predicate, rest) do
[object, _rest] = :binary.split(rest, ">")
# just discard what's left of the line (the end dot)
{subject, predicate, object}
end
defp tokenize_text3(subject, predicate, rest) do
[object, rest] = :binary.split(rest, "\"")
tokenize_text3(subject, predicate, object, rest)
end
# just discard what's left of the line (end dot) and assume @en if missing
defp tokenize_text3(subject, predicate, object, <<"@en", _::binary>>),
do: {subject, predicate, {:en, object}}
defp tokenize_text3(subject, predicate, object, <<"@de", _::binary>>),
do: {subject, predicate, {:de, object}}
defp tokenize_text3(subject, predicate, object, <<"@it", _::binary>>),
do: {subject, predicate, {:it, object}}
defp tokenize_text3(subject, predicate, object, _),
do: {subject, predicate, {:en, object}}
# Export Terse RDF Triple Language (Turtle)
# repo |> Gim.Rdf.write_rdf() |> Enum.into(File.stream!("dump.rdf.gz", [:compressed]))
def write_rdf(repo) do
aliases = repo.type_aliases()
types = repo.types()
[
"# -- Gim RDF export of #{repo} --\n",
Enum.map(types, &rdf_nodes(repo, &1, aliases)),
]
end
defp rdf_nodes(repo, type, aliases) do
nodes = repo.all!(type)
[
"\n# -- #{aliases[type]} --\n\n",
Enum.map(nodes, &rdf_node(&1, aliases)),
]
end
defp rdf_node(%{__struct__: struct, __id__: id} = node, aliases) do
props = struct.__schema__(:properties)
assocs = struct.__schema__(:associations)
subject = "<#{aliases[struct]}.#{id}>"
[
"#{subject} <gim.type> \"#{aliases[struct]}\" .\n",
Enum.map(props, &rdf_property(subject, node, &1)),
Enum.map(assocs, &rdf_edges(subject, node, &1, aliases)),
]
end
defp rdf_property(subject, node, property_name) do
%{^property_name => value} = node
"#{subject} <#{property_name}> #{inspect value} .\n"
end
defp rdf_edges(subject, %{__struct__: struct} = node, assoc, aliases) do
type = struct.__schema__(:type, assoc)
type_alias = aliases[type]
edges = Map.fetch!(node, assoc)
predicate = "<#{assoc}>"
rdf_edge(subject, predicate, type_alias, edges)
end
defp rdf_edge(subject, predicate, type_alias, edges) when is_list(edges) do
Enum.map(edges, &rdf_edge(subject, predicate, type_alias, &1))
end
defp rdf_edge(subject, predicate, type_alias, edge) do
"#{subject} #{predicate} <#{type_alias}.#{edge}> .\n"
end
end
|
lib/gim/rdf.ex
| 0.688992
| 0.515925
|
rdf.ex
|
starcoder
|
defmodule Caravan do
@moduledoc """
Tools for running Distributed Elixir/Erlang with Nomad and Consul
# Caravan
The built-in Erlang distribution mechanisms are prefaced on using the Erlang
Port Mapper Daemon(EPMD), a process that is started with the VM and communicates with
remote EPMD instances to determine what ports to send data on.
While this method can work in some cloud environments, container scheduling technologies make it
difficult to pick a single port to use globally or to run multiple processes in a continer.
Also, the built-in method for forming a cluster is to use a plaintext .hosts file with
resolvable node names, which is very difficult to make work in a dynamic environment where
nodes can leave a cluster frequently.
There are several libraries and strategies for using the Kubernetes API to
build a distributed cluster, but Consul provides us with a clean DNS api to
retrieve information, while Nomad handles monitoring and scheduling services.
Caravan is split into two parts: The first is a set of modules that remove the
need for `epmd` by determing node ports by the node name. The idea and much of
the code is from the excellent article [Erlang (and Elixir) distribution
without
epmd](https://www.erlang-solutions.com/blog/erlang-and-elixir-distribution-without-epmd.html).
It's worth the read, and should explain what we're trying to accomplish with
the `Caravan.Epmd` module.
The second part utilizes [libcluster](https://github.com/bitwalker/libcluster)
to help with forming clusters automatically based on DNS SRV queries to
Consul.
## Getting started with custom Erlang distribution
Erlang has some command line options to overwrite the default distribution
mechanism. To use Caravan's implementations, you would do something similar to
this
```
iex --erl "-proto_dist Elixir.Caravan.Epmd.Dist -start_epmd false -epmd_module Elixir.Caravan.Epmd.Client" --sname "node3434" -S mix
```
For testing locally, you'll either have to run `elixirc` on the above
modules to create the required `.beam` files, or you can pass an additional
flag to `--erl`:
```
-pa _build/dev/lib/caravan/ebin
```
Note: building a release with Distillery will not require the `-pa` flag.
"""
end
|
lib/caravan.ex
| 0.846292
| 0.876264
|
caravan.ex
|
starcoder
|
defmodule EctoLtree.Functions do
@moduledoc """
This module exposes the `ltree` functions.
For more information see the [PostgreSQL documentation](https://www.postgresql.org/docs/current/static/ltree.html#LTREE-FUNC-TABLE).
"""
@doc """
subpath of `ltree` from position start to position end-1 (counting from 0).
"""
defmacro subltree(ltree, start, finish) do
quote do: fragment("SUBLTREE(?, ?, ?)", unquote(ltree), unquote(start), unquote(finish))
end
@doc """
subpath of `ltree` starting at position offset, extending to end of path.
If offset is negative, subpath starts that far from the end of the path.
"""
defmacro subpath(ltree, offset) do
quote do: fragment("SUBPATH(?, ?)", unquote(ltree), unquote(offset))
end
@doc """
subpath of `ltree` starting at position offset, length len.
If offset is negative, subpath starts that far from the end of the path.
If len is negative, leaves that many labels off the end of the path.
"""
defmacro subpath(ltree, offset, len) do
quote do: fragment("SUBPATH(?, ?, ?)", unquote(ltree), unquote(offset), unquote(len))
end
@doc """
number of labels in path.
"""
defmacro nlevel(ltree) do
quote do: fragment("NLEVEL(?)", unquote(ltree))
end
@doc """
position of first occurrence of b in a; -1 if not found.
"""
defmacro index(a, b) do
quote do: fragment("INDEX(?, ?)", unquote(a), unquote(b))
end
@doc """
position of first occurrence of b in a, searching starting at offset; negative offset means start -offset labels from the end of the path.
"""
defmacro index(a, b, offset) do
quote do: fragment("INDEX(?, ?, ?)", unquote(a), unquote(b), unquote(offset))
end
@doc """
cast `text` to `ltree`.
"""
defmacro text2ltree(text) do
quote do: fragment("TEXT2LTREE(?)", unquote(text))
end
@doc """
cast `ltree` to `text`.
"""
defmacro ltree2text(ltree) do
quote do: fragment("LTREE2TEXT(?)", unquote(ltree))
end
@doc """
lowest common ancestor.
"""
defmacro lca(a, b) do
quote do: fragment("LCA(?, ?)", unquote(a), unquote(b))
end
end
|
lib/ecto_ltree/functions.ex
| 0.814274
| 0.747524
|
functions.ex
|
starcoder
|
defmodule Day4 do
def run(lines) do
with input = parse_input(lines),
part1 = Enum.count(input, &part1_valid/1),
part2 = Enum.count(input, &part2_valid/1) do
"part1: #{part1} part2: #{part2}"
end
end
def part1_valid(passport) do
case passport do
%{byr: _, iyr: _, eyr: _, hgt: _, hcl: _, ecl: _, pid: _} ->
true
_ ->
false
end
end
def part2_valid(passport) do
case passport do
%{byr: byr, iyr: iyr, eyr: eyr, hgt: hgt, hcl: hcl, ecl: ecl, pid: pid} ->
number_in_range(byr, ~r/^([0-9]{4})$/, 1920, 2002) and
number_in_range(iyr, ~r/^([0-9]{4})$/, 2010, 2020) and
number_in_range(eyr, ~r/^([0-9]{4})$/, 2020, 2030) and
height_valid(hgt) and
String.match?(hcl, ~r/^#[0-9a-f]{6}$/) and
Enum.member?(~w{amb blu brn gry grn hzl oth}, ecl) and
String.match?(pid, ~r/^[0-9]{9}$/)
_ ->
false
end
end
def height_valid(hgt) do
number_in_range(hgt, ~r/^([0-9]+)cm$/, 150, 193) or
number_in_range(hgt, ~r/^([0-9]+)in$/, 59, 76)
end
@doc """
Checks that the string matches the pattern, and the first capture
group in a number in the given range (inclusive).
## Examples:
iex> Day4.number_in_range("a47b", ~r/^a([47]+)b$/, 0, 50)
true
iex> Day4.number_in_range("a77b", ~r/^a([47]+)b$/, 0, 50)
false
iex> Day4.number_in_range("49", ~r/([0-9]+)/, 50, 50)
false
iex> Day4.number_in_range("50", ~r/([0-9]+)/, 50, 50)
true
iex> Day4.number_in_range("51", ~r/([0-9]+)/, 50, 50)
false
"""
def number_in_range(text, pattern, low, high) do
case Regex.run(pattern, text) do
nil ->
false
[_, num_str] ->
with num = String.to_integer(num_str) do
low <= num and num <= high
end
end
end
@doc """
Takes the textual input an converts it into a list
of maps. Each map represents one passport, mapping
field name to value.
"""
def parse_input(lines) do
lines
|> AdventUtil.split_at_blank_lines()
|> Enum.map(&parse_one_passport/1)
end
def parse_one_passport(lines) do
for line <- lines,
item <- String.split(line),
[field, value] = String.split(item, ":"),
into: %{} do
{String.to_atom(field), value}
end
end
end
|
elixir_advent/lib/day4.ex
| 0.617282
| 0.40589
|
day4.ex
|
starcoder
|
defmodule EarmarkParser.Block do
@moduledoc false
defmodule Heading do
@moduledoc false
defstruct lnb: 0, attrs: nil, content: nil, level: nil
end
defmodule Ruler do
@moduledoc false
defstruct lnb: 0, attrs: nil, type: nil
end
defmodule BlockQuote do
@moduledoc false
defstruct lnb: 0, attrs: nil, blocks: []
end
defmodule Para do
@moduledoc false
defstruct lnb: 0, attrs: nil, lines: []
end
defmodule Code do
@moduledoc false
defstruct lnb: 0, attrs: nil, lines: [], language: nil
end
defmodule Html do
@moduledoc false
defstruct lnb: 0, attrs: nil, html: [], tag: nil
end
defmodule HtmlOneline do
@moduledoc false
defstruct lnb: 0, attrs: nil, html: ""
end
defmodule HtmlComment do
@moduledoc false
defstruct lnb: 0, attrs: nil, lines: []
end
defmodule IdDef do
@moduledoc false
defstruct lnb: 0, attrs: nil, id: nil, url: nil, title: nil
end
defmodule FnDef do
@moduledoc false
defstruct lnb: 0, attrs: nil, id: nil, number: nil, blocks: []
end
defmodule FnList do
@moduledoc false
defstruct lnb: 0, attrs: ".footnotes", blocks: []
end
defmodule Ial do
@moduledoc false
defstruct lnb: 0, attrs: nil, content: nil, verbatim: ""
end
defmodule List do
@moduledoc false
defstruct attrs: nil,
blocks: [],
bullet: "-",
lnb: 0,
loose?: false,
start: "",
type: :ul
end
defmodule ListItem do
@moduledoc false
defstruct attrs: nil,
blocks: [],
bullet: "",
lnb: 0,
loose?: false,
spaced: true,
type: :ul
end
defmodule Table do
@moduledoc false
defstruct lnb: 0, attrs: nil, rows: [], header: nil, alignments: []
def new_for_columns(n) do
%__MODULE__{alignments: Elixir.List.duplicate(:left, n)}
end
end
defmodule Text do
@moduledoc false
defstruct attrs: nil, lnb: 0, line: ""
end
@type t :: %Heading{} |
%Ruler{} |
%BlockQuote{} |
%List{} |
%ListItem{} |
%Para{} |
%Code{} |
%Html{} |
%HtmlOneline{} |
%HtmlComment{} |
%IdDef{} |
%FnDef{} |
%FnList{} |
%Ial{} |
%Table{} |
%Text{}
@type ts :: list(t)
end
# SPDX-License-Identifier: Apache-2.0
|
lib/earmark_parser/block.ex
| 0.591369
| 0.427217
|
block.ex
|
starcoder
|
defmodule Ash.EmbeddableType do
@moduledoc false
@embedded_resource_array_constraints [
sort: [
type: :any,
doc: """
A sort to be applied when casting the data.
Only relevant for a type of {:array, `EmbeddedResource}`
The sort is not applied when reading the data, so if the sort changes you will
need to fix it in your database or wait for the data to be written again, at which
point it will be sorted when casting.
"""
],
load: [
type: {:list, :atom},
doc: """
A list of calculations to load on the resource.
Only relevant for a type of {:array, `EmbeddedResource}`
Aggregates are not supported on embedded resources.
"""
],
create_action: [
type: :atom,
doc:
"The action to use on the resource when creating an embed. The primary is used by default."
],
update_action: [
type: :atom,
doc:
"The action to use on the resource when updating an embed. The primary is used by default."
],
destroy_action: [
type: :atom,
doc:
"The action to use on the resource when destroying an embed. The primary is used by default."
]
]
@doc false
def embedded_resource_array_constraints, do: @embedded_resource_array_constraints
@doc false
def handle_errors(errors) do
errors
|> do_handle_errors()
|> List.wrap()
|> Ash.Error.flatten_preserving_keywords()
end
defp do_handle_errors(errors) when is_list(errors) do
if Keyword.keyword?(errors) do
main_fields = Keyword.take(errors, [:message, :field, :fields])
vars = Keyword.merge(main_fields, Keyword.get(errors, :vars, []))
main_fields
|> Keyword.put(:vars, vars)
|> Enum.into(%{})
|> do_handle_errors()
else
Enum.map(errors, &do_handle_errors/1)
end
end
defp do_handle_errors(%{errors: errors}) do
errors
|> List.wrap()
|> do_handle_errors()
end
defp do_handle_errors(%Ash.Error.Changes.InvalidAttribute{
message: message,
field: field,
vars: vars
}) do
vars
|> Keyword.put(:field, field)
|> Keyword.put(:message, message)
|> add_index()
end
defp do_handle_errors(%{message: message, vars: vars, field: field}) do
vars
|> Keyword.put(:message, message)
|> Keyword.put(:field, field)
|> add_index()
end
defp do_handle_errors(%{message: message, vars: vars}) do
vars
|> Keyword.put(:message, message)
|> add_index()
end
defp do_handle_errors(%{field: field} = exception) do
[field: field, message: Exception.message(exception)]
end
defp do_handle_errors(error) when is_binary(error) do
[message: error]
end
defp do_handle_errors(error) when is_exception(error) do
[message: Exception.message(error)]
end
defp do_handle_errors(_error) do
[message: "Something went wrong"]
end
defp add_index(opts) do
opts
# cond do
# opts[:index] && opts[:field] ->
# Keyword.put(opts, :field, "#{opts[:field]}[#{opts[:index]}]")
# opts[:index] ->
# Keyword.put(opts, :field, "[#{opts[:index]}]")
# true ->
# opts
# end
end
defmacro single_embed_implementation do
# credo:disable-for-next-line Credo.Check.Refactor.LongQuoteBlocks
quote location: :keep do
alias __MODULE__.ShadowApi
def storage_type, do: :map
def cast_input(%{__struct__: __MODULE__} = input, _constraints), do: {:ok, input}
def cast_input(value, constraints) when is_map(value) do
action =
constraints[:create_action] ||
Ash.Resource.Info.primary_action!(__MODULE__, :create).name
__MODULE__
|> Ash.Changeset.for_create(action, value)
|> ShadowApi.create()
|> case do
{:ok, result} ->
{:ok, result}
{:error, error} ->
{:error, Ash.EmbeddableType.handle_errors(error)}
end
end
def cast_input(nil, _), do: {:ok, nil}
def cast_input(_, _), do: :error
def cast_stored(value, constraints) when is_map(value) do
__MODULE__
|> Ash.Resource.Info.attributes()
|> Enum.reduce_while({:ok, struct(__MODULE__)}, fn attr, {:ok, struct} ->
with {:fetch, {:ok, value}} <- {:fetch, fetch_key(value, attr.name)},
{:ok, casted} <-
Ash.Type.cast_stored(attr.type, value, constraints) do
{:cont, {:ok, Map.put(struct, attr.name, casted)}}
else
{:fetch, :error} ->
{:cont, {:ok, struct}}
other ->
{:halt, other}
end
end)
end
def cast_stored(nil, _), do: {:ok, nil}
def cast_stored(_other, _) do
:error
end
def fetch_key(map, atom) do
case Map.fetch(map, atom) do
{:ok, value} ->
{:ok, value}
:error ->
Map.fetch(map, to_string(atom))
end
end
def dump_to_native(value, _) when is_map(value) do
attributes = Ash.Resource.Info.attributes(__MODULE__)
calculations = Ash.Resource.Info.calculations(__MODULE__)
Enum.reduce_while(attributes ++ calculations, {:ok, %{}}, fn attribute, {:ok, acc} ->
case Map.fetch(value, attribute.name) do
:error ->
{:cont, {:ok, acc}}
{:ok, value} ->
case Ash.Type.dump_to_embedded(
attribute.type,
value,
Map.get(attribute, :constraints) || []
) do
:error ->
{:halt, :error}
{:ok, dumped} ->
{:cont, {:ok, Map.put(acc, attribute.name, dumped)}}
end
end
end)
end
def dump_to_native(nil, _), do: {:ok, nil}
def dump_to_native(_, _), do: :error
def constraints,
do:
Keyword.take(array_constraints(), [
:load,
:create_action,
:destroy_action,
:update_action
])
def apply_constraints(nil, _), do: {:ok, nil}
def apply_constraints(term, constraints) do
__MODULE__
|> Ash.Query.put_context(:data, [term])
|> Ash.Query.load(constraints[:load] || [])
|> ShadowApi.read()
|> case do
{:ok, [result]} ->
{:ok, result}
{:error, errors} ->
{:error, Ash.EmbeddableType.handle_errors(errors)}
end
end
def handle_change(nil, new_value, _constraints) do
{:ok, new_value}
end
def handle_change(old_value, nil, constraints) do
action =
constraints[:destroy_action] ||
Ash.Resource.Info.primary_action!(__MODULE__, :destroy).name
case ShadowApi.destroy(old_value, action: action) do
:ok -> {:ok, nil}
{:error, error} -> {:error, Ash.EmbeddableType.handle_errors(error)}
end
end
def handle_change(old_value, new_value, constraints) do
pkey_fields = Ash.Resource.Info.primary_key(__MODULE__)
if Enum.all?(pkey_fields, fn pkey_field ->
Ash.Resource.Info.attribute(__MODULE__, pkey_field).private?
end) do
{:ok, new_value}
else
pkey = Map.take(old_value, pkey_fields)
if Map.take(new_value, pkey_fields) == pkey do
{:ok, new_value}
else
action =
constraints[:destroy_action] ||
Ash.Resource.Info.primary_action!(__MODULE__, :destroy).name
case ShadowApi.destroy(old_value, action: action) do
:ok -> {:ok, new_value}
{:error, error} -> {:error, Ash.EmbeddableType.handle_errors(error)}
end
end
end
end
def prepare_change(old_value, "", constraints) do
prepare_change(old_value, nil, constraints)
end
def prepare_change(_old_value, nil, _constraints) do
{:ok, nil}
end
def prepare_change(_old_value, %{__struct__: __MODULE__} = new_value, _constraints) do
{:ok, new_value}
end
def prepare_change(nil, new_value, _constraints) do
{:ok, new_value}
end
def prepare_change(old_value, new_uncasted_value, constraints) do
pkey_fields = Ash.Resource.Info.primary_key(__MODULE__)
if Enum.all?(pkey_fields, fn pkey_field ->
Ash.Resource.Info.attribute(__MODULE__, pkey_field).private?
end) do
action =
constraints[:update_action] ||
Ash.Resource.Info.primary_action!(__MODULE__, :update).name
old_value
|> Ash.Changeset.for_update(action, new_uncasted_value)
|> ShadowApi.update()
|> case do
{:ok, value} -> {:ok, value}
{:error, error} -> {:error, Ash.EmbeddableType.handle_errors(error)}
end
else
pkey =
Enum.into(pkey_fields, %{}, fn pkey_field ->
case fetch_key(new_uncasted_value, pkey_field) do
:error ->
{pkey_field, :error}
{:ok, value} ->
attribute = Ash.Resource.Info.attribute(__MODULE__, pkey_field)
case Ash.Type.cast_input(attribute.type, value, attribute.constraints) do
{:ok, casted} ->
{pkey_field, casted}
_ ->
{pkey_field, :error}
end
end
end)
if Enum.any?(Map.values(pkey), &(&1 == :error)) do
{:ok, new_uncasted_value}
else
old_pkey = Map.take(old_value, pkey_fields)
if old_pkey == pkey do
action =
constraints[:update_action] ||
Ash.Resource.Info.primary_action!(__MODULE__, :update).name
old_value
|> Ash.Changeset.for_update(action, new_uncasted_value)
|> ShadowApi.update()
|> case do
{:ok, value} -> {:ok, value}
{:error, error} -> {:error, Ash.EmbeddableType.handle_errors(error)}
end
else
{:ok, new_uncasted_value}
end
end
end
end
end
end
defmacro array_embed_implementation do
# credo:disable-for-next-line Credo.Check.Refactor.LongQuoteBlocks
quote location: :keep do
alias __MODULE__.ShadowApi
def array_constraints, do: Ash.EmbeddableType.embedded_resource_array_constraints()
def apply_constraints_array([], _constraints), do: {:ok, []}
def apply_constraints_array(term, constraints) do
pkey = Ash.Resource.Info.primary_key(__MODULE__)
unique_keys = Enum.map(Ash.Resource.Info.identities(__MODULE__), & &1.keys) ++ [pkey]
case Enum.find(unique_keys, fn unique_key ->
has_duplicates?(term, &Map.take(&1, unique_key))
end) do
nil ->
query =
__MODULE__
|> Ash.Query.put_context(:data, term)
|> Ash.Query.load(constraints[:load] || [])
query =
if constraints[:sort] do
Ash.Query.sort(query, constraints[:sort])
else
query
end
ShadowApi.read(query)
keys ->
{:error, message: "items must be unique on keys %{keys}", keys: Enum.join(keys, ",")}
end
end
defp has_duplicates?(list, func) do
list
|> Enum.reduce_while(MapSet.new(), fn x, acc ->
x = func.(x)
if MapSet.member?(acc, x) do
{:halt, 0}
else
{:cont, MapSet.put(acc, x)}
end
end)
|> is_integer()
end
def handle_change_array(nil, new_values, constraints) do
handle_change_array([], new_values, constraints)
end
def handle_change_array(old_values, nil, constraints) do
handle_change_array(old_values, [], constraints)
end
def handle_change_array(old_values, new_values, constraints) do
pkey_fields = Ash.Resource.Info.primary_key(__MODULE__)
destroy_action =
constraints[:destroy_action] ||
Ash.Resource.Info.primary_action!(__MODULE__, :destroy).name
old_values
|> Enum.with_index()
|> Enum.reject(fn {old_value, _} ->
pkey = Map.take(old_value, pkey_fields)
Enum.any?(new_values, fn new_value ->
Map.take(new_value, pkey_fields) == pkey
end)
end)
|> Enum.reduce_while(:ok, fn {record, index}, :ok ->
case ShadowApi.destroy(record, action: destroy_action) do
:ok ->
{:cont, :ok}
{:error, error} ->
errors =
error
|> Ash.EmbeddableType.handle_errors()
|> Enum.map(fn keyword ->
Keyword.put(keyword, :index, index)
end)
{:halt, {:error, errors}}
end
end)
|> case do
:ok ->
{:ok, new_values}
{:error, error} ->
{:error, error}
end
end
def prepare_change_array(old_values, new_uncasted_values, constraints) do
pkey_fields = Ash.Resource.Info.primary_key(__MODULE__)
if Enum.all?(pkey_fields, fn pkey_field ->
Ash.Resource.Info.attribute(__MODULE__, pkey_field).private?
end) do
{:ok, new_uncasted_values}
else
pkey_attributes =
Enum.into(pkey_fields, %{}, fn field ->
{field, Ash.Resource.Info.attribute(__MODULE__, field)}
end)
action =
constraints[:update_action] ||
Ash.Resource.Info.primary_action!(__MODULE__, :update).name
new_uncasted_values
|> Enum.with_index()
|> Enum.reduce_while({:ok, []}, fn {new, index}, {:ok, new_uncasted_values} ->
pkey =
Enum.into(pkey_fields, %{}, fn pkey_field ->
case fetch_key(new, pkey_field) do
:error ->
{pkey_field, :error}
{:ok, value} ->
attr = Map.get(pkey_attributes, pkey_field)
case Ash.Type.cast_input(attr.type, value, attr.constraints) do
{:ok, casted} ->
{pkey_field, casted}
_ ->
{pkey_field, :error}
end
end
end)
if Enum.any?(Map.values(pkey), &(&1 == :error)) do
{:cont, {:ok, [new | new_uncasted_values]}}
else
value_updating_from =
Enum.find(old_values, fn old_value ->
Map.take(old_value, pkey_fields) == pkey
end)
if value_updating_from do
value_updating_from
|> Ash.Changeset.for_update(action, new)
|> ShadowApi.update()
|> case do
{:ok, value} ->
{:cont, {:ok, [value | new_uncasted_values]}}
{:error, error} ->
errors =
error
|> Ash.EmbeddableType.handle_errors()
|> Enum.map(fn keyword ->
Keyword.put(keyword, :index, index)
end)
{:halt, {:error, errors}}
end
else
{:cont, {:ok, [new | new_uncasted_values]}}
end
end
end)
|> case do
{:ok, values} -> {:ok, Enum.reverse(values)}
{:error, error} -> {:error, error}
end
end
end
end
end
defmacro define_embeddable_type do
quote location: :keep do
use Ash.Type
parent = __MODULE__
defmodule ShadowApi do
@moduledoc false
use Ash.Api
@parent parent
resources do
resource @parent, warn_on_compile_failure?: false
end
end
Ash.EmbeddableType.single_embed_implementation()
Ash.EmbeddableType.array_embed_implementation()
end
end
end
|
lib/ash/embeddable_type.ex
| 0.811265
| 0.404919
|
embeddable_type.ex
|
starcoder
|
defmodule Jeff.Command do
@moduledoc """
Commands are sent from an ACU to a PD
| Code | Name | Description | Data Type |
|------|--------------|---------------------------------------|-------------------------|
| 0x60 | POLL | Poll | - |
| 0x61 | ID | ID Report Request | - |
| 0x62 | CAP | PD Capabilities Request | [Capability] |
| 0x64 | LSTAT | Local Status Report Request | - |
| 0x65 | ISTAT | Input Status Report Request | - |
| 0x66 | OSTAT | Output Status Report Request | - |
| 0x67 | RSTAT | Reader Status Report Request | - |
| 0x68 | OUT | Output Control Command | OutputSettings |
| 0x69 | LED | Reader Led Control Command | LedSettings |
| 0x6A | BUZ | Reader Buzzer Control Command | BuzzerSettings |
| 0x6B | TEXT | Text Output Command | TextSettings |
| 0x6E | COMSET | PD Communication Config Command | ComSettings |
| 0x73 | BIOREAD | Scan and Send Biometric Data | Requested Return Format |
| 0x74 | BIOMATCH | Scan and Match Biometric Template | Biometric Template |
| 0x75 | KEYSET | Encryption Key Set Command | EncryptionKey |
| 0x76 | CHLNG | Challenge/Secure Session Init Request | ChallengeData |
| 0x77 | SCRYPT | Server Cryptogram | EncryptionData |
| 0x7B | ACURXSIZE | Max ACU receive size | Buffer size |
| 0x7C | FILETRANSFER | Send data file to PD | File contents |
| 0x80 | MFG | Manufacturer Specific Command | Any |
| 0xA1 | XWR | Extended write data | APDU and details |
| 0xA2 | ABORT | Abort PD operation | - |
| 0xA3 | PIVDATA | Get PIV Data | Object details |
| 0xA4 | GENAUTH | Request Authenticate | Request details |
| 0xA5 | CRAUTH | Request Crypto Response | Challenge details |
| 0xA7 | KEEPACTIVE | PD read activation | Time duration |
"""
@type t() :: %__MODULE__{
address: byte(),
code: byte(),
data: binary(),
name: atom(),
caller: reference()
}
defstruct [:address, :code, :data, :name, :caller]
alias Jeff.Command.{
BuzzerSettings,
ChallengeData,
EncryptionServer,
ComSettings,
EncryptionKey,
LedSettings,
OutputSettings,
TextSettings
}
@names %{
0x60 => POLL,
0x61 => ID,
0x62 => CAP,
0x64 => LSTAT,
0x65 => ISTAT,
0x66 => OSTAT,
0x67 => RSTAT,
0x68 => OUT,
0x69 => LED,
0x6A => BUZ,
0x6B => TEXT,
0x6E => COMSET,
0x73 => BIOREAD,
0x74 => BIOMATCH,
0x75 => KEYSET,
0x76 => CHLNG,
0x77 => SCRYPT,
0x7B => ACURXSIZE,
0x7C => FILETRANSFER,
0x80 => MFG,
0xA1 => XWR,
0xA2 => ABORT,
0xA3 => PIVDATA,
0xA4 => GENAUTH,
0xA5 => CRAUTH,
0xA7 => KEEPACTIVE
}
@codes Map.new(@names, fn {code, name} -> {name, code} end)
def new(address, name, params \\ []) do
{caller, params} = Keyword.pop(params, :caller)
code = code(name)
data = encode(name, params)
%__MODULE__{
address: address,
code: code,
data: data,
name: name,
caller: caller
}
end
defp encode(POLL, _params), do: nil
defp encode(ID, _params), do: <<0x00>>
defp encode(CAP, _params), do: <<0x00>>
defp encode(LSTAT, _params), do: nil
defp encode(ISTAT, _params), do: nil
defp encode(OSTAT, _params), do: nil
defp encode(RSTAT, _params), do: nil
defp encode(OUT, params), do: OutputSettings.encode(params)
defp encode(LED, params), do: LedSettings.encode(params)
defp encode(BUZ, params), do: BuzzerSettings.encode(params)
defp encode(TEXT, params), do: TextSettings.encode(params)
defp encode(COMSET, params), do: ComSettings.encode(params)
defp encode(KEYSET, params), do: EncryptionKey.encode(params)
defp encode(CHLNG, params), do: ChallengeData.encode(params)
defp encode(SCRYPT, params), do: EncryptionServer.encode(params)
defp encode(ACURXSIZE, size: size), do: <<size::size(16)-little>>
defp encode(ABORT, _params), do: nil
def code(name), do: @codes[name]
def name(code), do: @names[code]
end
|
lib/jeff/command.ex
| 0.749821
| 0.625266
|
command.ex
|
starcoder
|
defmodule FFmpex do
@moduledoc """
Create and execute ffmpeg CLI commands.
The API is a builder, building up the list of options
per-file, per-stream(-per-file), and globally.
Note that adding options is backwards from using
the ffmpeg CLI; when using ffmpeg CLI, you specify the options
before each file.
But with FFmpex (this library), you add the file/stream first, then
add the relevant options afterward.
Example usage:
import FFmpex
use FFmpex.Options
command =
FFmpex.new_command
|> add_global_option(option_y())
|> add_input_file("/path/to/input.avi")
|> add_output_file("/path/to/output.avi")
|> add_stream_specifier(stream_type: :video)
|> add_stream_option(option_b("64k"))
|> add_file_option(option_maxrate("128k"))
|> add_file_option(option_bufsize("64k"))
:ok = execute(command)
"""
alias FFmpex.Command
alias FFmpex.File
alias FFmpex.Option
alias FFmpex.StreamSpecifier
@doc """
Begin a new blank (no options) ffmpeg command.
"""
def new_command, do: %Command{}
@doc """
Add an input file to the command.
"""
def add_input_file(%Command{files: files} = command, %File{} = file) do
file = %File{file | type: :input}
%Command{command | files: [file | files]}
end
def add_input_file(%Command{files: files} = command, file_path) when is_binary(file_path) do
file = %File{type: :input, path: file_path}
%Command{command | files: [file | files]}
end
@doc """
Add an output file to the command.
"""
def add_output_file(%Command{files: files} = command, %File{} = file) do
file = %File{file | type: :output}
%Command{command | files: [file | files]}
end
def add_output_file(%Command{files: files} = command, file_path) when is_binary(file_path) do
file = %File{type: :output, path: file_path}
%Command{command | files: [file | files]}
end
@doc """
Add a stream specifier to the most recent file.
The stream specifier is used as a target for per-stream options.
Example:
```
add_stream_specifier(command, stream_type: :video)
```
Options:
* `:stream_index` - 0-based integer index for the stream
* `:stream_type` - One of `:video`, `:video_without_pics`, `:audio`, `:subtitle`, `:data`, `:attachments`
* `:program_id` - ID for the program
* `:stream_id` - Stream id (e.g. PID in MPEG-TS container)
* `:metadata_key` - Match streams with the given metadata tag
* `:metadata_value` - Match streams with the given metadata value. Must also specify `:metadata_key`.
* `:usable` - Matches streams with usable configuration, the codec must be defined and the essential information such as video dimension or audio sample rate must be present.
"""
@spec add_stream_specifier(command :: Command.t, opts :: Keyword.t) :: Command.t
def add_stream_specifier(%Command{files: [file | files]} = command, opts) do
stream_specifier = struct(StreamSpecifier, opts)
file = %File{file | stream_specifiers: [stream_specifier | file.stream_specifiers]}
%Command{command | files: [file | files]}
end
@doc """
Add a global option that applies to the entire command.
"""
def add_global_option(%Command{global_options: options} = command, %Option{contexts: contexts} = option) do
validate_contexts!(contexts, :global)
%Command{command | global_options: [option | options]}
end
@doc """
Add a per-file option to the command.
Applies to the most recently added file.
"""
def add_file_option(%Command{files: [file | files]} = command, %Option{contexts: contexts} = option) do
%{type: file_io_type} = file
validate_contexts!(contexts, file_io_type)
file = %File{file | options: [option | file.options]}
%Command{command | files: [file | files]}
end
@doc """
Add a per-stream option to the command.
Applies to the most recently added stream specifier, of the most recently added file.
"""
def add_stream_option(%Command{files: [file | files]} = command, %Option{contexts: contexts} = option) do
%{type: file_io_type} = file
validate_contexts!(contexts, file_io_type)
%File{stream_specifiers: [stream_specifier | stream_specifiers]} = file
stream_specifier = %StreamSpecifier{stream_specifier | options: [option | stream_specifier.options]}
file = %File{file | stream_specifiers: [stream_specifier | stream_specifiers]}
%Command{command | files: [file | files]}
end
@doc """
Execute the command using ffmpeg CLI.
Returns `:ok` on success, or `{:error, {cmd_output, exit_status}}` on error.
"""
@spec execute(command :: Command.t) :: :ok | {:error, {Collectable.t, exit_status :: non_neg_integer}}
def execute(%Command{} = command) do
{executable, cmd_args} = prepare(command)
case System.cmd executable, cmd_args, stderr_to_stdout: true do
{_, 0} -> :ok
error -> {:error, error}
end
end
@doc """
Prepares the command to be executed, by converting the `%Command{}` into
proper parameters to be feeded to `System.cmd/3` or `Port.open/2`.
Under normal circumstances `FFmpex.execute/1` should be used, use `prepare`
only when converted args are needed to be feeded in a custom execution method.
Returns `{ffmpeg_executable_path, list_of_args}`.
"""
@spec prepare(command :: Command.t) :: {binary(), list(binary)}
def prepare(%Command{files: files, global_options: options}) do
options = Enum.map(options, &arg_for_option/1)
cmd_args = List.flatten([options, options_list(files)])
{ffmpeg_path(), cmd_args}
end
defp options_list(files) do
input_files = Enum.filter(files, fn %File{type: type} -> type == :input end)
output_files = Enum.filter(files, fn %File{type: type} -> type == :output end)
options_list(input_files, output_files)
end
defp options_list(input_files, output_files, acc \\ [])
defp options_list([], [], acc), do: List.flatten(acc)
defp options_list(input_files, [output_file | output_files], acc) do
acc = [File.command_arguments(output_file), output_file.path | acc]
options_list(input_files, output_files, acc)
end
defp options_list([input_file | input_files], [], acc) do
acc = [File.command_arguments(input_file), "-i", input_file.path | acc]
options_list(input_files, [], acc)
end
defp arg_for_option(%Option{name: name, require_arg: false, argument: nil}) do
~w(#{name})
end
defp arg_for_option(%Option{name: name, argument: arg}) when not is_nil(arg) do
~w(#{name} #{arg})
end
defp validate_contexts!(:unspecified, _), do: :ok
defp validate_contexts!(contexts, required) when is_list(contexts) do
unless Enum.member?(contexts, required), do: raise ArgumentError
end
# Read ffmpeg path from config. If unspecified, assume `ffmpeg` is in env $PATH.
defp ffmpeg_path do
case Application.get_env(:ffmpex, :ffmpeg_path, nil) do
nil -> System.find_executable("ffmpeg")
path -> path
end
end
end
|
lib/ffmpex.ex
| 0.823719
| 0.479686
|
ffmpex.ex
|
starcoder
|
defmodule BSV.BlockHeader do
@moduledoc """
A block header is an 80 byte packet of information providing a summary of the
`t:BSV.Block.t/0`.
Contained within the block header is a Merkle root - the result of hashing all
of the transactions contained in the block into a tree-like structure known as
a Merkle tree. Given a transaction and `t:BSV.MerkleProof.t/0`, we can verify
the transaction is contained in a block without downloading the entire block.
"""
alias BSV.{Block, Serializable}
import BSV.Util, only: [decode: 2, encode: 2]
defstruct [:version, :prev_hash, :merkle_root, :time, :bits, :nonce]
@typedoc "Block header struct"
@type t() :: %__MODULE__{
version: non_neg_integer(),
prev_hash: <<_::256>>,
merkle_root: Block.merkle_root(),
time: non_neg_integer(),
bits: non_neg_integer(),
nonce: non_neg_integer()
}
@doc """
Parses the given binary into a `t:BSV.BlockHeader.t/0`.
Returns the result in an `:ok` / `:error` tuple pair.
## Options
The accepted options are:
* `:encoding` - Optionally decode the binary with either the `:base64` or `:hex` encoding scheme.
"""
@spec from_binary(binary(), keyword()) :: {:ok, t()} | {:error, term()}
def from_binary(data, opts \\ []) when is_binary(data) do
encoding = Keyword.get(opts, :encoding)
with {:ok, data} <- decode(data, encoding),
{:ok, header, _rest} <- Serializable.parse(%__MODULE__{}, data)
do
{:ok, header}
end
end
@doc """
Parses the given binary into a `t:BSV.BlockHeader.t/0`.
As `from_binary/2` but returns the result or raises an exception.
"""
@spec from_binary!(binary(), keyword()) :: t()
def from_binary!(data, opts \\ []) when is_binary(data) do
case from_binary(data, opts) do
{:ok, header} ->
header
{:error, error} ->
raise BSV.DecodeError, error
end
end
@doc """
Serialises the given `t:BSV.BlockHeader.t/0` into a binary.
## Options
The accepted options are:
* `:encoding` - Optionally encode the binary with either the `:base64` or `:hex` encoding scheme.
"""
@spec to_binary(t()) :: binary()
def to_binary(%__MODULE__{} = header, opts \\ []) do
encoding = Keyword.get(opts, :encoding)
header
|> Serializable.serialize()
|> encode(encoding)
end
defimpl Serializable do
@impl true
def parse(header, data) do
with <<
version::little-32,
prev_hash::binary-size(32),
merkle_root::binary-size(32),
time::little-32,
bits::little-32,
nonce::little-32,
rest::binary
>> <- data
do
{:ok, struct(header, [
version: version,
prev_hash: prev_hash,
merkle_root: merkle_root,
time: time,
bits: bits,
nonce: nonce
]), rest}
else
_data ->
{:error, :invalid_header}
end
end
@impl true
def serialize(%{
version: version,
prev_hash: prev_hash,
merkle_root: merkle_root,
time: time,
bits: bits,
nonce: nonce
}) do
<<
version::little-32,
prev_hash::binary,
merkle_root::binary,
time::little-32,
bits::little-32,
nonce::little-32
>>
end
end
end
|
lib/bsv/block_header.ex
| 0.897022
| 0.783575
|
block_header.ex
|
starcoder
|
defmodule RemoteIp.Headers do
@moduledoc """
Entry point for parsing any type of forwarding header.
"""
require Logger
@doc """
Selects the appropriate headers and parses IPs out of them.
* `headers` - The entire list of the `Plug.Conn` `req_headers`
* `allowed` - The list of headers `RemoteIp` is configured to look for,
converted to a `MapSet` for efficiency
The actual parsing is delegated to the `RemoteIp.Headers.*` submodules:
* `"forwarded"` is parsed by `RemoteIp.Headers.Forwarded`
* everything else is parsed by `RemoteIp.Headers.Generic`
"""
@type key :: String.t
@type value :: String.t
@type header :: {key, value}
@type allowed :: %MapSet{}
@type ip :: :inet.ip_address
@spec parse([header], allowed) :: [ip]
def parse(headers, %MapSet{} = allowed) when is_list(headers) do
maybe_log_debug(fn -> parsing(headers) end)
ips = headers |> allow(allowed) |> parse_each
maybe_log_debug(fn -> parsed(ips) end)
ips
end
defp allow(headers, allowed) do
filtered = Enum.filter(headers, &allow?(&1, allowed))
maybe_log_debug(fn -> considering(filtered) end)
filtered
end
defp allow?({header, _}, allowed) do
MapSet.member?(allowed, header)
end
defp parse_each(headers) do
Enum.flat_map(headers, &parse_ips/1)
end
defp parse_ips({"forwarded", value}) when is_binary(value) do
RemoteIp.Headers.Forwarded.parse(value)
end
defp parse_ips({header, value}) when is_binary(header) and is_binary(value) do
RemoteIp.Headers.Generic.parse(value)
end
defp parsing(req_headers) do
[
inspect(__MODULE__),
" is parsing IPs from the request headers ",
inspect(req_headers, pretty: true)
]
end
def considering(req_headers) do
[
inspect(__MODULE__),
" is only considering the request headers ",
inspect(req_headers, pretty: true)
]
end
defp parsed(ips) do
[
inspect(__MODULE__),
" parsed the request headers into the IPs ",
inspect(ips, pretty: true)
]
end
defp maybe_log_debug(any) do
if Application.get_env(:remote_ip, :debug), do: Logger.debug(any)
end
end
|
lib/remote_ip/headers.ex
| 0.767864
| 0.445107
|
headers.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule Guardian.Plug.Pipeline do
@moduledoc """
Helps to build plug pipelines for use with Guardian and associated plugs.
All Guardian provided plugs have a number of features.
1. They take a `:key` option to know where to store information in the session and connection
2. They require a reference to the implementation (the module that `use Guardian`)
3. They require a reference to an error handling module
These references are passed through the connection so they must be put in place
before the Guardian Plugs. By using a pipeline this is taken care of for you.
The easiest way to use `Guardian.Plug.Pipeline` is to create a module that defines your pipeline.
```elixir
defmodule MyApp.AuthPipeline do
use Guardian.Plug.Pipeline, otp_app: :my_app,
module: MyApp.Tokens,
error_handler: MyApp.AuthErrorHandler
plug Guardian.Plug.VerifySession, claims: @claims
plug Guardian.Plug.VerifyHeader, claims: @claims, realm: "Bearer"
plug Guardian.Plug.EnsureAuthenticated
plug Guardian.Plug.LoadResource, ensure: true
end
```
When you want to use the pipeline you just use it like a normal plug.
```elixir
plug MyApp.AuthPipeline
```
This pipeline will look for tokens in either the session (it's ok if it's not loaded)
followed by the header if one wasn't found in the session.
We then ensure that we found a token and fail if not.
Given that we found a token, we then attempt to load the resource the token
refers to, failing if one is not found.
### Customizing your pipeline
Once you've created a pipeline, you can customize it when you call it with options.
```elixir
plug MyApp.AuthPipeline, module: MyApp.ADifferentGuardianModule
# OR
plug MyApp.AuthPipeline, key: :impersonate
```
### Options
You can provide options to the pipeline when you `use Guardian.Plug.Pipeline`
or you can provide them when you call the plug.
Additionally, for every option other than `:otp_app` you can use elixir
configuration, the `use` options, or inline options.
* `:otp_app` - The otp app where the pipeline modules can be found
* `:module` - The `Guardian` implementation module
* `:error_handler` - An error handling module. See `Guardian.Plug.Errors`
* `:key` - The key to use
### Keys
Using keys allows you to specifiy locations in the session/connection where
the tokens and resources will be placed. This allows multiple authenticated
tokens to be in play for a single request. This is useful for impersonation or
higher security areas where you can have a specific set of privileges and
still be logged in.
### Error handler
When using plugs, you'll need to specify an error handler
The error_handler requires an `auth_error` function that receives the conn
and the error reason
### Inline pipelines
If you want to define your pipeline inline, you can do so by using
`Guardian.Plug.Pipeline` as a plug itself.
You _must_ supply the module and error handler inline if you do this.
```elixir
plug Guardian.Plug.Pipeline, module: MyApp.Tokens,
error_handler: MyApp.AuthErrorHandler
plug Guardian.VerifyHeader, realm: "Bearer"
```
Inline pipelines are also good to change the error handler that you want to use.
Note that you must set the pipeline before using other guardian plugs.
```elixir
# Use the MyApp.AuthErrorHandler for downstream Guardian plugs
plug Guardian.Plug.Pipeline, module: MyApp.Tokens,
error_handler: MyApp.AuthErrorHandler
plug Guardian.VerifyHeader, realm: "Bearer"
# Now change out the error handler for plugs downstream of this one.
plug Guardian.Plug.Pipeline, error_handler: MyApp.SpecialAuthErrorHandler
```
"""
import Plug.Conn
alias Guardian.Plug, as: GPlug
@doc """
Create your very own `Guardian.Plug.Pipeline`
Using this macro will make your module into a plug builder.
It will provide your pipeline with the Guardian implementation module and error
handler so that it can be used within your pipeline and downstream.
"""
defmacro __using__(opts \\ []) do
alias Guardian.Plug.Pipeline
quote do
use Plug.Builder
alias Guardian.Config, as: GConfig
alias Guardian.Plug, as: GPlug
import Pipeline
plug(:put_modules)
def init(options) do
new_opts =
options
|> Keyword.merge(unquote(opts))
|> config()
unless Keyword.get(new_opts, :otp_app), do: raise_error(:otp_app)
new_opts
end
defp config(opts) do
case Keyword.get(opts, :otp_app) do
nil ->
opts
otp_app ->
otp_app
|> Application.get_env(__MODULE__, [])
|> Keyword.merge(opts)
end
end
defp config(opts, key, default \\ nil) do
unquote(opts)
|> Keyword.merge(opts)
|> config()
|> Keyword.get(key)
|> GConfig.resolve_value()
end
defp put_modules(conn, opts) do
pipeline_opts = [
module: config(opts, :module),
error_handler: config(opts, :error_handler),
key: config(opts, :key, GPlug.default_key())
]
Pipeline.call(conn, pipeline_opts)
end
@spec raise_error(atom()) :: no_return
defp raise_error(key), do: raise("Config `#{key}` is missing for #{__MODULE__}")
end
end
def init(opts), do: opts
def call(conn, opts) do
conn
|> maybe_put_key(:guardian_module, Keyword.get(opts, :module))
|> maybe_put_key(:guardian_error_handler, Keyword.get(opts, :error_handler))
|> maybe_put_key(:guardian_key, Keyword.get(opts, :key))
end
def put_key(conn, key), do: put_private(conn, :guardian_key, key)
def put_module(conn, module), do: put_private(conn, :guardian_module, module)
def put_error_handler(conn, module), do: put_private(conn, :guardian_error_handler, module)
def current_key(conn), do: conn.private[:guardian_key]
def current_module(conn), do: conn.private[:guardian_module]
def current_error_handler(conn), do: conn.private[:guardian_error_handler]
def fetch_key(conn, opts),
do: Keyword.get(opts, :key, current_key(conn)) || GPlug.default_key()
def fetch_module(conn, opts), do: Keyword.get(opts, :module, current_module(conn))
def fetch_module!(conn, opts) do
module = fetch_module(conn, opts)
if module do
module
else
raise_error(:module)
end
end
def fetch_error_handler(conn, opts),
do: Keyword.get(opts, :error_handler, current_error_handler(conn))
def fetch_error_handler!(conn, opts) do
module = fetch_error_handler(conn, opts)
if module do
module
else
raise_error(:error_handler)
end
end
defp maybe_put_key(conn, _, nil), do: conn
defp maybe_put_key(conn, key, v), do: put_private(conn, key, v)
defp raise_error(key), do: raise("`#{key}` not set in Guardian pipeline")
end
end
|
lib/guardian/plug/pipeline.ex
| 0.77081
| 0.818556
|
pipeline.ex
|
starcoder
|
defmodule Numy.Float do
@moduledoc """
Floating point number utilities.
"""
@spec from_number(number) :: float
def from_number(n) do
n / 1 # idiomatic way to convert a number to float
end
@spec sign(number) :: -1 | 0 | 1
def sign(0), do: 0
def sign(0.0), do: 0
def sign(x) when x < 0, do: -1
def sign(_), do: 1
@doc """
Return `true` if sign bit is 1 in the binary representation of the number.
IEEE Long Real 64-bit binary format:
- 1 bit for the sign,
- 11 bits for the exponent,
- and 52 bits for the mantissa
"""
@spec signbit(float) :: boolean
def signbit(x) when is_float(x) do
case <<x :: float>> do
<<1 :: 1, _ :: bitstring>> -> true
_ -> false
end
end
@doc """
Convert bit-by-bit 64-bit float to 64-bit integer.
"""
@spec as_uint64(float) :: non_neg_integer
def as_uint64(x) when is_float(x) do
<<uint64 :: 64>> = <<x :: float>>
uint64
end
@spec copysignbit(float, float) :: float
def copysignbit(src, dst) when is_float(src) and is_float(dst) do
<<_ :: 1, dst_rest :: bitstring>> = <<dst :: float>>
<<src_sign :: 1, _ :: bitstring>> = <<src :: float>>
<<ret :: float>> = <<src_sign :: 1, dst_rest :: bitstring>>
ret
end
@doc """
[ULP](https://en.wikipedia.org/wiki/Unit_in_the_last_place) difference.
"""
@spec ulp_diff(float, float) :: integer
def ulp_diff(a, b), do: abs(as_uint64(a) - as_uint64(b))
@doc """
Equality comparison for floating point numbers.
Based on [this blog post](
https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/)
by <NAME>.
"""
@spec equal?(number, number, float, pos_integer) :: boolean
def equal?(a, b, epsilon \\ 1.0e-9, max_ulps \\ 4)
def equal?(a, b, epsilon, max_ulps)
when is_number(a) and is_number(b) and is_integer(max_ulps) do
a = :erlang.float a
b = :erlang.float b
cond do
signbit(a) != signbit(b)
-> false
abs(a - b) <= epsilon
-> true
ulp_diff(a, b) <= max_ulps
-> true
true
-> false
end
end
def equal?(al, bl, epsilon, max_ulps)
when is_list(al) and is_list(bl) and is_integer(max_ulps) do
Enum.zip(List.flatten(al), List.flatten(bl))
|> Enum.all?(fn {a,b} -> equal?(a,b,epsilon,max_ulps) end)
end
def make_list_randoms(size, list \\ [])
def make_list_randoms(0, list), do: list
def make_list_randoms(size, list) when is_integer(size) do
make_list_randoms(size - 1, [:rand.uniform() | list])
end
end
|
lib/float.ex
| 0.860633
| 0.648939
|
float.ex
|
starcoder
|
defmodule Statisaur.Combinatorics do
@moduledoc """
Contains functions for analyzing finite descrete structures.
"""
@doc ~S"""
Finds the factorial of a given non-negative integer.
The factorial of a non-negative integer n, is the product of all positive integers less than or equal to n.
The value of factorial(0) is 1, according to the convention for an empty product.
### Example
The factorial of 5 is (5 * 4 * 3 * 2 * 1), or 120.
iex(1)> Statisaur.Combinatorics.factorial(5)
120
The factorial of 0 is 1, according to the convention for an empty product.
iex(2)> Statisaur.Combinatorics.factorial(0)
1
Statisaur will raise an error in the case of negative integers.
iex(3)> Statisaur.Combinatorics.factorial(-5)
** (ArgumentError) argument must be positive integer
"""
@spec factorial(non_neg_integer) :: non_neg_integer
def factorial(0) do
1
end
def factorial(n) when is_integer(n) and n > 0 do
Enum.reduce(1..n, 1, fn x, acc -> x * acc end)
end
def factorial(_term) do
raise ArgumentError, "argument must be positive integer"
end
@doc ~S"""
Finds the binomial coefficient of two integers.
The binomial coefficient (n; k) is the number of ways of picking k unordered outcomes from n \
possibilities, also known as a combination or combinatorial number.
### Examples
The number of outcomes of two from five possibilities is ten.
iex(1)> Statisaur.Combinatorics.n_choose_k(5, 2)
10
The number of outcomes of eight from twenty possibilities is 125970.
iex(2)> Statisaur.Combinatorics.n_choose_k(20, 8)
125970
"""
@spec n_choose_k(non_neg_integer, non_neg_integer) :: non_neg_integer
def n_choose_k(n, k) when n >= k and k >= 0 and is_integer(n) and is_integer(k) do
div(falling_factorial(n, k), factorial(k))
end
def n_choose_k(_n, _k) do
raise ArgumentError, "arguments must be positive integers"
end
@doc ~S"""
The falling factorial, or falling sequential product, is defined as:
`x * (x - 1) * (x - 2) * ... (x - n - 1)`
Compare also to `rising_factorial/2`.
### Examples
The falling factorial of (5, 4) would be `5 * 4 * 3 * 2`
iex(1)> Statisaur.Combinatorics.falling_factorial(5, 4)
120
The falling factorial of (5, 3) would be `5 * 4 * 3`
iex(2)> Statisaur.Combinatorics.falling_factorial(5, 3)
60
The falling factorial of (5, 2) would be `5 * 4`
iex(3)> Statisaur.Combinatorics.falling_factorial(5, 2)
20
With a second argument of `1`, the return value is simply the first argument
iex(4)> Statisaur.Combinatorics.falling_factorial(5, 1)
5
Listing the second input as `0` returns 1
iex(5)> Statisaur.Combinatorics.falling_factorial(5, 0)
1
The return value is `0` when the second argument is larger than the first, and
both arguments are positive
iex(6)> Statisaur.Combinatorics.falling_factorial(5, 7)
0
When the second argument is negative, the function returns positive values between 1 and 0
iex(7)> Statisaur.Combinatorics.falling_factorial(1, -2)
0.16666666666666666
When the both arguments are negative, the function raises an ArithmeticError.
"""
@spec falling_factorial(integer, integer) :: integer | float
def falling_factorial(n, 0) when is_integer(n) do
1
end
def falling_factorial(n, k) when is_integer(n) and is_integer(k) and k < 0 do
m = abs(k)
1 / falling_factorial(n + m, m)
end
def falling_factorial(n, k) when is_integer(n) and is_integer(k) do
Enum.reduce(n..(n - (k - 1)), 1, fn x, acc -> x * acc end)
end
def falling_factorial(_n, _k) do
raise ArgumentError, "arguments must be integers"
end
@doc ~S"""
The rising factorial, also known as the 'rising sequential product' or 'Pochhammer polynomial',
is defined as: `x * (x + 1) * (x + 2) ... (x + n - 1)`
Compare also to `falling_factorial/2`.
### Examples
`rising_factorial(5, 4)` is equivalent to `5 * (5 + 1) * (5 + 2) * (5 + 3)`
iex(1)> Statisaur.Combinatorics.rising_factorial(5, 4)
1680
The return value is always `1` when the second argument is `0`
iex(2)> Statisaur.Combinatorics.rising_factorial(5, 0)
1
The function raises an `ArithmeticError` when the second argument is negative
"""
@spec rising_factorial(integer, integer) :: integer
def rising_factorial(n, 0) when is_integer(n) do
1
end
def rising_factorial(n, k) when is_integer(n) and is_integer(k) and k < 0 do
raise ArithmeticError
end
def rising_factorial(n, k) when is_integer(n) and is_integer(k) do
Enum.reduce(n..(n + (k - 1)), 1, fn x, acc -> x * acc end)
end
def rising_factorial(_n, _k) do
raise ArgumentError, "arguments must be integers"
end
end
|
lib/statisaur/combinatorics.ex
| 0.930592
| 0.849097
|
combinatorics.ex
|
starcoder
|
defmodule Akin.Helpers.InitialsComparison do
@moduledoc """
Function specific to the comparison and matching of names. Returns matching names and metrics.
"""
import Akin.Util, only: [ngram_tokenize: 2]
alias Akin.Corpus
def similarity(%Corpus{} = left, %Corpus{} = right) do
left_initials = initials(left) |> Enum.sort()
right_initials = initials(right) |> Enum.sort()
left_i_count = Enum.count(left_initials)
right_i_count = Enum.count(right_initials)
left_c_intials =
cartesian_initials(left_initials, left.list)
|> List.flatten()
|> Enum.uniq()
right_c_intials =
cartesian_initials(right_initials, right.list)
|> List.flatten()
|> Enum.uniq()
if String.contains?(left.original, ["-", "'"]) or String.contains?(right.original, ["-", "'"]) do
case {left_i_count, right_i_count} do
{li, ri} when li == ri -> left_initials == right_initials
{li, ri} when li > ri ->
case left_initials -- right_initials do
[] -> true
[_i] ->
combined_hyphenation = right.list -- left.list
full_permutations = get_permuations(left.list)
combined_hyphenation -- full_permutations == []
_ -> false
end
{li, ri} when li < ri ->
case right_initials -- left_initials do
[] -> true
[_i] ->
combined_hyphenation = left.list -- right.list
full_permutations = get_permuations(right.list)
combined_hyphenation -- full_permutations == []
_ -> false
end
end
else
case {left_i_count, right_i_count} do
{li, ri} when li == ri -> left_initials == right_initials
{li, ri} when li > ri -> left_initials -- right_initials == []
{li, ri} when li < ri -> right_initials -- left_initials == []
end
end
|> cartesian_match(left_c_intials, right_c_intials)
|> permutation_match(left.list, right.list)
end
def similarity(_, _, false), do: false
defp initials(%Corpus{list: lists}) do
Enum.map(lists, fn list -> String.at(list, 0) end)
end
defp initials(list) when is_list(list) do
Enum.map(list, fn l -> String.at(l, 0) end)
end
defp initials(_), do: []
defp actual_initials(list) do
Enum.filter(list, fn l -> String.length(l) == 1 end)
end
def cartesian_initials(initials, list) do
cartesian =
for c <- 1..Enum.count(initials) do
ngram_tokenize(Enum.join(initials, ""), c)
end
|> List.flatten()
c = [cartesian | list] |> List.flatten() |> Enum.uniq()
c -- initials
end
defp cartesian_match(true, _, _), do: true
# do any of the cartesian products of the inital letters match?
defp cartesian_match(false, left, right) do
Enum.filter(left, fn l -> l in right end)
|> Enum.count()
|> Kernel.>(1)
end
defp permutation_match(true, _, _), do: true
# do any of the permutations of the names match? (i.e. alfonso di costanzo & a dicostanzo)
defp permutation_match(false, left, right) do
left_ai = actual_initials(left)
right_ai = actual_initials(right)
left_permuations = get_permuations(left -- left_ai)
right_permuations = get_permuations(right -- right_ai)
Enum.filter(left_permuations, fn lp -> lp in right_permuations end)
|> Enum.count()
|> Kernel.>(1)
end
defp get_permuations(list) do
Enum.reduce(list, [], fn l, acc ->
ps = list -- [l]
[Enum.map(ps, fn p -> l <> p end) | acc]
end)
|> List.flatten()
end
end
|
lib/akin/algorithms/helpers/initials_comparison.ex
| 0.773644
| 0.696629
|
initials_comparison.ex
|
starcoder
|
defmodule CTE.Adapter.Memory do
@moduledoc """
Basic implementation of the CTE, using the memory for persisting the models. Adapter provided as a convenient way of using CTE in tests or during the development
"""
use CTE.Adapter
@doc false
def descendants(pid, ancestor, opts) do
GenServer.call(pid, {:descendants, ancestor, opts})
end
@doc false
def ancestors(pid, descendant, opts) do
GenServer.call(pid, {:ancestors, descendant, opts})
end
@doc false
def insert(pid, leaf, ancestor, opts) do
GenServer.call(pid, {:insert, leaf, ancestor, opts})
end
@doc false
def delete(pid, leaf, opts) do
leaf? = Keyword.get(opts, :limit, 0) == 1
GenServer.call(pid, {:delete, leaf, leaf?, opts})
end
@doc false
def move(pid, leaf, ancestor, opts) do
GenServer.call(pid, {:move, leaf, ancestor, opts})
end
@doc false
def tree(pid, leaf, opts) do
GenServer.call(pid, {:tree, leaf, opts})
end
@doc false
def handle_call({:tree, leaf, opts}, _from, config) do
%CTE{paths: paths, nodes: nodes} = config
descendants_opts = [itself: true] ++ Keyword.take(opts, [:depth])
descendants = _descendants(leaf, descendants_opts, config)
subtree =
paths
|> Enum.filter(fn [ancestor, descendant, _] ->
ancestor in descendants && descendant in descendants
end)
|> Enum.map(&ignore_depth/1)
nodes =
subtree
|> Enum.reduce(%{}, fn [ancestor, descendant], acc ->
Map.merge(acc, %{
ancestor => Map.get(nodes, ancestor),
descendant => Map.get(nodes, descendant)
})
end)
{:reply, {:ok, %{paths: subtree, nodes: nodes}}, config}
end
@doc false
def handle_call({:delete, leaf, true, _opts}, _from, config) do
%CTE{paths: paths} = config
paths = Enum.filter(paths, fn [_ancestor, descendant, _] -> descendant != leaf end)
{:reply, :ok, %{config | paths: paths}}
end
@doc false
def handle_call({:delete, leaf, _subtree, opts}, _from, config) do
opts = Keyword.put(opts, :itself, true)
descendants = _descendants(leaf, opts, config)
paths = Enum.filter(descendants, &(&1 != leaf))
{:reply, :ok, %{config | paths: paths}}
end
@doc false
def handle_call({:move, leaf, ancestor, _opts}, _from, config) do
%CTE{paths: paths} = config
ex_ancestors = _ancestors(leaf, [itself: true], config)
{descendants_paths, _} = descendants_collector(leaf, [itself: true], config)
descendants = Enum.map(descendants_paths, fn [_, descendant, _] -> descendant end)
paths_with_leaf =
paths
|> Enum.filter(fn [ancestor, descendant, _] ->
ancestor in ex_ancestors and descendant in descendants and ancestor != descendant
end)
paths_without_leaf = Enum.filter(paths, &(&1 not in paths_with_leaf))
{new_ancestors_paths, _} =
ancestors_collector(ancestor, [itself: true], %{config | paths: paths_without_leaf})
new_paths =
for [ancestor, _, super_tree_depth] <- [[leaf, leaf, -1] | new_ancestors_paths],
[_, descendant, subtree_depth] <- descendants_paths,
into: [] do
[ancestor, descendant, super_tree_depth + subtree_depth + 1]
end
|> Enum.reverse()
{:reply, :ok, %{config | paths: paths_without_leaf ++ new_paths}}
end
@doc false
def handle_call({:insert, leaf, ancestor, opts}, _from, config) do
with {:ok, new_paths, config} <- _insert(leaf, ancestor, opts, config) do
{:reply, {:ok, new_paths}, config}
else
err -> {:reply, {:error, err}, config}
end
end
@doc false
def handle_call({:ancestors, descendant, opts}, _from, config) do
result =
_ancestors(descendant, opts, config)
|> Enum.reverse()
{:reply, {:ok, result}, config}
end
@doc false
def handle_call({:descendants, ancestor, opts}, _from, config) do
result =
_descendants(ancestor, opts, config)
|> Enum.reverse()
{:reply, {:ok, result}, config}
end
@doc false
defp _descendants(ancestor, opts, config) do
descendants_collector(ancestor, opts, config)
|> depth(opts, config)
|> selected(opts, config)
end
@doc false
defp descendants_collector(ancestor, opts, config) do
mapper = fn paths -> Enum.map(paths, fn [_, descendant, _] -> descendant end) end
fn path, {acc_paths, _mapper, size} = acc, itself? ->
case path do
[^ancestor, ^ancestor, _] when not itself? ->
acc
[^ancestor, _descendant, _depth] = descendants ->
{[descendants | acc_paths], mapper, size + 1}
_ ->
acc
end
end
|> _find_leaves(opts, config)
end
@doc false
defp _ancestors(descendant, opts, config) do
ancestors_collector(descendant, opts, config)
|> depth(opts, config)
|> selected(opts, config)
end
@doc false
defp ancestors_collector(descendant, opts, config) do
mapper = fn paths -> Enum.map(paths, fn [ancestor, _, _] -> ancestor end) end
fn path, {acc_paths, _mapper, size} = acc, itself? ->
case path do
[^descendant, ^descendant, _] when not itself? ->
acc
[_ancestor, ^descendant, _depth] = ancestors ->
{[ancestors | acc_paths], mapper, size + 1}
_ ->
acc
end
end
|> _find_leaves(opts, config)
end
@doc false
defp _insert(leaf, ancestor, _opts, config) do
%CTE{nodes: nodes, paths: paths} = config
with true <- Map.has_key?(nodes, ancestor) do
{leaf_new_ancestors, _} = ancestors_collector(ancestor, [itself: true], config)
new_paths =
leaf_new_ancestors
|> Enum.reduce([[leaf, leaf, 0]], fn [ancestor, _, depth], acc ->
[[ancestor, leaf, depth + 1] | acc]
end)
acc_paths = paths ++ new_paths
config = %{config | paths: acc_paths}
{:ok, Enum.map(new_paths, &ignore_depth/1), config}
else
_ -> {:error, :no_ancestor, config}
end
end
@doc false
defp _find_leaves(fun, opts, %CTE{paths: paths}) do
itself? = Keyword.get(opts, :itself, false)
limit = Keyword.get(opts, :limit, 0)
{leaves_paths, mapper, _size} =
paths
|> Enum.reduce_while({[], & &1, 0}, fn path, acc ->
{_, _, sz} = dsz = fun.(path, acc, itself?)
if limit == 0 or sz < limit, do: {:cont, dsz}, else: {:halt, dsz}
end)
{leaves_paths, mapper}
end
@doc false
defp depth({leaves_paths, mapper}, opts, _config) do
leaves_paths =
if depth = Keyword.get(opts, :depth) do
leaves_paths
|> Enum.filter(fn [_, _, depth_] -> depth_ <= max(depth, 0) end)
else
leaves_paths
end
{leaves_paths, mapper}
end
@doc false
defp selected({leaves_paths, mapper}, opts, %CTE{nodes: nodes}) do
leaves = mapper.(leaves_paths)
if Keyword.get(opts, :nodes, false) do
Enum.map(leaves, &Map.get(nodes, &1))
else
leaves
end
end
@doc false
defp ignore_depth([ancestor, descendant, _]), do: [ancestor, descendant]
end
|
lib/cte/adapter/memory.ex
| 0.754373
| 0.448064
|
memory.ex
|
starcoder
|
defmodule Crawlie do
@moduledoc """
The simple Elixir web crawler.
"""
require Logger
alias Crawlie.Options
alias Crawlie.Page
alias Crawlie.Response
alias Crawlie.Utils
alias Crawlie.Stage.UrlManager
alias Crawlie.Stats.Server, as: StatsServer
@spec crawl(Stream.t, module, Keyword.t) :: Flow.t
@doc """
Crawls the urls provided in `source`, using the `Crawlie.ParserLogic` provided
in `parser_logic`.
The `options` are used to tweak the crawler's behaviour. You can use most of
the options for [HttPoison](https://hexdocs.pm/httpoison/HTTPoison.html#request/5),
as well as Crawlie specific options.
It is perfectly ok to run multiple crawling sessions at the same time, they're independent.
## arguments
- `source` - a `Stream` or an `Enum` containing the urls to crawl
- `parser_logic`- a `Crawlie.ParserLogic` behaviour implementation
- `options` - a Keyword List of options
## Crawlie specific options
- `:http_client` - module implementing the `Crawlie.HttpClient` behaviour to be
used to make the requests. If not provided, will default to `Crawlie.HttpClient.HTTPoisonClient`.
- `:mock_client_fun` - If you're using the `Crawlie.HttpClient.MockClient`, this
would be the `url :: String.t -> {:ok, body :: String.t} | {:error, term}` function simulating
making the requests.
for details
- `:max_depth` - maximum crawling "depth". `0` by default.
- `:max_retries` - maximum amount of tries Crawlie should try to fetch any individual
page before giving up. By default `3`.
- `:fetch_phase` - `Flow` partition configuration for the fetching phase of the crawling `Flow`.
It should be a Keyword List containing any subset of `:min_demand`, `:max_demand` and `:stages`
properties. For the meaning of these options see [Flow documentation](https://hexdocs.pm/gen_stage/Flow.html)
- `:process_phase` - same as `:fetch_phase`, but for the processing (page parsing, data and link
extraction) part of the process
- `:pqueue_module` - One of [pqueue](https://github.com/okeuday/pqueue) implementations:
`:pqueue`, `:pqueue2`, `:pqueue3`, `:pqueue4`. Different implementation have different
performance characteristics and allow for different `:max_depth` values. Consult
[docs](https://github.com/okeuday/pqueue) for details. By default using `:pqueue3` -
good performance and allowing arbitrary `:max_depth` values.
"""
def crawl(source, parser_logic, options \\ []) do
options = options
|> Options.strip_reserved()
|> Options.with_defaults()
_crawl(source, parser_logic, options)
end
@spec crawl_and_track_stats(Stream.t, module, Keyword.t) :: {StatsServer.ref, Flow.t}
@doc """
Crawls the urls provided in `source`, using the `Crawlie.ParserLogic` provided and collects the crawling statistics.
The statistics are accumulated independently, per `Crawlie.crawl_and_track_stats/3` call.
See `Crawlie.crawl/3` for details.
## Additional options
(apart from the ones from `Crawlie.crawl/3`, which all apply as well)
- `:max_fetch_failed_uris_tracked` - `100` by default. The maximum quantity of uris that will be kept in the `Crawlie.Stats.Server`, for which the fetch operation was failed.
- `:max_parse_failed_uris_tracked` - `100` by default. The maximum quantity of uris that will be kept in the `Crawlie.Stats.Server`, for which the parse operation was failed.
"""
def crawl_and_track_stats(source, parser_logic, options \\ []) do
ref = StatsServer.start_new()
options = options
|> Options.strip_reserved()
|> Keyword.put(:stats_ref, ref)
|> Options.with_defaults()
flow = _crawl(source, parser_logic, options)
{ref, flow}
end
defp _crawl(source, parser_logic, options) do
{:ok, url_stage} = UrlManager.start_link(source, options)
url_stage
|> Flow.from_stage(options)
|> Flow.partition(Keyword.get(options, :fetch_phase))
|> Flow.flat_map(&fetch_operation(&1, options, url_stage))
|> Flow.partition(Keyword.get(options, :process_phase))
|> Flow.flat_map(&parse_operation(&1, options, parser_logic, url_stage))
|> Flow.each(&extract_uris_operation(&1, options, parser_logic, url_stage))
|> Flow.flat_map(&extract_data_operation(&1, options, parser_logic))
end
@spec fetch_operation(Page.t, Keyword.t, GenStage.stage) :: [{Page.t, String.t}]
@doc false
def fetch_operation(%Page{uri: uri} = page, options, url_stage) do
client = Keyword.get(options, :http_client)
start_time = Utils.utimestamp()
case client.get(uri, options) do
{:ok, response} ->
duration_usec = Utils.utimestamp() - start_time
Options.stats_op(options, &StatsServer.fetch_succeeded(&1, page, response, duration_usec))
[{page, response}]
{:error, _reason} ->
UrlManager.page_failed(url_stage, page)
max_failed_uris_to_track = Keyword.fetch!(options, :max_fetch_failed_uris_tracked)
Options.stats_op(options, &StatsServer.fetch_failed(&1, page, max_failed_uris_to_track))
[]
end
end
@spec parse_operation({Page.t, Response.t}, Keyword.t, module, GenStage.stage) :: [{Page.t, term}]
@doc false
def parse_operation({%Page{} = page, %Response{} = response}, options, parser_logic, url_stage) do
case parser_logic.parse(response, options) do
{:ok, parsed} ->
[{page, response, parsed}]
:skip ->
UrlManager.page_skipped(url_stage, page)
Options.stats_op(options, &StatsServer.page_skipped(&1, page))
[]
{:skip, _reason} ->
UrlManager.page_skipped(url_stage, page)
Options.stats_op(options, &StatsServer.page_skipped(&1, page))
[]
{:error, reason} ->
UrlManager.page_failed(url_stage, page)
max_failed_uris_to_track = Keyword.fetch!(options, :max_parse_failed_uris_tracked)
Options.stats_op(options, &StatsServer.parse_failed(&1, page, max_failed_uris_to_track))
Logger.warn "could not parse \"#{Page.url(page)}\", parsing failed with error #{inspect reason}"
[]
end
end
@spec extract_uris_operation({Page.t, Response.t, term}, Keyword.t, module, GenStage.stage) :: any
@doc false
def extract_uris_operation({%Page{depth: depth} = page, response, parsed}, options, module, url_stage) do
max_depth = Keyword.get(options, :max_depth, 0)
if depth < max_depth do
pages = module.extract_uris(response, parsed, options)
|> Enum.map(&Page.child(page, &1))
UrlManager.add_children_pages(url_stage, pages)
Options.stats_op(options, &StatsServer.uris_extracted(&1, Enum.count(pages)))
end
UrlManager.page_succeeded(url_stage, page)
:ok
end
@spec extract_data_operation({Page.t, term}, Keyword.t, module) :: [term]
@doc false
def extract_data_operation({_page, response, parsed}, options, module) do
module.extract_data(response, parsed, options)
end
end
|
lib/crawlie.ex
| 0.845145
| 0.584064
|
crawlie.ex
|
starcoder
|
defprotocol Timex.Protocol do
@moduledoc """
This protocol defines the API for functions which take a `Date`,
`NaiveDateTime`, or `DateTime` as input.
"""
@fallback_to_any true
alias Timex.Types
@doc """
Convert a date/time value to a Julian calendar date number
"""
@spec to_julian(Types.valid_datetime()) :: float | {:error, term}
def to_julian(datetime)
@doc """
Convert a date/time value to gregorian seconds (seconds since start of year zero)
"""
@spec to_gregorian_seconds(Types.valid_datetime()) :: non_neg_integer | {:error, term}
def to_gregorian_seconds(datetime)
@doc """
Convert a date/time value to gregorian microseconds (microseconds since the start of year zero)
"""
@spec to_gregorian_microseconds(Types.valid_datetime()) :: non_neg_integer | {:error, term}
def to_gregorian_microseconds(datetime)
@doc """
Convert a date/time value to seconds since the UNIX Epoch
"""
@spec to_unix(Types.valid_datetime()) :: non_neg_integer | {:error, term}
def to_unix(datetime)
@doc """
Convert a date/time value to a Date
"""
@spec to_date(Types.valid_datetime()) :: Date.t() | {:error, term}
def to_date(datetime)
@doc """
Convert a date/time value to a DateTime.
An optional timezone can be provided, UTC will be assumed if one is not provided.
"""
@spec to_datetime(Types.valid_datetime()) :: DateTime.t() | {:error, term}
@spec to_datetime(Types.valid_datetime(), Types.valid_timezone()) ::
DateTime.t() | Timex.AmbiguousDateTime.t() | {:error, term}
def to_datetime(datetime, timezone \\ :utc)
@doc """
Convert a date/time value to a NaiveDateTime
"""
@spec to_naive_datetime(Types.valid_datetime()) :: NaiveDateTime.t() | {:error, term}
def to_naive_datetime(datetime)
@doc """
Convert a date/time value to it's Erlang tuple variant
i.e. Date becomes `{y,m,d}`, DateTime/NaiveDateTime become
`{{y,m,d},{h,mm,s}}`
"""
@spec to_erl(Types.valid_datetime()) :: Types.date() | Types.datetime() | {:error, term}
def to_erl(datetime)
@doc """
Get the century a date/time value is in
"""
@spec century(Types.year() | Types.valid_datetime()) :: non_neg_integer | {:error, term}
def century(datetime)
@doc """
Return a boolean indicating whether the date/time value is in a leap year
"""
@spec is_leap?(Types.valid_datetime() | Types.year()) :: boolean | {:error, term}
def is_leap?(datetime)
@doc """
Shift a date/time value using a list of shift unit/value pairs
"""
@spec shift(Types.valid_datetime(), Timex.shift_options()) ::
Types.valid_datetime() | Timex.AmbiguousDateTime.t() | {:error, term}
def shift(datetime, options)
@doc """
Set fields on a date/time value using a list of unit/value pairs
"""
@spec set(Types.valid_datetime(), Timex.set_options()) ::
Types.valid_datetime() | {:error, term}
def set(datetime, options)
@doc """
Get a new version of the date/time value representing the beginning of the day
"""
@spec beginning_of_day(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
def beginning_of_day(datetime)
@doc """
Get a new version of the date/time value representing the end of the day
"""
@spec end_of_day(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
def end_of_day(datetime)
@doc """
Get a new version of the date/time value representing the beginning of it's week,
providing a weekday name (as an atom) for the day which starts the week, i.e. `:mon`.
"""
@spec beginning_of_week(Types.valid_datetime(), Types.weekstart()) ::
Types.valid_datetime() | {:error, term}
def beginning_of_week(datetime, weekstart)
@doc """
Get a new version of the date/time value representing the ending of it's week,
providing a weekday name (as an atom) for the day which starts the week, i.e. `:mon`.
"""
@spec end_of_week(Types.valid_datetime(), Types.weekstart()) ::
Types.valid_datetime() | {:error, term}
def end_of_week(datetime, weekstart)
@doc """
Get a new version of the date/time value representing the beginning of it's year
"""
@spec beginning_of_year(Types.year() | Types.valid_datetime()) ::
Types.valid_datetime() | {:error, term}
def beginning_of_year(datetime)
@doc """
Get a new version of the date/time value representing the ending of it's year
"""
@spec end_of_year(Types.year() | Types.valid_datetime()) ::
Types.valid_datetime() | {:error, term}
def end_of_year(datetime)
@doc """
Get a new version of the date/time value representing the beginning of it's quarter
"""
@spec beginning_of_quarter(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
def beginning_of_quarter(datetime)
@doc """
Get a new version of the date/time value representing the ending of it's quarter
"""
@spec end_of_quarter(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
def end_of_quarter(datetime)
@doc """
Get a new version of the date/time value representing the beginning of it's month
"""
@spec beginning_of_month(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
def beginning_of_month(datetime)
@doc """
Get a new version of the date/time value representing the ending of it's month
"""
@spec end_of_month(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
def end_of_month(datetime)
@doc """
Get the quarter for the given date/time value
"""
@spec quarter(Types.month() | Types.valid_datetime()) :: 1..4 | {:error, term}
def quarter(datetime)
@doc """
Get the number of days in the month for the given date/time value
"""
@spec days_in_month(Types.valid_datetime()) :: Types.num_of_days() | {:error, term}
def days_in_month(datetime)
@doc """
Get the week number of the given date/time value, starting at 1
"""
@spec week_of_month(Types.valid_datetime()) :: Types.week_of_month() | {:error, term}
def week_of_month(datetime)
@doc """
Get the ordinal weekday number of the given date/time value
"""
@spec weekday(Types.valid_datetime()) :: Types.weekday() | {:error, term}
def weekday(datetime)
@doc """
Get the ordinal day number of the given date/time value
"""
@spec day(Types.valid_datetime()) :: Types.daynum() | {:error, term}
def day(datetime)
@doc """
Determine if the provided date/time value is valid.
"""
@spec is_valid?(Types.valid_datetime()) :: boolean | {:error, term}
def is_valid?(datetime)
@doc """
Return a pair {year, week number} (as defined by ISO 8601) that the given date/time value falls on.
"""
@spec iso_week(Types.valid_datetime()) :: {Types.year(), Types.weeknum()} | {:error, term}
def iso_week(datetime)
@doc """
Shifts the given date/time value to the ISO day given
"""
@spec from_iso_day(Types.valid_datetime(), non_neg_integer) ::
Types.valid_datetime() | {:error, term}
def from_iso_day(datetime, day)
end
defimpl Timex.Protocol, for: Any do
def to_julian(%{__struct__: _} = d), do: Timex.to_julian(Map.from_struct(d))
def to_julian(_datetime), do: {:error, :invalid_date}
def to_gregorian_seconds(%{__struct__: _} = d),
do: Timex.to_gregorian_seconds(Map.from_struct(d))
def to_gregorian_seconds(_datetime), do: {:error, :invalid_date}
def to_gregorian_microseconds(%{__struct__: _} = d),
do: Timex.to_gregorian_microseconds(Map.from_struct(d))
def to_gregorian_microseconds(_datetime), do: {:error, :invalid_date}
def to_unix(%{__struct__: _} = d), do: Timex.to_unix(Map.from_struct(d))
def to_unix(_datetime), do: {:error, :invalid_date}
def to_date(%{__struct__: _} = d), do: Timex.to_date(Map.from_struct(d))
def to_date(_datetime), do: {:error, :invalid_date}
def to_datetime(%{__struct__: _} = d, timezone),
do: Timex.to_datetime(Map.from_struct(d), timezone)
def to_datetime(_datetime, _timezone), do: {:error, :invalid_date}
def to_naive_datetime(%{__struct__: _} = d), do: Timex.to_naive_datetime(Map.from_struct(d))
def to_naive_datetime(_datetime), do: {:error, :invalid_date}
def to_erl(%{__struct__: _} = d), do: Timex.to_erl(Map.from_struct(d))
def to_erl(_datetime), do: {:error, :invalid_date}
def century(%{__struct__: _} = d), do: Timex.century(Map.from_struct(d))
def century(_datetime), do: {:error, :invalid_date}
def is_leap?(%{__struct__: _} = d), do: Timex.is_leap?(Map.from_struct(d))
def is_leap?(_datetime), do: {:error, :invalid_date}
def shift(%{__struct__: _} = d, options), do: Timex.shift(Map.from_struct(d), options)
def shift(_datetime, _options), do: {:error, :invalid_date}
def set(%{__struct__: _} = d, options), do: Timex.set(Map.from_struct(d), options)
def set(_datetime, _options), do: {:error, :invalid_date}
def beginning_of_day(%{__struct__: _} = d), do: Timex.beginning_of_day(Map.from_struct(d))
def beginning_of_day(_datetime), do: {:error, :invalid_date}
def end_of_day(%{__struct__: _} = d), do: Timex.end_of_day(Map.from_struct(d))
def end_of_day(_datetime), do: {:error, :invalid_date}
def beginning_of_week(%{__struct__: _} = d, weekstart),
do: Timex.beginning_of_week(Map.from_struct(d), weekstart)
def beginning_of_week(_datetime, _weekstart), do: {:error, :invalid_date}
def end_of_week(%{__struct__: _} = d, weekstart),
do: Timex.end_of_week(Map.from_struct(d), weekstart)
def end_of_week(_datetime, _weekstart), do: {:error, :invalid_date}
def beginning_of_year(%{__struct__: _} = d), do: Timex.beginning_of_year(Map.from_struct(d))
def beginning_of_year(_datetime), do: {:error, :invalid_date}
def end_of_year(%{__struct__: _} = d), do: Timex.end_of_year(Map.from_struct(d))
def end_of_year(_datetime), do: {:error, :invalid_date}
def beginning_of_quarter(%{__struct__: _} = d),
do: Timex.beginning_of_quarter(Map.from_struct(d))
def beginning_of_quarter(_datetime), do: {:error, :invalid_date}
def end_of_quarter(%{__struct__: _} = d), do: Timex.end_of_quarter(Map.from_struct(d))
def end_of_quarter(_datetime), do: {:error, :invalid_date}
def beginning_of_month(%{__struct__: _} = d), do: Timex.beginning_of_month(Map.from_struct(d))
def beginning_of_month(_datetime), do: {:error, :invalid_date}
def end_of_month(%{__struct__: _} = d), do: Timex.end_of_month(Map.from_struct(d))
def end_of_month(_datetime), do: {:error, :invalid_date}
def quarter(%{__struct__: _} = d), do: Timex.quarter(Map.from_struct(d))
def quarter(_datetime), do: {:error, :invalid_date}
def days_in_month(%{__struct__: _} = d), do: Timex.days_in_month(Map.from_struct(d))
def days_in_month(_datetime), do: {:error, :invalid_date}
def week_of_month(%{__struct__: _} = d), do: Timex.week_of_month(Map.from_struct(d))
def week_of_month(_datetime), do: {:error, :invalid_date}
def weekday(%{__struct__: _} = d), do: Timex.weekday(Map.from_struct(d))
def weekday(_datetime), do: {:error, :invalid_date}
def day(%{__struct__: _} = d), do: Timex.day(Map.from_struct(d))
def day(_datetime), do: {:error, :invalid_date}
def is_valid?(%{__struct__: _} = d), do: Timex.is_valid?(Map.from_struct(d))
def is_valid?(_datetime), do: false
def iso_week(%{__struct__: _} = d), do: Timex.iso_week(Map.from_struct(d))
def iso_week(_datetime), do: {:error, :invalid_date}
def from_iso_day(%{__struct__: _} = d, _day), do: Timex.from_iso_day(Map.from_struct(d))
def from_iso_day(_datetime, _day), do: {:error, :invalid_date}
end
|
lib/protocol.ex
| 0.940898
| 0.707089
|
protocol.ex
|
starcoder
|
defmodule Contex.LinePlot do
@moduledoc """
A simple point plot, plotting points showing y values against x values.
It is possible to specify multiple y columns with the same x column. It is not
yet possible to specify multiple independent series.
Data are sorted by the x-value prior to plotting.
The x column can either be numeric or date time data. If numeric, a
`Contex.ContinuousLinearScale` is used to scale the values to the plot,
and if date time, a `Contex.TimeScale` is used.
Fill colours for each y column can be specified with `colours/2`.
A column in the dataset can optionally be used to control the colours. See
`colours/2` and `set_colour_col_name/2`
"""
import Contex.SVG
alias __MODULE__
alias Contex.{Scale, ContinuousLinearScale, TimeScale}
alias Contex.CategoryColourScale
alias Contex.{Dataset, Mapping}
alias Contex.Axis
alias Contex.Utils
defstruct [
:dataset,
:mapping,
:options,
:x_scale,
:y_scale,
:legend_scale,
transforms: %{},
colour_palette: :default
]
@required_mappings [
x_col: :exactly_one,
y_cols: :one_or_more,
fill_col: :zero_or_one
]
@default_options [
axis_label_rotation: :auto,
custom_x_scale: nil,
custom_y_scale: nil,
custom_x_formatter: nil,
custom_y_formatter: nil,
width: 100,
height: 100,
smoothed: true,
stroke_width: "2",
colour_palette: :default
]
@default_plot_options %{
show_x_axis: true,
show_y_axis: true,
legend_setting: :legend_none
}
@type t() :: %__MODULE__{}
@doc ~S"""
Create a new point plot definition and apply defaults.
Options may be passed to control the settings for the barchart. Options available are:
- `:axis_label_rotation` : `:auto` (default), 45 or 90
Specifies the label rotation value that will be applied to the bottom axis. Accepts integer
values for degrees of rotation or `:auto`. Note that manually set rotation values other than
45 or 90 will be treated as zero. The default value is `:auto`, which sets the rotation to
zero degrees if the number of items on the axis is greater than eight, 45 degrees otherwise.
- `:custom_x_scale` : `nil` (default) or an instance of a suitable `Contex.Scale`.
The scale must be suitable for the data type and would typically be either `Contex.ContinuousLinearScale`
or `Contex.TimeScale`. It is not necessary to set the range for the scale as the range is set
as part of the chart layout process.
- `:custom_y_scale` : `nil` (default) or an instance of a suitable `Contex.Scale`.
- `:custom_x_formatter` : `nil` (default) or a function with arity 1
Allows the axis tick labels to be overridden. For example, if you have a numeric representation of money and you want to
have the x axis show it as millions of dollars you might do something like:
# Turns 1_234_567.67 into $1.23M
defp money_formatter_millions(value) when is_number(value) do
"$#{:erlang.float_to_binary(value/1_000_000.0, [decimals: 2])}M"
end
defp show_chart(data) do
LinePlot.new(
dataset,
mapping: %{x_col: :column_a, y_cols: [:column_b, column_c]},
custom_x_formatter: &money_formatter_millions/1
)
end
- `:custom_y_formatter` : `nil` (default) or a function with arity 1.
- `:stroke_width` : 2 (default) - stroke width of the line
- `:smoothed` : true (default) or false - draw the lines smoothed
Note that the smoothing algorithm is a cardinal spline with tension = 0.3.
You may get strange effects (e.g. loops / backtracks) in certain circumstances, e.g.
if the x-value spacing is very uneven. This alogorithm forces the smoothed line
through the points.
- `:colour_palette` : `:default` (default) or colour palette - see `colours/2`
Overrides the default colours.
Where multiple y columns are defined for the plot, a different colour will be used for
each column.
If a single y column is defined and a `:fill_col`column is mapped,
a different colour will be used for each unique value in the colour column.
If a single y column is defined and no `:fill_col`column is mapped, the first colour
in the supplied colour palette will be used to plot the points.
Colours can either be a named palette defined in `Contex.CategoryColourScale` or a list of strings representing hex code
of the colour as per CSS colour hex codes, but without the #. For example:
```
chart = LinePlot.new(
dataset,
mapping: %{x_col: :column_a, y_cols: [:column_b, column_c]},
colour_palette: ["fbb4ae", "b3cde3", "ccebc5"]
)
```
The colours will be applied to the data series in the same order as the columns are specified in `set_val_col_names/2`
- `:mapping` : Maps attributes required to generate the barchart to columns in the dataset.
If the data in the dataset is stored as a map, the `:mapping` option is required. If the dataset
is not stored as a map, `:mapping` may be left out, in which case the first column will be used
for the x and the second column used as the y.
This value must be a map of the plot's `:x_col` and `:y_cols` to keys in the map,
such as `%{x_col: :column_a, y_cols: [:column_b, column_c]}`.
The value for the `:y_cols` key must be a list.
If a single y column is specified an optional `:fill_col` mapping can be provided
to control the point colour. _This is ignored if there are multiple y columns_.
"""
@spec new(Contex.Dataset.t(), keyword()) :: Contex.LinePlot.t()
def new(%Dataset{} = dataset, options \\ []) do
options = Keyword.merge(@default_options, options)
mapping = Mapping.new(@required_mappings, Keyword.get(options, :mapping), dataset)
%LinePlot{dataset: dataset, mapping: mapping, options: options}
end
@doc false
def set_size(%LinePlot{} = plot, width, height) do
plot
|> set_option(:width, width)
|> set_option(:height, height)
end
defp set_option(%LinePlot{options: options} = plot, key, value) do
options = Keyword.put(options, key, value)
%{plot | options: options}
end
defp get_option(%LinePlot{options: options}, key) do
Keyword.get(options, key)
end
@doc false
def get_svg_legend(%LinePlot{} = plot) do
plot = prepare_scales(plot)
Contex.Legend.to_svg(plot.legend_scale)
end
def get_svg_legend(_), do: ""
@doc false
def to_svg(%LinePlot{} = plot, plot_options) do
plot = prepare_scales(plot)
x_scale = plot.x_scale
y_scale = plot.y_scale
plot_options = Map.merge(@default_plot_options, plot_options)
x_axis_svg =
if plot_options.show_x_axis,
do:
get_x_axis(x_scale, plot)
|> Axis.to_svg(),
else: ""
y_axis_svg =
if plot_options.show_y_axis,
do:
Axis.new_left_axis(y_scale)
|> Axis.set_offset(get_option(plot, :width))
|> Axis.to_svg(),
else: ""
[
x_axis_svg,
y_axis_svg,
"<g>",
get_svg_lines(plot),
"</g>"
]
end
defp get_x_axis(x_scale, plot) do
rotation =
case get_option(plot, :axis_label_rotation) do
:auto ->
if length(Scale.ticks_range(x_scale)) > 8, do: 45, else: 0
degrees ->
degrees
end
x_scale
|> Axis.new_bottom_axis()
|> Axis.set_offset(get_option(plot, :height))
|> Kernel.struct(rotation: rotation)
end
defp get_svg_lines(
%LinePlot{dataset: dataset, mapping: %{accessors: accessors}, transforms: transforms} =
plot
) do
x_accessor = accessors.x_col
# Pre-sort by x-value else we get squiggly lines
data = Enum.sort(dataset.data, fn a, b -> x_accessor.(a) > x_accessor.(b) end)
Enum.with_index(accessors.y_cols)
|> Enum.map(fn {y_accessor, index} ->
colour = transforms.colour.(index, nil)
get_svg_line(plot, data, y_accessor, colour)
end)
end
defp get_svg_line(
%LinePlot{mapping: %{accessors: accessors}, transforms: transforms} = plot,
data,
y_accessor,
colour
) do
smooth = get_option(plot, :smoothed)
stroke_width = get_option(plot, :stroke_width)
options = [
transparent: true,
stroke: colour,
stroke_width: stroke_width,
stroke_linejoin: "round"
]
points_list =
data
|> Stream.map(fn row ->
x =
accessors.x_col.(row)
|> transforms.x.()
y =
y_accessor.(row)
|> transforms.y.()
{x, y}
end)
|> Enum.filter(fn {x, _y} -> not is_nil(x) end)
|> Enum.sort(fn {x1, _y1}, {x2, _y2} -> x1 < x2 end)
|> Enum.chunk_by(fn {_x, y} -> is_nil(y) end)
|> Enum.filter(fn [{_x, y} | _] -> not is_nil(y) end)
Enum.map(points_list, fn points -> line(points, smooth, options) end)
end
@doc false
def prepare_scales(%LinePlot{} = plot) do
plot
|> prepare_x_scale()
|> prepare_y_scale()
|> prepare_colour_scale()
end
defp prepare_x_scale(%LinePlot{dataset: dataset, mapping: mapping} = plot) do
x_col_name = mapping.column_map[:x_col]
width = get_option(plot, :width)
custom_x_scale = get_option(plot, :custom_x_scale)
x_scale =
case custom_x_scale do
nil -> create_scale_for_column(dataset, x_col_name, {0, width})
_ -> custom_x_scale |> Scale.set_range(0, width)
end
x_scale = %{x_scale | custom_tick_formatter: get_option(plot, :custom_x_formatter)}
x_transform = Scale.domain_to_range_fn(x_scale)
transforms = Map.merge(plot.transforms, %{x: x_transform})
%{plot | x_scale: x_scale, transforms: transforms}
end
defp prepare_y_scale(%LinePlot{dataset: dataset, mapping: mapping} = plot) do
y_col_names = mapping.column_map[:y_cols]
height = get_option(plot, :height)
custom_y_scale = get_option(plot, :custom_y_scale)
y_scale =
case custom_y_scale do
nil ->
{min, max} =
get_overall_domain(dataset, y_col_names)
|> Utils.fixup_value_range()
ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(min, max)
|> Scale.set_range(height, 0)
_ ->
custom_y_scale |> Scale.set_range(height, 0)
end
y_scale = %{y_scale | custom_tick_formatter: get_option(plot, :custom_y_formatter)}
y_transform = Scale.domain_to_range_fn(y_scale)
transforms = Map.merge(plot.transforms, %{y: y_transform})
%{plot | y_scale: y_scale, transforms: transforms}
end
defp prepare_colour_scale(%LinePlot{dataset: dataset, mapping: mapping} = plot) do
y_col_names = mapping.column_map[:y_cols]
fill_col_name = mapping.column_map[:fill_col]
palette = get_option(plot, :colour_palette)
# It's a little tricky. We look up colours by index when colouring by series
# but need the legend by column name, so where we are colouring by series
# we will create a transform function with one instance of a colour scale
# and the legend from another
legend_scale = create_legend_colour_scale(y_col_names, fill_col_name, dataset, palette)
transform = create_colour_transform(y_col_names, fill_col_name, dataset, palette)
transforms = Map.merge(plot.transforms, %{colour: transform})
%{plot | legend_scale: legend_scale, transforms: transforms}
end
defp create_legend_colour_scale(y_col_names, fill_col_name, dataset, palette)
when length(y_col_names) == 1 and not is_nil(fill_col_name) do
vals = Dataset.unique_values(dataset, fill_col_name)
CategoryColourScale.new(vals) |> CategoryColourScale.set_palette(palette)
end
defp create_legend_colour_scale(y_col_names, _fill_col_name, _dataset, palette) do
CategoryColourScale.new(y_col_names) |> CategoryColourScale.set_palette(palette)
end
defp create_colour_transform(y_col_names, fill_col_name, dataset, palette)
when length(y_col_names) == 1 and not is_nil(fill_col_name) do
vals = Dataset.unique_values(dataset, fill_col_name)
scale = CategoryColourScale.new(vals) |> CategoryColourScale.set_palette(palette)
fn _col_index, fill_val -> CategoryColourScale.colour_for_value(scale, fill_val) end
end
defp create_colour_transform(y_col_names, _fill_col_name, _dataset, palette) do
fill_indices =
Enum.with_index(y_col_names)
|> Enum.map(fn {_, index} -> index end)
scale = CategoryColourScale.new(fill_indices) |> CategoryColourScale.set_palette(palette)
fn col_index, _fill_val -> CategoryColourScale.colour_for_value(scale, col_index) end
end
defp get_overall_domain(dataset, col_names) do
combiner = fn {min1, max1}, {min2, max2} ->
{Utils.safe_min(min1, min2), Utils.safe_max(max1, max2)}
end
Enum.reduce(col_names, {nil, nil}, fn col, acc_extents ->
inner_extents = Dataset.column_extents(dataset, col)
combiner.(acc_extents, inner_extents)
end)
end
defp create_scale_for_column(dataset, column, {r_min, r_max}) do
{min, max} = Dataset.column_extents(dataset, column)
case Dataset.guess_column_type(dataset, column) do
:datetime ->
TimeScale.new()
|> TimeScale.domain(min, max)
|> Scale.set_range(r_min, r_max)
:number ->
ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(min, max)
|> Scale.set_range(r_min, r_max)
end
end
end
|
lib/chart/lineplot.ex
| 0.950652
| 0.987911
|
lineplot.ex
|
starcoder
|
defmodule FarmbotFirmware.GCODE.Decoder do
@moduledoc false
alias FarmbotFirmware.{GCODE, Param}
@doc false
@spec do_decode(binary(), [binary()]) :: {GCODE.kind(), GCODE.args()}
def do_decode("R00", []), do: {:report_idle, []}
def do_decode("R01", []), do: {:report_begin, []}
def do_decode("R02", []), do: {:report_success, []}
def do_decode("R03", []), do: {:report_error, [:no_error]}
def do_decode("R03", v), do: {:report_error, decode_v(v)}
def do_decode("R04", []), do: {:report_busy, []}
def do_decode("R05", xyz), do: {:report_axis_state, decode_axis_state(xyz)}
def do_decode("R06", xyz),
do: {:report_calibration_state, decode_calibration_state(xyz)}
def do_decode("R07", []), do: {:report_retry, []}
def do_decode("R08", args),
do: {:report_echo, decode_echo(Enum.join(args, " "))}
def do_decode("R09", []), do: {:report_invalid, []}
def do_decode("R11", []), do: {:report_home_complete, [:x]}
def do_decode("R12", []), do: {:report_home_complete, [:y]}
def do_decode("R13", []), do: {:report_home_complete, [:z]}
def do_decode("R15", x), do: {:report_position_change, decode_floats(x)}
def do_decode("R16", y), do: {:report_position_change, decode_floats(y)}
def do_decode("R17", z), do: {:report_position_change, decode_floats(z)}
def do_decode("R20", []), do: {:report_parameters_complete, []}
def do_decode("R21", pv), do: {:report_parameter_value, decode_pv(pv)}
def do_decode("R23", pv),
do: {:report_calibration_parameter_value, decode_pv(pv)}
def do_decode("R41", pv), do: {:report_pin_value, decode_ints(pv)}
def do_decode("R71", []), do: {:report_axis_timeout, [:x]}
def do_decode("R72", []), do: {:report_axis_timeout, [:y]}
def do_decode("R73", []), do: {:report_axis_timeout, [:z]}
def do_decode("R81", xxyyzz),
do: {:report_end_stops, decode_end_stops(xxyyzz)}
def do_decode("R82", xyzs), do: {:report_position, decode_floats(xyzs)}
def do_decode("R83", [version]), do: {:report_software_version, [version]}
def do_decode("R84", xyz), do: {:report_encoders_scaled, decode_floats(xyz)}
def do_decode("R85", xyz), do: {:report_encoders_raw, decode_floats(xyz)}
def do_decode("R87", []), do: {:report_emergency_lock, []}
def do_decode("R88", []), do: {:report_no_config, []}
def do_decode("R89", uxvywz), do: {:report_load, decode_uxvywz(uxvywz)}
def do_decode("R99", debug),
do: {:report_debug_message, [Enum.join(debug, " ")]}
def do_decode("G00", xyzs), do: {:command_movement, decode_floats(xyzs)}
def do_decode("G28", []), do: {:comand_movement_home, [:x, :y, :z]}
def do_decode("F11", []), do: {:command_movement_find_home, [:x]}
def do_decode("F12", []), do: {:command_movement_find_home, [:y]}
def do_decode("F13", []), do: {:command_movement_find_home, [:z]}
def do_decode("F14", []), do: {:command_movement_calibrate, [:x]}
def do_decode("F15", []), do: {:command_movement_calibrate, [:y]}
def do_decode("F16", []), do: {:command_movement_calibrate, [:z]}
def do_decode("F20", []), do: {:parameter_read_all, []}
def do_decode("F21", [param_id]),
do: {:parameter_read, [Param.decode(param_id)]}
def do_decode("F22", pv), do: {:parameter_write, decode_pv(pv)}
def do_decode("F23", pv), do: {:calibration_parameter_write, decode_pv(pv)}
def do_decode("F41", pvm), do: {:pin_write, decode_ints(pvm)}
def do_decode("F42", pv), do: {:pin_read, decode_ints(pv)}
def do_decode("F43", pm), do: {:pin_mode_write, decode_ints(pm)}
def do_decode("F61", pv), do: {:servo_write, decode_ints(pv)}
def do_decode("F81", []), do: {:end_stops_read, []}
def do_decode("F82", []), do: {:position_read, []}
def do_decode("F83", []), do: {:software_version_read, []}
def do_decode("F84", xyzs), do: {:position_write_zero, decode_ints(xyzs)}
def do_decode("F09", _), do: {:command_emergency_unlock, []}
def do_decode("E", _), do: {:command_emergency_lock, []}
def do_decode(kind, args) do
{:unknown, [kind | args]}
end
def decode_v(["V" <> value]) do
{value, ""} = Float.parse(value)
case value do
0.0 -> [:no_error]
1.0 -> [:emergency_lock]
2.0 -> [:timeout]
3.0 -> [:stall_detected]
4.0 -> [:calibration_error]
14.0 -> [:invalid_command]
15.0 -> [:no_config]
31.0 -> [:stall_detected_x]
32.0 -> [:stall_detected_y]
33.0 -> [:stall_detected_z]
unk -> [unknown_error: unk]
end
end
defp decode_floats(list, acc \\ [])
defp decode_floats([<<arg::binary-1, val::binary>> | rest], acc) do
arg =
arg
|> String.downcase()
|> String.to_existing_atom()
case Float.parse(val) do
{num, ""} ->
decode_floats(rest, Keyword.put(acc, arg, num))
_ ->
case Integer.parse(val) do
{num, ""} -> decode_floats(rest, Keyword.put(acc, arg, num / 1))
_ -> decode_floats(rest, acc)
end
end
end
# This is sort of order dependent and not exactly correct.
# It should ensure the order is [x: _, y: _, z: _]
defp decode_floats([], acc), do: Enum.reverse(acc)
defp decode_axis_state(list) do
args = decode_floats(list)
Enum.map(args, fn {axis, value} ->
case value do
0.0 -> {axis, :idle}
1.0 -> {axis, :begin}
2.0 -> {axis, :accelerate}
3.0 -> {axis, :cruise}
4.0 -> {axis, :decelerate}
5.0 -> {axis, :stop}
6.0 -> {axis, :crawl}
end
end)
end
defp decode_calibration_state(list) do
args = decode_floats(list)
Enum.map(args, fn {axis, value} ->
case value do
0.0 -> {axis, :idle}
1.0 -> {axis, :home}
2.0 -> {axis, :end}
end
end)
end
@spec decode_end_stops([binary()], Keyword.t()) :: Keyword.t()
def decode_end_stops(list, acc \\ [])
def decode_end_stops(
[
<<arg::binary-1, "A", val0::binary>>,
<<arg::binary-1, "B", val1::binary>> | rest
],
acc
) do
dc = String.downcase(arg)
acc =
acc ++
[
{:"#{dc}a", String.to_integer(val0)},
{:"#{dc}b", String.to_integer(val1)}
]
decode_end_stops(rest, acc)
end
def decode_end_stops([], acc), do: acc
def decode_end_stops(error, _acc), do: [parse_error: error]
def decode_pv(["P" <> param_id, "V" <> value]) do
param = Param.decode(String.to_integer(param_id))
{value, ""} = Float.parse(value)
[{param, value}]
end
def decode_uxvywz([
"U" <> u_value,
"X" <> x_value,
"V" <> v_value,
"Y" <> y_value,
"W" <> w_value,
"Z" <> z_value
]) do
{u, ""} = Integer.parse(u_value)
{x, ""} = Integer.parse(x_value)
{v, ""} = Integer.parse(v_value)
{y, ""} = Integer.parse(y_value)
{w, ""} = Integer.parse(w_value)
{z, ""} = Integer.parse(z_value)
[u, x, v, y, w, z]
end
def decode_ints(pvm, acc \\ [])
def decode_ints([<<arg::binary-1, val::binary>> | rest], acc) do
arg =
arg
|> String.downcase()
|> String.to_existing_atom()
case Integer.parse(val) do
{num, ""} -> decode_ints(rest, Keyword.put(acc, arg, num))
_ -> decode_ints(rest, acc)
end
end
def decode_ints([], acc), do: Enum.reverse(acc)
@spec decode_echo(binary()) :: [binary()]
defp decode_echo(str) when is_binary(str) do
[_, echo | _] = String.split(str, "*", parts: 3)
[String.trim(echo)]
end
end
|
farmbot_firmware/lib/farmbot_firmware/gcode/decoder.ex
| 0.731155
| 0.491151
|
decoder.ex
|
starcoder
|
defmodule AshAdmin.Resource do
@field %Ash.Dsl.Entity{
describe: "Declare non-default behavior for a specific attribute",
name: :field,
schema: AshAdmin.Resource.Field.schema(),
target: AshAdmin.Resource.Field,
args: [:name]
}
@form %Ash.Dsl.Section{
describe: "Configure the appearance of fields in admin forms.",
name: :form,
entities: [
@field
]
}
@admin %Ash.Dsl.Section{
describe: "Configure the admin dashboard for a given resource",
name: :admin,
sections: [
@form
],
schema: [
name: [
type: :string,
doc: "The proper name to use when this resource appears in the admin interface"
],
actor?: [
type: :boolean,
doc: "Whether or not this resource can be used as the actor for requests"
],
show_action: [
type: :atom,
doc:
"The action to use when linking to the resource/viewing a single record. Defaults to the primary read action."
],
read_actions: [
type: {:list, :atom},
doc:
"A list of read actions that can be used to show resource details. By default, all actions are included"
],
create_actions: [
type: {:list, :atom},
doc:
"A list of create actions that can be create records. By default, all actions are included"
],
update_actions: [
type: {:list, :atom},
doc:
"A list of update actions that can be update records. By default, all actions are included"
],
destroy_actions: [
type: {:list, :atom},
doc:
"A list of destroy actions that can be destroy records. By default, all actions are included"
],
polymorphic_tables: [
type: {:list, :string},
doc: """
For resources that use ash_postgres's polymorphism capabilities, you can provide a list of tables that should be available to
select. These will be added to the list of derivable tables based on scanning all Apis + resources provided to ash_admin.
"""
],
table_columns: [
type: {:list, :atom},
doc: "The list of attributes to render on the table view."
],
relationship_display_fields: [
type: {:list, :atom},
doc: "The list of attributes to render when it's shown as a relationship on a datatable"
]
]
}
use Ash.Dsl.Extension,
sections: [@admin],
transformers: [AshAdmin.Resource.Transformers.ValidateTableColumns]
@moduledoc """
An Api extension to alter the behavior of a resource in the admin ui.
Table of Contents:
#{Ash.Dsl.Extension.doc_index([@admin])}
DSL Docs:
#{Ash.Dsl.Extension.doc([@admin])}
"""
if Code.ensure_compiled(AshPostgres) do
def polymorphic?(resource) do
AshPostgres.polymorphic?(resource)
end
else
def polymorphic?(_), do: false
end
def polymorphic_tables(resource, apis) do
resource
|> Ash.Dsl.Extension.get_opt([:admin], :polymorphic_tables, [], true)
|> Enum.concat(find_polymorphic_tables(resource, apis))
|> Enum.uniq()
end
def relationship_display_fields(resource) do
Ash.Dsl.Extension.get_opt(resource, [:admin], :relationship_display_fields, nil, true)
end
def table_columns(resource) do
Ash.Dsl.Extension.get_opt(resource, [:admin], :table_columns, nil, true)
end
def name(resource) do
Ash.Dsl.Extension.get_opt(resource, [:admin], :name, nil, true) ||
resource
|> Module.split()
|> List.last()
end
def actor?(resource) do
Ash.Dsl.Extension.get_opt(resource, [:admin], :actor?, false, true)
end
def read_actions(resource) do
Ash.Dsl.Extension.get_opt(resource, [:admin], :read_actions, nil, true)
end
def create_actions(resource) do
Ash.Dsl.Extension.get_opt(resource, [:admin], :create_actions, nil, true)
end
def update_actions(resource) do
Ash.Dsl.Extension.get_opt(resource, [:admin], :update_actions, nil, true)
end
def destroy_actions(resource) do
Ash.Dsl.Extension.get_opt(resource, [:admin], :destroy_actions, nil, true)
end
def show_action(resource) do
action = Ash.Dsl.Extension.get_opt(resource, [:admin], :show_action, false, [])
if action do
action
else
action = Ash.Resource.Info.primary_action(resource, :read)
action && action.name
end
end
def fields(resource) do
Ash.Dsl.Extension.get_entities(resource, [:admin, :form])
end
def field(resource, name) do
resource
|> fields()
|> Enum.find(fn field ->
field.name == name
end)
end
defp find_polymorphic_tables(resource, apis) do
apis
|> Enum.flat_map(&Ash.Api.resources/1)
|> Enum.flat_map(&Ash.Resource.Info.relationships/1)
|> Enum.filter(&(&1.destination == resource))
|> Enum.map(& &1.context[:data_layer][:table])
|> Enum.reject(&is_nil/1)
|> Enum.uniq()
end
end
|
lib/ash_admin/resource/resource.ex
| 0.701917
| 0.400134
|
resource.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.