code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Imagineer.Image.PNG.Helpers do
@doc """
Given a PNG's color type, returns its color format
"""
def color_format(0), do: :grayscale
def color_format(2), do: :rgb
def color_format(3), do: :palette
def color_format(4), do: :grayscale_alpha
def color_format(6), do: :rgb_alpha
@doc """
Given a color format, bit depth, and the width of an image, tells us how many
bytes are are present per scanline (a row of pixels).
"""
def bytes_per_row(:grayscale, 1, width), do: div(width + 7, 8)
def bytes_per_row(:grayscale, 2, width), do: div(width + 3, 4)
def bytes_per_row(:grayscale, 4, width), do: div(width + 1, 2)
def bytes_per_row(:grayscale, 8, width), do: width
def bytes_per_row(:grayscale, 16, width), do: width * 2
def bytes_per_row(:rgb, 8, width), do: width * 3
def bytes_per_row(:rgb, 16, width), do: width * 6
def bytes_per_row(:palette, 1, width), do: div(width + 7, 8)
def bytes_per_row(:palette, 2, width), do: div(width + 3, 4)
def bytes_per_row(:palette, 4, width), do: div(width + 1, 2)
def bytes_per_row(:palette, 8, width), do: width
def bytes_per_row(:grayscale_alpha, 8, width), do: width * 2
def bytes_per_row(:grayscale_alpha, 16, width), do: width * 4
def bytes_per_row(:rgb_alpha, 8, width), do: width * 4
def bytes_per_row(:rgb_alpha, 16, width), do: width * 8
@doc """
Given a color format and bit depth, tells us how many bytes are needed to
store a pixel
"""
def bytes_per_pixel(:grayscale, 1), do: 1
def bytes_per_pixel(:grayscale, 2), do: 1
def bytes_per_pixel(:grayscale, 4), do: 1
def bytes_per_pixel(:grayscale, 8), do: 1
def bytes_per_pixel(:grayscale, 16), do: 2
def bytes_per_pixel(:rgb, 8), do: 3
def bytes_per_pixel(:rgb, 16), do: 6
def bytes_per_pixel(:palette, 1), do: 1
def bytes_per_pixel(:palette, 2), do: 1
def bytes_per_pixel(:palette, 4), do: 1
def bytes_per_pixel(:palette, 8), do: 1
def bytes_per_pixel(:grayscale_alpha, 8), do: 2
def bytes_per_pixel(:grayscale_alpha, 16), do: 4
def bytes_per_pixel(:rgb_alpha, 8), do: 4
def bytes_per_pixel(:rgb_alpha, 16), do: 8
@doc """
Returns the number of channels for a given `color_format`. For example,
`:rgb` and `:rbg16` have 3 channels: one for Red, Green, and Blue.
`:rgb_alpha` and `:rgb_alpha` each have 4 channels: one for Red, Green,
Blue, and the alpha (transparency) channel.
"""
def channels_per_pixel(:palette), do: 1
def channels_per_pixel(:grayscale), do: 1
def channels_per_pixel(:grayscale_alpha), do: 2
def channels_per_pixel(:rgb), do: 3
def channels_per_pixel(:rgb_alpha), do: 4
@doc """
Returns a binary consisting of `length` null (`<<0>>`) bytes
"""
def null_binary(length) when length >= 0 do
null_binary(<<>>, length)
end
defp null_binary(null_bytes, 0) do
null_bytes
end
defp null_binary(null_bytes, length) do
null_binary(null_bytes <> <<0>>, length - 1)
end
end
|
lib/imagineer/image/png/helpers.ex
| 0.889475
| 0.719063
|
helpers.ex
|
starcoder
|
defmodule GameOfThree.Domain.Game do
@moduledoc """
The Game module is responsible by evaluate the player's movements
and announce the winner or pass the turn for the opponent
"""
defstruct game_id: nil,
game_name: nil,
player_a: nil,
player_b: nil,
move: nil,
next_to_play: nil
def create_game do
%__MODULE__{
game_id: generate_id(),
game_name: generate_name()
}
end
def add_player(game_state, player_name) do
{:ok, player_a} = Map.fetch(game_state, :player_a)
if player_a == nil do
Map.put(game_state, :player_a, player_name)
else
Map.put(game_state, :player_b, player_name)
end
end
def start_game(game_state, initial_move) do
{:ok, player_b} = Map.fetch(game_state, :player_b)
game_move_changed = Map.put(game_state, :move, initial_move)
game_next_player_changed = Map.put(game_move_changed, :next_to_play, player_b)
game_next_player_changed
end
def perform_turn(game_state, move) do
{:ok, played} = Map.fetch(game_state, :next_to_play)
{:ok, player_a} = Map.fetch(game_state, :player_a)
{:ok, player_b} = Map.fetch(game_state, :player_b)
next_to_play =
if played == player_a do
player_b
else
player_a
end
game_move_changed = Map.put(game_state, :move, move)
game_next_to_play_changed = Map.put(game_move_changed, :next_to_play, next_to_play)
game_next_to_play_changed
end
def evaluate_move, do: {:error, "An empty movement is not allowed"}
def evaluate_move(nil), do: {:error, "An empty movement is not allowed"}
def evaluate_move(value) when is_bitstring(value),
do: {:error, "A numeric value is expected here"}
def evaluate_move(value) do
cond do
value < 1 || value > 25_000 ->
{:error, "The movement is out of range"}
div(value, 3) == 1 ->
{:winner, "The player xxx has won!"}
div(value, 3) < 1 ->
{:tie, "There is no winners in this game!"}
true ->
{:ok, div(value, 3)}
end
end
defp generate_id do
Base.encode64("#{NaiveDateTime.to_iso8601(NaiveDateTime.utc_now())}_game_of_three")
end
defp generate_name do
name = :crypto.strong_rand_bytes(17)
name
|> Base.url_encode64()
|> binary_part(0, 17)
end
end
|
lib/game_of_three/domain/game.ex
| 0.746324
| 0.549157
|
game.ex
|
starcoder
|
defmodule PhoenixStarter.Uploads do
@moduledoc """
Logic for creating direct uploads to AWS S3.
"""
@default_upload_limit 25 * 1024 * 1024
@doc """
Returns a presigned URL to the upload on S3.
It is assumed that the given param is a list of maps with string keys `variation`
and `key`. The returned URL is to the map with `variation == "source"`.
Returns `nil` if no match is found.
"""
@spec upload_url(list(map())) :: String.t() | nil
def upload_url(upload) do
upload
|> Enum.find(fn u ->
variation = Map.get(u, "variation")
variation == "source" && Map.has_key?(u, "key")
end)
|> case do
nil ->
nil
source ->
presigned_url(source["key"])
end
end
defp presigned_url(key) do
{:ok, url} =
ExAws.S3.presigned_url(config(), :get, config(:bucket_name), key, virtual_host: true)
url
end
@doc """
Creates a presigned direct upload.
Creates the fields necessary for a presigned and authenticated upload request to AWS S3.
First argument is a `Phoenix.LiveView.UploadEntry` from a LiveView upload form.
Second argument is options. Currently `opts` accepts:
- `:acl` - AWS canned ACL for the upload (default: "private")
- `:prefix` - path prefix for file location in S3 (default: "cache/")
- `:upload_limit` - limit in bytes for the upload (default: 25mb)
Returns a map with the fields necessary for successfully completing an upload request.
## Examples
iex> create_upload(%Phoenix.LiveView.UploadEntry{})
%{method: "post", ...}
"""
@spec create_upload(Phoenix.LiveView.UploadEntry.t(), keyword(), DateTime.t()) :: {:ok, map()}
def create_upload(entry, opts \\ [], now \\ Timex.now()) do
upload_acl = Keyword.get(opts, :acl, "private")
upload_limit = Keyword.get(opts, :upload_limit, @default_upload_limit)
upload_prefix = Keyword.get(opts, :prefix, "cache/")
upload = %{method: "post", url: bucket_url()}
policy = generate_policy(entry.client_type, upload_acl, upload_limit, upload_prefix, now)
fields = %{
:acl => upload_acl,
"Content-Type" => entry.client_type,
:key => generate_key(entry, upload_prefix),
:policy => policy,
"x-amz-algorithm" => generate_amz_algorithm(),
"x-amz-credential" => generate_amz_credential(now),
"x-amz-date" => generate_amz_date(now),
"x-amz-signature" => generate_signature(policy, now)
}
{:ok, Map.put(upload, :fields, fields)}
end
defp generate_policy(content_type, acl, upload_limit, prefix, now) do
expires_at = now |> Timex.shift(minutes: 5) |> Timex.format!("{ISO:Extended:Z}")
%{
expiration: expires_at,
conditions: [
%{acl: acl},
%{bucket: config(:bucket_name)},
["content-length-range", 0, upload_limit],
%{"Content-Type" => content_type},
["starts-with", "$key", prefix],
%{"x-amz-algorithm" => generate_amz_algorithm()},
%{"x-amz-credential" => generate_amz_credential(now)},
%{"x-amz-date" => generate_amz_date(now)}
]
}
|> Jason.encode!()
|> Base.encode64()
end
defp generate_signature(policy, now) do
date = now |> Timex.to_date() |> Timex.format!("{YYYY}{0M}{0D}")
"AWS4#{config(:secret_access_key)}"
|> hmac_digest(date)
|> hmac_digest(config(:region))
|> hmac_digest("s3")
|> hmac_digest("aws4_request")
|> hmac_digest(policy)
|> hmac_hexdigest()
end
defp config() do
:phoenix_starter
|> Application.get_env(PhoenixStarter.Uploads)
|> Enum.into(%{})
|> Map.merge(ExAws.Config.new(:s3))
end
defp config(key), do: Map.get(config(), key)
defp bucket_url do
"https://#{config(:bucket_name)}.s3.amazonaws.com"
end
defp generate_amz_algorithm, do: "AWS4-HMAC-SHA256"
defp generate_amz_credential(now) do
date = now |> Timex.to_date() |> Timex.format!("{YYYY}{0M}{0D}")
"#{config(:access_key_id)}/#{date}/#{config(:region)}/s3/aws4_request"
end
defp generate_amz_date(now) do
now |> Timex.to_date() |> Timex.format!("{ISO:Basic:Z}")
end
defp generate_key(entry, prefix) do
key = 12 |> :crypto.strong_rand_bytes() |> Base.url_encode64(padding: false)
ext = Path.extname(entry.client_name)
Path.join([prefix, "#{key}#{ext}"])
end
defp hmac_digest(key, string) do
:crypto.hmac(:sha256, key, string)
end
defp hmac_hexdigest(digest) do
Base.encode16(digest, case: :lower)
end
end
|
lib/phoenix_starter/uploads.ex
| 0.843525
| 0.407216
|
uploads.ex
|
starcoder
|
defmodule Harald.HCI.Transport do
@moduledoc """
A server to manage lower level transports and parse bluetooth events.
"""
use GenServer
@type adapter() :: module()
@type adapter_opts() :: keyword(any())
@type id() :: atom()
@typedoc """
## Options
`:adapter_opts` - `adapter_opts()`. `[]`. The options provided to the adapter on start.
`:adapter` - `adapter()`. Required. The transport implementation module.
`:id` - `atom()`. Required. Uniquely identifies this transport instance to Harald.
`:subscriber_pids` - `MapSet.t()`. `#MapSet<[]>`. The pids that received data and events will be
sent to.
"""
@type start_link_opts() :: [
{:adapter, adapter()},
{:adapter_opts, adapter_opts()},
{:id, id()},
{:subscriber_pids, MapSet.t()}
]
@impl GenServer
def handle_call({:write, bin}, _from, state) do
{:ok, adapter_state} = state.adapter.write(bin, state.adapter_state)
{:reply, :ok, Map.put(state, :adapter_state, adapter_state)}
end
def handle_call({:subscribe, pid}, _from, state) do
{:reply, :ok, Map.put(state, :subscriber_pids, MapSet.put(state.subscriber_pids, pid))}
end
def handle_call({:publish, data_or_event}, _from, state) do
for pid <- state.subscriber_pids do
send(pid, {Harald, data_or_event})
end
{:reply, :ok, state}
end
@impl GenServer
def handle_continue(:setup, state) do
{:ok, adapter_state} =
state.adapter_opts
|> Keyword.put(:transport_pid, self())
|> state.adapter.setup()
{:noreply, Map.put(state, :adapter_state, adapter_state)}
end
@impl GenServer
def init(args) do
state = %{
adapter: args.adapter,
adapter_opts: args.adapter_opts,
adapter_state: %{},
id: args.id,
name: args.name,
subscriber_pids: args.subscriber_pids
}
{:ok, state, {:continue, :setup}}
end
@doc "Returns the registered name derived from `id`."
def name(id), do: String.to_atom("Harald.Transport.Name.#{id}")
def publish(transport_pid, data_or_event) do
GenServer.call(transport_pid, {:publish, data_or_event})
end
@doc """
Start the transport.
"""
@spec start_link(start_link_opts()) :: GenServer.on_start()
def start_link(opts) do
opts_map = Map.new(opts)
with {true, :adapter} <- {Map.has_key?(opts_map, :adapter), :adapter},
{true, :id} <- {Map.has_key?(opts_map, :id), :id} do
name = name(opts_map.id)
args =
opts_map
|> Map.put(:name, name)
|> Map.put_new(:adapter_opts, [])
|> Map.update(:subscriber_pids, MapSet.new(), &Enum.into(&1, MapSet.new()))
GenServer.start_link(__MODULE__, args, name: name)
else
{false, :adapter} -> {:error, {:args, %{adapter: ["required"]}}}
{false, :id} -> {:error, {:args, %{id: ["required"]}}}
end
end
def subscribe(id, pid \\ self()) do
id
|> name()
|> GenServer.call({:subscribe, pid})
end
def write(id, bin) when is_binary(bin) do
id
|> name()
|> GenServer.call({:write, bin})
end
end
|
lib/harald/hci/transport.ex
| 0.825414
| 0.507873
|
transport.ex
|
starcoder
|
defmodule Bible.Versions.ESV do
@moduledoc """
Contains the meta data for the English Standard Version (ESV) bible.
The format of a version file is as follows:
Line 1 : 'T tt bb ccc name'
Line 2 : vvv vvv vvv ...
Where: 'T' is either "O" or "N" for old or new testament book.
tt is the order of the book in it's testament.
bb is the order of the book in the Bible.
ccc is the number of chapters in the book.
name is the name of the book.
vvv is the number of verses in the chapter.
Leading and trailing spaces are ignored as well as blank lines.
"""
def get_version do
:esv
end
def get_version_name do
"English Standard Version"
end
@doc """
Returns the data for the ESV version of the Bible.
"""
def get_version_data do
"""
O 01 01 050 Genesis
031 025 024 026 032 022 024 022 029 032 032 020 018 024 021
016 027 033 038 018 034 024 020 067 034 035 046 022 035 043
055 032 020 031 029 043 036 030 023 023 057 038 034 034 028
034 031 022 033 026
O 02 02 040 Exodus
022 025 022 031 023 030 029 028 035 029 010 051 022 031 027
036 016 027 025 026 037 030 033 018 040 037 021 043 046 038
018 035 023 035 035 038 029 031 043 038
O 03 03 027 Leviticus
017 016 017 035 026 023 038 036 024 020 047 008 059 057 033
034 016 030 037 027 024 033 044 023 055 046 034
O 04 04 036 Numbers
054 034 051 049 031 027 089 026 023 036 035 016 033 045 041
035 028 032 022 029 035 041 030 025 019 065 023 031 039 017
054 042 056 029 034 013
O 05 05 034 Deuteronomy
046 037 029 049 033 025 026 020 029 022 032 031 019 029 023
022 020 022 021 020 023 029 026 022 019 019 026 069 028 020
030 052 029 012
O 06 06 024 Joshua
018 024 017 024 015 027 026 035 027 043 023 024 033 015 063
010 018 028 051 009 045 034 016 033
O 07 07 021 Judges
036 023 031 024 031 040 025 035 057 018 040 015 025 020 020
031 013 031 030 048 025
O 08 08 004 Ruth
022 023 018 022
O 09 09 031 1 Samuel
028 036 021 022 012 021 017 022 027 027 015 025 023 052 035
023 058 030 024 042 016 023 028 023 043 025 012 025 011 031
013
O 10 10 024 2 Samuel
027 032 039 012 025 023 029 018 013 019 027 031 039 033 037
023 029 032 044 026 022 051 039 025
O 11 11 022 1 Kings
053 046 028 020 032 038 051 066 028 029 043 033 034 031 034
034 024 046 021 043 029 054
O 12 12 025 2 Kings
018 025 027 044 027 033 020 029 037 036 020 022 025 029 038
020 041 037 037 021 026 020 037 020 030
O 13 13 029 1 Chronicles
054 055 024 043 041 066 040 040 044 014 047 041 014 017 029
043 027 017 019 008 030 019 032 031 031 032 034 021 030
O 14 14 036 2 Chronicles
018 017 017 022 014 042 022 018 031 019 023 016 023 014 019
014 019 034 011 037 020 012 021 027 028 023 009 027 036 027
021 033 025 033 026 023
O 15 15 010 Ezra
011 070 013 024 017 022 028 036 015 044
O 16 16 013 Nehemiah
011 020 038 017 019 019 072 018 037 040 036 047 031
O 17 17 010 Esther
022 023 015 017 014 014 010 017 032 003
O 18 18 042 Job
022 013 026 021 027 030 021 022 035 022 020 025 028 022 035
022 016 021 029 029 034 030 017 025 006 014 021 028 025 031
040 022 033 037 016 033 024 041 030 032 026 017
O 19 19 150 Psalms
006 011 009 009 013 011 018 010 021 018 007 009 006 007 005
011 015 051 015 010 014 032 006 010 022 011 014 009 011 013
025 011 022 023 028 013 040 023 014 018 014 012 005 027 018
012 010 015 021 023 021 011 007 009 024 014 012 012 018 014
009 013 012 011 014 020 008 036 037 006 024 020 028 023 011
013 021 072 013 020 017 008 019 013 014 017 007 019 053 017
016 016 005 023 011 013 012 009 009 005 008 029 022 035 045
048 043 014 031 007 010 010 009 008 018 019 002 029 176 007
008 009 004 008 005 006 005 006 008 008 003 018 003 003 021
026 009 008 024 014 010 008 012 015 021 010 020 014 009 006
O 20 20 031 Proverbs
033 022 035 027 023 035 027 036 018 032 031 028 025 035 033
033 028 024 029 030 031 029 035 034 028 028 027 028 027 033
031
O 21 21 012 Ecclesiastes
018 026 022 017 019 012 029 017 018 020 010 014
O 22 22 008 Song of Solomon
017 017 011 016 016 012 014 014
O 23 23 066 Isaiah
031 022 026 006 030 013 025 023 020 034 016 006 022 032 009
014 014 007 025 006 017 025 018 023 012 021 013 029 024 033
009 020 024 017 010 022 038 022 008 031 029 025 028 028 025
013 015 022 026 011 023 015 012 017 013 012 021 014 021 022
011 012 019 011 025 024
O 24 24 052 Jeremiah
019 037 025 031 031 030 034 023 025 025 023 017 027 022 021
021 027 023 015 018 014 030 040 010 038 024 022 017 032 024
040 044 026 022 019 032 021 028 018 016 018 022 013 030 005
028 007 047 039 046 064 034
O 25 25 005 Lamentations
022 022 066 022 022
O 26 26 048 Ezekiel
028 010 027 017 017 014 027 018 011 022 025 028 023 023 008
063 024 032 014 044 037 031 049 027 017 021 036 026 021 026
018 032 033 031 015 038 028 023 029 049 026 020 027 031 025
024 023 035
O 27 27 012 Daniel
021 049 030 037 031 028 028 027 027 021 045 013
O 28 28 014 Hosea
009 025 005 019 015 011 016 014 017 015 011 015 015 010
O 29 29 004 Joel
020 027 005 021
O 30 30 009 Amos
015 016 015 013 027 014 017 014 015
O 31 31 001 Obadiah
021
O 32 32 004 Jonah
016 011 010 011
O 33 33 007 Micah
016 013 012 014 014 016 020
O 34 34 003 Nahum
014 014 019
O 35 35 003 Habakkuk
017 020 019
O 36 36 003 Zephaniah
018 015 020
O 37 37 002 Haggai
015 023
O 38 38 014 Zechariah
017 017 010 014 011 015 014 023 017 012 017 014 009 021
O 39 39 003 Malachi
014 017 024
N 01 40 028 Matthew
025 023 017 025 048 034 029 034 038 042 030 050 058 036 039
028 027 035 030 034 046 046 039 051 046 075 066 020
N 02 41 016 Mark
045 028 035 041 043 056 037 038 050 052 033 044 037 072 047
020
N 03 42 024 Luke
080 052 038 044 039 049 050 056 062 042 054 059 035 035 032
031 037 043 048 047 038 071 056 053
N 04 43 021 John
051 025 036 054 047 071 053 059 041 042 057 050 038 031 027
033 026 040 042 031 025
N 05 44 028 Acts
026 047 026 037 042 015 060 040 043 048 030 025 052 028 041
040 034 028 040 038 040 030 035 027 027 032 044 031
N 06 45 016 Romans
032 029 031 025 021 023 025 039 033 021 036 021 014 023 033
027
N 07 46 016 1 Corinthians
031 016 023 021 013 020 040 013 027 033 034 031 013 040 058
024
N 08 47 013 2 Corinthians
024 017 018 018 021 018 016 024 015 018 033 021 013
N 09 48 006 Galatians
024 021 029 031 026 018
N 10 49 006 Ephesians
023 022 021 032 033 024
N 11 50 004 Philippians
030 030 021 023
N 12 51 004 Colossians
029 023 025 018
N 13 52 005 1 Thessalonians
010 020 013 018 028
N 14 53 003 2 Thessalonians
012 017 018
N 15 54 006 1 Timothy
020 015 016 016 025 021
N 16 55 004 2 Timothy
018 026 017 022
N 17 56 003 Titus
016 015 015
N 18 57 001 Philemon
025
N 19 58 013 Hebrews
014 018 019 016 014 020 028 013 028 039 040 029 025
N 20 59 005 James
027 026 018 017 020
N 21 60 005 1 Peter
025 025 022 019 014
N 22 61 003 2 Peter
021 022 018
N 23 62 005 1 John
010 029 024 021 021
N 24 63 001 2 John
013
N 25 64 001 3 John
015
N 26 65 001 Jude
025
N 27 66 022 Revelation
020 029 022 011 014 017 017 013 021 011 019 017 018 020 008
021 018 024 021 015 027 021
"""
end
end
|
lib/bible/versions/esv.ex
| 0.644449
| 0.709311
|
esv.ex
|
starcoder
|
defmodule Sonda.Sink.Memory do
defmodule Defaults do
def signals, do: :any
end
defstruct signals: Defaults.signals(), records: []
@type message :: {Sonda.Sink.signal(), Sonda.Sink.timestamp(), any()}
@type accepted_signals :: :any | [Sonda.Sink.signal()]
@type config_opts :: [
{:signals, accepted_signals()}
]
@type matcher :: (message() -> boolean())
@type t :: %__MODULE__{
signals: accepted_signals(),
records: [message()]
}
defimpl Sonda.Sink do
def record(%module{} = sink, signal, timestamp, data) do
module.record(sink, signal, timestamp, data)
end
end
@spec configure() :: t()
def configure()
@spec configure(opts :: config_opts()) :: t()
def configure(opts \\ []) do
struct(__MODULE__, opts)
end
@spec record(
sink :: t(),
signal :: Sonda.Sink.signal(),
timestamp :: Sonda.Sink.timestamp(),
data :: any()
) :: t()
def record(sink, signal, timestamp, data) do
case record_signal?(sink, signal) do
false ->
sink
true ->
records = sink.records
new_record = {signal, timestamp, data}
records = [new_record | records]
%{sink | records: records}
end
end
@spec recorded?(sink :: t(), match :: matcher()) :: boolean()
def recorded?(sink, match) do
Enum.find_value(sink.records, false, fn record ->
match.(record)
end)
end
@spec records(sink :: t()) :: [message()]
def records(sink) do
Enum.reverse(sink.records)
end
@spec records(sink :: t(), match :: matcher()) :: [message()]
def records(sink, match) do
sink.records
|> Enum.filter(match)
|> Enum.reverse()
end
@spec record_signal?(sink :: t(), signal :: Sonda.Sink.signal()) :: boolean()
def record_signal?(sink, signal)
def record_signal?(%{signals: :any}, _signal), do: true
def record_signal?(%{signals: signals}, signal), do: signal in signals
@spec one_record(sink :: t(), match :: matcher()) ::
{:ok, message()} | {:error, :none} | {:error, :multiple}
def one_record(sink, match) do
records = records(sink, match)
case records do
[] -> {:error, :none}
[record] -> {:ok, record}
_ -> {:error, :multiple}
end
end
@spec recorded_once?(sink :: t(), match :: matcher()) :: boolean()
def recorded_once?(sink, match) do
case one_record(sink, match) do
{:ok, _} -> true
_ -> false
end
end
end
|
lib/sonda/sink/memory.ex
| 0.813794
| 0.474875
|
memory.ex
|
starcoder
|
defmodule Vivaldi.Peer.Coordinate do
@moduledoc """
Contains the core logic, where the coordinate of a peer is updated
when it communicates with another peer
Ported from [hashicorp/serf](https://github.com/hashicorp/serf/tree/master/coordinate)
"""
use GenServer
require Logger
alias Vivaldi.Peer.{CoordinateStash, CoordinateLogger}
alias Vivaldi.Simulation.Vector
def start_link(config) do
node_id = config[:node_id]
coordinate = CoordinateStash.get_coordinate(node_id)
GenServer.start_link(__MODULE__, {config, coordinate}, name: get_name(node_id))
end
def update_coordinate(my_node_id, other_node_id, other_coordinate, rtt) do
GenServer.call(get_name(my_node_id),
{:update_coordinate, my_node_id, other_node_id,
other_coordinate, rtt})
end
def handle_call(:get_coordinate, _from, {config, coordinate}) do
{:reply, coordinate, {config, coordinate}}
end
def handle_call({:update_coordinate, my_node_id, other_node_id, other_coordinate, rtt},
_from, {config, my_coordinate}) do
my_new_coordinate = vivaldi(config, my_coordinate, other_coordinate, rtt)
# Update CoordinateStash immediately so that PingServer can use it for its next response.
CoordinateStash.set_coordinate(my_node_id, my_new_coordinate)
unless config[:local_mode?] do
# Send coordinate-update event
event = %{
i: my_node_id,
j: other_node_id,
x_i: my_coordinate,
x_j: other_coordinate,
x_i_next: my_new_coordinate,
rtt: rtt
}
CoordinateLogger.log(my_node_id, event)
end
{:reply, :ok, {config, my_new_coordinate}}
end
def terminate(_reason, {config, coordinate}) do
node_id = config[:node_id]
CoordinateStash.set_coordinate(node_id, coordinate)
end
def get_name(node_id) do
:"#{node_id}-coordinate"
end
def new(dimension, height, error) do
%{vector: Vector.zero(dimension), height: height, error: error}
end
def vivaldi(config, x_i, x_j, rtt) do
dist = distance(x_i, x_j)
rtt = max(rtt, config[:zero_threshold])
wrongness = abs(dist - rtt) / dist
total_error = x_i[:error] + x_j[:error]
total_error = max(total_error, config[:zero_threshold])
weight = x_i[:error] / total_error
error_next = config[:vivaldi_ce] * weight * wrongness + x_i[:error] * (1.0 - config[:vivaldi_ce] * weight)
error_next = min(error_next, config[:vivaldi_error_max])
delta = config[:vivaldi_cc] * weight
force = delta * (rtt - dist)
{vec_next, height_next} = apply_force(config, x_i, x_j, force)
%{vector: vec_next, height: height_next, error: error_next}
end
def apply_force(config, x_i, x_j, force) do
unit = Vector.unit_vector_at(x_i[:vector], x_j[:vector])
mag = Vector.diff(x_i[:vector], x_j[:vector]) |> Vector.magnitude()
force_vec = Vector.scale(unit, force)
vec_next = Vector.add(x_i[:vector], force_vec)
height_next = case mag > config[:zero_threshold] do
true ->
h = (x_i[:height] + x_j[:height]) * force / mag + x_i[:height]
max(h, config[:height_min])
false ->
x_i[:height]
end
{vec_next, height_next}
end
def distance(%{vector: a_vec, height: a_height, error: _}, %{vector: b_vec, height: b_height, error: _}) do
vector_dist = Vector.distance(a_vec, b_vec)
height_dist = a_height + b_height
vector_dist + height_dist
end
end
|
vivaldi/lib/peer/coordinate.ex
| 0.766862
| 0.455865
|
coordinate.ex
|
starcoder
|
defmodule NewRelic.Tracer do
@moduledoc """
Function Tracing
To enable function tracing in a particular module, `use NewRelic.Tracer`,
and annotate the functions you want to `@trace`.
Traced functions will report as:
- Segments in Transaction Traces
- Span Events in Distributed Traces
- Special custom attributes on Transaction Events
#### Notes:
* Traced functions will *not* be tail-call-recursive. **Don't use this for recursive functions**.
#### Example
```elixir
defmodule MyModule do
use NewRelic.Tracer
@trace :func
def func do
# Will report as `MyModule.func/0`
end
end
```
#### Categories
To categorize External Service calls you must give the trace annotation a category.
You may also call `NewRelic.set_span` to provide better naming for metrics & spans, and additonally annotate the outgoing HTTP headers with the Distributed Tracing context to track calls across services.
```elixir
defmodule MyExternalService do
use NewRelic.Tracer
@trace {:request, category: :external}
def request(method, url, headers) do
NewRelic.set_span(:http, url: url, method: method, component: "HttpClient")
headers ++ NewRelic.create_distributed_trace_payload(:http)
HttpClient.request(method, url, headers)
end
end
```
This will:
* Post `External` metrics to APM
* Add custom attributes to Transaction events:
- `external_call_count`
- `external_duration_ms`
- `external.MyExternalService.query.call_count`
- `external.MyExternalService.query.duration_ms`
Transactions that call the traced `ExternalService` functions will contain `external_call_count` attribute
```elixir
get "/endpoint" do
ExternalService.request(:get, url, headers)
send_resp(conn, 200, "ok")
end
```
"""
defmacro __using__(_args) do
quote do
require NewRelic.Tracer.Macro
require NewRelic.Tracer.Report
Module.register_attribute(__MODULE__, :nr_tracers, accumulate: true)
Module.register_attribute(__MODULE__, :nr_last_tracer, accumulate: false)
@before_compile NewRelic.Tracer.Macro
@on_definition NewRelic.Tracer.Macro
end
end
end
|
lib/new_relic/tracer.ex
| 0.889418
| 0.784154
|
tracer.ex
|
starcoder
|
defmodule IEx do
@moduledoc ~S"""
Elixir's interactive shell.
Some of the functionalities described here will not be available
depending on your terminal. In particular, if you get a message
saying that the smart terminal could not be run, some of the
features described here won't work.
## Helpers
IEx provides a bunch of helpers. They can be accessed by typing
`h()` into the shell or as a documentation for the `IEx.Helpers` module.
## Autocomplete
To discover all available functions for a module, type the module name
followed by a dot, then press tab to trigger autocomplete. For example:
Enum.
Such function may not be available on some Windows shells. You may need
to pass the `--werl` flag when starting IEx, as in `iex --werl` for it
to work. `--werl` may be permanently enabled by setting the `IEX_WITH_WERL`
environment variable.
## Shell history
From Erlang/OTP 20, it is possible to get shell history by passing some
flags that enable it in the VM. This can be done on a per-need basis
when starting IEx:
iex --erl "-kernel shell_history enabled"
If you would rather enable it on your system as a whole, you can use
the `ERL_AFLAGS` environment variable and make sure that it is set
accordingly on your terminal/shell configuration.
On Linux:
export ERL_AFLAGS="-kernel shell_history enabled"
On Windows:
set ERL_AFLAGS "-kernel shell_history enabled"
## Expressions in IEx
As an interactive shell, IEx evaluates expressions. This has some
interesting consequences that are worth discussing.
The first one is that the code is truly evaluated and not compiled.
This means that any benchmarking done in the shell is going to have
skewed results. So never run any profiling nor benchmarks in the shell.
Second, IEx allows you to break an expression into many lines,
since this is common in Elixir. For example:
iex(1)> "ab
...(1)> c"
"ab\nc"
In the example above, the shell will be expecting more input until it
finds the closing quote. Sometimes it is not obvious which character
the shell is expecting, and the user may find themselves trapped in
the state of incomplete expression with no ability to terminate it other
than by exiting the shell.
For such cases, there is a special break-trigger (`#iex:break`) that when
encountered on a line by itself will force the shell to break out of any
pending expression and return to its normal state:
iex(1)> ["ab
...(1)> c"
...(1)> "
...(1)> ]
...(1)> #iex:break
** (TokenMissingError) iex:1: incomplete expression
## The Break command
Inside IEx, hitting `Ctrl+C` will open up the `BREAK` menu. In this
menu you can quit the shell, see process and ets tables information
and much more.
## Exiting the shell
There are a few ways to quit the IEx shell:
* via the `BREAK` menu (available via `Ctrl+C`) by typing `q`, pressing enter
* by hitting `Ctrl+C`, `Ctrl+C`
* by hitting `Ctrl+\ `
If you are connected to remote shell, it remains alive after disconnection.
## Prying and breakpoints
IEx also has the ability to set breakpoints on Elixir code and
"pry" into running processes. This allows the developer to have
an IEx session run inside a given function.
`IEx.pry/0` can be used when you are able to modify the source
code directly and recompile it:
def my_fun(arg1, arg2) do
require IEx; IEx.pry
... implementation ...
end
When the code is executed, it will ask you for permission to be
introspected.
Alternatively, you can use `IEx.break!/4` to setup a breakpoint
on a given module, function and arity you have no control of.
While `IEx.break!/4` is more flexible, it requires OTP 20+ and
it does not contain information about imports and aliases from
the source code.
## The User Switch command
Besides the break command, one can type `Ctrl+G` to get to the
user switch command menu. When reached, you can type `h` to
get more information.
In this menu, developers are able to start new shells and
alternate between them. Let's give it a try:
User switch command
--> s 'Elixir.IEx'
--> c
The command above will start a new shell and connect to it.
Create a new variable called `hello` and assign some value to it:
hello = :world
Now, let's roll back to the first shell:
User switch command
--> c 1
Now, try to access the `hello` variable again:
hello
** (UndefinedFunctionError) undefined function hello/0
The command above fails because we have switched shells.
Since shells are isolated from each other, you can't access the
variables defined in one shell from the other one.
The User Switch command can also be used to terminate an existing
session, for example when the evaluator gets stuck in an infinite
loop or when you are stuck typing an expression:
User switch command
--> i
--> c
The user switch command menu also allows developers to connect to
remote shells using the `r` command. A topic which we will discuss next.
## Remote shells
IEx allows you to connect to another node in two fashions.
First of all, we can only connect to a shell if we give names
both to the current shell and the shell we want to connect to.
Let's give it a try. First start a new shell:
$ iex --sname foo
iex(foo@HOST)1>
The string between the parentheses in the prompt is the name
of your node. We can retrieve it by calling the `node/0`
function:
iex(foo@HOST)1> node()
:"foo@HOST"
iex(foo@HOST)2> Node.alive?()
true
For fun, let's define a simple module in this shell too:
iex(foo@HOST)3> defmodule Hello do
...(foo@HOST)3> def world, do: "it works!"
...(foo@HOST)3> end
Now, let's start another shell, giving it a name as well:
$ iex --sname bar
iex(bar@HOST)1>
If we try to dispatch to `Hello.world`, it won't be available
as it was defined only in the other shell:
iex(bar@HOST)1> Hello.world
** (UndefinedFunctionError) undefined function Hello.world/0
However, we can connect to the other shell remotely. Open up
the User Switch prompt (Ctrl+G) and type:
User switch command
--> r 'foo@HOST' 'Elixir.IEx'
--> c
Now we are connected into the remote node, as the prompt shows us,
and we can access the information and modules defined over there:
rem(foo@macbook)1> Hello.world
"it works"
In fact, connecting to remote shells is so common that we provide
a shortcut via the command line as well:
$ iex --sname baz --remsh foo@HOST
Where "remsh" means "remote shell". In general, Elixir supports:
* remsh from an Elixir node to an Elixir node
* remsh from a plain Erlang node to an Elixir node (through the ^G menu)
* remsh from an Elixir node to a plain Erlang node (and get an `erl` shell there)
Connecting an Elixir shell to a remote node without Elixir is
**not** supported.
## The .iex.exs file
When starting, IEx looks for a local `.iex.exs` file (located in the current
working directory), then a global one (located at `~/.iex.exs`) and loads the
first one it finds (if any). The code in the loaded `.iex.exs` file is
evaluated in the shell's context. So, for instance, any modules that are
loaded or variables that are bound in the `.iex.exs` file will be available in the
shell after it has booted.
For example, take the following `.iex.exs` file:
# Load another ".iex.exs" file
import_file "~/.iex.exs"
# Import some module from lib that may not yet have been defined
import_if_available MyApp.Mod
# Print something before the shell starts
IO.puts "hello world"
# Bind a variable that'll be accessible in the shell
value = 13
Running IEx in the directory where the above `.iex.exs` file is located
results in:
$ iex
Erlang 19 [...]
hello world
Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help)
iex(1)> value
13
It is possible to load another file by supplying the `--dot-iex`
option to IEx. See `iex --help`.
## Configuring the shell
There are a number of customization options provided by IEx. Take a look
at the docs for the `IEx.configure/1` function by typing `h IEx.configure/1`.
Those options can be configured in your project configuration file or globally
by calling `IEx.configure/1` from your `~/.iex.exs` file. For example:
# .iex.exs
IEx.configure(inspect: [limit: 3])
Now run the shell:
$ iex
Erlang 19 [...]
Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help)
iex(1)> [1, 2, 3, 4, 5]
[1, 2, 3, ...]
"""
@doc """
Configures IEx.
The supported options are:
* `:colors`
* `:inspect`
* `:width`
* `:history_size`
* `:default_prompt`
* `:alive_prompt`
They are discussed individually in the sections below.
## Colors
A keyword list that encapsulates all color settings used by the
shell. See documentation for the `IO.ANSI` module for the list of
supported colors and attributes.
List of supported keys in the keyword list:
* `:enabled` - boolean value that allows for switching the coloring on and off
* `:eval_result` - color for an expression's resulting value
* `:eval_info` - ... various informational messages
* `:eval_error` - ... error messages
* `:eval_interrupt` - ... interrupt messages
* `:stack_info` - ... the stacktrace color
* `:blame_diff` - ... when blaming source with no match
* `:ls_directory` - ... for directory entries (ls helper)
* `:ls_device` - ... device entries (ls helper)
When printing documentation, IEx will convert the Markdown
documentation to ANSI as well. Colors for this can be configured
via:
* `:doc_code` - the attributes for code blocks (cyan, bright)
* `:doc_inline_code` - inline code (cyan)
* `:doc_headings` - h1 and h2 (yellow, bright)
* `:doc_title` - the overall heading for the output (reverse, yellow, bright)
* `:doc_bold` - (bright)
* `:doc_underline` - (underline)
IEx will also color inspected expressions using the `:syntax_colors`
option. Such can be disabled with:
IEx.configure [colors: [syntax_colors: false]]
You can also configure the syntax colors, however, as desired:
IEx.configure [colors: [syntax_colors: [atom: :red]]]
Configuration for most built-in data types are supported: `:atom`,
`:string`, `:binary`, `:list`, `:number`, `:boolean`, `:nil`, etc.
The default is:
[number: :magenta, atom: :cyan, string: :green,
boolean: :magenta, nil: :magenta]
## Inspect
A keyword list containing inspect options used by the shell
when printing results of expression evaluation. Default to
pretty formatting with a limit of 50 entries.
To show all entries, configure the limit to `:infinity`:
IEx.configure [inspect: [limit: :infinity]]
See `Inspect.Opts` for the full list of options.
## Width
An integer indicating the maximum number of columns to use in output.
The default value is 80 columns. The actual output width is the minimum
of this number and result of `:io.columns`. This way you can configure IEx
to be your largest screen size and it should always take up the full width
of your current terminal screen.
## History size
Number of expressions and their results to keep in the history.
The value is an integer. When it is negative, the history is unlimited.
## Prompt
This is an option determining the prompt displayed to the user
when awaiting input.
The value is a keyword list with two possible keys representing prompt types:
* `:default_prompt` - used when `Node.alive?/0` returns `false`
* `:alive_prompt` - used when `Node.alive?/0` returns `true`
The following values in the prompt string will be replaced appropriately:
* `%counter` - the index of the history
* `%prefix` - a prefix given by `IEx.Server`
* `%node` - the name of the local node
"""
def configure(options) do
IEx.Config.configure(options)
end
@doc """
Returns IEx configuration.
"""
def configuration do
IEx.Config.configuration()
end
@doc """
Registers a function to be invoked after the IEx process is spawned.
"""
def after_spawn(fun) when is_function(fun) do
IEx.Config.after_spawn(fun)
end
@doc """
Returns registered `after_spawn` callbacks.
"""
def after_spawn do
IEx.Config.after_spawn()
end
@doc """
Returns `true` if IEx was started.
"""
def started? do
IEx.Config.started?()
end
@doc """
Returns `string` escaped using the specified `color`.
ANSI escapes in `string` are not processed in any way.
"""
def color(color, string) do
case IEx.Config.color(color) do
nil ->
string
ansi ->
[ansi | string] |> IO.ANSI.format(true) |> IO.iodata_to_binary()
end
end
@doc """
Gets the IEx width for printing.
Used by helpers and it has a default maximum cap of 80 chars.
"""
def width do
IEx.Config.width()
end
@doc """
Gets the options used for inspecting.
"""
def inspect_opts do
IEx.Config.inspect_opts()
end
@doc """
Pries into the process environment.
This is useful for debugging a particular chunk of code
when executed by a particular process. The process becomes
the evaluator of IEx commands and is temporarily changed to
have a custom group leader. Those values are reverted by
calling `IEx.Helpers.respawn/0`, which starts a new IEx shell,
freeing up the pried one.
When a process is pried, all code runs inside IEx and has
access to all imports and aliases from the original code.
However, the code is evaluated and therefore cannot access
private functions of the module being pried. Module functions
still need to be accessed via `Mod.fun(args)`.
Alternatively, you can use `IEx.break!/4` to setup a breakpoint
on a given module, function and arity you have no control of.
While `IEx.break!/4` is more flexible, it requires OTP 20+ and
it does not contain information about imports and aliases from
the source code.
## Examples
Let's suppose you want to investigate what is happening
with some particular function. By invoking `IEx.pry/1` from
the function, IEx will allow you to access its binding
(variables), verify its lexical information and access
the process information. Let's see an example:
import Enum, only: [map: 2]
defmodule Adder do
def add(a, b) do
c = a + b
require IEx; IEx.pry
end
end
When invoking `Adder.add(1, 2)`, you will receive a message in
your shell to pry the given environment. By allowing it,
the shell will be reset and you gain access to all variables
and the lexical scope from above:
pry(1)> map([a, b, c], &IO.inspect(&1))
1
2
3
Keep in mind that `IEx.pry/1` runs in the caller process,
blocking the caller during the evaluation cycle. The caller
process can be freed by calling `respawn/0`, which starts a
new IEx evaluation cycle, letting this one go:
pry(2)> respawn()
true
Interactive Elixir - press Ctrl+C to exit (type h() ENTER for help)
Setting variables or importing modules in IEx does not
affect the caller's environment. However, sending and
receiving messages will change the process state.
## Pry and macros
When setting up Pry inside a code defined by macros, such as:
defmacro __using__(_) do
quote do
def add(a, b) do
c = a + b
require IEx; IEx.pry
end
end
end
The variables defined inside `quote` won't be available during
prying due to the hygiene mechanism in quoted expressions. The
hygiene mechanism changes the variable names in quoted expressions
so they don't collide with variables defined by the users of the
macros. Therefore the original names are not available.
## Pry and mix test
To use `IEx.pry/0` during tests, you need to run Mix inside
`iex` and pass the `--trace` to `mix test` to avoid running
into timeouts:
iex -S mix test --trace
iex -S mix test path/to/file:line --trace
"""
defmacro pry() do
quote do
IEx.Pry.pry(binding(), __ENV__)
end
end
@doc """
Macro-based shortcut for `IEx.break!/4`.
"""
defmacro break!(ast, stops \\ 1) do
quote do
IEx.__break__!(unquote(Macro.escape(ast)), unquote(Macro.escape(stops)), __ENV__)
end
end
def __break__!({:/, _, [call, arity]} = ast, stops, env) when is_integer(arity) do
with {module, fun, []} <- Macro.decompose_call(call),
module when is_atom(module) <- Macro.expand(module, env) do
IEx.Pry.break!(module, fun, arity, quote(do: _), stops)
else
_ ->
raise_unknown_break_ast!(ast)
end
end
def __break__!({{:., _, [module, fun]}, _, args} = ast, stops, env) do
__break__!(ast, module, fun, args, true, stops, env)
end
def __break__!({:when, _, [{{:., _, [module, fun]}, _, args}, guards]} = ast, stops, env) do
__break__!(ast, module, fun, args, guards, stops, env)
end
def __break__!(ast, _stops) do
raise_unknown_break_ast!(ast)
end
defp __break__!(ast, module, fun, args, guards, stops, env) do
module = Macro.expand(module, env)
unless is_atom(module) do
raise_unknown_break_ast!(ast)
end
pattern = {:when, [], [{:{}, [], args}, guards]}
to_expand =
quote do
case Unknown.module() do
unquote(pattern) -> :ok
end
end
{{:case, _, [_, [do: [{:->, [], [[expanded], _]}]]]}, _} =
:elixir_expand.expand(to_expand, env)
IEx.Pry.break!(module, fun, length(args), expanded, stops)
end
defp raise_unknown_break_ast!(ast) do
raise ArgumentError, """
unknown expression to break on, expected one of:
* Mod.fun/arity, such as: URI.parse/1
* Mod.fun(arg1, arg2, ...), such as: URI.parse(_)
* Mod.fun(arg1, arg2, ...) when guard, such as: URI.parse(var) when is_binary(var)
Got #{Macro.to_string(ast)}
"""
end
@doc """
Sets up a breakpoint in `module`, `function` and `arity` with
the given number of `stops`.
This function will instrument the given module and load a new
version in memory with breakpoints at the given function and
arity. If the module is recompiled, all breakpoints are lost.
When a breakpoint is reached, IEx will ask if you want to `pry`
the given function and arity. In other words, this works similar
to `IEx.pry/0` as the running process becomes the evaluator of
IEx commands and is temporarily changed to have a custom group
leader. However, differently from `IEx.pry/0`, aliases and imports
from the source code won't be available in the shell.
IEx helpers includes many conveniences related to breakpoints.
Below they are listed with the full module, such as `IEx.Helpers.breaks/0`,
but remember it can be called directly as `breaks()` inside IEx.
They are:
* `IEx.Helpers.break!/2` - sets up a breakpoint for a given `Mod.fun/arity`
* `IEx.Helpers.break!/4` - sets up a breakpoint for the given module, function, arity
* `IEx.Helpers.breaks/0` - prints all breakpoints and their ids
* `IEx.Helpers.continue/0` - continues until the next breakpoint in the same shell
* `IEx.Helpers.open/0` - opens editor on the current breakpoint
* `IEx.Helpers.remove_breaks/0` - removes all breakpoints in all modules
* `IEx.Helpers.remove_breaks/1` - removes all breakpoints in a given module
* `IEx.Helpers.reset_break/1` - sets the number of stops on the given id to zero
* `IEx.Helpers.reset_break/3` - sets the number of stops on the given module, function, arity to zero
* `IEx.Helpers.respawn/0` - starts a new shell (breakpoints will ask for permission once more)
* `IEx.Helpers.whereami/1` - shows the current location
By default, the number of stops in a breakpoint is 1. Any follow-up
call won't stop the code execution unless another breakpoint is set.
Alternatively, the number of be increased by passing the `stops`
argument. `IEx.Helpers.reset_break/1` and `IEx.Helpers.reset_break/3`
can be used to reset the number back to zero. Note the module remains
"instrumented" even after all stops on all breakpoints are consumed.
You can remove the instrumentation in a given module by calling
`IEx.Helpers.remove_breaks/1` and on all modules by calling
`IEx.Helpers.remove_breaks/0`.
To exit a breakpoint, the developer can either invoke `continue()`,
which will block the shell until the next breakpoint is found or
the process terminates, or invoke `respawn()`, which starts a new IEx
shell, freeing up the pried one.
This functionality only works on Elixir code and requires OTP 20+.
## Examples
The following sets up a breakpoint on `URI.decode_query/2`:
IEx.break!(URI, :decode_query, 2)
This call will setup a breakpoint that stops once.
To set a breakpoint that will stop 10 times:
IEx.break!(URI, :decode_query, 2, 10)
`IEx.break!/2` is a convenience macro that allows breakpoints
to be given in the `Mod.fun/arity` format:
require IEx
IEx.break!(URI.decode_query/2)
Or to set a breakpoint that will stop 10 times:
IEx.break!(URI.decode_query/2, 10)
`IEx.break!/2` allows patterns to be given, triggering the
breakpoint only in some occasions. For example, to trigger
the breakpoint only when the first argument is the "foo=bar"
string:
IEx.break!(URI.decode_query("foo=bar", _))
Or to trigger is whenever the second argument is a map with
more than one element:
IEx.break!(URI.decode_query(_, map) when map_size(map) > 0)
Only a single break point can be set per function. So if you call
`IEx.break!` multiple times with different patterns, only the last
pattern is kept.
This function returns the breakpoint ID and will raise if there
is an error setting up the breakpoint.
## Breaks and mix test
To use `IEx.break!/4` during tests, you need to run Mix inside
`iex` and pass the `--trace` to `mix test` to avoid running
into timeouts:
iex -S mix test --trace
iex -S mix test path/to/file:line --trace
"""
def break!(module, function, arity, stops \\ 1) when is_integer(arity) do
IEx.Pry.break!(module, function, arity, quote(do: _), stops)
end
## Callbacks
# This is a callback invoked by Erlang shell utilities
# when someone press Ctrl+G and adds 's Elixir.IEx'.
@doc false
def start(opts \\ [], mfa \\ {IEx, :dont_display_result, []}) do
spawn(fn ->
case :init.notify_when_started(self()) do
:started -> :ok
_ -> :init.wait_until_started()
end
:ok = start_iex()
:ok = set_expand_fun()
:ok = run_after_spawn()
IEx.Server.start(opts, mfa)
end)
end
@doc false
def dont_display_result, do: :"do not show this result in output"
## Helpers
defp start_iex() do
{:ok, _} = Application.ensure_all_started(:iex)
:ok
end
defp set_expand_fun do
gl = Process.group_leader()
glnode = node(gl)
expand_fun =
if glnode != node() do
_ = ensure_module_exists(glnode, IEx.Remsh)
IEx.Remsh.expand(node())
else
&IEx.Autocomplete.expand(&1)
end
# expand_fun is not supported by a shell variant
# on Windows, so we do two IO calls, not caring
# about the result of the expand_fun one.
_ = :io.setopts(gl, expand_fun: expand_fun)
:io.setopts(gl, binary: true, encoding: :unicode)
end
defp ensure_module_exists(node, mod) do
unless :rpc.call(node, :code, :is_loaded, [mod]) do
{m, b, f} = :code.get_object_code(mod)
{:module, _} = :rpc.call(node, :code, :load_binary, [m, f, b])
end
end
defp run_after_spawn do
_ = for fun <- Enum.reverse(after_spawn()), do: fun.()
:ok
end
end
|
lib/iex/lib/iex.ex
| 0.779783
| 0.517693
|
iex.ex
|
starcoder
|
defmodule Mockery do
@moduledoc """
Core functionality
"""
alias Mockery.Utils
defmacro __using__(_opts) do
quote do
import Mockery
import Mockery.Assertions
import Mockery.History, only: [enable_history: 0, enable_history: 1, disable_history: 0]
end
end
@typep global_mock :: module | nil
@typedoc """
Mockery uses tuple calls to send additional data to internal proxy module
"""
@opaque proxy_tuple :: {Mockery.Proxy, module, global_mock}
@typedoc """
Used to avoid unnecessary compile-time dependencies between modules
## Examples
defmodule Foo do
# this creates compile-time dependency between Foo and Bar
@bar1 Mockery.of(Bar)
# same result but without compile-time dependency
@bar2 Mockery.of("Bar")
end
`mix xref graph` can be used to check difference between module and string versions
"""
@type elixir_module_as_string :: String.t()
@doc """
Function used to prepare module for mocking.
For Mix.env other than :test it returns module given in the first argument.
If Mix.env equal :test it creates a proxy to the original module.
When Mix is missing it assumes that env is :prod
## Examples
#### Prepare for mocking (elixir module)
defmodule Foo do
@bar Mockery.of("Bar")
def foo do
@bar.bar()
end
end
It is also possible to pass the module in elixir format
@bar Mockery.of(Bar)
but it is not recommended as it creates an unnecessary compile-time dependency
(see `mix xref graph` output for both versions).
#### Prepare for mocking (erlang module)
defmodule Foo do
@crypto Mockery.of(:crypto)
def foo do
@crypto.rand_seed()
end
end
#### Prepare for mocking with global mock
# test/support/global_mocks/bar.ex
defmodule BarGlobalMock do
def bar, do: :mocked
end
# lib/foo.ex
defmodule Foo do
@bar Mockery.of(Bar, by: BarGlobalMock)
def foo do
@bar.bar()
end
end
## OTP21+
Internally mockery is using tuple calls to pass additional data to its proxy module
when mock is called. Tuple calls are disabled by default in OTP21+ and require additional
compile flag to be reenabled.
defmodule Foo do
@compile :tuple_calls
@bar Mockery.of("Bar")
# ...
end
If don't want to reenable tuple calls, there's also new macro-based alternative
(for more information see `Mockery.Macro` module).
"""
@spec of(
mod :: module | elixir_module_as_string,
opts :: [by: module | elixir_module_as_string]
) :: module | proxy_tuple
def of(mod, opts \\ [])
when is_atom(mod)
when is_binary(mod) do
case opts[:env] || mix_env() do
:test ->
do_proxy_tuple(mod, opts)
_ ->
to_mod(mod)
end
end
@doc """
DEPRECATED
Mockery was not designed as solution for other libraries.
It was a bad decision to try to workaround this.
This approach was also extremely ugly and lacking all the advantages of Mockery.
"""
def new(mod, opts \\ [])
when is_atom(mod)
when is_binary(mod) do
do_proxy_tuple(mod, opts)
end
defp do_proxy_tuple(mod, opts) do
{Mockery.Proxy, to_mod(mod), to_mod(opts[:by])}
end
defp to_mod(nil), do: nil
defp to_mod(mod) when is_atom(mod), do: mod
defp to_mod(mod) when is_binary(mod), do: Module.concat([mod])
@doc """
Function used to create mock in context of single test.
Mock created in one test won't leak to another.
It can be used safely in asynchronous tests.
Mocks can be created with static value:
mock Mod, [fun: 2], "mocked value"
or function:
mock Mod, [fun: 2], fn(_, arg2) -> arg2 end
Keep in mind that function inside mock must have same arity as
original one.
This:
mock Mod, [fun: 2], &to_string/1
will raise an error.
It is also possible to mock function with given name and any arity
mock Mod, :fun, "mocked value"
but this version doesn't support function as value.
Also, multiple mocks for same module are chainable
Mod
|> mock(:fun1, "value")
|> mock([fun2: 1], &string/1)
"""
def mock(mod, fun, value \\ :mocked)
def mock(mod, fun, value) when is_atom(fun) and is_function(value) do
{:arity, arity} = :erlang.fun_info(value, :arity)
raise Mockery.Error, """
Dynamic mock requires [function: arity] syntax.
Please use:
mock(#{Utils.print_mod(mod)}, [#{fun}: #{arity}], fn(...) -> ... end)
"""
end
def mock(mod, fun, nil), do: do_mock(mod, fun, Mockery.Nil)
def mock(mod, fun, false), do: do_mock(mod, fun, Mockery.False)
def mock(mod, fun, value), do: do_mock(mod, fun, value)
defp do_mock(mod, fun, value) do
Utils.put_mock(mod, fun, value)
mod
end
@compile {:inline, mix_env: 0}
defp mix_env do
if function_exported?(Mix, :env, 0), do: Mix.env(), else: :prod
end
end
|
lib/mockery.ex
| 0.845401
| 0.464598
|
mockery.ex
|
starcoder
|
defmodule TrademarkFreeStrategicLandWarfare.Players.Johnny5 do
alias TrademarkFreeStrategicLandWarfare.{Board, Player, Piece}
@behaviour Player
@type direction() :: :north | :west | :east | :south
@type count() :: Integer.t()
@type state() :: any()
@spec name() :: binary()
def name() do
"Johnny5"
end
# should return a list with 4 lists of 10 piece-name atoms (:miner, :colonel, etc) per list
@spec initial_pieces_placement() :: nonempty_list([Atom.t(), ...])
def initial_pieces_placement() do
[[:sergeant, :sergeant, :scout, :scout, :scout, :scout, :scout, :scout, :marshall, :general],
[:bomb, :bomb, :major, :major, :major, :scout, :sergeant, :scout, :sergeant, :lieutenant],
[:bomb, :bomb, :spy, :lieutenant, :lieutenant, :lieutenant, :captain, :captain, :captain, :captain],
[:flag, :bomb, :bomb, :miner, :miner, :miner, :colonel, :colonel, :miner, :miner]]
end
@spec turn(
%TrademarkFreeStrategicLandWarfare.Board{},
%TrademarkFreeStrategicLandWarfare.Player{},
state()
) :: {binary(), direction(), count(), state()}
def turn(%Board{rows: rows} = board, %Player{number: number}, state) do
# find all eligible pieces
move_partitioned_pieces =
rows
|> List.flatten()
|> Enum.flat_map(fn
%Piece{player: ^number, name: name} = piece when name not in [:bomb, :flag] -> [piece]
_ -> []
end)
|> partition_by_move(board)
# select from them, biasing towards pieces that can win, then those that can advance,
# then west/east, then move backward
eligible_moves =
Enum.find(
[
Map.get(move_partitioned_pieces, :win, []),
Map.get(
move_partitioned_pieces,
Board.maybe_invert_player_direction(:north, number),
[]
),
Map.get(move_partitioned_pieces, :west, []) ++
Map.get(move_partitioned_pieces, :east, []),
Map.get(
move_partitioned_pieces,
Board.maybe_invert_player_direction(:south, number),
[]
)
],
fn list -> length(list) > 0 end
)
# randomly select one from the list returned
case eligible_moves do
nil ->
raise "no move possible"
moves ->
moves
|> Enum.random()
|> Tuple.append(state)
end
end
defp partition_by_move(pieces, board) do
# TODO: reduce_while and halt when preferred one found (win, progressing forward)
Enum.reduce(pieces, %{}, fn piece, acc ->
Enum.reduce([:north, :west, :east, :south], acc, fn direction, dir_acc ->
case Board.move(board, piece.player, piece.uuid, direction, 1) do
{:ok, :win, _} ->
# this shouldn't ever get hit, because we'll never know as a player
# where the opponent's flag is without trying to capture it. putting
# this here for that note, and just in case.
Map.update(
dir_acc,
:win,
[{piece.uuid, direction, 1}],
&[{piece.uuid, direction, 1} | &1]
)
{:error, :unknown_result} ->
# allowed move, but masked piece. include in the possibles.
Map.update(
dir_acc,
direction,
[{piece.uuid, direction, 1}],
&[{piece.uuid, direction, 1} | &1]
)
{:ok, %Board{}} ->
# allowed move -- no differentiation on whether attack happened
Map.update(
dir_acc,
direction,
[{piece.uuid, direction, 1}],
&[{piece.uuid, direction, 1} | &1]
)
_ ->
dir_acc
end
end)
end)
end
end
|
lib/trademark_free_strategic_land_warfare/players/johnny_5.ex
| 0.528047
| 0.501404
|
johnny_5.ex
|
starcoder
|
defmodule Geo.PostGIS do
@moduledoc """
PostGIS functions that can used in ecto queries
[PostGIS Function Documentation](http://postgis.net/docs/manual-1.3/ch06.html).
Currently only the OpenGIS functions are implemented.
## Examples
defmodule Example do
import Ecto.Query
import Geo.PostGIS
def example_query(geom) do
from location in Location, limit: 5, select: st_distance(location.geom, ^geom)
end
end
"""
defmacro st_transform(wkt, srid) do
quote do: fragment("ST_Transform(?, ?)", unquote(wkt), unquote(srid))
end
defmacro st_distance(geometryA, geometryB) do
quote do: fragment("ST_Distance(?,?)", unquote(geometryA), unquote(geometryB))
end
@doc """
Casts the 2 geometries given to geographies in order to return distance in meters.
"""
defmacro st_distance_in_meters(geometryA, geometryB) do
quote do:
fragment(
"ST_Distance(?::geography, ?::geography)",
unquote(geometryA),
unquote(geometryB)
)
end
defmacro st_distancesphere(geometryA, geometryB) do
quote do: fragment("ST_DistanceSphere(?,?)", unquote(geometryA), unquote(geometryB))
end
@doc """
Please note that ST_Distance_Sphere has been deprecated as of Postgis 2.2.
Postgis 2.1 is no longer supported on PostgreSQL >= 9.5.
This macro is still in place to support users of PostgreSQL <= 9.4.x.
"""
defmacro st_distance_sphere(geometryA, geometryB) do
quote do: fragment("ST_Distance_Sphere(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_dwithin(geometryA, geometryB, float) do
quote do:
fragment("ST_DWithin(?,?,?)", unquote(geometryA), unquote(geometryB), unquote(float))
end
@doc """
Casts the 2 geometries given to geographies in order to check for distance in meters.
"""
defmacro st_dwithin_in_meters(geometryA, geometryB, float) do
quote do:
fragment(
"ST_DWithin(?::geography, ?::geography, ?)",
unquote(geometryA),
unquote(geometryB),
unquote(float)
)
end
defmacro st_equals(geometryA, geometryB) do
quote do: fragment("ST_Equals(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_disjoint(geometryA, geometryB) do
quote do: fragment("ST_Disjoint(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_intersects(geometryA, geometryB) do
quote do: fragment("ST_Intersects(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_touches(geometryA, geometryB) do
quote do: fragment("ST_Touches(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_crosses(geometryA, geometryB) do
quote do: fragment("ST_Crosess(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_within(geometryA, geometryB) do
quote do: fragment("ST_Within(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_overlaps(geometryA, geometryB) do
quote do: fragment("ST_Overlaps(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_contains(geometryA, geometryB) do
quote do: fragment("ST_Contains(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_covers(geometryA, geometryB) do
quote do: fragment("ST_Covers(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_covered_by(geometryA, geometryB) do
quote do: fragment("ST_CoveredBy(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_relate(geometryA, geometryB, intersectionPatternMatrix) do
quote do:
fragment(
"ST_Relate(?,?,?)",
unquote(geometryA),
unquote(geometryB),
unquote(intersectionPatternMatrix)
)
end
defmacro st_relate(geometryA, geometryB) do
quote do: fragment("ST_Relate(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_centroid(geometry) do
quote do: fragment("ST_Centroid(?)", unquote(geometry))
end
defmacro st_area(geometry) do
quote do: fragment("ST_Area(?)", unquote(geometry))
end
defmacro st_length(geometry) do
quote do: fragment("ST_Length(?)", unquote(geometry))
end
defmacro st_point_on_surface(geometry) do
quote do: fragment("ST_PointOnSurface(?)", unquote(geometry))
end
defmacro st_boundary(geometry) do
quote do: fragment("ST_Boundary(?)", unquote(geometry))
end
defmacro st_buffer(geometry, double) do
quote do: fragment("ST_Buffer(?, ?)", unquote(geometry), unquote(double))
end
defmacro st_buffer(geometry, double, integer) do
quote do: fragment("ST_Buffer(?, ?, ?)", unquote(geometry), unquote(double), unquote(integer))
end
defmacro st_convex_hull(geometry) do
quote do: fragment("ST_ConvexHull(?)", unquote(geometry))
end
defmacro st_intersection(geometryA, geometryB) do
quote do: fragment("ST_Intersection(?, ?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_shift_longitude(geometry) do
quote do: fragment("ST_Shift_Longitude(?)", unquote(geometry))
end
defmacro st_sym_difference(geometryA, geometryB) do
quote do: fragment("ST_SymDifference(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_difference(geometryA, geometryB) do
quote do: fragment("ST_Difference(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_collect(geometryList) do
quote do: fragment("ST_Collect(?)", unquote(geometryList))
end
defmacro st_collect(geometryA, geometryB) do
quote do: fragment("ST_Collect(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_union(geometryList) do
quote do: fragment("ST_Union(?)", unquote(geometryList))
end
defmacro st_union(geometryA, geometryB) do
quote do: fragment("ST_Union(?,?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_mem_union(geometryList) do
quote do: fragment("ST_MemUnion(?)", unquote(geometryList))
end
defmacro st_as_text(geometry) do
quote do: fragment("ST_AsText(?)", unquote(geometry))
end
defmacro st_as_binary(geometry) do
quote do: fragment("ST_AsBinary(?)", unquote(geometry))
end
defmacro st_srid(geometry) do
quote do: fragment("ST_SRID(?)", unquote(geometry))
end
defmacro st_set_srid(geometry, srid) do
quote do: fragment("ST_SetSRID(?, ?)", unquote(geometry), unquote(srid))
end
defmacro st_make_box_2d(geometryA, geometryB) do
quote do: fragment("ST_MakeBox2D(?, ?)", unquote(geometryA), unquote(geometryB))
end
defmacro st_dimension(geometry) do
quote do: fragment("ST_Dimension(?)", unquote(geometry))
end
defmacro st_envelope(geometry) do
quote do: fragment("ST_Envelope(?)", unquote(geometry))
end
defmacro st_is_simple(geometry) do
quote do: fragment("ST_IsSimple(?)", unquote(geometry))
end
defmacro st_is_closed(geometry) do
quote do: fragment("ST_IsClosed(?)", unquote(geometry))
end
defmacro st_is_ring(geometry) do
quote do: fragment("ST_IsRing(?)", unquote(geometry))
end
defmacro st_num_geometries(geometry) do
quote do: fragment("ST_NumGeometries(?)", unquote(geometry))
end
defmacro st_geometry_n(geometry, int) do
quote do: fragment("ST_GeometryN(?, ?)", unquote(geometry), unquote(int))
end
defmacro st_num_points(geometry) do
quote do: fragment("ST_NumPoints(?)", unquote(geometry))
end
defmacro st_point_n(geometry, int) do
quote do: fragment("ST_PointN(?, ?)", unquote(geometry), unquote(int))
end
defmacro st_point(x, y) do
quote do: fragment("ST_Point(?, ?)", unquote(x), unquote(y))
end
defmacro st_exterior_ring(geometry) do
quote do: fragment("ST_ExteriorRing(?)", unquote(geometry))
end
defmacro st_num_interior_rings(geometry) do
quote do: fragment("ST_NumInteriorRings(?)", unquote(geometry))
end
defmacro st_num_interior_ring(geometry) do
quote do: fragment("ST_NumInteriorRing(?)", unquote(geometry))
end
defmacro st_interior_ring_n(geometry, int) do
quote do: fragment("ST_InteriorRingN(?, ?)", unquote(geometry), unquote(int))
end
defmacro st_end_point(geometry) do
quote do: fragment("ST_EndPoint(?)", unquote(geometry))
end
defmacro st_start_point(geometry) do
quote do: fragment("ST_StartPoint(?)", unquote(geometry))
end
defmacro st_geometry_type(geometry) do
quote do: fragment("ST_GeometryType(?)", unquote(geometry))
end
defmacro st_x(geometry) do
quote do: fragment("ST_X(?)", unquote(geometry))
end
defmacro st_y(geometry) do
quote do: fragment("ST_Y(?)", unquote(geometry))
end
defmacro st_z(geometry) do
quote do: fragment("ST_Z(?)", unquote(geometry))
end
defmacro st_m(geometry) do
quote do: fragment("ST_M(?)", unquote(geometry))
end
defmacro st_geom_from_text(text, srid \\ -1) do
quote do: fragment("ST_GeomFromText(?, ?)", unquote(text), unquote(srid))
end
defmacro st_point_from_text(text, srid \\ -1) do
quote do: fragment("ST_PointFromText(?, ?)", unquote(text), unquote(srid))
end
defmacro st_line_from_text(text, srid \\ -1) do
quote do: fragment("ST_LineFromText(?, ?)", unquote(text), unquote(srid))
end
defmacro st_linestring_from_text(text, srid \\ -1) do
quote do: fragment("ST_LinestringFromText(?, ?)", unquote(text), unquote(srid))
end
defmacro st_polygon_from_text(text, srid \\ -1) do
quote do: fragment("ST_PolygonFromText(?, ?)", unquote(text), unquote(srid))
end
defmacro st_m_point_from_text(text, srid \\ -1) do
quote do: fragment("ST_MPointFromText(?, ?)", unquote(text), unquote(srid))
end
defmacro st_m_line_from_text(text, srid \\ -1) do
quote do: fragment("ST_MLineFromText(?, ?)", unquote(text), unquote(srid))
end
defmacro st_m_poly_from_text(text, srid \\ -1) do
quote do: fragment("ST_MPolyFromText(?, ?)", unquote(text), unquote(srid))
end
defmacro st_m_geom_coll_from_text(text, srid \\ -1) do
quote do: fragment("ST_GeomCollFromText(?, ?)", unquote(text), unquote(srid))
end
defmacro st_m_geom_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_GeomFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_m_geometry_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_GeometryFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_point_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_PointFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_line_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_LineFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_linestring_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_LinestringFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_poly_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_PolyFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_polygon_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_PolygonFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_m_point_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_MPointFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_m_line_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_MLineFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_m_poly_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_MPolyFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_geom_coll_from_wkb(bytea, srid \\ -1) do
quote do: fragment("ST_GeomCollFromWKB(?, ?)", unquote(bytea), unquote(srid))
end
defmacro st_bd_poly_from_text(wkt, srid) do
quote do: fragment("ST_BdPolyFromText(?, ?)", unquote(wkt), unquote(srid))
end
defmacro st_bd_m_poly_from_text(wkt, srid) do
quote do: fragment("ST_BdMPolyFromText(?, ?)", unquote(wkt), unquote(srid))
end
defmacro st_flip_coordinates(geometryA) do
quote do: fragment("ST_FlipCoordinates(?)", unquote(geometryA))
end
defmacro st_generate_points(geometryA, npoints) do
quote do: fragment("ST_GeneratePoints(?,?)", unquote(geometryA), unquote(npoints))
end
defmacro st_generate_points(geometryA, npoints, seed) do
quote do:
fragment(
"ST_GeneratePoints(?,?,?)",
unquote(geometryA),
unquote(npoints),
unquote(seed)
)
end
defmacro st_extent(geometry) do
quote do: fragment("ST_EXTENT(?)::geometry", unquote(geometry))
defmacro st_build_area(geometryA) do
quote do: fragment("ST_BuildArea(?)", unquote(geometryA))
end
end
|
lib/geo_postgis.ex
| 0.788868
| 0.895294
|
geo_postgis.ex
|
starcoder
|
defmodule Conform.Schema.Transform do
@moduledoc """
This module defines the behaviour for custom transformations.
Transformations can be defined inline, in which case this behaviour need not be used,
but if you want to define reusable transforms which you can reference in your
schema, you should implement this behaviour, and then import the application in
your schema so that they are made available by the module name.
## Example
[ mappings: [...],
transforms: [
"lager.handlers.lager_file_backend": MyApp.Transforms.ToLagerFileBackend,
"lager.handlers.lager_console_backend": MyApp.Transforms.ToLagerConsoleBackend,
"lager.handlers": fn conf ->
file_handlers = Conform.Conf.get(conf, "lager.handlers.lager_file_backend.$level")
|> Enum.map(fn {[_, _, backend, level], path} -> {backend, [level: level, path: path]} end)
console_handlers = Conform.Conf.get(conf, "lager.handlers.lager_console_backend")
|> Enum.map(fn {[_, _, backend], conf} -> {backend, conf}
console_handlers ++ file_handlers
end
]]
In the case of the two transforms which reference a transform module, the `transform/1` function on each will
be called with the current configuration state, which is actually a PID of an ETS table. Use the `Conform.Conf` module to query
values from the configuration as shown in the example above.
"""
alias Conform.Schema.Transform
defmacro __using__(_) do
quote do
@behaviour Conform.Schema.Transform
end
end
defstruct path: "", # The path of the setting in sys.config where the transformed value will be placed
transform: nil, # The transformation function
definition: "", # The quoted function definition
persist: true
@callback transform([{term, term}]) :: [{term, term}]
def from_quoted({key, transform}) when is_atom(transform) do
%Transform{path: Atom.to_string(key), definition: nil, transform: transform}
end
def from_quoted({key, transform}) do
definition = transform
{transform, _} = Code.eval_quoted(transform)
case is_function(transform, 1) do
true ->
%Transform{path: Atom.to_string(key), definition: definition, transform: transform}
false ->
raise Conform.Schema.SchemaError, message: "Invalid transform for #{key}, it must be a function of arity 1."
end
end
end
|
lib/conform/schema/transform.ex
| 0.741206
| 0.511778
|
transform.ex
|
starcoder
|
defrecord Mix.Dep, [ scm: nil, app: nil, requirement: nil, status: nil, opts: nil,
deps: [], source: nil, manager: nil ] do
@moduledoc """
This is a record that keeps information about your project
dependencies. It keeps:
* scm - a module representing the source code management tool (SCM) operations;
* app - the app name as an atom;
* requirements - a binary or regexp with the deps requirement;
* status - the current status of dependency, check `Mix.Deps.format_status/1` for more info;
* opts - the options given by the developer
* source - any possible configuration associated with the manager field,
rebar.config for rebar or the Mix.Project for Mix
* manager - the project management, possible values: :rebar | :mix | :make | nil
"""
end
defmodule Mix.Deps do
@moduledoc """
A module with common functions to work with dependencies.
"""
@doc """
Returns all dependencies recursively as `Mix.Dep` record.
## Exceptions
This function raises an exception in case the developer
provides a dependency in the wrong format.
"""
def all do
{ deps, _ } = Mix.Deps.Converger.all(nil, fn(dep, acc) -> { dep, acc } end)
deps
end
@doc """
Returns all dependencies but with a custom callback and
accumulator.
"""
def all(acc, callback) do
{ _deps, acc } = Mix.Deps.Converger.all(acc, callback)
acc
end
@doc """
Returns all direct child dependencies.
"""
defdelegate children(), to: Mix.Deps.Retriever
@doc """
Returns all dependencies depending on given dependencies.
"""
def depending(deps, all_deps // all)
def depending([], _all_deps) do
[]
end
def depending(deps, all_deps) do
dep_names = Enum.map(deps, fn dep -> dep.app end)
parents = Enum.filter all_deps, fn dep ->
Enum.any?(dep.deps, fn child_dep -> child_dep.app in dep_names end)
end
parents ++ depending(parents, all_deps)
end
@doc """
Receives a list of deps names and returns deps records.
Logs a message if the dependency could not be found.
"""
def by_name(given, all_deps // all) do
# Ensure all apps are atoms
apps = Enum.map given, fn(app) ->
if is_binary(app), do: binary_to_atom(app), else: app
end
# We need to keep the order of all, which properly orders deps
deps = Enum.filter all_deps, fn(dep) -> dep.app in apps end
# Now we validate the given atoms
index = Mix.Dep.__index__(:app)
Enum.each apps, fn(app) ->
unless List.keyfind(deps, app, index) do
Mix.shell.info "unknown dependency #{app} for env #{Mix.env}"
end
end
deps
end
@doc """
Runs the given `fun` inside the given dependency project by
changing the current working directory and loading the given
project into the project stack.
"""
def in_dependency(dep, post_config // [], fun)
def in_dependency(Mix.Dep[manager: :rebar, opts: opts], post_config, fun) do
# Use post_config for rebar deps
Mix.Project.post_config(post_config)
Mix.Project.push(Mix.Rebar)
try do
File.cd!(opts[:dest], fn -> fun.(nil) end)
after
Mix.Project.pop
end
end
def in_dependency(Mix.Dep[app: app, opts: opts], post_config, fun) do
env = opts[:env] || :prod
old_env = Mix.env
try do
Mix.env(env)
Mix.Project.in_project(app, opts[:dest], post_config, fun)
after
Mix.env(old_env)
end
end
@doc """
Formats the status of a dependency.
"""
def format_status(Mix.Dep[status: { :ok, _vsn }]),
do: "ok"
def format_status(Mix.Dep[status: { :noappfile, path }]),
do: "could not find app file at #{Mix.Utils.relative_to_cwd(path)}"
def format_status(Mix.Dep[status: { :invalidapp, path }]),
do: "the app file at #{Mix.Utils.relative_to_cwd(path)} is invalid"
def format_status(Mix.Dep[status: { :invalidvsn, vsn }]),
do: "the dependency does not match the specified version, got #{vsn}"
def format_status(Mix.Dep[status: { :lockmismatch, _ }]),
do: "lock mismatch: the dependency is out of date"
def format_status(Mix.Dep[status: :nolock]),
do: "the dependency is not locked"
def format_status(Mix.Dep[status: { :diverged, other }, opts: opts]),
do: "different specs were given for this dependency, choose one in your deps:\n" <>
"$ #{inspect_kw opts}\n$ #{inspect_kw other.opts}\n"
def format_status(Mix.Dep[status: { :unavailable, _ }]),
do: "the dependency is not available, run `mix deps.get`"
defp inspect_kw(list) do
middle = lc { key, value } inlist Enum.sort(list), do: "#{key}: #{inspect value, raw: true}"
"[ " <> Enum.join(middle, ",\n ") <> " ]"
end
@doc """
Checks the lock for the given dependency and update its status accordingly.
"""
def check_lock(Mix.Dep[scm: scm, app: app, opts: opts] = dep, lock) do
if available?(dep) do
rev = lock[app]
opts = Keyword.put(opts, :lock, rev)
if scm.matches_lock?(opts) do
dep
else
status = if rev, do: { :lockmismatch, rev }, else: :nolock
dep.status(status)
end
else
dep
end
end
@doc """
Updates the dependency inside the given project.
"""
defdelegate update(dep), to: Mix.Deps.Retriever
@doc """
Check if a dependency is ok.
"""
def ok?(Mix.Dep[status: { :ok, _ }]), do: true
def ok?(_), do: false
@doc """
Check if a dependency is available.
"""
def available?(Mix.Dep[status: { :diverged, _ }]), do: false
def available?(Mix.Dep[status: { :unavailable, _ }]), do: false
def available?(_), do: true
@doc """
Check if a dependency is part of an umbrella project as a top level project.
"""
def in_umbrella?(Mix.Dep[opts: opts], apps_path) do
apps_path == Path.expand(Path.join(opts[:dest], ".."))
end
@doc """
Check if a dependency is out of date or not, considering its
lock status. Therefore, be sure to call `check_lock` before
invoking this function.
"""
def out_of_date?(Mix.Dep[status: { :lockmismatch, _ }]), do: true
def out_of_date?(Mix.Dep[status: :nolock]), do: true
def out_of_date?(dep), do: not available?(dep)
@doc """
Format the dependency for printing.
"""
def format_dep(Mix.Dep[scm: scm, app: app, status: status, opts: opts]) do
version =
case status do
{ :ok, vsn } when vsn != nil -> "(#{vsn}) "
_ -> ""
end
"#{app} #{version}#{inspect scm.format(opts)}"
end
@doc """
Returns all compile paths for the dependency.
"""
def compile_paths(Mix.Dep[app: app, opts: opts, manager: manager]) do
if manager == :mix do
Mix.Project.in_project app, opts[:dest], fn _ ->
Mix.Project.compile_paths
end
else
[ Path.join(opts[:dest], "ebin") ]
end
end
@doc """
Returns all load paths for the dependency.
"""
def load_paths(Mix.Dep[manager: :mix, app: app, opts: opts]) do
paths = Mix.Project.in_project app, opts[:dest], fn _ ->
Mix.Project.load_paths
end
Enum.uniq paths
end
def load_paths(Mix.Dep[manager: :rebar, opts: opts, source: source]) do
# Add root dir and all sub dirs with ebin/ directory
sub_dirs = Enum.map(source[:sub_dirs] || [], fn path ->
Path.join(opts[:dest], path)
end)
[ opts[:dest] | sub_dirs ]
|> Enum.map(Path.wildcard(&1))
|> List.concat
|> Enum.map(Path.join(&1, "ebin"))
|> Enum.filter(File.dir?(&1))
end
def load_paths(Mix.Dep[manager: manager, opts: opts]) when manager in [:make, nil] do
[ Path.join(opts[:dest], "ebin") ]
end
@doc """
Returns true if dependency is a mix project.
"""
def mix?(Mix.Dep[manager: manager]) do
manager == :mix
end
@doc """
Returns true if dependency is a rebar project.
"""
def rebar?(Mix.Dep[manager: manager]) do
manager == :rebar
end
@doc """
Returns true if dependency is a make project.
"""
def make?(Mix.Dep[manager: manager]) do
manager == :make
end
end
|
lib/mix/lib/mix/deps.ex
| 0.859177
| 0.434041
|
deps.ex
|
starcoder
|
defmodule Day5 do
@input "priv/inputs/day5.txt"
defp get_input(), do: File.read!(@input)
def part1() do
get_input()
|> get_program()
|> compute()
end
def test(input) do
input
|> get_program()
|> compute()
end
defp get_program(input) do
input
|> String.split(",", trim: true)
|> Enum.map(&String.to_integer/1)
end
def compute(program, ip \\ 0) do
case execute(program, ip) do
{:ok, p, i} -> compute(p, i)
{:stop, p, _} -> p
end
end
def execute(program, ip) do
{_, opcode} = Enum.split(program, ip)
[o|params] = opcode
{m3,m2,m1,op} = parse_modes(o)
execute_instruction(op, params, {m1,m2,m3}, program, ip)
end
def execute_instruction(1, [p1,p2,p3|_], {m1,m2,_}, program, ip) do
v1 = get_param(program, p1, m1)
v2 = get_param(program, p2, m2)
updated = program |> replace(p3, v1+v2)
{:ok, updated, ip + 4}
end
def execute_instruction(2, [p1,p2,p3|_], {m1,m2,_}, program, ip) do
v1 = get_param(program, p1, m1)
v2 = get_param(program, p2, m2)
updated = program |> replace(p3, v1*v2)
{:ok, updated, ip + 4}
end
def execute_instruction(3, [idx|_], _, program, ip) do
val = IO.gets("Input: ")
|> String.replace("\r","")
|> String.replace("\n","")
|> String.to_integer()
updated = program |> replace(idx, val)
{:ok, updated, ip + 2}
end
def execute_instruction(4, [idx|_], {m1,_,_}, program, ip) do
val = get_param(program, idx, m1)
IO.puts("Value: #{val}")
{:ok, program, ip + 2}
end
def execute_instruction(5, [p1,p2|_], {m1,m2,_}, program, ip) do
new_ip = case get_param(program, p1, m1) do
val when val != 0 -> get_param(program, p2, m2)
_ -> ip+3
end
{:ok, program, new_ip}
end
def execute_instruction(6, [p1,p2|_], {m1,m2,_}, program, ip) do
new_ip = case get_param(program, p1, m1) do
val when val == 0 -> get_param(program, p2, m2)
_ -> ip+3
end
{:ok, program, new_ip}
end
def execute_instruction(7, [p1,p2,p3|_], {m1,m2,_}, program, ip) do
v1 = get_param(program, p1, m1)
v2 = get_param(program, p2, m2)
updated = case v1 < v2 do
true -> replace(program, p3, 1)
false -> replace(program, p3, 0)
end
{:ok, updated, ip + 4}
end
def execute_instruction(8, [p1,p2,p3|_], {m1,m2,_}, program, ip) do
v1 = get_param(program, p1, m1)
v2 = get_param(program, p2, m2)
updated = case v1 == v2 do
true -> replace(program, p3, 1)
false -> replace(program, p3, 0)
end
{:ok, updated, ip + 4}
end
def execute_instruction(99, _, _, program, ip) do
{:stop, program, ip}
end
defp replace(lst, idx, val) do
{left, [_|right]} = Enum.split(lst, idx)
left ++ [val|right]
end
def get_param(program, idx, 0), do: Enum.at(program, idx)
def get_param(_, val, 1), do: val
def parse_modes(opcode) do
m3 = div(opcode, 10000)
r3 = rem(opcode, 10000)
m2 = div(r3, 1000)
r2 = rem(r3, 1000)
m1 = div(r2, 100)
op = rem(r2, 100)
{m3, m2, m1, op}
end
end
|
lib/Day5.ex
| 0.524151
| 0.41739
|
Day5.ex
|
starcoder
|
defmodule Relay.Marathon.Store do
@moduledoc """
A store for Marathon apps and tasks. Keeps a mapping of apps to their tasks
and triggers updates when a new version of an app or task is stored.
"""
alias Relay.Marathon.{Adapter, App, Task}
alias Relay.Resources
use LogWrapper, as: Log
use GenServer
defmodule State do
@moduledoc false
defstruct [:version, apps: %{}, app_tasks: %{}]
@type t :: %__MODULE__{
version: String.t(),
apps: %{optional(String.t()) => App.t()},
app_tasks: %{optional(String.t()) => %{optional(String.t()) => Task.t()}}
}
def new, do: %State{version: new_version()}
@spec set_apps_and_tasks(t, [{App.t(), [Task.t()]}]) :: t
def set_apps_and_tasks(state, apps_and_tasks) do
apps =
apps_and_tasks
|> Enum.into(%{}, fn {%App{id: app_id} = app, _} -> {app_id, app} end)
app_tasks =
apps_and_tasks
|> Enum.into(%{}, fn {%App{id: app_id}, tasks_list} ->
tasks =
tasks_list
|> Enum.into(%{}, fn %Task{id: task_id} = task -> {task_id, task} end)
{app_id, tasks}
end)
new_state(state, apps: apps, app_tasks: app_tasks)
end
@spec get_apps(t) :: [App.t()]
def get_apps(%__MODULE__{apps: apps}), do: values_sorted_by_key(apps)
@spec get_apps_and_tasks(t) :: [{App.t(), [Task.t()]}]
def get_apps_and_tasks(%__MODULE__{} = state) do
state
|> get_apps()
|> Enum.map(fn app -> {app, get_tasks(state, app)} end)
end
@spec get_tasks(t, App.t()) :: [Task.t()]
defp get_tasks(%__MODULE__{app_tasks: app_tasks}, %App{id: app_id}),
do: values_sorted_by_key(app_tasks[app_id])
@spec values_sorted_by_key(%{optional(Map.key()) => Map.value()}) :: [Map.value()]
defp values_sorted_by_key(map),
do: map |> Map.to_list() |> Enum.sort() |> Enum.map(&elem(&1, 1))
@spec get_app(t, String.t()) :: App.t() | nil
def get_app(%__MODULE__{apps: apps}, app_id), do: Map.get(apps, app_id)
@spec get_and_update_app(t, App.t()) :: {App.t() | nil, t}
def get_and_update_app(state, %App{id: id, version: version} = app) do
case State.get_app(state, id) do
# App is newer than existing app, update the app
%App{version: existing_version} = existing_app when version > existing_version ->
{existing_app, replace_app!(state, app)}
# App is the same or older than existing app, do nothing
%App{} ->
{app, state}
# No existing app with this ID, add this one
nil ->
{nil, put_app(state, app)}
end
end
@spec put_app(t, App.t()) :: t
defp put_app(%__MODULE__{apps: apps, app_tasks: app_tasks} = state, %App{id: id} = app),
do: new_state(state, apps: Map.put(apps, id, app), app_tasks: Map.put(app_tasks, id, %{}))
@spec replace_app!(t, App.t()) :: t
defp replace_app!(%__MODULE__{apps: apps} = state, %App{id: id} = app),
do: new_state(state, apps: Map.replace!(apps, id, app))
@spec pop_app(t, String.t()) :: {App.t() | nil, t}
def pop_app(%__MODULE__{apps: apps, app_tasks: app_tasks} = state, id) do
case Map.pop(apps, id) do
{%App{} = app, new_apps} ->
{app, new_state(state, apps: new_apps, app_tasks: Map.delete(app_tasks, id))}
{nil, _} ->
{nil, state}
end
end
@spec get_and_update_task!(t, Task.t()) :: {Task.t() | nil, t}
def get_and_update_task!(
%__MODULE__{app_tasks: app_tasks} = state,
%Task{id: id, app_id: app_id, version: version} = task
) do
tasks = Map.fetch!(app_tasks, app_id)
case Map.get(tasks, id) do
# Task is newer than existing task, update the task
%Task{version: existing_version} = existing_task when version > existing_version ->
{existing_task, put_task!(state, tasks, task)}
# Task is the same or older than existing task, do nothing
%Task{} ->
{task, state}
# No existing task with this ID, add this one
nil ->
{nil, put_task!(state, tasks, task)}
end
end
@spec put_task!(t, %{String.t() => %{optional(String.t()) => Task.t()}}, Task.t()) :: t
defp put_task!(
%__MODULE__{app_tasks: app_tasks} = state,
tasks,
%Task{id: id, app_id: app_id} = task
) do
new_tasks = Map.put(tasks, id, task)
new_state(state, app_tasks: Map.replace!(app_tasks, app_id, new_tasks))
end
@spec pop_task!(t, String.t(), String.t()) :: {Task.t() | nil, t}
def pop_task!(%__MODULE__{app_tasks: app_tasks} = state, id, app_id) do
tasks = Map.fetch!(app_tasks, app_id)
case Map.pop(tasks, id) do
{%Task{} = task, new_tasks} ->
{task, new_state(state, app_tasks: Map.replace!(app_tasks, app_id, new_tasks))}
{nil, _} ->
{nil, state}
end
end
@spec new_state(t, keyword) :: t
defp new_state(state, updates), do: struct(state, [version: new_version()] ++ updates)
@spec new_version() :: String.t()
defp new_version do
time = DateTime.utc_now() |> DateTime.truncate(:second) |> DateTime.to_iso8601()
# Take last 6 digits of unique_integer and pad with leading 0's.
number =
System.unique_integer([:monotonic, :positive])
|> Integer.mod(1_000_000)
|> Integer.to_string()
|> String.pad_leading(6, "0")
"#{time}-#{number}"
end
end
@spec start_link(keyword) :: GenServer.on_start()
def start_link(opts \\ []) do
{resources, opts} = Keyword.pop(opts, :resources, Resources)
GenServer.start_link(__MODULE__, resources, opts)
end
@doc """
Sync all apps and tasks. Simply replaces the existing apps and tasks in the
store with those provided.
"""
@spec sync(GenServer.server(), [{App.t(), [Task.t()]}]) :: :ok
def sync(store, apps_and_tasks), do: GenServer.call(store, {:sync, apps_and_tasks})
@doc """
Get an app from the store using its ID.
We need to check for the presence of an App in the Store to determine whether
a Task is relevant to us. Also, Task structs require the corresponding App
struct in order to be created.
"""
@spec get_app(GenServer.server(), String.t()) :: {:ok, App.t() | nil}
def get_app(store, app_id), do: GenServer.call(store, {:get_app, app_id})
@doc """
Update an app in the Store. The app is only added if its version is newer than
any existing app.
"""
@spec update_app(GenServer.server(), App.t()) :: :ok
def update_app(store, %App{} = app), do: GenServer.call(store, {:update_app, app})
@doc """
Delete an app from the Store. All tasks for the app will also be removed.
"""
@spec delete_app(GenServer.server(), String.t()) :: :ok
def delete_app(store, app_id), do: GenServer.call(store, {:delete_app, app_id})
@doc """
Update a task in the Store. The task is only added if its version is newer
than any existing task.
An error will be raised if the task's app is not already stored.
"""
@spec update_task(GenServer.server(), Task.t()) :: :ok
def update_task(store, %Task{} = task), do: GenServer.call(store, {:update_task, task})
@doc """
Delete a task from the Store.
If the app ID is not in the store, this task deletion will be ignored.
"""
@spec delete_task(GenServer.server(), String.t(), String.t()) :: :ok
def delete_task(store, task_id, app_id),
do: GenServer.call(store, {:delete_task, task_id, app_id})
@spec init(GenServer.server()) :: {:ok, {GenServer.server(), State.t()}}
def init(resources) do
{:ok, {resources, State.new()}}
end
def handle_call({:sync, apps_and_tasks}, _from, {resources, state}) do
Log.info("Syncing all apps and tasks...")
new_state = State.set_apps_and_tasks(state, apps_and_tasks)
notify_sync(resources, new_state)
{:reply, :ok, {resources, new_state}}
end
def handle_call({:get_app, app_id}, _from, {resources, state}),
do: {:reply, {:ok, State.get_app(state, app_id)}, {resources, state}}
def handle_call({:update_app, %App{id: id, version: version} = app}, _from, {resources, state}) do
{old_app, new_state} = State.get_and_update_app(state, app)
case old_app do
%App{version: existing_version} when version > existing_version ->
Log.debug("App '#{id}' updated: #{existing_version} -> #{version}")
notify_updated_app(resources, new_state)
%App{version: existing_version} ->
Log.debug("App '#{id}' unchanged: #{version} <= #{existing_version}")
nil ->
Log.info("App '#{id}' with version #{version} added")
notify_updated_app(resources, new_state)
end
{:reply, :ok, {resources, new_state}}
end
def handle_call({:delete_app, id}, _from, {resources, state}) do
{app, new_state} = State.pop_app(state, id)
case app do
%App{version: version} ->
Log.info("App '#{id}' with version #{version} deleted")
notify_updated_app(resources, new_state)
nil ->
Log.debug("App '#{id}' not present/already deleted")
end
{:reply, :ok, {resources, new_state}}
end
def handle_call(
{:update_task, %Task{id: id, version: version} = task},
_from,
{resources, state}
) do
# If the app for the task doesn't exist it's an error
{old_task, new_state} = State.get_and_update_task!(state, task)
case old_task do
%Task{version: existing_version} when version > existing_version ->
Log.debug("Task '#{id}' updated: #{existing_version} -> #{version}")
notify_updated_task(resources, new_state)
%Task{version: existing_version} ->
Log.debug("Task '#{id}' unchanged: #{version} <= #{existing_version}")
nil ->
Log.info("Task '#{id}' with version #{version} added")
notify_updated_task(resources, new_state)
end
{:reply, :ok, {resources, new_state}}
end
def handle_call({:delete_task, id, app_id}, _from, {resources, state}) do
new_state =
try do
{task, new_state} = State.pop_task!(state, id, app_id)
case task do
%Task{version: version} ->
Log.info("Task '#{id}' with version #{version} deleted")
notify_updated_task(resources, new_state)
nil ->
Log.debug("Task '#{id}' not present/already deleted")
end
new_state
rescue
KeyError ->
Log.debug("Unable to find app '#{app_id}' for task '#{id}'. Task delete ignored.")
state
end
{:reply, :ok, {resources, new_state}}
end
# For testing only
def handle_call(:_get_state, _from, {resources, state}),
do: {:reply, {:ok, state}, {resources, state}}
@spec notify_sync(GenServer.server(), State.t()) :: :ok
defp notify_sync(resources, state) do
Log.debug("Apps and tasks were synced, updating app endpoints")
# TODO: Split CDS/RDS from EDS updates in Resources so that this does
# what both notify_updated_task/1 notify_updated_app/1 would do
update_app_endpoints(resources, state)
end
@spec notify_updated_app(GenServer.server(), State.t()) :: :ok
defp notify_updated_app(resources, state) do
Log.debug("An app was updated, updating app endpoints")
# TODO: Split CDS/RDS from EDS updates in Resources so that this does
# something different from notify_updated_task/1
update_app_endpoints(resources, state)
end
@spec notify_updated_task(GenServer.server(), State.t()) :: :ok
defp notify_updated_task(resources, state) do
Log.debug("A task was updated, updating app endpoints...")
# TODO: Split CDS/RDS from EDS updates in Resources so that this does
# something different from notify_updated_app/1
update_app_endpoints(resources, state)
end
@spec update_app_endpoints(GenServer.server(), State.t()) :: :ok
defp update_app_endpoints(resources, %State{version: version, apps: apps}) when apps == %{},
do: Resources.update_app_endpoints(resources, version, [])
defp update_app_endpoints(resources, state) do
app_endpoints =
state
|> State.get_apps_and_tasks()
|> Enum.flat_map(fn {app, tasks} -> Adapter.app_endpoints_for_app(app, tasks) end)
Resources.update_app_endpoints(resources, state.version, app_endpoints)
end
end
|
lib/relay/marathon/store.ex
| 0.795181
| 0.402069
|
store.ex
|
starcoder
|
defmodule QRCodeEx.PNG do
@moduledoc """
Render the QR Code matrix in PNG format
```elixir
qr_code_content
|> QRCodeEx.encode()
|> QRCodeEx.png()
```
You can specify the following attributes of the QR code:
* `color`: In binary format. The default is `<<0, 0, 0>>`
* `background_color`: In binary format or `:transparent`. The default is `<<255, 255, 255>>`
* `width`: The width of the QR code in pixel. (the actual size may vary, due to the number of modules in the code)
By default, QR code size will be dynamically generated based on the input string.
"""
alias QRCodeEx.Matrix
@defaults %{
background_color: <<255, 255, 255>>,
color: <<0, 0, 0>>,
module_size: 11
}
@transparent_alpha <<0>>
@opaque_alpha <<255>>
@png_signature <<137, 80, 78, 71, 13, 10, 26, 10>>
@doc """
Return the PNG binary representation of the QR Code
"""
@spec png(Matrix.t(), map() | Keyword.t()) :: String.t()
def png(%Matrix{matrix: matrix} = m, options \\ []) do
matrix_size = Matrix.size(m)
options = normalize_options(options, matrix_size)
pixel_size = matrix_size * options[:module_size]
ihdr = png_chunk("IHDR", <<pixel_size::32, pixel_size::32, fc00:db20:35b:7399::5, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, 0::24>>)
idat = png_chunk("IDAT", pixels(matrix, options))
iend = png_chunk("IEND", "")
[@png_signature, ihdr, idat, iend]
|> List.flatten()
|> Enum.join()
end
defp normalize_options(options, matrix_size) do
options
|> Enum.into(@defaults)
|> calc_module_size(matrix_size)
end
defp calc_module_size(%{width: width} = options, matrix_size) when is_integer(width) do
size = (width / matrix_size) |> Float.round() |> trunc()
Map.put(options, :module_size, size)
end
defp calc_module_size(options, _matrix_size), do: options
defp png_chunk(type, binary) do
length = byte_size(binary)
crc = :erlang.crc32(type <> binary)
[<<length::32>>, type, binary, <<crc::32>>]
end
defp pixels(matrix, options) do
matrix
|> Tuple.to_list()
|> Stream.map(&row_pixels(&1, options))
|> Enum.join()
|> :zlib.compress()
end
defp row_pixels(row, %{module_size: module_size} = options) do
pixels =
row
|> Tuple.to_list()
|> Enum.map(&module_pixels(&1, options))
|> Enum.join()
:binary.copy(<<0>> <> pixels, module_size)
end
defp module_pixels(value, %{background_color: background_color, module_size: module_size})
when is_nil(value) or value == 0 do
background_color
|> apply_alpha_channel()
|> :binary.copy(module_size)
end
defp module_pixels(1, %{color: color, module_size: module_size}) do
color
|> apply_alpha_channel()
|> :binary.copy(module_size)
end
defp apply_alpha_channel(:transparent), do: <<0, 0, 0>> <> @transparent_alpha
defp apply_alpha_channel(color), do: color <> @opaque_alpha
end
|
lib/eqrcode/png.ex
| 0.87266
| 0.894329
|
png.ex
|
starcoder
|
defmodule PasswordValidator.Validators.LengthValidator do
@moduledoc """
Validates a password by checking the length of the password.
"""
@behaviour PasswordValidator.Validator
@doc """
Validate the password by checking the length
Example config (min 5 characters, max 9 characters):
```
[
length: [
min: 5,
max: 9,
]
]
```
## Examples
iex> LengthValidator.validate("simple2", [length: [min: 3]])
:ok
iex> LengthValidator.validate("too_short", [length: [min: 10]])
{:error, [{"String is too short. Only 9 characters instead of 10",
validator: PasswordValidator.Validators.LengthValidator, error_type: :too_short}]}
iex> LengthValidator.validate("too_long", [length: [min: 3, max: 6]])
{:error, [{"String is too long. 8 but maximum is 6",
validator: PasswordValidator.Validators.LengthValidator, error_type: :too_long}]}
"""
def validate(string, opts) do
config = Keyword.get(opts, :length, [])
min_length = Keyword.get(config, :min, :infinity)
max_length = Keyword.get(config, :max, :infinity)
custom_messages = Keyword.get(config, :messages, [])
validate_password(string, min_length, max_length, custom_messages)
end
@spec validate_password(String.t(), integer(), integer() | :infinity, map()) ::
:ok | {:error, nonempty_list()}
defp validate_password(_, min_length, max_length, _)
when is_integer(min_length) and is_integer(max_length) and min_length > max_length,
do: raise("Min length cannot be greater than the max")
defp validate_password(nil, min_length, max_length, custom_messages) do
validate_password("", min_length, max_length, custom_messages)
end
defp validate_password(string, min_length, max_length, custom_messages) do
length = String.length(string)
[
valid_min_length?(length, min_length, custom_messages),
valid_max_length?(length, max_length, custom_messages)
]
|> PasswordValidator.Validator.return_errors_or_ok()
end
defp valid_min_length?(_, :infinity, _custom_messages),
do: :ok
defp valid_min_length?(_, min, _custom_messages) when not is_integer(min),
do: raise("min must be an integer")
defp valid_min_length?(length, min, custom_messages) when length < min,
do: error("String is too short. Only #{length} characters instead of #{min}", :too_short, custom_messages)
defp valid_min_length?(_, _, _),
do: :ok
defp valid_max_length?(_, :infinity, _custom_messages),
do: :ok
defp valid_max_length?(_, max, _custom_messages) when not is_integer(max),
do: raise("max must be an integer")
defp valid_max_length?(length, max, custom_messages) when length > max,
do: error("String is too long. #{length} but maximum is #{max}", :too_long, custom_messages)
defp valid_max_length?(_, _, _),
do: :ok
defp error(message, error_type, custom_messages) do
message = Keyword.get(custom_messages, error_type, message)
additional_info = [validator: __MODULE__, error_type: error_type]
{:error, {message, additional_info}}
end
end
|
lib/password_validator/validators/length_validator.ex
| 0.926728
| 0.790247
|
length_validator.ex
|
starcoder
|
defmodule Callisto.Query do
alias __MODULE__
alias Callisto.{Cypher, Vertex}
defstruct create: nil,
match: nil,
merge: nil,
where: nil,
set: nil,
delete: nil,
order: nil,
limit: nil,
return: nil,
piped_queries: []
def new do
%Query{}
end
# Accept the match criteria directly. Only strings supported.
@doc ~S"""
Sets the MATCH pattern into the query. May take a string or a
hash.
iex> %Query{} |> Query.match("(x:Disease)") |> to_string
"MATCH (x:Disease)"
iex> %Query{} |> Query.match(x: Vertex.cast("Medicine", %{dose: 42})) |> to_string
"MATCH (x:Medicine {dose: 42})"
iex> %Query{} |> Query.match(x: %{id: 42}) |> to_string
"MATCH (x {id: 42})"
iex> %Query{} |> Query.match(x: %{id: 42}, y: %{id: 69}) |> to_string
"MATCH (x {id: 42}), (y {id: 69})"
"""
def match(pattern), do: match(%Query{}, pattern)
def match(query=%Query{}, pattern) when is_binary(pattern) do
%Query{match: pattern, piped_queries: append_query(query)}
end
def match(query=%Query{}, hash) when is_map(hash) or is_list(hash) do
pattern = Enum.map(hash, fn({k, v}) -> Cypher.to_cypher(v, k) end)
|> Enum.join(", ")
match(query, pattern)
end
@doc ~S"""
Works as match/2, but will mix with MERGE keyword. Note that Cypher
permits MATCH and MERGE to be used together in some contructs.
# iex> %Query{} |> Query.merge(x: Vertex.cast(Medicine, %{name: "foo"})) |> to_string
"MERGE (x:Disease { name: 'foo' })"
"""
def merge(pattern), do: merge(%Query{}, pattern)
def merge(query=%Query{}, pattern) when is_binary(pattern) do
%Query{merge: pattern, piped_queries: append_query(query)}
end
def merge(query=%Query{}, hash) when is_map(hash) or is_list(hash) do
pattern = Enum.map(hash, fn{k, v} -> Cypher.to_cypher(v, k) end)
|> Enum.join(", ")
merge(query, pattern)
end
@doc ~S"""
Sets the CREATE clause in the query. Only supports strings for now.
iex> %Query{} |> Query.create("(x:Disease { id: 42 })") |> to_string
"CREATE (x:Disease { id: 42 })"
"""
def create(pattern), do: create(%Query{}, pattern)
def create(query=%Query{}, pattern) when is_binary(pattern) do
%Query{create: pattern, piped_queries: append_query(query)}
end
def create(query=%Query{}, vert=%Vertex{}) do
pattern = Cypher.to_cypher(vert)
create(query, pattern)
end
# If you set the query to a string or nil, just accept it directly
@doc ~S"""
Sets the WHERE clause on the query.
Can accept a string, nil (to clear any previous clause), or a Map or
Keyword list of key/values that are ANDed together. ONLY supports
equality checks for the moment.
iex> %Query{} |> Query.where("x = y") |> to_string
"WHERE x = y"
iex> %Query{} |> Query.where(x: "y", foo: "bar") |> to_string
"WHERE (x = 'y') AND (foo = 'bar')"
iex> %Query{} |> Query.where(%{x: "y", foo: "bar"}) |> to_string
"WHERE (foo = 'bar') AND (x = 'y')"
Note the order is different between a Keyword list and a Map.
"""
def where(clause), do: where(%Query{}, clause)
def where(query=%Query{}, clause) when is_binary(clause) or is_nil(clause) do
%Query{where: clause, piped_queries: append_query(query)}
end
def where(query=%Query{}, hash) when is_map(hash) or is_list(hash) do
clause =
Enum.map(hash, fn(x) -> "#{hd(Tuple.to_list(x))} = #{Tuple.to_list(x)|>List.last|>Cypher.escaped_quote}" end)
|> Enum.join(") AND (")
where(query, "(" <> clause <> ")")
end
@doc ~S"""
Assigns the SET clause in the Cypher query.
## Examples
Example of manually setting the clause directly...
iex> %Query{} |> Query.set("x.name = '<NAME>'") |> to_string
"SET x.name = '<NAME>'"
Example of passing a Map of Maps (top-level keys are the matched
entities, the map value is the key/value maps to update)
iex> %Query{} |> Query.set(x: %{name: "<NAME>"}) |> to_string
"SET x += {name: '<NAME>'}"
If you want to unset a property, send nil as the value
iex> %Query{} |> Query.set(x: %{name: nil}) |> to_string
"SET x += {name: NULL}"
"""
def set(clause), do: set(%Query{}, clause)
def set(query=%Query{}, clause) when is_binary(clause) or is_nil(clause) do
%Query{set: clause, piped_queries: append_query(query)}
end
def set(query=%Query{}, hash) when is_map(hash) do
set(query, Map.to_list(hash))
end
def set(query=%Query{}, kwlist) when is_list(kwlist) do
clauses = Keyword.take(kwlist, [:on_create, :on_match])
case clauses do
[] -> set(query, Enum.map(kwlist, &set_one/1) |> Enum.join(", "))
clauses -> %Query{set: set_on_multiple(clauses), piped_queries: append_query(query)}
end
end
defp set_on_multiple(clauses) do
Enum.map(clauses, fn({x,y}) ->
{x, Enum.map(y, &set_one/1) |> Enum.join(", ")}
end)
end
defp set_one({x, y}) do
"#{to_string x} += #{Cypher.set_values(y)}"
end
@doc ~S"""
Assigns the DELETE clause in the Cypher query. Pass an array of elements
to DELETE -- if you use the keyword detach: to identify the list, the
elements will be detached instead.
## Examples
Example of manually setting the clause directly...
iex> %Query{} |> Query.delete("x") |> to_string
"DELETE x"
Example of passing a Map of Maps (top-level keys are the matched
entities, the map value is the key/value maps to update)
iex> %Query{} |> Query.delete(["x", "y"])|> to_string
"DELETE x,y"
Examples of detaching
iex> %Query{} |> Query.delete(detach: "x")|> to_string
"DETACH DELETE x"
iex> %Query{} |> Query.delete(detach: ["x", "y"])|> to_string
"DETACH DELETE x,y"
"""
def delete(v), do: delete(%Query{}, v)
def delete(query=%Query{}, detach: v) when is_list(v) do
do_delete(query, %{detach: v})
end
def delete(query=%Query{}, detach: v), do: delete(query, detach: [v])
def delete(query=%Query{}, v) when is_list(v) != true, do: delete(query, [v])
def delete(query=%Query{}, v), do: do_delete(query, v)
defp do_delete(query=%Query{}, v) do
%Query{delete: v, piped_queries: append_query(query)}
end
# Set the return hash -- should be variable name => type (or nil)
# This can be used later to convert the resulting rows to objects of the
# appropriate type.
@doc ~S"""
Sets the values to be returned as keys mapped to types; set to true
(boolean) to convert to %Vertex{} structs (note that the labels will
not be filled in, they would have to be requested separately), or
set to nil if no conversion should be done at all.
Currently the values are not yet used (but can be used to cast the
result to the appropriate struct).
If passed a list, will use each element as a return key, but no type
conversion will be done on the result.
If passed a string, splits on commas and uses each part as a return
key, again with no type conversion.
iex> %Query{} |> Query.returning("x,y") |> to_string
"RETURN x, y"
iex> %Query{} |> Query.returning(["x", "y"]) |> to_string
"RETURN x, y"
iex> %Query{} |> Query.returning(%{x: Medicine, y: Treatment}) |> to_string
"RETURN x, y"
"""
def returning(clause), do: returning(%Query{}, clause)
def returning(query=%Query{}, kwlist) when is_list(kwlist) do
clause = case Keyword.keyword?(kwlist) do
true -> kwlist
_ -> Enum.map(kwlist, fn x -> {x, nil} end)
end
%Query{return: clause, piped_queries: append_query(query)}
end
def returning(query=%Query{}, hash) when is_map(hash) do
returning(query, Map.to_list(hash))
end
def returning(query=%Query{}, string) when is_binary(string) do
# Split string on commas, then call again as a list...
parts = String.split(string, ",") |> Enum.map(&(String.strip(&1)))
returning(query, parts)
end
def returning(query=%Query{}, atom) when is_atom(atom) do
returning(query, to_string(atom))
end
# Order clause only accepts string or nil.
def order(clause), do: order(%Query{}, clause)
def order(query=%Query{}, clause) when is_binary(clause) or is_nil(clause) do
%Query{order: clause, piped_queries: append_query(query)}
end
# Limiter only accepts integers or nil
def limit(lim), do: limit(%Query{}, lim)
def limit(query=%Query{}, lim) when is_integer(lim) or is_nil(lim) do
%Query{limit: lim, piped_queries: append_query(query)}
end
# Load piped queries into an attribute.
defp append_query(query) do
List.insert_at(query.piped_queries, -1, query)
end
end
defimpl String.Chars, for: Callisto.Query do
def to_string(q) do
parse_chained_queries(q) <> do_to_string(q)
end
defp do_to_string(q) do
[
match(q.match),
merge(q.merge),
create(q.create),
where(q.where),
set(q.set),
delete(q.delete),
return(q.return),
order(q.order),
limit(q.limit),
]
|> Enum.reject(&is_nil/1)
|> Enum.join(" ")
end
defp parse_chained_queries(q) do
cond do
Enum.count(q.piped_queries) > 1 ->
clause = q.piped_queries
|> List.delete_at(0)
|> Enum.map(&do_to_string/1)
|> Enum.join("\n")
clause <> "\n"
true ->
""
end
end
defp match(nil), do: nil
defp match(clause), do: "MATCH #{clause}"
defp merge(nil), do: nil
defp merge(clause), do: "MERGE #{clause}"
defp create(nil), do: nil
defp create(clause), do: "CREATE #{clause}"
defp where(nil), do: nil
defp where(clause), do: "WHERE #{clause}"
defp set(nil), do: nil
defp set(clause) when is_list(clause) do
Enum.map(clause, fn({x,y}) ->
{(Atom.to_string(x) |> String.upcase |> String.replace("_", " ")), y}
end)
|> Enum.map(fn({x,y}) ->
Enum.join([x,y], " SET ")
end)
|> Enum.join(" ")
end
defp set(clause), do: "SET #{clause}"
defp delete(nil), do: nil
defp delete(array) when is_list(array), do: "DELETE #{Enum.join(array, ",")}"
defp delete(%{detach: array}), do: "DETACH #{delete(array)}"
defp order(nil), do: nil
defp order(clause), do: "ORDER BY #{clause}"
defp limit(nil), do: nil
defp limit(num), do: "LIMIT #{num}"
defp return(nil), do: nil
defp return(string) when is_binary(string) do
"RETURN #{string}"
end
defp return(hash) do
# Only care about the keys, which we join with commas.
"RETURN #{Keyword.keys(hash) |> Enum.join(", ") }"
end
end
|
lib/callisto/query.ex
| 0.712832
| 0.452536
|
query.ex
|
starcoder
|
defmodule BMP3XX do
@moduledoc """
Read pressure and temperature from a Bosch BMP388 or BMP390 sensor
"""
use GenServer
require Logger
@type sensor_mod :: BMP3XX.BMP388 | BMP3XX.BMP390
@type bus_address :: 0x76 | 0x77
@typedoc """
BMP3XX GenServer start_link options
* `:name` - a name for the GenServer
* `:bus_name` - which I2C bus to use (e.g., `"i2c-1"`)
* `:bus_address` - the address of the BMP3XX (defaults to 0x77)
* `:sea_level_pa` - a starting estimate for the sea level pressure in Pascals
"""
@type options() :: [
name: GenServer.name(),
bus_name: binary,
bus_address: bus_address,
sea_level_pa: number
]
@sea_level_pa 100_000
@default_bus_address 0x77
@polling_interval_ms 1000
@doc """
Start a new GenServer for interacting with a BMP3XX
Normally, you'll want to pass the `:bus_name` option to specify the I2C
bus going to the BMP3XX.
"""
@spec start_link(options()) :: GenServer.on_start()
def start_link(init_arg) do
GenServer.start_link(__MODULE__, init_arg, name: init_arg[:name])
end
@doc """
Return the type of sensor
This function returns the cached result of reading the ID register.
if the part is recognized. If not, it returns the integer read.
"""
@spec sensor_mod(GenServer.server()) :: sensor_mod()
def sensor_mod(server \\ __MODULE__) do
GenServer.call(server, :sensor_mod)
end
@doc """
Measure the current temperature, pressure, altitude
An error is return if the I2C transactions fail.
"""
@spec measure(GenServer.server()) :: {:ok, struct} | {:error, any()}
def measure(server \\ __MODULE__) do
GenServer.call(server, :measure)
end
@doc """
Update the sea level pressure estimate
The sea level pressure should be specified in Pascals. The estimate
is used for altitude calculations.
"""
@spec update_sea_level_pressure(GenServer.server(), number) :: :ok
def update_sea_level_pressure(server \\ __MODULE__, new_estimate) do
GenServer.call(server, {:update_sea_level, new_estimate})
end
@doc """
Force the altitude to a known value
Altitude calculations depend on the accuracy of the sea level pressure estimate. Since
the sea level pressure changes based on the weather, it needs to be kept up to date
or altitude measurements can be pretty far off. Another way to set the sea level pressure
is to report a known altitude. Call this function with the current altitude in meters.
This function returns an error if the attempt to sample the current barometric
pressure fails.
"""
@spec force_altitude(GenServer.server(), number) :: :ok | {:error, any()}
def force_altitude(server \\ __MODULE__, altitude_m) do
GenServer.call(server, {:force_altitude, altitude_m})
end
@doc """
Detect the type of sensor that is located at the I2C address
If the sensor is a known BMP3XX sensor, the response will either contain
`:bmp388` or `:bmp390`. If the sensor does not report back that it is one of
those two types of sensors the return value will contain the id value that
was reported back form the sensor.
The bus address is likely going to be 0x77 (the default) or 0x76.
"""
@spec detect(binary, bus_address) :: {:ok, sensor_mod()} | {:error, any()}
def detect(bus_name, bus_address \\ @default_bus_address) do
case transport_mod().open(bus_name: bus_name, bus_address: bus_address) do
{:ok, transport} -> BMP3XX.Comm.sensor_type(transport)
_error -> {:error, :device_not_found}
end
end
@impl GenServer
def init(args) do
bus_name = Access.get(args, :bus_name, "i2c-1")
bus_address = Access.get(args, :bus_address, @default_bus_address)
sea_level_pa = Access.get(args, :sea_level_pa, @sea_level_pa)
Logger.info(
"[BMP3XX] Starting on bus #{bus_name} at address #{inspect(bus_address, base: :hex)}"
)
with {:ok, transport} <-
transport_mod().open(bus_name: bus_name, bus_address: bus_address),
{:ok, sensor_mod} <- BMP3XX.Comm.sensor_type(transport) do
state = %BMP3XX.Sensor{
calibration: nil,
last_measurement: nil,
sea_level_pa: sea_level_pa,
sensor_mod: sensor_mod,
transport: transport
}
{:ok, state, {:continue, :start_measuring}}
else
_error -> {:stop, :device_not_found}
end
end
@impl GenServer
def handle_continue(:start_measuring, state) do
Logger.info("[BMP3XX] Initializing sensor type #{state.sensor_mod}")
new_state = state |> init_sensor() |> read_and_put_new_measurement()
Process.send_after(self(), :schedule_measurement, @polling_interval_ms)
{:noreply, new_state}
end
@impl GenServer
def handle_call(:measure, _from, state) do
if state.last_measurement do
{:reply, {:ok, state.last_measurement}, state}
else
{:reply, {:error, :no_measurement}, state}
end
end
def handle_call(:sensor_mod, _from, state) do
{:reply, state.sensor_mod, state}
end
def handle_call({:update_sea_level, new_estimate}, _from, state) do
{:reply, :ok, %{state | sea_level_pa: new_estimate}}
end
def handle_call({:force_altitude, altitude_m}, _from, state) do
if state.last_measurement do
sea_level = BMP3XX.Calc.sea_level_pressure(state.last_measurement.pressure_pa, altitude_m)
{:reply, :ok, %{state | sea_level_pa: sea_level}}
else
{:reply, {:error, :no_measurement}, state}
end
end
@impl GenServer
def handle_info(:schedule_measurement, state) do
Process.send_after(self(), :schedule_measurement, @polling_interval_ms)
{:noreply, read_and_put_new_measurement(state)}
end
defp init_sensor(state) do
case state.sensor_mod.init(state) do
{:ok, state} -> state
_error -> raise("Error initializing sensor")
end
end
defp read_sensor(state) do
state.sensor_mod.read(state)
end
defp read_and_put_new_measurement(state) do
case read_sensor(state) do
{:ok, measurement} ->
%{state | last_measurement: measurement}
{:error, reason} ->
Logger.error("[BMP3XX] Error reading measurement: #{inspect(reason)}")
state
end
end
defp transport_mod() do
Application.get_env(:bmp3xx, :transport_mod, BMP3XX.Transport.I2C)
end
end
|
lib/bmp3xx.ex
| 0.928999
| 0.545588
|
bmp3xx.ex
|
starcoder
|
defmodule Elsa do
@moduledoc """
Provides public api to Elsa. Top-level short-cuts to sub-module functions
for performing basic interactions with Kafka including listing, creating,
deleting, and validating topics. Also provides a function for one-off
produce_sync of message(s) to a topic.
"""
@typedoc "named connection, must be an atom"
@type connection :: atom
@type hostname :: atom | String.t()
@type portnum :: pos_integer
@typedoc "endpoints to connect to kafka brokers"
@type endpoints :: [{hostname, portnum}]
@type topic :: String.t()
@type partition :: non_neg_integer
defdelegate list_topics(endpoints), to: Elsa.Topic, as: :list
defdelegate topic?(endpoints, topic), to: Elsa.Topic, as: :exists?
defdelegate create_topic(endpoints, topic, opts \\ []), to: Elsa.Topic, as: :create
defdelegate delete_topic(endpoints, topic), to: Elsa.Topic, as: :delete
defdelegate produce(endpoints_or_connection, topic, messages, opts \\ []), to: Elsa.Producer
defdelegate fetch(endpoints, topic, opts \\ []), to: Elsa.Fetch
@doc """
Define a default client name for establishing persistent connections to
the Kafka cluster by producers and consumers. Useful for optimizing
interactions by passing the identifier of a standing connection instead
of instantiating a new one at each interaction, but when only a single connection
is required, aleviating the need for the caller to differentiate and pass
around a name.
"""
@spec default_client() :: atom()
def default_client(), do: :elsa_default_client
defmodule Message do
@moduledoc """
Defines the structure of a Kafka message provided by the Elsa library and
the function to construct the message struct.
"""
import Record, only: [defrecord: 2, extract: 2]
defrecord :kafka_message, extract(:kafka_message, from_lib: "kafka_protocol/include/kpro_public.hrl")
@type kafka_message :: record(:kafka_message, key: term(), value: term(), offset: integer(), ts: integer())
@type elsa_message :: %Elsa.Message{
topic: Elsa.topic(),
partition: Elsa.partition(),
offset: integer,
key: term,
value: term,
generation_id: integer | nil,
headers: list
}
defstruct [
:topic,
:partition,
:offset,
:key,
:value,
:timestamp,
:generation_id,
:headers
]
@doc """
Constructs a message struct from the imported definition of a kafka_message as
defined by the brod library with the addition of the topic and partition the message
was read from as well as the optional generation id as defined by the message's relationship
to a consumer group. Generation id defaults to `nil` in the event the message retrieved
outside of the context of a consumer group.
"""
@spec new(kafka_message(), keyword()) :: elsa_message()
def new(kafka_message(offset: offset, key: key, value: value, ts: timestamp, headers: headers), attributes) do
%Message{
topic: Keyword.fetch!(attributes, :topic),
partition: Keyword.fetch!(attributes, :partition),
offset: offset,
key: key,
value: value,
timestamp: timestamp,
generation_id: Keyword.get(attributes, :generation_id),
headers: headers
}
end
end
defmodule ConnectError do
defexception [:message]
end
end
|
lib/elsa.ex
| 0.821868
| 0.522446
|
elsa.ex
|
starcoder
|
defmodule Noizu.EmailService.Email.Binding.Substitution.Dynamic.Effective do
@vsn 1.0
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic.Selector
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic.Section
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic.Formula
@type t :: %__MODULE__{
bind: [Selector.t],
bound: Map.t,
unbound: %{:optional => [Selector.t], :required => [Selector.t]},
outcome: tuple | :ok,
meta: Map.t,
vsn: float,
}
defstruct [
bind: [],
bound: %{},
unbound: %{
optional: [],
required: []
},
outcome: :ok,
meta: %{},
vsn: @vsn
]
def insert_path(blob, path, selector, acc \\ [])
def insert_path(blob, [h], selector, p) do
p = p ++ [h]
block = update_in(blob, p, &(&1 || %{type: :trace, index_size: 0, selector: nil, children: %{}}))
# Track indexes
blob = case h do
{:at, v} ->
parent_index_size_path = Enum.slice(p, 0 .. -3) ++ [:index_size]
index_size = get_in(blob, parent_index_size_path)
cond do
v + 1 >= index_size -> put_in(blob, parent_index_size_path, v + 1)
:else -> blob
end
_else -> blob
end
# set to copy if terminal full value insert, or scalar if existence check value that doesnt require full body to be included in payload.
type = get_in(block, p ++ [:type])
scalar? = Selector.scalar?(selector)
cond do
type == :copy -> block
scalar? ->
block
|> put_in(p ++ [:type], :scalar)
|> put_in(p ++ [:selector], selector)
:else ->
block
|> put_in(p ++ [:type], :copy)
|> put_in(p ++ [:selector], selector)
end
end
def insert_path(blob, [h|t], selector, p) do
p = p ++ [h]
blob = update_in(blob, p, &(&1 || %{type: :trace, index_size: 0, selector: nil, children: %{}}))
# Track indexes
blob = case h do
{:at, v} ->
parent_index_size_path = Enum.slice(p, 0 .. -3) ++ [:index_size]
index_size = get_in(blob, parent_index_size_path)
cond do
v + 1 >= index_size -> put_in(blob, parent_index_size_path, v + 1)
:else -> blob
end
_else -> blob
end
insert_path(blob, t, selector, p ++ [:children])
end
#----------------------
# interstitial_map
#----------------------
def interstitial_map(%__MODULE__{} = this, state, context, options) do
bind = this.bind
|> Enum.uniq()
|> Enum.sort_by(&(&1.selector < &1.selector))
book_keeping = Enum.reduce(bind, %{}, fn(selector, acc) ->
path = Selector.path(selector)
insert_path(acc, path, selector)
end)
end
def build_output(v = %{}, state, context, options) do
Enum.reduce(v, {%{}, state}, fn({k,v}, {acc, s}) ->
{snippet, s} = build_output_inner({k,v}, %{}, s, context, options)
case snippet do
{:value, value} ->
case k do
{:select, name} -> {put_in(acc, [name], value), s}
end
_else -> {acc, s}
end
end)
end
defp build_output_inner({k,v}, blob, state, context, options) do
cond do
v.type == :copy ->
{value, s} = Noizu.RuleEngine.ScriptProtocol.execute!(v.selector, state, context, options)
case value do
{:value, nil} ->
# consider a copy field a non-result to force required bind error.
{nil, s}
_ -> {value, s}
end
v.type == :scalar ->
{value, s} = Noizu.RuleEngine.ScriptProtocol.execute!(v.selector, state, context, options)
case value do
{:value, value} ->
cond do
value == nil -> {{:value, value}, s} # path existed e.g. map.key but response was null, so we may include in result. If no value was returned we return nil to avoid improperly triggering
# conditionals higher in the formula tree.
is_integer(value) || is_float(value) || is_atom(value) -> {{:value, value}, s}
is_map(value) || is_list(value) || is_tuple(value) -> {{:value, true}, s}
:else -> {{:value, value}, s}
end
_else -> {nil, s}
end
v.type == :trace ->
# Tentative mode
cond do
v.index_size == 0 ->
snippet = %{}
{snippet, state} = Enum.reduce(v.children, {snippet, state}, fn({k2,v2}, {acc_snippet, acc_state}) ->
{sv, s} = build_output_inner({k2, v2}, acc_snippet, acc_state, context, options)
case sv do
{:value, value} ->
case k2 do
{:key, name} -> {put_in(acc_snippet, [name], value), s}
{:select, name} -> {put_in(acc_snippet, [name], value), s}
end
_else -> {acc_snippet, s}
end
end)
cond do
snippet == %{} -> {nil, state} # value never reached, leave path barren (e.g. path ended in a scalar request that was nil)
:else -> {{:value, snippet}, state}
end
:else ->
snippet = Enum.map(0.. v.index_size - 1, fn(_) -> nil end)
{hit?, snippet, state} = Enum.reduce(v.children, {false, snippet, state}, fn({k2,v2}, {acc_hit?, acc_snippet, acc_state}) ->
{sv, s} = build_output_inner({k2, v2}, acc_snippet, acc_state, context, options)
case sv do
{:value, value} ->
case k2 do
{:at, index} -> {true, put_in(acc_snippet, [Access.at(index)], value), s}
end
_else -> {acc_hit?, acc_snippet, s}
end
end)
cond do
!hit? -> {nil, state} # value never reached, leave path barren (e.g. path ended in a scalar request that was nil)
:else -> {{:value, snippet}, state}
end
end
end
end
#----------------------
#
#----------------------
def finalize(%__MODULE__{} = this, state, context, options) do
book_keeping = interstitial_map(this, state, context, options)
{output, state} = build_output(book_keeping, state, context, options)
this = %__MODULE__{this| bound: output}
# now walk through binds to verify all non scalars are bound.
Enum.reduce(this.bind, {this, state}, fn(selector,{this, state}) ->
# only require non scalars.
{bound?, _val, state} = Selector.bound_inner(selector, output, state, context, options)
cond do
bound? -> {this, state}
Selector.scalar?(selector) -> {update_in(this, [Access.key(:unbound), Access.key(:optional)], &((&1 || []) ++ [selector])), state}
:else -> {update_in(this, [Access.key(:unbound), Access.key(:required)], &((&1 || []) ++ [selector])), state}
end
end)
end
#----------------------
#
#----------------------
def new(%Section{} = section, state, context, options) do
{%__MODULE__{bind: Enum.uniq(section.bind)}, state}
end
def new(%Formula.IfThen{condition_clause: formula} = section, state, context, options) do
selectors = Formula.selectors(section)
|> Enum.map(&(Selector.exists(&1)))
|> Enum.uniq()
{%__MODULE__{bind: selectors}, state}
end
def new(%Formula.Each{} = section, state, context, options) do
{%__MODULE__{}, state}
end
def new(bind, state, context, options) when is_list(bind) do
bind = bind
|> Enum.uniq()
{%__MODULE__{bind: bind}, state}
end
def new(nil, state, context, options) do
{%__MODULE__{bind: []}, state}
end
#----------------------
#
#----------------------
def set_wildcard_hint(%__MODULE__{} = this, %Selector{} = selector, :list, {index, value}, state, context, options) do
{r,s} = Noizu.RuleEngine.StateProtocol.get!(state, :wildcards, context)
r = put_in(r || %{}, [selector.selector], %{index: index, value: value, type: :list})
s = Noizu.RuleEngine.StateProtocol.put!(s, :wildcards, r, context)
this = put_in(this, [Access.key(:meta), :wildcard], {selector.selector, Selector.set_wildcard_hint(selector, {:at, index})})
{this, s}
end
#----------------------
#
#----------------------
def set_wildcard_hint(%__MODULE__{} = this, %Selector{} = selector, :kv, {key, value}, state, context, options) do
{r,s} = Noizu.RuleEngine.StateProtocol.get!(state, :wildcards, context)
r = put_in(r || %{}, [selector.selector], %{key: key, value: value, type: :kv})
s = Noizu.RuleEngine.StateProtocol.put!(s, :wildcards, r, context)
s = Noizu.RuleEngine.StateProtocol.put!(s, :last_wildcard, {selector.selector, Selector.set_wildcard_hint(selector, {:key, key})}, context)
this = put_in(this, [Access.key(:meta), :wildcard], {selector.selector, Selector.set_wildcard_hint(selector, {:key, key})})
{this, s}
end
#----------------------
#
#----------------------
def clear_wildcard_hint(%__MODULE__{} = this, %Selector{} = selector, _type, state, context, options) do
{r,s} = Noizu.RuleEngine.StateProtocol.get!(state, :wildcards, context)
r = Map.delete(r || %{}, selector.selector)
s = Noizu.RuleEngine.StateProtocol.put!(s, :wildcards, r, context)
{_, this} = pop_in(this, [Access.key(:meta), :wildcard])
{this, s}
end
#----------------------
#
#----------------------
def merge(%__MODULE__{} = bind_a, %__MODULE__{} = bind_b, state, context, options) do
r = cond do
bind_a.meta[:wildcard] ->
{ws,wr} = bind_a.meta[:wildcard]
wsl = length(ws)
r = Enum.map(bind_b.bind || [], fn(b_s) ->
cond do
List.starts_with?(b_s.selector, ws) ->
s = b_s.selector
s = wr.selector ++ Enum.slice(s, wsl .. -1)
%Selector{b_s| selector: s}
:else -> b_s
end
end)
:else -> bind_b.bind
end |> Enum.uniq
{%__MODULE__{bind_a| bind: Enum.uniq(bind_a.bind ++ r)}, state}
end
def merge(%__MODULE__{} = bind_a, nil, state, context, options), do: {bind_a, state}
def merge(nil, %__MODULE__{} = bind_b, state, context, options) do
{%__MODULE__{bind_b| bind: Enum.uniq(bind_b.bind)}, state}
end
end
|
lib/email_service/entities/email/binding/substitution/dynamic/effective.ex
| 0.591605
| 0.440469
|
effective.ex
|
starcoder
|
defmodule Univrse.Alg.AES_CBC_HMAC do
@moduledoc """
AES_CBC_HMAC algorithm module.
Sign and encrypt messages using AES-CBC symetric encryption, with HMAC message
authentication.
https://tools.ietf.org/html/rfc7518#section-5.2.2
"""
alias Univrse.Key
@doc """
Decrypts the cyphertext with the key using the specified algorithm.
Accepted options:
* `aad` - Ephemeral public key
* `iv` - Agreement PartyUInfo
* `tag` - Agreement PartyVInfo
"""
@spec decrypt(String.t, binary, Key.t, keyword) :: {:ok, binary} | {:error, any}
def decrypt(alg, encrypted, key, opts \\ [])
def decrypt(alg, encrypted, %Key{type: "oct", params: %{k: k}}, opts)
when (alg == "A128CBC-HS256" and byte_size(k) == 32)
or (alg == "A256CBC-HS512" and byte_size(k) == 64)
do
aad = Keyword.get(opts, :aad, "")
iv = Keyword.get(opts, :iv, "")
tag = Keyword.get(opts, :tag, "")
keylen = div(byte_size(k), 2)
<<m::binary-size(keylen), k::binary-size(keylen)>> = k
macmsg = aad <> iv <> encrypted <> <<bit_size(aad)::big-size(64)>>
with <<^tag::binary-size(keylen), _::binary>> <- :crypto.mac(:hmac, hash(alg), m, macmsg),
result when is_binary(result) <- :crypto.crypto_one_time(cipher(alg), k, iv, encrypted, false)
do
{:ok, pkcs7_unpad(result)}
else
{:error, _, error} ->
{:error, error}
:error ->
{:error, "Decrypt error"}
macresult when is_binary(macresult) ->
{:error, "HMAC validation failed"}
end
end
def decrypt(_alg, _encrypted, _key, _opts),
do: {:error, :invalid_key}
@doc """
Encrypts the message with the key using the specified algorithm. Returns a
three part tuple containing the encrypted cyphertext and any headers to add to
the Recipient.
Accepted options:
* `aad` - Ephemeral public key
* `iv` - Agreement PartyUInfo
"""
@spec encrypt(String.t, binary, Key.t, keyword) :: {:ok, binary, map} | {:error, any}
def encrypt(alg, message, key, opts \\ [])
def encrypt(alg, message, %Key{type: "oct", params: %{k: k}}, opts)
when (alg == "A128CBC-HS256" and byte_size(k) == 32)
or (alg == "A256CBC-HS512" and byte_size(k) == 64)
do
aad = Keyword.get(opts, :aad, "")
iv = Keyword.get(opts, :iv, :crypto.strong_rand_bytes(16))
keylen = div(byte_size(k), 2)
<<m::binary-size(keylen), k::binary-size(keylen)>> = k
message = pkcs7_pad(message)
case :crypto.crypto_one_time(cipher(alg), k, iv, message, true) do
encrypted when is_binary(encrypted) ->
macmsg = aad <> iv <> encrypted <> <<bit_size(aad)::big-size(64)>>
<<tag::binary-size(keylen), _::binary>> = :crypto.mac(:hmac, hash(alg), m, macmsg)
{:ok, encrypted, %{"iv" => iv, "tag" => tag}}
{:error, _, error} ->
{:error, error}
end
end
def encrypt(_alg, _message, _key, _opts),
do: {:error, :invalid_key}
# Returns the hash alg for the given algorithm
defp hash("A128CBC-HS256"), do: :sha256
defp hash("A256CBC-HS512"), do: :sha512
# Returns the cipher for the given algorithm
defp cipher("A128CBC-HS256"), do: :aes_128_cbc
defp cipher("A256CBC-HS512"), do: :aes_256_cbc
# Pads the message using PKCS7
defp pkcs7_pad(message) do
case rem(byte_size(message), 16) do
0 -> message
pad ->
pad = 16 - pad
message <> :binary.copy(<<pad>>, pad)
end
end
# Unpads the message using PKCS7
defp pkcs7_unpad(message) do
case :binary.last(message) do
pad when 0 < pad and pad < 16 ->
:binary.part(message, 0, byte_size(message) - pad)
_ ->
message
end
end
end
|
lib/univrse/alg/aes_cbc_hmac.ex
| 0.658747
| 0.473475
|
aes_cbc_hmac.ex
|
starcoder
|
defmodule Identicon do
def main(input) do
input
|> hash_input
|> pick_color
|> build_grid
|> filter_odd_squares
|> build_pixel_map
|> draw_image
|> save_image(input) #input is actually second argument, first one comes from the pipe (image)
end
def save_image(image, input) do
File.write("#{input}.png", image)
end
def draw_image(%Identicon.Image{color: color, pixel_map: pixel_map}) do #no '= image' because it is end of the pipeline and we do not care about other arguments
image = :egd.create(250,250)
fill = :egd.color(color)
Enum.each pixel_map, fn ({start, stop}) -> #difference between Enum.each and Enum.map is that Enum.each itterates over every element but does not return new collection or transforming, but only does the processing step
:egd.filledRectangle(image, start, stop, fill)
end
:egd.render(image)
end
def build_pixel_map(%Identicon.Image{grid: grid} = image) do
pixel_map = Enum.map grid, fn({_code, index}) ->
horizontal = rem(index, 5) * 50
vertical = div(index, 5) * 50
top_left = {horizontal, vertical}
bottom_right = {horizontal + 50, vertical + 50}
{top_left, bottom_right}
end
%Identicon.Image{image | pixel_map: pixel_map}
end
def filter_odd_squares(%Identicon.Image{grid: grid} = image) do
grid = Enum.filter grid, fn({code, _index}) ->
rem(code, 2) == 0
end
%Identicon.Image{image | grid: grid}
end
def build_grid(%Identicon.Image{hex: hex} = image) do
grid =
hex #community convention
|> Enum.chunk(3) #producing list of lists
|> Enum.map(&mirror_row/1) #Enum.map() mapping every element; &mirror_row/1 - passing reference to function
|> List.flatten
|> Enum.with_index
%Identicon.Image{image | grid: grid}
end
def mirror_row(row) do
[first, second | _tail] = row
row ++ [second, first] #take row and append 2 elements
end
def pick_color0(image) do
%Identicon.Image{hex: hex_list} = image
[r, g, b | _tail] = hex_list # | _tail = rest of the list
# [r, g, b]
# image
%Identicon.Image{image | color: {r, g, b}}
end
def pick_color1(image) do
%Identicon.Image{hex: [r, g, b | _tail]} = image
%Identicon.Image{image | color: {r, g, b}}
end
def pick_color(%Identicon.Image{hex: [r, g, b | _tail]} = image) do
%Identicon.Image{image | color: {r, g, b}}
end
@doc """
Creating hash of the string and passing it to Identicon.Image struct.
## Examples
iex> Identicon.hash_input("banana")
%Identicon.Image{hex: [114, 179, 2, 191, 41, 122, 34, 138, 117, 115, 1, 35, 239, 239, 124, 65]}
"""
def hash_input(input) do
hex = :crypto.hash(:md5, input)
|> :binary.bin_to_list
# hash = :crypto.hash(:md5, input) # Base.encode16(hash)
# :binary._bin_to_list(hash)
%Identicon.Image{hex: hex}
end
end
|
lib/identicon.ex
| 0.815159
| 0.408395
|
identicon.ex
|
starcoder
|
defmodule Hammox.Protect do
@moduledoc """
A `use`able module simplifying protecting functions with Hammox.
The explicit way is to use `Hammox.protect/3` and friends to generate
protected versions of functions as anonymous functions. In tests, the most
convenient way is to generate them once in a setup hook and then resolve
them from test context. However, this can get quite verbose.
If you're willing to trade explicitness for some macro magic, doing `use
Hammox.Protect` in your test module will define functions from the module
you want to protect in it. The effect is similar to `import`ing the module
you're testing, but with added benefit of the functions being protected.
`use Hammox.Protect` supports these options:
- `:module` (required) — the module you'd like to protect (usually the one
you're testing in the test module). Equivalent to the first parameter of
`Hammox.protect/3` in batch usage.
- `:behaviour` — the behaviour module you'd like to protect the
implementation module with. Can be skipped if `:module` and `:behaviour`
are the same module. Equivalent to the second parameter of
`Hammox.protect/3` in batch usage.
- `:funs` — An optional explicit list of functions you'd like to protect.
Equivalent to the third parameter of `Hammox.protect/3` in batch usage.
"""
alias Hammox.Utils
defmacro __using__(opts) do
opts_block =
quote do
{module, behaviour, funs} = Hammox.Protect.extract_opts!(unquote(opts))
end
funs_block =
quote unquote: false do
for {name, arity} <- funs do
def unquote(name)(
unquote_splicing(
Enum.map(
case arity do
0 -> []
arity -> 1..arity
end,
&Macro.var(:"arg#{&1}", __MODULE__)
)
)
) do
protected_fun =
Hammox.Protect.protect(
{unquote(module), unquote(name), unquote(arity)},
unquote(behaviour)
)
apply(
protected_fun,
unquote(
Enum.map(
case arity do
0 -> []
arity -> 1..arity
end,
&Macro.var(:"arg#{&1}", __MODULE__)
)
)
)
end
end
end
quote do
unquote(opts_block)
unquote(funs_block)
end
end
@doc false
def extract_opts!(opts) do
module = Keyword.get(opts, :module)
behaviour = Keyword.get(opts, :behaviour)
if is_nil(module) do
raise ArgumentError,
message: """
Please specify :module to protect with Hammox.Protect.
Example:
use Hammox.Protect, module: ModuleToProtect
"""
end
module_with_callbacks = behaviour || module
funs = Keyword.get_lazy(opts, :funs, fn -> get_funs!(module_with_callbacks) end)
if funs == [] do
raise ArgumentError,
message:
"The module #{inspect(module_with_callbacks)} does not contain any callbacks. Please use a behaviour with at least one callback."
end
{module, behaviour, funs}
end
@doc false
def protect(mfa, nil), do: Hammox.protect(mfa)
def protect(mfa, behaviour), do: Hammox.protect(mfa, behaviour)
defp get_funs!(module) do
Utils.check_module_exists(module)
{:ok, callbacks} = Code.Typespec.fetch_callbacks(module)
Enum.map(callbacks, fn {callback, _typespecs} ->
callback
end)
end
end
|
lib/hammox/protect.ex
| 0.791096
| 0.667185
|
protect.ex
|
starcoder
|
defmodule SFTPToolkit.Recursive do
@moduledoc """
Module containing functions that allow to do recursive
operations on the directories.
"""
@default_operation_timeout 5000
@doc """
Recursively creates a directory over existing SFTP channel.
## Arguments
Expects the following arguments:
* `sftp_channel_pid` - PID of the already opened SFTP channel,
* `path` - path to create,
* `options` - additional options, see below.
## Options
* `operation_timeout` - SFTP operation timeout (it is a timeout
per each SFTP operation, not total timeout), defaults to 5000 ms.
## Limitations
This function will not follow symbolic links. If it is going
to encounter a symbolic link while evaluating existing path
components, even if it points to a directory, it will return
an error.
## Return values
On success returns `:ok`.
On error returns `{:error, reason}`, where `reason` might be one
of the following:
* `{:invalid_path, path}` - given path is invalid,
* `{:make_dir, info}` - `:ssh_sftp.make_dir/3` failed and `info`
contains the underlying error returned from it,
* `{:file_info, path, info}` - `:ssh_sftp.read_file_info/3` failed and
`info` contains the underlying error returned from it,
* `{:invalid_type, path, type}` - one of the components of the
path to create, specified as `path` is not a directory, and
it's actual type is specified as `type`,
* `{:invalid_access, path, access}` - one of the components of
the path to create, specified as `path` is a directory, but
it's access is is invalid and it's actual access mode is
specified as `access`.
## Notes
### Implementation details
If we're using SFTP version 3 we get just `:failure` when trying
to create a directory that already exists, so we have no clear
error code to distinguish a real error from a case where directory
just exists and we can proceed.
Moreover, the path component may exist but it can be a regular file
which will prevent us from creating a subdirectory.
Due to these limitations we're checking if directory exists
prior to each creation of a directory as we can't rely on the
returned error reasons, even if we use newer versions of SFTP they
tend to return more fine-grained information.
### Timeouts
It was observed in the wild that underlying `:ssh_sftp.list_dir/3`
and `:ssh_sftp.read_file_info/3` always returned `{:error, :timeout}`
with some servers when SFTP version being used was greater than 3,
at least with Elixir 1.7.4 and Erlang 21.0. If you encounter such
issues try passing `{:sftp_vsn, 3}` option while creating a SFTP
channel.
"""
@spec make_dir_recursive(pid, Path.t(), operation_timeout: timeout) :: :ok | {:error, any}
def make_dir_recursive(sftp_channel_pid, path, options \\ []) do
case Path.split(path) do
[] ->
{:error, {:invalid_path, path}}
path_splitted ->
do_make_dir_recursive(sftp_channel_pid, path_splitted, options, [])
end
end
defp do_make_dir_recursive(_sftp_channel_pid, [], _options, _acc), do: :ok
defp do_make_dir_recursive(sftp_channel_pid, [head | tail], options, acc) do
path = Path.join(Enum.reverse([head | acc]))
case :ssh_sftp.read_file_info(
sftp_channel_pid,
path,
Keyword.get(options, :operation_timeout, @default_operation_timeout)
) do
{:ok,
{:file_info, _size, :directory, access, _atime, _mtime, _ctime, _mode, _links,
_major_device, _minor_device, _inode, _uid, _gid}}
when access in [:write, :read_write] ->
# Directory already exists and we have right permissions, skip creation
do_make_dir_recursive(sftp_channel_pid, tail, options, [head | acc])
{:ok,
{:file_info, _size, :directory, access, _atime, _mtime, _ctime, _mode, _links,
_major_device, _minor_device, _inode, _uid, _gid}} ->
# Directory already exists but we have invalid access mode, error
{:error, {:invalid_access, path, access}}
{:ok,
{:file_info, _size, type, _access, _atime, _mtime, _ctime, _mode, _links, _major_device,
_minor_device, _inode, _uid, _gid}} ->
# Path component already exists but it is not a directory
{:error, {:invalid_type, path, type}}
{:error, :no_such_file} ->
# There's no such directory, try to create it
case :ssh_sftp.make_dir(
sftp_channel_pid,
path,
Keyword.get(options, :operation_timeout, @default_operation_timeout)
) do
:ok ->
# We made it, recurse
do_make_dir_recursive(sftp_channel_pid, tail, options, [head | acc])
{:error, reason} ->
# Directory creation failed, error
{:error, {:make_dir, reason}}
end
{:error, other} ->
# File info read failed, error
{:error, {:file_info, other}}
end
end
@doc """
Recursively lists files in a given directory over existing SFTP
channel.
## Arguments
Expects the following arguments:
* `sftp_channel_pid` - PID of already opened SFTP channel,
* `path` - path to list, defaults to empty string, which will map into
SFTP server's default directory,
* `options` - additional options, see below.
## Options
* `operation_timeout` - SFTP operation timeout (it is a timeout
per each SFTP operation, not total timeout), defaults to 5000 ms,
* `included_types` - which file types should be included in the
result, defaults to `[:regular]`. See the `:file.file_info`
typespec for list of all valid values,
* `result_format` - can be one of `:path` or `:file_info`.
If you pass `:path`, the result will be a list of strings containing
file names.
If you pass `:file_info`, the result will be a list of `{path, file_info}`
tuples, where `file_info` will have have the same format as
`:file.file_info`. Please note that if you return `:skip_but_include`
from the `iterate_callback` the `file_info` will be `:undefined`.
Defaults to `:path`.
* `recurse_callback` - optional function that will be called
before recursing to the each subdirectory that is found. It will
get one argument that is a path currently being evaluated and should
return one of `:skip`, `:skip_but_include` or `:ok`.
If it will return `:skip`, the whole tree, including the path passed
as an argument to the function will be skipped and they won't be
included in the final result.
If it will return `:skip_but_include`, the underlying tree, except
the path passed as an argument to the function will be skipped
won't be included in the final result but the path itself will,
as long as it's type is within included_types.
If it will return `:ok`, it will recurse, and this is also
the default behaviour if function is not passed.
* `iterate_callback` - optional function that will be called
before evaluating each file that is found whether it is a directory.
It will get one argument that is a path currently being evaluated
and should return one of `:skip`, `:skip_but_include` or `:ok`.
If it will return `:skip`, the file will not be evaluated for its
type and it will not be included in the final result.
If it will return `:skip_but_include`, the file will not be evaluated
for its type but it will be always included in the final result.
If it will return `:ok`, it will evaluate file's type and try recurse
if it's directory, and this is also the default behaviour if function
is not passed.
The `recurse_callback` and `iterate_callback` options are useful if you
traverse a large tree and you can determine that only certain parts of it
are meaningful solely from the paths or file names. For example if your
directories are created programatically, and you know that files with
the `.pdf` extension are always regular files and by no means they
are directories you can instruct this function that it's pointless to
read their file information. Thanks to this you can limit amount of
calls to `:ssh_sftp.read_file_info/3` just by checking if given path
has an appropriate suffix and returning the appropriate value.
## Limitations
It will ignore symbolic links. They will not be followed.
It will ignore directories without proper access and recurse only
to these that provide at least read access.
## Return values
On success returns `{:ok, list_of_files}`.
On error returns `{:error, reason}`, where `reason` might be one
of the following:
* `{:invalid_type, path, type}` - given path is not a directory and
it's actual type is specified as `type`,
* `{:invalid_access, path, access}` - given path is a directory, but
it's access is is invalid and it's actual access mode is specified
as `access`.
* `{:list_dir, path, info}` - `:ssh_sftp.list_dir/3` failed and `info`
contains the underlying error returned from it,
* `{:file_info, path, info}` - `:ssh_sftp.read_file_info/3` failed and
`info` contains the underlying error returned from it.
## Notes
### Timeouts
It was observed in the wild that underlying `:ssh_sftp.list_dir/3`
and `:ssh_sftp.read_file_info/3` always returned `{:error, :timeout}`
with some servers when SFTP version being used was greater than 3,
at least with Elixir 1.7.4 and Erlang 21.0. If you encounter such
issues try passing `{:sftp_vsn, 3}` option while creating a SFTP
channel.
"""
@spec list_dir_recursive(pid, Path.t(),
operation_timeout: timeout,
result_format: :path | :file_info,
included_types: [:device | :directory | :other | :regular | :symlink | :undefined],
recurse_callback: nil | (Path.t() -> :skip | :skip_but_include | :ok),
iterate_callback: nil | (Path.t() -> :skip | :skip_but_include | :ok)
) :: {:ok, [] | [Path.t() | {Path.t(), :file.file_info()}]} | {:error, any}
def list_dir_recursive(sftp_channel_pid, path \\ "", options \\ []) do
case :ssh_sftp.read_file_info(
sftp_channel_pid,
path,
Keyword.get(options, :operation_timeout, @default_operation_timeout)
) do
{:ok,
{:file_info, _size, :directory, access, _atime, _mtime, _ctime, _mode, _links,
_major_device, _minor_device, _inode, _uid, _gid}}
when access in [:read, :read_write] ->
# Given path is a directory and we have right permissions, recurse
do_list_dir_recursive(sftp_channel_pid, path, options, [])
{:ok,
{:file_info, _size, :directory, access, _atime, _mtime, _ctime, _mode, _links,
_major_device, _minor_device, _inode, _uid, _gid}} ->
# Given path is a directory but we do not have have right permissions, error
{:error, {:invalid_access, path, access}}
{:ok,
{:file_info, _size, type, _access, _atime, _mtime, _ctime, _mode, _links, _major_device,
_minor_device, _inode, _uid, _gid}} ->
# Given path is not a directory, error
{:error, {:invalid_type, path, type}}
{:error, reason} ->
{:error, {:file_info, path, reason}}
end
end
defp do_list_dir_recursive(sftp_channel_pid, path, options, acc) do
case :ssh_sftp.list_dir(
sftp_channel_pid,
path,
Keyword.get(options, :operation_timeout, @default_operation_timeout)
) do
{:ok, files} ->
case do_list_dir_iterate(sftp_channel_pid, path, files, options, acc) do
{:ok, files} ->
{:ok, files}
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
# List dir failed, error
{:error, {:list_dir, path, reason}}
end
end
defp do_list_dir_iterate(_sftp_channel_pid, _path, [], _options, acc) do
{:ok, acc}
end
defp do_list_dir_iterate(sftp_channel_pid, path, [head | tail], options, acc)
when head in ['.', '..'] do
do_list_dir_iterate(sftp_channel_pid, path, tail, options, acc)
end
defp do_list_dir_iterate(sftp_channel_pid, path, [head | tail], options, acc) do
path_full = Path.join(path, head)
included_types = Keyword.get(options, :included_types, [:regular])
iterate_callback = Keyword.get(options, :iterate_callback, nil)
recurse_callback = Keyword.get(options, :recurse_callback, nil)
# Call iterate_callback function only once, if it's present and store the return value
iterate_callback_result =
if !is_nil(iterate_callback) do
iterate_callback.(path_full)
end
# If we're allowed to read file info, do this
if is_nil(iterate_callback) or (!is_nil(iterate_callback) and iterate_callback_result == :ok) do
case :ssh_sftp.read_file_info(
sftp_channel_pid,
path_full,
Keyword.get(options, :operation_timeout, @default_operation_timeout)
) do
{:ok,
{:file_info, _size, :directory, access, _atime, _mtime, _ctime, _mode, _links,
_major_device, _minor_device, _inode, _uid, _gid} = file_info}
when access in [:read, :read_write] ->
# Directory already exists and we have right permissions
# Determine what data should be added to the result
result_item =
case Keyword.get(options, :result_format, :path) do
:path ->
path_full
:file_info ->
{path_full, file_info}
end
# Store it if :directory was listed in the included_types
acc =
if :directory in included_types do
[result_item | acc]
else
acc
end
# Call recurse_callback function only once, if it's present and store the return value
recurse_callback_result =
if !is_nil(recurse_callback) do
recurse_callback.(path_full)
end
# If we're allowed to recurse, do this
if is_nil(recurse_callback) or
(!is_nil(recurse_callback) and recurse_callback_result == :ok) do
case do_list_dir_recursive(sftp_channel_pid, path_full, options, acc) do
{:ok, acc} ->
do_list_dir_iterate(sftp_channel_pid, path, tail, options, acc)
{:error, reason} ->
{:error, reason}
end
else
# If we're not allowed to recurse, honour instructions received from the recurse_callback function
case recurse_callback_result do
:skip_but_include ->
do_list_dir_iterate(sftp_channel_pid, path, tail, options, [result_item | acc])
:skip ->
do_list_dir_iterate(sftp_channel_pid, path, tail, options, acc)
end
end
{:ok,
{:file_info, _size, type, access, _atime, _mtime, _ctime, _mode, _links, _major_device,
_minor_device, _inode, _uid, _gid} = file_info}
when access in [:read, :read_write] ->
# We found a different file than a directory and it is readable
# Determine what data should be added to the result
result_item =
case Keyword.get(options, :result_format, :path) do
:path ->
path_full
:file_info ->
{path_full, file_info}
end
# Add it to the result it if its type was listed in the included_typesd
acc =
if type in included_types do
[result_item | acc]
else
acc
end
# Proceed
do_list_dir_iterate(sftp_channel_pid, path, tail, options, acc)
{:ok,
{:file_info, _size, _type, _access, _atime, _mtime, _ctime, _mode, _links, _major_device,
_minor_device, _inode, _uid, _gid}} ->
# We read something but we have no permissions, ignore that
do_list_dir_iterate(sftp_channel_pid, path, tail, options, acc)
{:error, reason} ->
# File info read failed, error
{:error, {:file_info, path_full, reason}}
end
else
# If we're not allowed to read file info, honour instructions received from the iterate_callback function
case iterate_callback_result do
:skip_but_include ->
# Determine what data should be added to the result
result_item =
case Keyword.get(options, :result_format, :path) do
:path ->
path_full
:file_info ->
# If read_file_info was called the file info is unknown
{path_full, :unknown}
end
do_list_dir_iterate(sftp_channel_pid, path, tail, options, [result_item | acc])
:skip ->
do_list_dir_iterate(sftp_channel_pid, path, tail, options, acc)
end
end
end
@doc """
Recursively deletes a given directory over existing SFTP channel.
## Arguments
Expects the following arguments:
* `sftp_channel_pid` - PID of already opened SFTP channel,
* `path` - path to delete,
* `options` - additional options, see below.
## Options
* `operation_timeout` - SFTP operation timeout (it is a timeout
per each SFTP operation, not total timeout), defaults to 5000 ms.
## Limitations
It will ignore symbolic links. They will not be followed.
## Return values
On success returns `:ok`.
On error returns `{:error, reason}`, where `reason` might be one
of the following:
* `{:invalid_type, path, type}` - given path is not a directory and
it's actual type is specified as `type`,
* `{:invalid_access, path, access}` - given path is a directory, but
it's access is is invalid and it's actual access mode is specified
as `access`,
* `{:delete, path, info}` - failed to delete file at `path`,
* `{:del_dir, path, info}` - failed to delete directory at `path`,
* `{:list_dir, path, info}` - `:ssh_sftp.list_dir/3` failed and `info`
contains the underlying error returned from it,
* `{:file_info, path, info}` - `:ssh_sftp.read_file_info/3` failed and
`info` contains the underlying error returned from it.
## Notes
### Timeouts
It was observed in the wild that underlying `:ssh_sftp.list_dir/3`
and `:ssh_sftp.read_file_info/3` always returned `{:error, :timeout}`
with some servers when SFTP version being used was greater than 3,
at least with Elixir 1.7.4 and Erlang 21.0. If you encounter such
issues try passing `{:sftp_vsn, 3}` option while creating a SFTP
channel.
"""
@spec del_dir_recursive(pid, Path.t(), operation_timeout: timeout) ::
{:ok, [] | [Path.t() | {Path.t(), :file.file_info()}]} | {:error, any}
def del_dir_recursive(sftp_channel_pid, path, options \\ []) do
operation_timeout = Keyword.get(options, :operation_timeout, @default_operation_timeout)
case :ssh_sftp.read_file_info(
sftp_channel_pid,
path,
operation_timeout
) do
{:ok,
{:file_info, _size, :directory, access, _atime, _mtime, _ctime, _mode, _links,
_major_device, _minor_device, _inode, _uid, _gid}}
when access in [:write, :read_write] ->
# Given path is a directory and we have right permissions, recurse
case do_del_dir_recursive(sftp_channel_pid, path, options) do
:ok ->
# After recursion is finished, delete the directory
case :ssh_sftp.del_dir(sftp_channel_pid, path, operation_timeout) do
:ok ->
:ok
{:error, reason} ->
{:error, {:del_dir, path, reason}}
end
{:error, reason} ->
{:error, reason}
end
{:ok,
{:file_info, _size, :directory, access, _atime, _mtime, _ctime, _mode, _links,
_major_device, _minor_device, _inode, _uid, _gid}} ->
# Given path is a directory but we do not have have right permissions, error
{:error, {:invalid_access, path, access}}
{:ok,
{:file_info, _size, type, _access, _atime, _mtime, _ctime, _mode, _links, _major_device,
_minor_device, _inode, _uid, _gid}} ->
# Given path is not a directory, error
{:error, {:invalid_type, path, type}}
{:error, reason} ->
{:error, {:file_info, path, reason}}
end
end
defp do_del_dir_recursive(sftp_channel_pid, path, options) do
case :ssh_sftp.list_dir(
sftp_channel_pid,
path,
Keyword.get(options, :operation_timeout, @default_operation_timeout)
) do
{:ok, files} ->
case do_del_dir_iterate(sftp_channel_pid, path, files, options) do
:ok ->
:ok
{:error, reason} ->
{:error, reason}
end
{:error, reason} ->
# List dir failed, error
{:error, {:list_dir, path, reason}}
end
end
defp do_del_dir_iterate(_sftp_channel_pid, _path, [], _options) do
:ok
end
defp do_del_dir_iterate(sftp_channel_pid, path, [head | tail], options)
when head in ['.', '..'] do
do_del_dir_iterate(sftp_channel_pid, path, tail, options)
end
defp do_del_dir_iterate(sftp_channel_pid, path, [head | tail], options) do
path_full = Path.join(path, head)
operation_timeout = Keyword.get(options, :operation_timeout, @default_operation_timeout)
case :ssh_sftp.read_file_info(
sftp_channel_pid,
path_full,
operation_timeout
) do
{:ok,
{:file_info, _size, :directory, access, _atime, _mtime, _ctime, _mode, _links,
_major_device, _minor_device, _inode, _uid, _gid}}
when access in [:write, :read_write] ->
# Directory already exists and we have right permissions, recurse
case do_del_dir_recursive(sftp_channel_pid, path_full, options) do
:ok ->
# After recursion is finished, delete the directory
case :ssh_sftp.del_dir(sftp_channel_pid, path_full, operation_timeout) do
:ok ->
do_del_dir_iterate(sftp_channel_pid, path, tail, options)
{:error, reason} ->
{:error, {:del_dir, path_full, reason}}
end
{:error, reason} ->
{:error, reason}
end
{:ok,
{:file_info, _size, _type, access, _atime, _mtime, _ctime, _mode, _links, _major_device,
_minor_device, _inode, _uid, _gid}}
when access in [:write, :read_write] ->
# We found a different file than a directory and it is writable, try to delete it
case :ssh_sftp.delete(sftp_channel_pid, path_full, operation_timeout) do
:ok ->
# Deleted, proceed
do_del_dir_iterate(sftp_channel_pid, path, tail, options)
{:error, reason} ->
{:error, {:delete, path_full, reason}}
end
{:ok,
{:file_info, _size, _type, access, _atime, _mtime, _ctime, _mode, _links, _major_device,
_minor_device, _inode, _uid, _gid}} ->
# We read something but we have no permissions, error
{:error, {:invalid_access, path_full, access}}
{:error, reason} ->
# File info read failed, error
{:error, {:file_info, path_full, reason}}
end
end
end
|
lib/sftp_toolkit/recursive.ex
| 0.884794
| 0.625638
|
recursive.ex
|
starcoder
|
defmodule Jumubase.JumuParams do
import Jumubase.Gettext
@moduledoc """
Defines various params inherent to the Jumu institution.
"""
@doc """
Returns the year for a given season.
"""
def year(season) do
1963 + season
end
@doc """
Returns the season for a given year.
"""
def season(year) do
year - year(0)
end
@doc """
Returns the available competition rounds.
"""
def rounds do
# Round 0 is for Kimu contests
0..2
end
@doc """
Returns the available groupings of hosts & contests.
"""
def groupings do
~w(1 2 3)
end
@doc """
Returns all possible user roles.
"""
def user_roles do
[
# A "regular" user who organizes contests (typically RW) locally
"local-organizer",
# A user organizing LW (2nd round) contests in various countries
"global-organizer",
# A user who can view, but not change or delete anything
"observer",
# An omnipotent being
"admin"
]
end
@doc """
Returns all possible category genres.
"""
def genres do
["classical", "popular", "kimu"]
end
@doc """
Returns all possible category types.
"""
def category_types do
["solo", "ensemble", "solo_or_ensemble"]
end
@doc """
Returns all possible category groups.
"""
def category_groups do
~w(
kimu piano strings wind plucked classical_vocals accordion
harp organ percussion special_lineups pop_vocals pop_instrumental
)
end
@doc """
Returns all possible participant roles.
"""
def participant_roles do
["soloist", "accompanist", "ensemblist"]
end
@doc """
Returns all possible piece epochs.
"""
def epochs do
~w(trad a b c d e f)
end
@doc """
Returns a description for the given epoch.
"""
def epoch_description(epoch) do
case epoch do
"trad" -> dgettext("epochs", "Traditional / Folk Music")
"a" -> dgettext("epochs", "Renaissance, Early Baroque")
"b" -> dgettext("epochs", "Baroque")
"c" -> dgettext("epochs", "Early Classical, Classical")
"d" -> dgettext("epochs", "Romantic, Impressionist")
"e" -> dgettext("epochs", "Modern Classical, Jazz, Pop")
"f" -> dgettext("epochs", "Neue Musik")
end
end
@doc """
Returns the range of possible point values.
"""
def points, do: 0..25
@doc """
Returns the range of points required to advance to the next round.
"""
def advancing_point_range, do: 23..25
@doc """
Returns the range of points required for a WESPE nomination.
"""
def wespe_nomination_point_range, do: 23..25
end
|
lib/jumubase/jumu_params.ex
| 0.829561
| 0.500122
|
jumu_params.ex
|
starcoder
|
defmodule ABI.TypeDecoder do
@moduledoc """
`ABI.TypeDecoder` is responsible for decoding types to the format
expected by Solidity. We generally take a function selector and binary
data and decode that into the original arguments according to the
specification.
"""
@doc """
Decodes the given data based on the function selector.
Note, we don't currently try to guess the function name?
## Examples
iex> "00000000000000000000000000000000000000000000000000000000000000450000000000000000000000000000000000000000000000000000000000000001"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode(
...> %ABI.FunctionSelector{
...> function: "baz",
...> types: [
...> {:uint, 32},
...> :bool
...> ],
...> returns: :bool
...> }
...> )
[69, true]
iex> "000000000000000000000000000000000000000000000000000000000000000b68656c6c6f20776f726c64000000000000000000000000000000000000000000"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode(
...> %ABI.FunctionSelector{
...> function: nil,
...> types: [
...> :string
...> ]
...> }
...> )
["hello world"]
iex> "00000000000000000000000000000000000000000000000000000000000000110000000000000000000000000000000000000000000000000000000000000001"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode(
...> %ABI.FunctionSelector{
...> function: nil,
...> types: [
...> {:tuple, [{:uint, 32}, :bool]}
...> ]
...> }
...> )
[{17, true}]
iex> "00000000000000000000000000000000000000000000000000000000000000110000000000000000000000000000000000000000000000000000000000000001"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode(
...> %ABI.FunctionSelector{
...> function: nil,
...> types: [
...> {:array, {:uint, 32}, 2}
...> ]
...> }
...> )
[[17, 1]]
iex> "000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000110000000000000000000000000000000000000000000000000000000000000001"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode(
...> %ABI.FunctionSelector{
...> function: nil,
...> types: [
...> {:array, {:uint, 32}}
...> ]
...> }
...> )
[[17, 1]]
iex> "0000000000000000000000000000000000000000000000000000000000000011000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000011020000000000000000000000000000000000000000000000000000000000000"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode(
...> %ABI.FunctionSelector{
...> function: nil,
...> types: [
...> {:array, {:uint, 32}, 2},
...> :bool,
...> {:bytes, 2}
...> ]
...> }
...> )
[[17, 1], true, <<16, 32>>]
iex> "000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000007617765736f6d6500000000000000000000000000000000000000000000000000"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode(
...> %ABI.FunctionSelector{
...> function: nil,
...> types: [
...> {:tuple, [:string, :bool]}
...> ]
...> }
...> )
[{"awesome", true}]
iex> "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode(
...> %ABI.FunctionSelector{
...> function: nil,
...> types: [
...> {:tuple, [{:array, :address}]}
...> ]
...> }
...> )
[{[]}]
iex> "00000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000c556e617574686f72697a656400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000204a2bf2ff0a4eaf1890c8d8679eaa446fb852c4000000000000000000000000861d9af488d5fa485bb08ab6912fff4f7450849a"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode(
...> %ABI.FunctionSelector{
...> function: nil,
...> types: [{:tuple,[
...> :string,
...> {:array, {:uint, 256}}
...> ]}]
...> }
...> )
[{
"Unauthorized",
[
184341788326688649239867304918349890235378717380,
765664983403968947098136133435535343021479462042,
]
}]
"""
def decode(encoded_data, function_selector) do
decode_raw(encoded_data, function_selector.types)
end
@doc """
Similar to `ABI.TypeDecoder.decode/2` except accepts a list of types instead
of a function selector.
## Examples
iex> "000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000007617765736f6d6500000000000000000000000000000000000000000000000000"
...> |> Base.decode16!(case: :lower)
...> |> ABI.TypeDecoder.decode_raw([{:tuple, [:string, :bool]}])
[{"awesome", true}]
"""
def decode_raw(encoded_data, types) do
do_decode(types, encoded_data, [])
end
@spec do_decode([ABI.FunctionSelector.type], binary(), [any()]) :: [any()]
defp do_decode([], bin, _) when byte_size(bin) > 0, do: raise("Found extra binary data: #{inspect bin}")
defp do_decode([], _, acc), do: Enum.reverse(acc)
defp do_decode([type|remaining_types], data, acc) do
{decoded, remaining_data} = decode_type(type, data)
do_decode(remaining_types, remaining_data, [decoded | acc])
end
@spec decode_type(ABI.FunctionSelector.type, binary()) :: {any(), binary()}
defp decode_type({:uint, size_in_bits}, data) do
decode_uint(data, size_in_bits)
end
defp decode_type(:address, data), do: decode_bytes(data, 20, :left)
defp decode_type(:bool, data) do
{encoded_value, rest} = decode_uint(data, 8)
value = case encoded_value do
1 -> true
0 -> false
end
{value, rest}
end
defp decode_type(:string, data) do
<<_index :: integer-size(256), remaining_data :: binary>> = data
<<string_length_in_bytes :: integer-size(256), string_data :: binary>> = remaining_data
padding_length_in_bytes = 32 - Integer.mod(string_length_in_bytes, 32)
<<string :: bytes-size(string_length_in_bytes), _padding :: bytes-size(padding_length_in_bytes), rest :: binary>> = string_data
{nul_terminate_string(string), rest}
end
defp decode_type(:bytes, data) do
{byte_size, rest} = decode_uint(data, 256)
decode_bytes(rest, byte_size, :right)
end
defp decode_type({:bytes, 0}, data), do: {<<>>, data}
defp decode_type({:bytes, size}, data) when size > 0 and size <= 32 do
decode_bytes(data, size, :right)
end
defp decode_type({:array, type}, data) do
{element_count, rest} = decode_uint(data, 256)
decode_type({:array, type, element_count}, rest)
end
defp decode_type({:array, _type, 0}, data), do: {[], data}
defp decode_type({:array, type, element_count}, data) do
repeated_type = Enum.map(1..element_count, fn _ -> type end)
{tuple, rest} = decode_type({:tuple, repeated_type}, data)
{tuple |> Tuple.to_list, rest}
end
defp decode_type({:tuple, types}, starting_data) do
# First pass, decode static types
{elements, rest} = Enum.reduce(types, {[], starting_data}, fn type, {elements, data} ->
if ABI.FunctionSelector.is_dynamic?(type) do
{tail_position, rest} = decode_type({:uint, 256}, data)
{[{:dynamic, type, tail_position}|elements], rest}
else
{el, rest} = decode_type(type, data)
{[el|elements], rest}
end
end)
# Second pass, decode dynamic types
{elements, rest} = Enum.reduce(elements |> Enum.reverse, {[], rest}, fn el, {elements, data} ->
case el do
{:dynamic, type, _tail_position} ->
{el, rest} = decode_type(type, data)
{[el|elements], rest}
_ ->
{[el|elements], data}
end
end)
{elements |> Enum.reverse |> List.to_tuple, rest}
end
defp decode_type(els, _) do
raise "Unsupported decoding type: #{inspect els}"
end
@spec decode_uint(binary(), integer()) :: {integer(), binary()}
defp decode_uint(data, size_in_bits) do
# TODO: Create `left_pad` repo, err, add to `ExthCrypto.Math`
total_bit_size = size_in_bits + ExthCrypto.Math.mod(256 - size_in_bits, 256)
<<value::integer-size(total_bit_size), rest::binary>> = data
{value, rest}
end
@spec decode_bytes(binary(), integer(), atom()) :: {binary(), binary()}
def decode_bytes(data, size_in_bytes, padding_direction) do
# TODO: Create `unright_pad` repo, err, add to `ExthCrypto.Math`
total_size_in_bytes = size_in_bytes + ExthCrypto.Math.mod(32 - size_in_bytes, 32)
padding_size_in_bytes = total_size_in_bytes - size_in_bytes
case padding_direction do
:left ->
<<_padding::binary-size(padding_size_in_bytes), value::binary-size(size_in_bytes), rest::binary()>> = data
{value, rest}
:right ->
<<value::binary-size(size_in_bytes), _padding::binary-size(padding_size_in_bytes), rest::binary()>> = data
{value, rest}
end
end
defp nul_terminate_string(raw_string) do
raw_string = :erlang.iolist_to_binary(raw_string)
[pre_nul_part | _] = :binary.split(raw_string, <<0>>)
pre_nul_part
end
end
|
lib/abi/type_decoder.ex
| 0.915374
| 0.534248
|
type_decoder.ex
|
starcoder
|
defmodule Finitomata.PlantUML do
@moduledoc false
import NimbleParsec
alias Finitomata.Transition
use Boundary, deps: [Finitomata], exports: []
@alphanumeric [?a..?z, ?A..?Z, ?0..?9, ?_]
blankspace = ignore(ascii_string([?\s], min: 1))
transition_op = string("-->")
event_op = string(":")
event =
ascii_char([?a..?z])
|> optional(ascii_string(@alphanumeric, min: 1))
|> reduce({IO, :iodata_to_binary, []})
state = choice([string("[*]"), event])
plant_line =
optional(blankspace)
|> concat(state)
|> ignore(blankspace)
|> ignore(transition_op)
|> ignore(blankspace)
|> concat(state)
|> ignore(blankspace)
|> ignore(event_op)
|> ignore(blankspace)
|> concat(event)
|> optional(blankspace)
|> ignore(choice([times(string("\n"), min: 1), eos()]))
|> tag(:transition)
malformed =
optional(utf8_string([not: ?\n], min: 1))
|> string("\n")
|> pre_traverse(:abort)
@type parse_error ::
{:error, String.t(), binary(), map(), {pos_integer(), pos_integer()}, pos_integer()}
@doc ~S"""
iex> {:ok, result, _, _, _, _} = Finitomata.PlantUML.transition("state1 --> state2 : succeeded")
iex> result
[transition: ["state1", "state2", "succeeded"]]
iex> {:error, message, _, _, _, _} = Finitomata.PlantUML.transition("state1 --> State2 : succeeded")
iex> String.slice(message, 0..14)
"expected string"
"""
defparsec :transition, plant_line
@doc ~S"""
iex> {:ok, result, _, _, _, _} = Finitomata.PlantUML.fsm("s1 --> s2 : ok\ns2 --> [*] : ko")
iex> result
[transition: ["s1", "s2", "ok"], transition: ["s2", "[*]", "ko"]]
"""
defparsec :fsm, times(choice([plant_line, malformed]), min: 1)
@doc ~S"""
iex> {:ok, result, _, _, _, _} = Finitomata.PlantUML.fsm("s1 --> s2 : ok\ns2 --> [*] : ko")
...> Finitomata.PlantUML.validate(result)
{:error, :initial_state}
iex> {:ok, result, _, _, _, _} = Finitomata.PlantUML.fsm("[*] --> s1 : foo\ns1 --> s2 : ok\ns2 --> [*] : ko")
...> Finitomata.PlantUML.validate(result)
{:ok,
[
%Finitomata.Transition{event: :foo, from: :*, to: :s1},
%Finitomata.Transition{event: :ok, from: :s1, to: :s2},
%Finitomata.Transition{event: :ko, from: :s2, to: :*}
]}
"""
@spec validate([{:transition, [binary()]}]) ::
{:ok, [Transition.t()]} | {:error, Finitomata.validation_error()}
def validate(parsed), do: Finitomata.validate(parsed)
@doc ~S"""
iex> Finitomata.PlantUML.parse("[*] --> s1 : ok\ns2 --> [*] : ko")
{:error, :orphan_from_state}
iex> Finitomata.PlantUML.parse("[*] --> s1 : foo\ns1 --> s2 : ok\ns2 --> [*] : ko")
{:ok,
[
%Finitomata.Transition{event: :foo, from: :*, to: :s1},
%Finitomata.Transition{event: :ok, from: :s1, to: :s2},
%Finitomata.Transition{event: :ko, from: :s2, to: :*}
]}
"""
@spec parse(binary()) ::
{:ok, [Transition.t()]} | {:error, Finitomata.validation_error()} | parse_error()
def parse(input) do
case fsm(input) do
{:ok, result, _, _, _, _} ->
validate(result)
{:error, "[line: " <> _ = msg, _rest, context, _, _} ->
[numbers, msg] = String.split(msg, "|||")
{numbers, []} = Code.eval_string(numbers)
{:error, msg, numbers[:rest], context, {numbers[:line], numbers[:column]},
numbers[:offset]}
error ->
error
end
end
@spec lint(binary()) :: binary()
def lint(input), do: "@startuml\n\n" <> input <> "\n@enduml"
@spec abort(
String.t(),
[String.t()],
map(),
{non_neg_integer, non_neg_integer},
non_neg_integer
) :: {:error, binary()}
defp abort(rest, content, _context, {line, column}, offset) do
rest = content |> Enum.reverse() |> Enum.join() |> Kernel.<>(rest)
meta = inspect(line: line, column: column, offset: offset, rest: rest)
{:error, meta <> "|||malformed FSM transition, expected `from --> to : event`"}
end
end
|
lib/finitomata/parsers/plant_uml.ex
| 0.712432
| 0.474266
|
plant_uml.ex
|
starcoder
|
defmodule Dust.Requests.Proxy do
@moduledoc """
Proxy configuration struct.
Proxy address can start with `http/s` or `socks5`.
It is also possible to only specify `address` field
and when you call `Proxy.get_config/1` then we try to
parse URI to figure out type, auth details in this case
default values will be also applied.
```elixir
# 2 fields with default values
%Proxy{
address: "socks5://user:pass@awesome.host:port"
username: "user",
password: "<PASSWORD>"
}
```
"""
use TypedStruct
alias __MODULE__
@typedoc "Proxy"
typedstruct do
field :address, String.t()
field :username, String.t()
field :password, String.t()
end
def get_config(nil), do: []
def get_config(proxy) when is_list(proxy) do
proxy
end
@doc """
Prepare proxy configuration for `HTTPoison`
"""
@spec get_config(Proxy.t()) :: Keyword.t()
def get_config(%Proxy{} = proxy) do
prepare_proxy(URI.parse(proxy.address), proxy)
end
defp prepare_proxy(%URI{scheme: "socks5"} = uri, %Proxy{} = proxy) do
[]
|> Keyword.merge(get_auth(:socks, proxy, uri))
|> Keyword.put(:proxy, {:socks5, to_charlist(uri.host), uri.port})
end
defp prepare_proxy(%URI{} = uri, %Proxy{} = proxy) do
[]
|> Keyword.merge(get_auth(:http, proxy, uri))
|> Keyword.merge(proxy: to_charlist(uri.host))
end
defp get_auth(:socks, %Proxy{} = proxy, %URI{} = uri) do
if proxy.username && proxy.password do
[socks5_user: proxy.username, socks5_pass: proxy.password]
else
if uri.userinfo do
[username, password] = String.split(uri.userinfo, ":")
[socks5_user: username, socks5_pass: password]
else
[]
end
end
end
defp get_auth(:http, %Proxy{} = proxy, %URI{} = uri) do
if proxy.username && proxy.password do
[proxy_auth: {proxy.username, proxy.password}]
else
if uri.userinfo do
[username, password] = String.split(uri.userinfo, ":")
[proxy_auth: {username, password}]
else
[]
end
end
end
end
|
lib/dust/requests/proxy.ex
| 0.740831
| 0.631779
|
proxy.ex
|
starcoder
|
defmodule Rir.Api do
@moduledoc """
Functions that update a given context with datasets by querying the RIPEstat API
Notes:
- AS nrs are strings, without the AS prefix, e.g. AS42 -> "42"
- Each API endpoint has a map stored under its own key in the context
- Each API call has its results stored under the relevant key(s)
- API results themselves, are always represented as a map
"""
alias Rir.Stat
# Helpers
# todo
# - [ ] decoders must check the version of the reply given
# - [ ] decoders must check call status
@spec decode(tuple) :: map
defp decode({:ok, {%{name: "announced-prefixes", status: :ok}, data}}) do
# Announced-prefixes is a list of prefixes
%{prefixes: data["prefixes"] |> Enum.map(&Map.get(&1, "prefix"))}
end
defp decode({:ok, {%{name: "as-routing-consistency", status: :ok}, data}}) do
# notes
# - irr_sources is a "-" and not a list, if the prefix is not in whois
# AS-routing-consistency
# peers => %{peer => {:imports, bgp?, whois?, :exports, bgp?, whois?}}
# prefixes => %{prefix => {bgp?, whois?, authorities}
prefixes = map_tuples(data["prefixes"], ["prefix", "in_bgp", "in_whois", "irr_sources"])
exports = map_tuples(data["exports"], ["peer", "in_bgp", "in_whois"])
imports = map_tuples(data["imports"], ["peer", "in_bgp", "in_whois"])
peers =
Map.merge(imports, exports, fn _peer, {a, b}, {c, d} -> {:imports, a, b, :exports, c, d} end)
%{prefixes: prefixes, peers: peers}
end
defp decode({:ok, {%{name: "as-overview", status: :ok}, data}}) do
# As-overiew
data
end
defp decode({:ok, {%{name: "bgp-state", status: :ok}, data}}) do
# Bgp-state
data["bgp_state"]
|> list_tuples(["target_prefix", "path"])
|> Enum.map(fn {pfx, as_path} -> {pfx, Enum.take(as_path, -2) |> hd()} end)
|> Enum.uniq()
|> Enum.reduce(%{}, fn {pfx, asn}, acc ->
Map.update(acc, pfx, [asn], fn asns -> [asn | asns] end)
end)
end
defp decode({:ok, {%{name: "network-info", status: :ok}, data}}) do
# Network-info
asn = data["asns"] |> List.first()
%{asn: asn, asns: data["asns"], prefix: data["prefix"]}
end
defp decode({:ok, {%{name: "ris-prefixes", status: :ok}, data}}) do
# Ris-prefixes
prefixes = data["prefixes"]
%{
originating: prefixes["v4"]["originating"] ++ prefixes["v6"]["originating"],
transiting: prefixes["v4"]["transiting"] ++ prefixes["v6"]["transiting"]
}
end
defp decode({:ok, {%{name: "rpki-validation", status: :ok}, data}}) do
# Rpki-validation
with status <- data["status"],
roas <- data["validating_roas"] do
%{
status: Stat.to_atom(status),
roas: list_tuples(roas, ["origin", "prefix", "max_length", "validity"])
}
end
end
defp decode({:ok, {%{name: "whois", status: :ok}, data}}) do
# Whois
records = Enum.map(data["records"], fn l -> list_tuples(l, ["key", "value"]) end)
irr = Enum.map(data["irr_records"], fn l -> list_tuples(l, ["key", "value"]) end)
%{
autorities: data["authorities"],
records: records,
irr: irr
}
end
defp decode({:ok, {%{name: name, status: :ok} = call, _data}}),
# Missing decode handler
do: %{call: call, error: "missing Rir.Api.decode/2 for api endpoint #{inspect(name)}"}
defp decode({:error, {call, reason}}),
# Api Error
do: %{error: reason, call: call}
defp list_tuples(list, keys) do
# turn a list of maps, into a list of tuples for selected keys
list
|> Enum.map(fn m -> for(k <- keys, do: Map.get(m, k)) end)
|> Enum.map(fn l -> List.to_tuple(l) end)
end
defp map_tuples(list, [primary | keys]) do
# turn a list of maps, into a map of tuples
# - primary is the unique key, different for each map
# - keys is the list of keys whose values are presented as a tuple
list
|> Enum.map(fn m -> {Map.get(m, primary), for(key <- keys, do: Map.get(m, key))} end)
|> Enum.map(fn {k, l} -> {k, List.to_tuple(l)} end)
|> Enum.into(%{})
end
defp store(data, ctx, api_call, resource) do
# store resulting data in context under resource for given API endpoint name
Map.get(ctx, api_call, %{})
|> Map.put(resource, data)
|> then(fn updated -> Map.put(ctx, api_call, updated) end)
end
# API
@doc """
Stores the [announced](https://stat.ripe.net/docs/02.data-api/announced-prefixes.html)
prefixes for given `asn`, under `ctx.announced["asn"].prefixes` as a map
(with only one key).
```elixir
%{ announced: %{
"asn" => %{
prefixes: ["prefix1", "prefix2", ..]
}
}
```
"""
@spec announced(map, binary) :: map
def announced(ctx, asn) do
Stat.url("announced-prefixes", resource: asn)
|> Stat.get()
|> decode()
|> store(ctx, :announced, asn)
end
@doc """
Stores the [as-overview](https://stat.ripe.net/docs/02.data-api/as-overview.html)
for given `asn`, under `ctx.as_overview["asn"]` as a map.
```
%{ as_overview: %{
"asn" => %{
"announced" => boolean,
"block" => %{
"desc" => "...",
"name" => "...",
"resource" => "xxx-yyy"
},
"holder" => "name of organisation",
"resource" => "number",
"type" => "as"
}
}}
```
"""
@spec as_overview(map, binary) :: map
def as_overview(ctx, asn) do
Stat.url("as-overview", resource: asn)
|> Stat.get()
|> decode()
|> store(ctx, :as_overview, asn)
end
@doc """
Stores the [bgp state](https://stat.ripe.net/docs/02.data-api/bgp-state.html)
results under the `;bgp_state` key in given `ctx` for given `resource`.
The results are processed into a map where the list of upstream neighbors seen
in BGP are stored under the `prefix` key.
"""
@spec bgp_state(map, binary) :: map
def bgp_state(ctx, resource) do
Stat.url("bgp-state", resource: resource)
|> Stat.get()
|> decode()
|> store(ctx, :bgp_state, resource)
end
@doc """
Stores the
[as-routing-consistency](https://stat.ripe.net/docs/02.data-api/as-routing-consistency.html)
for given `asn`, under `ctx.consistency["asn"]` as a map.
```
%{
consistency: %{
"asn" => %{
peers: %{
asn1 => {:imports, bgp?, whois?, :exports, bgp?, whois?},
asn2 => {:imports, bgp?, whois?, :exports, bgp?, whois?},
...
},
prefixes: %{
"prefix/len" => {bgp?, whois?, ["authority", ...]},
...
}
}
}
}
```
"""
@spec consistency(map, binary) :: map
def consistency(ctx, asn) do
Stat.url("as-routing-consistency", resource: asn)
|> Stat.get(retry: 4)
|> decode()
|> store(ctx, :consistency, asn)
end
@doc """
Stores the
[network-info](https://stat.ripe.net/docs/02.data-api/network-info.html) for
given `prefix` under `ctx.network["prefix"]` as a map.
```
%{
network: %{
"prefix" => %{asn: "number", asns: ["number", ..], prefix: "matching-prefix"}
}
}
```
The `prefix` given can be an address or a real prefix and the `matching-prefix`
is the most specific match found.
Note that the `asn` field in the map is just the first "asn" from the list of
`asns` returned.
"""
@spec network(map, binary) :: map
def network(ctx, prefix) do
Stat.url("network-info", resource: prefix)
|> Stat.get()
|> decode()
|> store(ctx, :network, prefix)
end
@doc """
Stores the [ris-prefixes]() for given `asn` under `:ris_prefixes`
in given `ctx`.
"""
@spec ris_prefixes(map, binary) :: map
def ris_prefixes(ctx, asn) do
Stat.url("ris-prefixes", resource: asn, list_prefixes: "true")
|> Stat.get()
|> decode()
|> store(ctx, :ris_prefixes, asn)
end
@doc """
Stores the
[rpki-validation](https://stat.ripe.net/docs/02.data-api/rpki-validation.html)
status for the given `asn` and `prefix` under `ctx.roa[{asn, prefix}]` as a
map.
```elixir
%{
roa: %{
{"asn", "prefix"} => %{
roas: [{"asn", "matching-prefix", max_len, "status"}],
status: :valid | :invalid
}
}
}
```
Where the "status" string can be:
- "valid"
- "invalid_as"
- "invalid_len"
- "unknown"
"""
@spec roa(map, binary, binary) :: map
def roa(ctx, asn, prefix) do
Stat.url("rpki-validation", resource: asn, prefix: prefix)
|> Stat.get()
|> decode()
|> store(ctx, :roa, {asn, prefix})
end
@doc """
Stores the [whois](https://stat.ripe.net/docs/02.data-api/whois.html)
information for given `resource` under `ctx.whois[resource]` as a map.
The `resource` can be either a ASN number, IP address or IP prefix. The
whois records are transformed into a list of two-element tuples in the form
of `{key, value}` without any other transformation. Depending on the registry
the information came from, different `{key, value}`-pairs may be listed for an
object.
```elixir
%{
whois: %{
"resource" => %{
autorities: ["authority", ..],
irr: [
[
{key, value},
...
],
;;;
],
records: [
[
{key, value},
...
{"source", "authority"}
],
...
]
}
}
}
```
"""
@spec whois(map, binary) :: map
def whois(ctx, resource) do
Stat.url("whois", resource: resource)
|> Stat.get()
|> decode()
|> store(ctx, :whois, resource)
end
end
|
lib/rir/api.ex
| 0.52902
| 0.663941
|
api.ex
|
starcoder
|
defmodule RpiBacklight.AutoDimmer do
@moduledoc """
A simple automatic screen blanker.
`AutoDimmer` can be started under a supervision tree and it will take care
to dim and poweroff the display. By default the timeout is 10 seconds and
the brightness level is 255, the maximum allowed.
Is responsibity of the user to call `activate/0` to keep the
light active, for example on input events from keyboard or mouse.
Everytime `activate/0` is called, the timeout is reset.
`AutoDimmer` can be configured with optional parameters, they are:
* `:timeout` - the blank timeout in seconds, 10 by default.
* `:brightness` - the brightness when active, from 0 to 255, 255 by default.
For example:
`RpiBacklight.AutoDimmer.start_link(timeout: 30, brightness: 127)` will
kick in 30 seconds by setting half of the maximum brightness possible.
"""
use GenServer
require Logger
defmodule State do
@moduledoc false
defstruct brightness: 255,
tref: nil,
timeout: 10_000
end
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
@doc """
Signal the dimmer to activate the backlight.
If backlight is already on, restarts the timer.
"""
def activate do
GenServer.cast(__MODULE__, {:activate})
end
@impl true
def init(opts) do
state = init_state(opts)
Logger.info(
"Starting backlight controller with " <>
"#{state.brightness} brightness and " <> "#{state.timeout}msec interval"
)
RpiBacklight.brightness(state.brightness)
tref = Process.send_after(self(), {:blank}, state.timeout)
{:ok, %{state | tref: tref}}
end
@impl true
def handle_cast({:activate}, %{tref: nil} = state) do
RpiBacklight.brightness(state.brightness)
RpiBacklight.on()
tref = Process.send_after(self(), {:blank}, state.timeout)
{:noreply, %{state | tref: tref}}
end
@impl true
def handle_cast({:activate}, %{tref: tref} = state) do
Process.cancel_timer(tref)
tref = Process.send_after(self(), {:blank}, state.timeout)
{:noreply, %{state | tref: tref}}
end
@impl true
def handle_info({:blank}, state) do
Enum.each(state.brightness..0, fn level ->
RpiBacklight.brightness(level)
:timer.sleep(10)
end)
RpiBacklight.off()
{:noreply, %{state | tref: nil}}
end
defp init_state(opts) do
state = %State{}
timeout = Keyword.get(opts, :timeout, div(state.timeout, 1000)) * 1000
brightness = Keyword.get(opts, :brightness, state.brightness)
%{state | timeout: timeout, brightness: brightness}
end
end
|
lib/rpi_backlight/auto_dimmer.ex
| 0.910344
| 0.537223
|
auto_dimmer.ex
|
starcoder
|
defmodule Plenario.Etl.FieldGuesser do
require Logger
import Plenario.Utils, only: [parse_timestamp: 1]
alias Plenario.DataSet
alias Plenario.Etl.Downloader
alias Socrata.Client
@download_limit 10 # chunks
@num_rows 1_001
@soc_types %{
"calendar_date" => "timestamp",
"checkbox" => "boolean",
"double" => "float",
"floating_timestamp" => "timestamp",
"line" => "geometry",
"location" => "jsonb",
"money" => "text",
"multiline" => "geometry",
"multipoint" => "geometry",
"multipolygon" => "geometry",
"number" => "integer",
"point" => "geometry",
"polygon" => "geometry",
"text" => "text"
}
# socrata
def guess(%DataSet{soc_domain: domain, soc_4x4: fourby, socrata?: true}) do
%HTTPoison.Response{body: body} = Client.new(domain) |> Client.get_view(fourby)
res = Jason.decode!(body)
fields =
res["columns"]
|> Enum.map(fn col -> [col["fieldName"], col["dataTypeName"], col["description"]] end)
|> Enum.reject(fn [key, _, _] -> String.starts_with?(key, ":@") end)
|> Enum.map(fn [col, type, desc] -> [col, Map.get(@soc_types, type, "text"), desc] end)
fields =
fields ++
[
[":id", "text", "The internal Socrata record ID"],
[":created_at", "timestamp", "The timestamp of when the record was first created"],
[":updated_at", "timestamp", "The timestamp of when the record was last updated"]
]
fields
|> Enum.map(& Enum.zip(~w|name type description|a, &1))
|> Enum.map(& Enum.into(&1, %{}))
end
# web resource
def guess(%DataSet{src_type: type, socrata?: false} = ds) do
source_doc = Downloader.download(ds, @download_limit)
csv_opts =
case type do
"csv" -> [headers: true]
"tsv" -> [headers: true, separator: ?\t]
end
rows =
File.stream!(source_doc)
|> CSV.decode!(csv_opts)
|> Enum.take(@num_rows)
guesses =
rows
|> Enum.map(fn row_map ->
Enum.map(row_map, fn {key, value} ->
{key, make_guess(value)}
end)
end)
|> List.flatten()
counts =
guesses
|> Enum.reduce(%{}, fn key_guess, acc ->
Map.update(acc, key_guess, 1, & &1 + 1)
end)
|> Enum.into([])
maxes =
counts
|> Enum.reduce(%{}, fn {{key, _type?}, count}, acc ->
current_max = Map.get(acc, key, 0)
case count > current_max do
false -> acc
true -> Map.put(acc, key, count)
end
end)
maxes
|> Enum.reduce(%{}, fn {col, count}, acc ->
# this is kind of convoluted, but we need to match the column
# and the max count to the previous `counts` map to ensure we
# are setting the correct type
{{_col, type,}, _count} = Enum.find(counts, fn {{col?, _type}, count?} -> col? == col and count? == count end)
Map.put(acc, col, type)
end)
|> Enum.map(fn {name, type} -> [name: name, type: type] end)
|> Enum.map(& Enum.into(&1, %{}))
end
def make_guess(value) do
cond do
boolean?(value) -> "boolean"
integer?(value) -> "integer"
float?(value) -> "float"
date?(value) -> "timestamp"
json?(value) -> "jsonb"
geometry?(value) -> "geometry"
true -> "text"
end
end
def boolean?(value) when is_boolean(value), do: true
def boolean?(value) when is_binary(value), do: Regex.match?(~r/^(t|true|f|false)$/i, value)
def boolean?(_), do: false
def integer?(value) when is_integer(value), do: true
def integer?(value) when is_binary(value), do: Regex.match?(~r/^-?\d+$/, value)
def integer?(_), do: false
def float?(value) when is_float(value), do: true
def float?(value) when is_binary(value), do: Regex.match?(~r/^-?\d+\.\d+$/, value)
def float?(_), do: false
def date?(value) when is_binary(value), do: parse_timestamp(value) != :error
def date?(_), do: false
def geometry?(value) when is_binary(value), do: Regex.match?(~r/^(multi)?(point|polygon|linestring)\s?\(.*$/i, value)
def geometry?(_), do: false
def json?(value) when is_map(value) or is_list(value), do: true
def json?(value) when is_binary(value) do
case Jason.decode(value) do
{:ok, _} -> true
{:error, _} -> false
end
end
def json?(_), do: false
end
|
lib/plenario/etl/field_guesser.ex
| 0.642657
| 0.437884
|
field_guesser.ex
|
starcoder
|
defmodule ExHashRing.Node do
@moduledoc """
Types and Functions for working with Ring Nodes and their Replicas
"""
alias ExHashRing.Hash
@typedoc """
Nodes are uniquely identified in the ring by their name.
"""
@type name :: binary()
@typedoc """
Replicas is a count of how many times a Node should be placed into a Ring.
Negative replica counts will result in an ArgumentError when expanded
"""
@type replicas :: non_neg_integer()
@typedoc """
Nodes are properly specified as a tuple of their name and their number of replicas
"""
@type t :: {name(), replicas()}
@typedoc """
Nodes can be defined by either using a bare name or using a fully specified node. When using a
bare name the definition will have to be converted into a fully specified node, see
`normalize/2`.
"""
@type definition :: name() | t()
@typedoc """
Nodes are expanded into multiple virtual nodes.
"""
@type virtual :: {Hash.t(), name()}
@doc """
Expands a list of nodes into a list of virtual nodes.
"""
@spec expand([t()]) :: [virtual()]
def expand([]), do: []
def expand(nodes) do
nodes
|> Enum.reduce([], fn node, acc ->
do_expand(node, acc)
end)
|> do_sort()
end
@spec expand([t()], replicas()) :: [virtual()]
def expand(nodes, replicas) do
nodes
|> normalize(replicas)
|> expand()
end
@doc """
Converts definitions into fully specified nodes.
A single definition or a list of defintions can be normalized by this function.
"""
@spec normalize([definition()], replicas()) :: [t()]
def normalize(nodes, replicas) when is_list(nodes) do
Enum.map(nodes, &normalize(&1, replicas))
end
@spec normalize(t(), replicas()) :: t()
def normalize({_name, _replicas} = normalized, _default_replicas) do
normalized
end
@spec normalize(name(), replicas()) :: t()
def normalize(name, replicas) do
{name, replicas}
end
## Private
@spec do_expand(node :: t, acc :: [virtual()]) :: [virtual()]
defp do_expand({_name, 0}, acc) do
acc
end
defp do_expand({name, replicas}, acc) when replicas > 0 do
Enum.reduce(0..(replicas - 1), acc, fn replica, acc ->
[{Hash.of("#{name}#{replica}"), name} | acc]
end)
end
defp do_expand({name, replicas}, _) do
raise ArgumentError, "#{name} has #{replicas} replicas, replicas must be non-negative"
end
@spec do_sort([virtual()]) :: [virtual()]
defp do_sort(virtual_nodes) do
Enum.sort(virtual_nodes, &(elem(&1, 0) < elem(&2, 0)))
end
end
|
lib/ex_hash_ring/node.ex
| 0.887525
| 0.710829
|
node.ex
|
starcoder
|
defmodule Gradient.ElixirFmt do
@moduledoc """
Module that handles formatting and printing error messages produced by Gradualizer in Elixir.
Options:
- `ex_colors`: list of color options:
- {`use_colors`, boolean()}: - wheather to use the colors, default: true
- {`expression`, ansicode()}: color of the expressions, default: :yellow
- {`type`, ansicode()}: color of the types, default: :cyan
- {`underscored_line`, ansicode()}: color of the underscored line pointed the error in code, default: :red
- `ex_fmt_expr_fun`: function to pretty print an expression AST in Elixir `(abstract_expr()) -> iodata()`.
- `ex_fmt_type_fun`: function to pretty print an type AST in Elixir `(abstract_type() -> iodata())`.
- `{fancy, boolean()}`: do not use fancy error messages, default: true
- Gradualizer options, but some of them are overwritten by Gradient.
"""
@behaviour Gradient.Fmt
alias :gradualizer_fmt, as: FmtLib
alias Gradient.ElixirType
alias Gradient.ElixirExpr
alias Gradient.Types
@type colors_opts() :: [
use_colors: boolean(),
expression: IO.ANSI.ansicode(),
type: IO.ANSI.ansicode(),
underscored_line: IO.ANSI.ansicode()
]
@type options() :: [
ex_colors: colors_opts(),
ex_fmt_type_fun: (Types.abstract_type() -> iodata()),
ex_fmt_expr_fun: (Types.abstract_expr() -> iodata())
]
@default_colors [use_colors: true, expression: :yellow, type: :cyan, underscored_line: :red]
def print_errors(errors, opts) do
for {file, e} <- errors do
opts = Keyword.put(opts, :filename, file)
print_error(e, opts)
end
end
def print_error(error, opts) do
file = Keyword.get(opts, :filename)
fmt_loc = Keyword.get(opts, :fmt_location, :verbose)
case file do
nil -> :ok
_ when fmt_loc == :brief -> :io.format("~s:", [file])
_ -> :io.format("~s: ", [file])
end
:io.put_chars(format_error(error, opts))
end
def format_error(error, opts) do
opts = Keyword.put_new(opts, :color, false)
opts = Keyword.put_new(opts, :fmt_type_fun, pp_type_fun(opts))
opts = Keyword.put_new(opts, :fmt_expr_fun, pp_expr_fun(opts))
format_type_error(error, opts)
end
@impl Gradient.Fmt
def format_type_error({:type_error, expression, actual_type, expected_type}, opts)
when is_tuple(expression) do
format_expr_type_error(expression, actual_type, expected_type, opts)
end
def format_type_error({:nonexhaustive, anno, example}, opts) do
formatted_example =
case example do
[x | xs] ->
:lists.foldl(
fn a, acc ->
[pp_expr(a, opts), "\n\t" | acc]
end,
[pp_expr(x, opts)],
xs
)
|> Enum.reverse()
x ->
pp_expr(x, opts)
end
:io_lib.format(
"~sNonexhaustive patterns~s~s",
[
format_location(anno, :brief, opts),
format_location(anno, :verbose, opts),
case :proplists.get_value(:fmt_location, opts, :verbose) do
:brief ->
:io_lib.format(": ~s\n", formatted_example)
:verbose ->
:io_lib.format("\nExample values which are not covered:~n\t~s~n", [formatted_example])
end
]
)
end
def format_type_error(
{:spec_error, :wrong_spec_name, anno, name, arity},
opts
) do
:io_lib.format(
"~sThe spec ~p/~p~s doesn't match the function name/arity~n",
[
format_location(anno, :brief, opts),
name,
arity,
format_location(anno, :verbose, opts)
]
)
end
def format_type_error({:spec_error, :mixed_specs, anno, name, arity}, opts) do
:io_lib.format(
"~sThe spec ~p/~p~s follows a spec with different name/arity~n",
[
format_location(anno, :brief, opts),
name,
arity,
format_location(anno, :verbose, opts)
]
)
end
def format_type_error({:call_undef, anno, module, func, arity}, opts) do
:io_lib.format(
"~sCall to undefined function ~s~p/~p~s~n",
[
format_location(anno, :brief, opts),
parse_module(module),
func,
arity,
format_location(anno, :verbose, opts)
]
)
end
def format_type_error({:undef, :record, anno, {module, recName}}, opts) do
:io_lib.format(
"~sUndefined record ~p:~p~s~n",
[
format_location(anno, :brief, opts),
module,
recName,
format_location(anno, :verbose, opts)
]
)
end
def format_type_error({:undef, :record, anno, recName}, opts) do
:io_lib.format(
"~sUndefined record ~p~s~n",
[format_location(anno, :brief, opts), recName, format_location(anno, :verbose, opts)]
)
end
def format_type_error({:undef, :record_field, fieldName}, opts) do
:io_lib.format(
"~sUndefined record field ~s~s~n",
[
format_location(fieldName, :brief, opts),
pp_expr(fieldName, opts),
format_location(fieldName, :verbose, opts)
]
)
end
def format_type_error({:undef, :user_type, anno, {name, arity}}, opts) do
:io_lib.format(
"~sUndefined type ~p/~p~s~n",
[format_location(anno, :brief, opts), name, arity, format_location(anno, :verbose, opts)]
)
end
def format_type_error({:undef, type, anno, {module, name, arity}}, opts)
when type in [:user_type, :remote_type] do
type =
case type do
:user_type -> "type"
:remote_type -> "remote type"
end
module = "#{inspect(module)}"
:io_lib.format(
"~sUndefined ~s ~s.~p/~p~s~n",
[
format_location(anno, :brief, opts),
type,
module,
name,
arity,
format_location(anno, :verbose, opts)
]
)
end
def format_type_error(error, opts) do
:gradualizer_fmt.format_type_error(error, opts) ++ '\n'
end
def format_expr_type_error(expression, actual_type, expected_type, opts) do
{inline_expr, fancy_expr} =
case try_highlight_in_context(expression, opts) do
{:error, _e} -> {[" " | pp_expr(expression, opts)], ""}
{:ok, fancy} -> {"", fancy}
end
:io_lib.format(
"~sThe ~s~ts~s is expected to have type ~ts but it has type ~ts~n~ts~n~n",
[
format_location(expression, :brief, opts),
describe_expr(expression),
inline_expr,
format_location(expression, :verbose, opts),
pp_type(expected_type, opts),
pp_type(actual_type, opts),
fancy_expr
]
)
end
def format_location(expression, fmt_type, opts \\ []) do
case Keyword.get(opts, :fmt_location, :verbose) do
^fmt_type -> FmtLib.format_location(expression, fmt_type)
_ -> ""
end
end
def pp_expr_fun(opts) do
fmt = Keyword.get(opts, :ex_fmt_expr_fun, &ElixirExpr.pp_expr_format/1)
colors = get_colors_with_default(opts)
{:ok, use_colors} = Keyword.fetch(colors, :use_colors)
{:ok, expr_color} = Keyword.fetch(colors, :expression)
fn expression ->
IO.ANSI.format([expr_color, fmt.(expression)], use_colors)
end
end
def pp_type_fun(opts) do
fmt = Keyword.get(opts, :ex_fmt_type_fun, &ElixirType.pp_type_format/1)
colors = get_colors_with_default(opts)
{:ok, use_colors} = Keyword.fetch(colors, :use_colors)
{:ok, type_color} = Keyword.fetch(colors, :type)
fn type ->
[IO.ANSI.format([type_color, fmt.(type)], use_colors)]
end
end
def get_colors_with_default(opts) do
case Keyword.fetch(opts, :ex_colors) do
{:ok, colors} ->
colors ++ @default_colors
_ ->
@default_colors
end
end
def pp_expr(expression, opts) do
pp_expr_fun(opts).(expression)
end
def pp_type(type, opts) do
pp_type_fun(opts).(type)
end
@spec try_highlight_in_context(Types.abstract_expr(), options()) ::
{:ok, iodata()} | {:error, term()}
def try_highlight_in_context(expression, opts) do
with :ok <- print_fancy?(opts),
:ok <- has_location?(expression),
{:ok, path} <- get_ex_file_path(opts[:forms]),
{:ok, code} <- File.read(path) do
code_lines = String.split(code, ~r/\R/)
{:ok, highlight_in_context(expression, code_lines, opts)}
end
end
def print_fancy?(opts) do
if Keyword.get(opts, :fancy, true) do
:ok
else
{:error, "The fancy mode is turn off"}
end
end
def has_location?(expression) do
if elem(expression, 1) == 0 do
{:error, "The location is missing in the expression"}
else
:ok
end
end
@spec highlight_in_context(tuple(), [String.t()], options()) :: iodata()
def highlight_in_context(expression, context, opts) do
line = elem(expression, 1)
context
|> Enum.with_index(1)
|> filter_context(line, 2)
|> underscore_line(line, opts)
|> Enum.join("\n")
end
def filter_context(lines, loc, ctx_size \\ 1) do
line = :erl_anno.line(loc)
range = (line - ctx_size)..(line + ctx_size)
Enum.filter(lines, fn {_, number} -> number in range end)
end
def underscore_line(lines, line, opts) do
Enum.map(lines, fn {str, n} ->
if(n == line) do
colors = get_colors_with_default(opts)
{:ok, use_colors} = Keyword.fetch(colors, :use_colors)
{:ok, color} = Keyword.fetch(colors, :underscored_line)
line_str = to_string(n) <> " " <> str
[
IO.ANSI.underline(),
IO.ANSI.format_fragment([color, line_str], use_colors),
IO.ANSI.reset()
]
else
to_string(n) <> " " <> str
end
end)
end
def get_ex_file_path([{:attribute, 1, :file, {path, 1}} | _]), do: {:ok, path}
def get_ex_file_path(_), do: {:error, :not_found}
@spec parse_module(atom()) :: String.t()
def parse_module(:elixir), do: ""
def parse_module(mod) do
case Atom.to_string(mod) do
"Elixir." <> mod_str -> mod_str <> "."
mod -> ":" <> mod <> "."
end
end
@spec describe_expr(:gradualizer_type.abstract_expr()) :: binary()
def describe_expr({:atom, _, _}), do: "atom"
def describe_expr({:bc, _, _, _}), do: "binary comprehension"
def describe_expr({:bin, _, _}), do: "bit expression"
def describe_expr({:block, _, _}), do: "block"
def describe_expr({:char, _, _}), do: "character"
def describe_expr({:call, _, _, _}), do: "function call"
def describe_expr({:catch, _, _}), do: "catch expression"
def describe_expr({:case, _, _, _}), do: "case expression"
def describe_expr({:cons, _, _, _}), do: "list"
def describe_expr({:float, _, _}), do: "float"
def describe_expr({:fun, _, _}), do: "fun expression"
def describe_expr({:integer, _, _}), do: "integer"
def describe_expr({:if, _, _}), do: "if expression"
def describe_expr({:lc, _, _, _}), do: "list comprehension"
def describe_expr({:map, _, _}), do: "map"
def describe_expr({:map, _, _, _}), do: "map update"
def describe_expr({:match, _, _, _}), do: "match"
def describe_expr({:named_fun, _, _, _}), do: "named fun expression"
def describe_expr({nil, _}), do: "empty list"
def describe_expr({:op, _, 'not', _}), do: "negation"
def describe_expr({:op, _, '-', _}), do: "negation"
def describe_expr({:op, _, op, _, _}), do: to_string(:io_lib.format("~w expression", [op]))
def describe_expr({:record, _, _, _}), do: "record"
def describe_expr({:receive, _, _, _, _}), do: "receive expression"
def describe_expr({:record, _, _, _, _}), do: "record update"
def describe_expr({:record_field, _, _, _, _}), do: "record field"
def describe_expr({:record_index, _, _, _}), do: "record index"
def describe_expr({:string, _, _}), do: "string"
def describe_expr({:tuple, _, _}), do: "tuple"
def describe_expr({:try, _, _, _, _, _}), do: "try expression"
def describe_expr({:var, _, _}), do: "variable"
def describe_expr(_), do: "expression"
end
|
lib/gradient/elixir_fmt.ex
| 0.82748
| 0.54468
|
elixir_fmt.ex
|
starcoder
|
defmodule Membrane.Core.Element.Toilet do
@moduledoc false
# Toilet is an entity that can be filled and drained. If it's not drained on
# time and exceeds its capacity, it overflows by logging an error and killing
# the responsible process (passed on the toilet creation).
require Membrane.Logger
@opaque t :: {__MODULE__, :atomics.atomics_ref(), pos_integer, Process.dest()}
@default_capacity_factor 200
@spec new(pos_integer() | nil, Membrane.Buffer.Metric.unit_t(), Process.dest()) :: t
def new(capacity, demand_unit, responsible_process) do
default_capacity =
Membrane.Buffer.Metric.from_unit(demand_unit).buffer_size_approximation() *
@default_capacity_factor
capacity = capacity || default_capacity
{__MODULE__, :atomics.new(1, []), capacity, responsible_process}
end
@spec fill(t, non_neg_integer) :: :ok | :overflow
def fill({__MODULE__, atomic, capacity, responsible_process}, amount) do
size = :atomics.add_get(atomic, 1, amount)
if size > capacity do
overflow(size, capacity, responsible_process)
:overflow
else
:ok
end
end
@spec drain(t, non_neg_integer) :: :ok
def drain({__MODULE__, atomic, _capacity, _responsible_process}, amount) do
:atomics.sub(atomic, 1, amount)
end
defp overflow(size, capacity, responsible_process) do
Membrane.Logger.debug_verbose(~S"""
Toilet overflow
` ' `
.'''. ' .'''.
.. ' ' ..
' '.'.' '
.'''.'.'''.
' .''.'.''. '
;------ ' ------;
| ~~ .--'--// |
| / ' \ |
| / ' \ |
| | ' | | ,----.
| \ , ' , / | =|____|=
'---,###'###,---' (---(
/## ' ##\ )---)
|##, ' ,##| (---(
\'#####'/ `---`
\`"#"`/
|`"`|
.-| |-.
jgs / ' ' \
'---------'
""")
Membrane.Logger.error("""
Toilet overflow.
Reached the size of #{inspect(size)}, which is above toilet capacity (#{inspect(capacity)})
when storing data from output working in push mode. It means that some element in the pipeline
processes the stream too slow or doesn't process it at all.
To have control over amount of buffers being produced, consider using output in pull mode
(see `Membrane.Pad.mode_t`).
You can also try changing the `toilet_capacity` in `Membrane.ParentSpec.via_in/3`.
""")
Process.exit(responsible_process, :kill)
end
end
|
lib/membrane/core/element/toilet.ex
| 0.683947
| 0.481759
|
toilet.ex
|
starcoder
|
defmodule Dynamo.HTTP.Case do
@moduledoc ~S"""
A bunch of helpers to make it easy to test Dynamos and routers.
By default, these helpers are macros that dispatch directly
to the registered endpoint. Here is an example:
defmodule MyAppTest do
use ExUnit.Case
use Dynamo.HTTP.Case
test :root_route do
conn = get("/")
assert conn.sent_body =~ %r/somevalue/
end
end
The default dynamo used in tests is `Dynamo.under_Test`.
This can be changed in a specific test case using `@endpoint`:
defmodule CustomRouterTest do
use ExUnit.Case
use Dynamo.HTTP.Case
@endpoint CustomRouter
test :route do
conn = get("/route")
assert conn.sent_body =~ %r/somevalue/
end
end
The connection used in such tests is the `Dynamo.Connection.Test`
which provides some test specific function.
## Testing with sequential requests
In some cases, the same test may request different endpoints:
test :session do
conn = get("/put_session")
assert conn.sent_body =~ %r/somevalue/
conn = get(conn, "/set_session")
assert conn.sent_body =~ %r/othervalue/
end
The example above will automatically work, since
`get`/`post`/`put`/`patch`/`delete`/`options` recycles the connection before
each request.
When recycled, all response information previously set in
the connection is cleaned and all cookies are moved from
the response to the request. This allows state to be passed
in between the different requests.
Notice though that recycling will clean up any information
set in the connection:
test :session do
conn = get("/put_session")
assert conn.sent_body =~ %r/somevalue/
conn = conn.assign(:foo, :bar)
conn = get(conn, "/set_session")
assert conn.sent_body =~ %r/othervalue/
end
In the example above, the assign `:foo` set before the request
won't be visible in the endpoint since it will be cleaned up.
This can be fixed by explicitly cleaning up the request:
conn = conn.recycle.assign(:foo, :bar)
If the connection was already recycled, it won't be recycled once again.
Finally, notice that all `get`/`post`/`put`/`patch`/`delete`/`options` macros
are simply a proxy to `process/4`. So in case you want to dispatch
to different dynamos at the same time, `process/4` may be useful.
"""
@doc false
defmacro __using__(_) do
quote do
@endpoint Dynamo.under_test
import unquote(__MODULE__)
import Dynamo.HTTP.Cookies
import Dynamo.HTTP.Session
end
end
@doc """
Returns a connection built with the given method, path and body.
"""
def conn(method, path, body \\ "") do
Dynamo.Connection.Test.new(method, path, body)
end
@doc """
Does a GET request to the given path:
get("/foo")
get(conn, "/foo")
"""
defmacro get(arg1, arg2 \\ nil) do
do_method :GET, arg1, arg2
end
@doc """
Does a POST request to the given path and optionally body:
post("/foo")
post(conn, "/foo")
post(conn, "/foo", "test body") # POSTs to `/foo` with `test body` body
post(conn, "/foo", [{"foo", "bar"}]) # POSTs to `/foo` with `foo=bar` body
"""
defmacro post(arg1, arg2 \\ nil, arg3 \\ nil) do
do_method_with_body :POST, arg1, arg2, arg3
end
@doc """
Does a PUT request to the given path:
put("/foo")
put(conn, "/foo")
put(conn, "/foo", "test body")
put(conn, "/foo", [{"foo", "bar"}])
"""
defmacro put(arg1, arg2 \\ nil, arg3 \\ nil) do
do_method :PUT, arg1, arg2
do_method_with_body :PUT, arg1, arg2, arg3
end
@doc """
Does a PATCH request to the given path:
patch("/foo")
patch(conn, "/foo")
patch(conn, "/foo", "test body")
patch(conn, "/foo", [{"foo", "bar"}])
"""
defmacro patch(arg1, arg2 \\ nil, arg3 \\ nil) do
do_method_with_body :PATH, arg1, arg2, arg3
end
@doc """
Does a DELETE request to the given path:
delete("/foo")
delete(conn, "/foo")
"""
defmacro delete(arg1, arg2 \\ nil) do
do_method :DELETE, arg1, arg2
end
@doc """
Does a OPTIONS request to the given path:
options("/foo")
options(conn, "/foo")
"""
defmacro options(arg1, arg2 \\ nil) do
do_method :OPTIONS, arg1, arg2
end
defp do_method_with_body(method, arg1, arg2, nil) when is_list(arg2) do
do_method method, arg1, URI.encode_query(arg2)
end
defp do_method_with_body(method, arg1, arg2, nil) do
do_method method, arg1, arg2
end
defp do_method_with_body(method, arg1, arg2, arg3) when is_list(arg3) do
do_method method, arg1, arg2, URI.encode_query(arg3)
end
defp do_method_with_body(method, arg1, arg2, arg3) do
do_method(method, arg1, arg2, arg3)
end
defp do_method(method, arg1, nil) do
quote do
unquote(__MODULE__).process @endpoint, unquote(method), unquote(arg1)
end
end
defp do_method(method, arg1, arg2) do
quote do
unquote(__MODULE__).process @endpoint, unquote(arg1), unquote(method), unquote(arg2)
end
end
defp do_method(method, conn, path, body) do
quote do
unquote(__MODULE__).process @endpoint, unquote(conn), unquote(method), unquote(path), unquote(body)
end
end
@doc """
Writes a session cookie according to the current store to
be used in the next request. This is the preferred way to
set the session before a request.
"""
def put_session_cookie(conn, session) do
config = conn.main.config[:dynamo]
store = config[:session_store]
opts = store.setup config[:session_options]
value = store.put_session(nil, session, opts)
conn.put_req_cookie(opts[:key], value)
end
@doc """
Requests the given `endpoint` with the given `method` and `path`.
And verifies if the endpoint returned a valid connection.
## Examples
process MyDynamo, :get, "/foo"
process MyDynamo, conn, :get, "/foo"
"""
def process(endpoint, conn, method, path \\ nil, body \\ nil)
def process(endpoint, method, path, nil, nil) do
do_process endpoint, Dynamo.Connection.Test.new(method, path)
end
def process(endpoint, conn, method, path, nil) when is_tuple(conn) do
conn = if conn.sent_body, do: conn.recycle, else: conn
do_process endpoint, conn.req(method, path)
end
def process(endpoint, path, method, body, nil) when is_binary(path) do
do_process endpoint, conn(method, path, body)
end
def process(endpoint, conn, method, path, body) do
conn = if conn.sent_body, do: conn.recycle, else: conn
do_process endpoint, conn.req(method, path, body)
end
defp do_process(endpoint, conn) do
conn = endpoint.service(conn)
if not is_tuple(conn) or not function_exported?(elem(conn, 0), :state, 1) do
raise "#{inspect endpoint}.service did not return a connection, got #{inspect conn}"
end
if conn.state == :unset and conn.already_sent? do
raise "#{inspect endpoint}.service sent a response back but there was an exception and the response was lost (the exception was logged)"
end
conn
end
end
|
lib/dynamo/http/case.ex
| 0.892639
| 0.593609
|
case.ex
|
starcoder
|
defmodule SiteWeb.ScheduleView.StopList do
alias SiteWeb.ViewHelpers
alias Site.StopBubble
@doc """
Link to expand or collapse a route branch.
Note: The target element (with id `"target_id"`) must also have class `"collapse stop-list"`
for the javascript to appropriately modify the button and the dotted/solid line
"""
@spec view_branch_link(String.t(), map, String.t(), String.t()) :: Phoenix.HTML.Safe.t()
def view_branch_link(nil, _assigns, _target_id, _branch_display), do: []
def view_branch_link(branch_name, assigns, target_id, branch_display) do
SiteWeb.ScheduleView.render(
"_stop_list_expand_link.html",
Map.merge(
assigns,
%{
branch_name: branch_name,
branch_display: branch_display,
target_id: target_id,
expanded: assigns.expanded == branch_name
}
)
)
end
@spec display_expand_link?([{String.t(), StopBubble.Params.t()}]) :: boolean
@doc "Determine if the expansion link should be shown"
def display_expand_link?([_, _ | _]), do: true
def display_expand_link?(_), do: false
@spec step_bubble_attributes([{String.t(), StopBubble.Params.t()}], String.t(), boolean) ::
Keyword.t()
@doc "Returns the html attributes to be used when rendering the intermediate steps"
def step_bubble_attributes(step_bubble_params, target_id, expanded) do
case {display_expand_link?(step_bubble_params), expanded} do
{true, true} -> [id: target_id, class: "collapse stop-list in"]
{true, _} -> [id: target_id, class: "collapse stop-list"]
_ -> []
end
end
@spec stop_bubble_row_params(map(), boolean) :: [StopBubble.Params.t()]
def stop_bubble_row_params(assigns, first_stop? \\ true) do
for {{bubble_branch, bubble_type}, index} <- Enum.with_index(assigns.bubbles) do
indent = merge_indent(bubble_type, assigns[:direction_id], index)
%StopBubble.Params{
render_type: rendered_bubble_type(bubble_type, index),
class: Atom.to_string(bubble_type),
direction_id: assigns[:direction_id],
merge_indent: indent,
route_id: bubble_branch,
route_type: assigns.route.type,
show_line?: show_line?(bubble_type, indent, first_stop?),
vehicle_tooltip: vehicle_tooltip(bubble_type, bubble_branch, assigns.vehicle_tooltip),
content: bubble_content(bubble_branch),
bubble_branch: bubble_branch,
show_checkmark?: show_checkmark?(assigns[:show_checkmark?], first_stop?, bubble_type)
}
end
end
defp show_checkmark?(nil, first_stop?, bubble_type) do
!first_stop? and bubble_type == :terminus
end
defp show_checkmark?(show_checkmark?, _first_stop?, _bubble_type) do
show_checkmark?
end
defp merge_indent(bubble_type, direction_id, index)
defp merge_indent(:merge, 0, 1), do: :above
defp merge_indent(:merge, 1, 1), do: :below
defp merge_indent(_, _, _), do: nil
defp show_line?(bubble_type, indent, first_stop?)
defp show_line?(:empty, _, _), do: false
defp show_line?(:line, _, _), do: true
defp show_line?(_, :below, _), do: true
defp show_line?(:terminus, _, first_stop?), do: first_stop? == true
defp show_line?(_, _, _), do: true
defp vehicle_tooltip(bubble_type, bubble_branch, tooltip)
defp vehicle_tooltip(:line, _, _), do: nil
defp vehicle_tooltip(
_,
"Green" <> _ = bubble_branch,
%VehicleTooltip{vehicle: %Vehicles.Vehicle{route_id: bubble_branch}} = tooltip
),
do: tooltip
defp vehicle_tooltip(_, "Green" <> _, _), do: nil
defp vehicle_tooltip(_, _, tooltip), do: tooltip
defp rendered_bubble_type(bubble_type, index)
defp rendered_bubble_type(:line, _), do: :empty
defp rendered_bubble_type(:merge, 1), do: :empty
defp rendered_bubble_type(bubble_type, _), do: bubble_type
defp bubble_content(route_id)
defp bubble_content("Green-" <> letter), do: letter
defp bubble_content(_), do: ""
@doc """
Formats a Schedules.Departures.t to a human-readable time range.
"""
@spec display_departure_range(Schedules.Departures.t()) :: iodata
def display_departure_range(:no_service) do
"No Service"
end
def display_departure_range(%Schedules.Departures{first_departure: nil, last_departure: nil}) do
"No Service"
end
def display_departure_range(%Schedules.Departures{} = departures) do
[
ViewHelpers.format_schedule_time(departures.first_departure),
"-",
ViewHelpers.format_schedule_time(departures.last_departure)
]
end
@doc """
Displays a schedule period.
"""
@spec schedule_period(atom) :: String.t()
def schedule_period(:week), do: "Monday to Friday"
def schedule_period(period) do
period
|> Atom.to_string()
|> String.capitalize()
end
@spec display_map_link?(integer) :: boolean
# only show for ferry
def display_map_link?(type), do: type == 4
end
|
apps/site/lib/site_web/views/schedule/stop_list.ex
| 0.809953
| 0.412974
|
stop_list.ex
|
starcoder
|
defmodule Mix.Tasks.Authority.Gen.Context do
use Mix.Task
alias Mix.Authority.Ecto.Context
@shortdoc "Generate a context with Authority"
@moduledoc """
Generates a new context with Authority ready to go.
mix authority.gen.context Accounts
The following files will be created (assuming the provided context name is `Accounts`):
* `lib/<your app>/accounts/accounts.ex`
* `test/<your app>/accounts/accounts_test.exs`
* `lib/<your app>/accounts/user.ex`
* `test/<your app>/accounts/user_test.exs`
* `lib/<your app>/accounts/token.ex`
* `test/<your app>/accounts/token_test.exs`
* `lib/<your app>/accounts/lock.ex`
* `test/<your app>/accounts/lock_test.exs`
* `lib/<your app>/accounts/attempt.ex`
* `test/<your app>/accounts/attempt_test.exs`
* `priv/repo/migrations/<timestamp>_authority_ecto.ex`
The generated files expect the following modules to exist (where
`MyApp` is the top-level namespace for your application):
* `MyApp.Repo`
* `MyApp.DataCase`
If you created your application using `mix phx.new`, these modules where
already defined for you.
## Options
* `--no-locking` - do not generate files for locking accounts
after a number of failed attempts
* `--no-recovery` - do not generate files for password resets
* `--no-tokenization` - do not generate files for creating tokens. When
choosing this option, you must also provide `--no-recovery`
* `--no-registration` - do not generate files for creating/updating users
"""
@context_template "priv/templates/authority.gen.context"
@switches [
tokenization: :boolean,
locking: :boolean,
recovery: :boolean,
registration: :boolean
]
@default_options [
authentication: true,
tokenization: true,
recovery: true,
registration: true,
locking: true
]
@doc """
Generate a new context with Authority.Ecto.Template preconfigured.
"""
def run([name | args]) do
context = Context.new(name)
{files, behaviours, config} = build_features(context, args)
binding = [
context: context,
config: config,
behaviours: Enum.sort_by(behaviours, &behaviour_order/1)
]
for {source, target} <- files do
Mix.Generator.create_file(target, render(source, binding))
end
Mix.shell().info("""
Remember to update your repository by running migrations:
$ mix ecto.migrate
Add a secret to your configuration for storing tokens:
config #{inspect(context.otp_app)}, #{inspect(context.token.module)}.HMAC,
secret_key: "some secure value"
""")
end
def run(args) do
Mix.raise(
"expected authority.gen.migration to receive a name for the new context, " <>
"got: #{inspect(Enum.join(args, " "))}"
)
end
defp render(source, binding) do
:authority_ecto
|> Application.app_dir(Path.join(@context_template, source))
|> EEx.eval_file(binding)
end
defp build_features(context, args) do
{options, _, _} = OptionParser.parse(args, switches: @switches)
@default_options
|> Keyword.merge(options)
|> Enum.reduce({[], [], []}, &build_feature(&2, context, &1))
end
defp build_feature(spec, context, {:authentication, true}) do
spec
|> put_file("migration.exs", context.migration.file)
|> put_file("context.ex", context.file)
|> put_file("context_test.exs", context.test_file)
|> put_file("user.ex", context.user.file)
|> put_file("user_test.exs", context.user.test_file)
|> put_behaviour(Authority.Authentication)
|> put_config(:repo, context.repo)
|> put_config(:user_schema, context.user.module)
end
defp build_feature(spec, _context, {:registration, true}) do
put_behaviour(spec, Authority.Registration)
end
defp build_feature(spec, context, {:recovery, true}) do
spec
|> put_behaviour(Authority.Recovery)
|> put_config(:recovery_callback, {context.module, :send_forgot_password_email})
end
defp build_feature(spec, context, {:tokenization, true}) do
spec
|> put_file("token.ex", context.token.file)
|> put_file("token_test.exs", context.token.test_file)
|> put_behaviour(Authority.Tokenization)
|> put_config(:token_schema, context.token.module)
end
defp build_feature(spec, context, {:locking, true}) do
spec
|> put_file("lock.ex", context.lock.file)
|> put_file("lock_test.exs", context.lock.test_file)
|> put_file("attempt.ex", context.attempt.file)
|> put_file("attempt_test.exs", context.attempt.test_file)
|> put_behaviour(Authority.Locking)
|> put_config(:lock_schema, context.lock.module)
|> put_config(:lock_attempt_schema, context.attempt.module)
end
defp build_feature(spec, _context, _), do: spec
defp put_file({files, behaviours, config}, src, dest) do
{[{src, dest} | files], behaviours, config}
end
defp put_behaviour({files, behaviours, config}, behaviour) do
{files, [behaviour | behaviours], config}
end
defp put_config({files, behaviours, config}, key, value) do
{files, behaviours, [{key, value} | config]}
end
defp behaviour_order(Authority.Authentication), do: 0
defp behaviour_order(Authority.Tokenization), do: 1
defp behaviour_order(Authority.Recovery), do: 2
defp behaviour_order(Authority.Registration), do: 3
defp behaviour_order(Authority.Locking), do: 4
end
|
lib/mix/tasks/authority.gen.context.ex
| 0.810741
| 0.406214
|
authority.gen.context.ex
|
starcoder
|
defmodule Exred.Library.NodePrototype do
@moduledoc """
### Basic Example
```elixir
defmodule Exred.Node.HelloWorld do
@moduledoc \"""
Sends "<NAME>" or any other configured greeting as payload
when it receives a message.
**Incoming message format**
Anything / ignored
**Outgoing message format**
msg = %{
payload :: string
}
\"""
@name "Greeter"
@category "output"
@info @moduledoc
@config %{
name: %{
info: "Visible node name",
value: @name,
type: "string",
attrs: %{max: 20}
},
greeting: %{
info: "Greeting to be sent",
value: "<NAME>",
type: "string",
attrs: %{max: 40}
}
}
@ui_attributes %{
left_icon: "face"
}
use Exred.Library.NodePrototype
@impl true
def handle_msg(msg, state) do
out = Map.put(msg, :payload, state.config.greeting.value)
{out, state}
end
end
```
### Module Attributes
__@name :: string__
Name of the node. This will be the visible name in the UI.
__@category :: string__
Name of the category the node is in. Categories are the panel headers in the node selector on the left side of the UI.
The name of the category also determines the color of the node given in `exred/ui/app/styles/app.scss` (see .exred-category-function, etc. CSS classes)
__@info :: string__
Description of the node. This is displayed in the Info tab in the UI.
It is usually a longer multi-line string and it is interpreted as markdown text.
__@config :: map__
This is a map of configurable values for the node.
These are displayed in the Config tab in the UI and the values are accessible in the `state` argument in the node.
There are different types of config entries represented by different widgets in the UI.
Each type has its unique attributes.
Node types match the UI components in `exred/ui/app/templates/components/x-config-tab/`
Keys in the map are:
- type: type of the config item
- info: short descritpion of the config item (will be displayed as a tooltip)
- value: default value for the config item
- attrs :: map : map of attributes based on the type
#### type: "string"
```elixir
config_item: %{
type: "string",
info: "Short description",
value: "default value",
attrs: %{max: 50}
}
```
Attributes:
- max : maximum length of the string
#### type: "number"
```elixir
config_item: %{
type: "number",
info: "Short description",
value: 22,
attrs: %{min: 10, max: 50}
}
```
Attributes:
- max : maximum length of the string
#### type: "select"
```elixir
config_item: %{
type: "select",
info: "Short description",
value: "GET",
attrs: %{options: ["GET", "POST", "PATCH"]}
}
```
Attributes:
- options : list of options for the selector
#### type: "list-singleselect"
```elixir
config_item: %{
info: "Short description",
type: "list-singleselect",
value: [],
attrs: %{items: ["Display 1", "Display 2"]}
},
```
Attributes:
- items : list of items to select from
#### type: "list-multiselect"
```elixir
config_item: %{
type: "list-multiselect",
info: "Short description",
value: [],
attrs: %{items: ["channel1", "channel2", "channel3"]}
}
```
Attributes:
- items : list of items to select from
#### type: "codeblock"
```elixir
config_item: %{
type: "codeblock",
info: "Short description",
value: "default value"
}
```
No attributes
#### Example (@config map):
```elixir
@config %{
name: %{
info: "Visible node name",
value: "GPIO In",
type: "string",
attrs: %{max: 20}
},
pin_number: %{
info: "GPIO pin number that the node will read",
value: 0,
type: "number",
attrs: %{min: 0}
},
mode: %{
info: "read_on_message or monitor",
type: "list-singleselect",
value: nil,
attrs: %{items: ["read_on_message", "monitor"]}
},
monitored_transition: %{
info: "send message in rising and/or falling edge",
type: "list-multiselect",
value: [],
attrs: %{items: ["rising", "falling"]}
}
}
```
__@ui_attributes__
Additional UI attributes for the node.
```elixir
@ui_attributes %{
fire_button: false,
left_icon: nil,
right_icon: "send",
config_order: [:name,:pin_number,:mode]
}
```
- fire_button :: boolean : clickable button on the node in the UI. Sends a fire message to the node's gen_server
- left_icon :: string, right_icon :: string : material design icon name (see [Material Design Icons](https://material.io/tools/icons/?style=baseline))
- config_order :: list : list of the config items in the order we want to display them in the UI
### Module Callbacks
#### node_init(state)
```elixir
node_init(state :: map) :: map | {map, integer}
```
This is called as the last step of the node's init function.
Needs to return a new state or a {state, timeout} tuple.
(see GenServer documentation)
#### handle_msg(msg, state)
```elixir
handle_msg(msg :: map, state :: map) :: {map | nil, map}
```
Handles incoming messages sent to the node from another node.
Returns the outgoing message and a state.
If the outgoing message is `nil` then no message will be sent from the node.
#### fire(state)
```elixir
fire(state :: map) :: map`
```
Called when the fire button is pressed on the node in the UI.
Needs to return the updated state.
If the fire action needs to send an outgoing message from the node that can be done with the standard `send/2` function.
`state.out_nodes` is a list of node PIDs that are connected to this node with outgoing edges.
Example:
```elixir
def fire(state) do
Enum.each state.out_nodes, & send(&1, %{payload: "hello"})
state
end
```
### Complete Node Example
[Link to node in github]()
"""
@name "NodePrototype"
@category "Undefined"
@config %{}
@info @moduledoc
# icon names are the standard material design icons
@ui_attributes %{fire_button: false, left_icon: "thumb_up", right_icon: nil}
# TODO: prepare/0 is not used, get rid of it?
@doc """
This gets called when the module is loaded.
It should set up set up services that the node needs
(autheticate with an API or set up database access)
"""
# @callback prepare() :: list
@doc """
Initialize node.
This is called as the last step of the node's init function.
Needs to return a new state or a {state, timeout} tuple.
(see GenServer documentation)
"""
@callback node_init(state :: map) :: map | {map, integer}
@callback handle_msg(msg :: map, state :: map) :: {map | nil, map}
@callback fire(state :: map) :: map
defmacro __using__(_opts) do
quote do
IO.inspect("Compiling node prototype: #{__MODULE__}")
require Logger
@behaviour Exred.Library.NodePrototype
def attributes do
config_order =
if Keyword.keyword?(@config) do
Keyword.keys(@config)
else
Map.keys(@config)
end
%{
name: @name,
category: @category,
info: @info,
config: Enum.into(@config, %{}),
ui_attributes: Enum.into(@ui_attributes, %{config_order: config_order})
}
end
def prepare(), do: [prepare: :done]
def node_init(state), do: state
def handle_msg(msg, state), do: {msg, state}
def fire(state) do
IO.puts("#{inspect(self())} firing: #{inspect(state.node_id)}")
state
end
defoverridable prepare: 0, node_init: 1, handle_msg: 2, fire: 1
use GenServer
# API
def start_link([node_id, node_config, send_event]) do
Logger.debug("node: #{node_id} #{get_in(node_config, [:name, :value])} START_LINK")
GenServer.start_link(__MODULE__, [node_id, node_config, send_event], name: node_id)
end
def get_state(pid) do
GenServer.call(pid, :get_state)
end
def set_out_nodes(pid, out_nodes) do
GenServer.call(pid, {:set_out_nodes, out_nodes})
end
def add_out_node(pid, new_out) do
GenServer.call(pid, {:add_out_node, new_out})
end
def get_name, do: @name
def get_category, do: @category
def get_default_config, do: @config
# Callbacks
@impl true
def init([node_id, node_config, send_event]) do
Logger.debug("node: #{node_id} #{get_in(node_config, [:name, :value])} INIT")
# trap exits to make sure terminate/2 gets called by GenServer
Process.flag(:trap_exit, true)
default_state = %{
node_id: node_id,
config: node_config,
node_data: %{},
out_nodes: [],
send_event: send_event
}
case node_init(default_state) do
{state, timeout} ->
Logger.debug(
"node: #{node_id} #{get_in(node_config, [:name, :value])} INIT timeout: #{
inspect(timeout)
}"
)
{:ok, state, timeout}
state ->
Logger.debug(
"node: #{node_id} #{get_in(node_config, [:name, :value])} INIT no timeout"
)
{:ok, state}
end
end
@impl true
def handle_call(:get_state, _from, state) do
{:reply, state, state}
end
def handle_call({:set_out_nodes, out_nodes}, _from, state) do
{:reply, :ok, state |> Map.put(:out_nodes, out_nodes)}
end
def handle_call({:add_out_node, new_out}, _from, %{out_nodes: out_nodes} = state) do
{:reply, :ok, %{state | out_nodes: [new_out | out_nodes]}}
end
def handle_call(:fire, _from, state) do
{:reply, :ok, fire(state)}
end
@impl true
def handle_info(msg, state) do
Logger.debug(
"node: #{state.node_id} #{get_in(state.config, [:name, :value])} GOT: #{inspect(msg)}"
)
{msg_out, new_state} = handle_msg(msg, state)
if msg_out != nil do
Enum.each(state.out_nodes, &send(&1, msg_out))
end
{:noreply, new_state}
end
def handle_info(msg, state) do
Logger.debug(
"UNHANDLED msg: #{state.node_id} #{get_in(state.config, [:name, :value])} GOT: #{
inspect(msg)
}"
)
{:noreply, state}
end
@impl true
def terminate(reason, state) do
event = "notification"
debug_data = %{exit_reason: Exception.format_exit(reason)}
payload = %{node_id: state.node_id, node_name: @name, debug_data: debug_data}
Logger.error(
"node: #{state.node_id} #{get_in(state.config, [:name, :value])} TERMINATING due to: #{
inspect(debug_data)
}"
)
# EventChannel.send(event, payload)
state.send_event.(event, payload)
:return_value_ignored
end
end
end
end
|
lib/exred_library/node_prototype.ex
| 0.827967
| 0.778733
|
node_prototype.ex
|
starcoder
|
defmodule Structex.Hysteresis.InsertedWoodenSidingWall do
@moduledoc """
Calculates rigidity and ultimate strength of the inserted wooden siding walls.
Depending the following study.
https://www.jstage.jst.go.jp/article/aijs/76/659/76_659_97/_article/-char/ja/
"""
@doc """
初期すべり変形角R0
wall_inner_length - 軸組の内法幅
wall_inner_height - 軸組の内法高さ
horizontal_clearance - 柱と板小口とのクリアランス
virtical_clearance - 梁と板長辺とのクリアランス
"""
@spec first_slip_deformation_angle(
wall_inner_length :: number,
wall_inner_height :: number,
horizontal_clearance :: number,
virtical_clearance :: number
) :: float
def first_slip_deformation_angle(l, h, cl, ch) when l > 0 and h > 0 and cl >= 0 and ch >= 0 do
((l - cl * 0.5) * cl + (h - ch * 0.5) * ch) / (l * h)
end
@doc """
ダボのせん断による剛性Kdの逆数 (摩擦力を考慮した場合)
single_connecter_rigidity - ダボ1本のせん断剛性
connecter_number - 板1列あたりのダボ本数
siding_width - 板幅
wall_inner_height - 軸組の内法高さ
wall_inner_width - 軸組の内法幅
friction_coefficient - 板同士の摩擦係数
"""
@spec shear_connecter_inverted_rigidity_with_friction(
single_connecter_rigidity :: number,
connecter_number :: pos_integer,
siding_width :: number,
wall_inner_height :: number,
wall_inner_width :: number,
friction_coefficient :: number
) :: float
def shear_connecter_inverted_rigidity_with_friction(kd, nd, w, h, l, fc)
when kd > 0 and is_integer(nd) and nd > 0 and w > 0 and h > 0 and l > 0 and fc >= 0 and
h * fc / l >= 1 do
0.0
end
def shear_connecter_inverted_rigidity_with_friction(kd, nd, w, h, l, fc)
when kd > 0 and is_integer(nd) and nd > 0 and w > 0 and h > 0 and l > 0 and fc >= 0 do
(floor(h / w) - 1) * (1 / h - fc / l) / nd / kd
end
@doc """
板材のせん断剛性Ksの逆数
shear_modulus - 板材のせん断弾性係数
thickness - 板厚
wall_inner_length - 軸組の内法幅
"""
@spec siding_inverted_rigidity(
shear_modulus :: number,
thickness :: number,
wall_inner_length :: number
) :: float
def siding_inverted_rigidity(g, t, l) when g > 0 and t > 0 and l > 0, do: 1 / g / l / t
@doc """
板の圧縮筋かいゾーンの縮みによる剛性Kaの逆数
fiber_direction_elasticity - 繊維方向のヤング係数
elasticity_ratio - E‖ / E┴
thickness - 板厚
wall_inner_length - 軸組の内法幅
wall_inner_height - 軸組の内法高さ
"""
@spec diagonal_siding_zone_inverted_rigidity(
fiber_direction_elasticity :: number,
elasticity_ratio :: number,
thickness :: number,
wall_inner_length :: number,
wall_inner_height :: number
) :: float
def diagonal_siding_zone_inverted_rigidity(eh, er, t, l, h)
when eh > 0 and er > 0 and t > 0 and l > 0 and h > 0 do
(4 * :math.log(l) - :math.log(l * l + h * h) + h * h / l / l - 1) * (l * l + h * h * er) /
(l * l + h * h) / eh / l / t
end
@doc """
板端部の柱へのめりこみによる剛性Kcの逆数
fiber_orthogonal_direction_elasticity - 柱の全面横圧縮ヤング係数
column_depth - 柱の見付幅
column_width - 柱の見込幅
substitution_coefficient - 繊維方向に対する繊維直行方向の置換係数
thickness - 板厚
siding_width - 板幅
wall_inner_height - 軸組の内法高さ
"""
@spec column_side_inverted_rigidity(
fiber_orthogonal_direction_elasticity :: number,
column_depth :: number,
column_width :: number,
substitution_coefficient :: number,
thickness :: number,
siding_width :: number,
wall_inner_height :: number
) :: float
def column_side_inverted_rigidity(ec, dc, b, n, t, w, h)
when ec > 0 and dc > 0 and b >= t and n > 0 and t > 0 and w > 0 and h > 0 do
4 * dc / w / h / t / dent_coefficient(dc, b, t, n) / ec
end
@doc """
板端部の柱へのめりこみによる剛性Kcの逆数 (板と横架材間がダボで止められている場合)
fiber_orthogonal_direction_elasticity - 柱の全面横圧縮ヤング係数
column_depth - 柱の見付幅
column_width - 柱の見込幅
substitution_coefficient - 繊維方向に対する繊維直行方向の置換係数
thickness - 板厚
siding_width - 板幅
wall_inner_height - 軸組の内法高さ
wall_inner_length - 軸組の内法幅
connecter_number - 1列あたりのダボ本数
single_connecter_rigidity - ダボ1本のせん断剛性
friction_coefficient - 板と横架材の摩擦係数
"""
@spec column_side_inverted_rigidity_with_shear_connecters(
fiber_orthogonal_direction_elasticity :: number,
column_depth :: number,
column_width :: number,
substitution_coefficient :: number,
thickness :: number,
siding_width :: number,
wall_inner_height :: number,
wall_inner_length :: number,
connecter_number :: pos_integer,
single_connecter_rigidity :: number,
friction_coefficient :: number
) :: float
def column_side_inverted_rigidity_with_shear_connecters(ec, dc, b, n, t, w, h, l, nd, kd, fc)
when ec > 0 and dc > 0 and b >= t and n > 0 and t > 0 and w > 0 and h > 0 and l > 0 and
is_integer(nd) and nd > 0 and kd > 0 and fc >= 0 do
4 * dc / h / (w * t * dent_coefficient(dc, b, t, n) * ec + 2 * dc * nd * kd) *
(1 - h / l * fc)
end
@doc """
板端部の梁へのめりこみによる剛性Kcの逆数
fiber_orthogonal_direction_elasticity - 梁の全面横圧縮ヤング係数
beam_depth - 梁の見付幅
beam_width - 梁の見込幅
substitution_coefficient - 繊維方向に対する繊維直行方向の置換係数
thickness - 板厚
wall_inner_length - 軸組の内法幅
wall_inner_height - 軸組の内法高さ
"""
@spec beam_side_inverted_rigidity(
fiber_orthogonal_direction_elasticity :: number,
column_depth :: number,
column_width :: number,
substitution_coefficient :: number,
thickness :: number,
wall_inner_length :: number,
wall_inner_height :: number
) :: float
def beam_side_inverted_rigidity(eb, d, b, n, t, l, h)
when eb > 0 and d > 0 and b >= t and n > 0 and t > 0 and l > 0 and h > 0 do
108 / 7 * h * d / l / l / l / t / dent_coefficient(d, b, t, n) / eb
end
@spec dent_coefficient(number, number, number, number) :: float
defp dent_coefficient(d, b, t, n) when d > 0 and b > 0 and t > 0 and n > 0 do
1 + 4 / 3 * d * (1 - :math.exp(-3 / 4 * n * (b - t) / d)) / n / t
end
@doc """
板壁の剛性Kの逆数
Accepted parameters:
shear_connecter_rigidity - ダボ1本のせん断剛性
number_of_shear_connecters - 板1列あたりのダボ本数
siding_width - 板幅
frame_inner_height - 軸組の内法高さ
frame_inner_width - 軸組の内法幅
friction_coefficient - 摩擦係数
shear_modulus - 板材のせん断弾性係数
siding_thickness - 板厚
siding_fiber_direction_elasticity - 板材の繊維方向ヤング係数
elasticity_ratio - 板材の繊維方向ヤング係数に対する繊維直行方向ヤング係数の比
column_fiber_orthogonal_direction_elasticity - 柱材の全面横圧縮ヤング係数
beam_fiber_orthogonal_direction_elasticity - 梁材の全面横圧縮ヤング係数
column_depth - 柱の見付幅
beam_height - 梁せい
column_width - 柱の見込幅
beam_width - 梁幅
column_substitution_coefficient - 柱材の繊維方向に対する繊維直行方向の置換係数
beam_substitution_coefficient - 梁材の繊維方向に対する繊維直行方向の置換係数
"""
@spec inverted_rigidity(map | keyword) :: float
def inverted_rigidity(params) when is_list(params) do
inverted_rigidity(Enum.into(params, %{}))
end
def inverted_rigidity(%{} = params) do
kd = params.shear_connecter_rigidity
nd = params.number_of_shear_connecters
w = params.siding_width
h = params.frame_inner_height
l = params.frame_inner_width
fc = params.friction_coefficient
g = params.shear_modulus
t = params.siding_thickness
eh = params.siding_fiber_direction_elasticity
er = params.elasticity_ratio
ec = params.column_fiber_orthogonal_direction_elasticity
eb = params.beam_fiber_orthogonal_direction_elasticity
dc = params.column_depth
d = params.beam_height
b = params.column_width
bb = params.beam_width
nc = params.column_substitution_coefficient
nb = params.beam_substitution_coefficient
column_side_inverted_rigidity =
if Map.get(params, :shear_connecters_between_beams_and_sidings) do
column_side_inverted_rigidity_with_shear_connecters(ec, dc, b, nc, t, w, h, l, nd, kd, fc)
else
column_side_inverted_rigidity(ec, dc, b, nc, t, w, h)
end
shear_connecter_inverted_rigidity_with_friction(kd, nd, w, h, l, fc) +
siding_inverted_rigidity(g, t, l) +
diagonal_siding_zone_inverted_rigidity(eh, er, t, l, h) +
column_side_inverted_rigidity +
beam_side_inverted_rigidity(eb, d, bb, nb, t, l, h)
end
@doc """
ダボがせん断降伏するときの耐力Pud
number_of_shear_connecters - 板1列あたりのダボ本数
single_connecter_yield_resistance - ダボ1本の降伏せん断耐力
frame_inner_width - 軸組の内法幅
frame_inner_height - 軸組の内法高さ
friction_coefficient - 板同士の摩擦係数
"""
@spec shear_connecter_yield_resistance(
number_of_shear_connecters :: pos_integer,
single_connecter_yield_resistance :: number,
frame_inner_width :: number,
frame_inner_height :: number,
friction_coefficient :: number
) :: float
def shear_connecter_yield_resistance(nd, py, l, h, fc)
when is_integer(nd) and nd > 0 and py > 0 and l > 0 and h > 0 and fc >= 0 and h / l * fc < 1 do
nd * py / (1 - h * fc / l)
end
@doc """
板の圧縮筋かいゾーンの上下端部領域が全塑性圧縮に達するときの終局耐力Pua
fiber_direction_compressive_strength - 繊維方向の圧縮強度
fiber_orthogonal_direction_compressive_strength - 繊維直行方向の圧縮強度
thickness - 板厚
frame_inner_width - 軸組の内法幅
frame_inner_height - 軸組の内法高さ
yield_judgement_ratio - 全塑性圧縮域の対角長さに対する比
"""
@spec diagonal_siding_zone_yield_resistance(
fiber_direction_compressive_strength :: number,
fiber_orthogonal_direction_compressive_strength :: number,
thickness :: number,
frame_inner_width :: number,
frame_inner_height :: number,
yield_judgement_ratio :: number
) :: float
def diagonal_siding_zone_yield_resistance(fc, fcv, t, l, h, r)
when fc > 0 and fcv > 0 and t > 0 and l > 0 and h > 0 and 0 <= r and r <= 1 do
r * t * fc * fcv * (l * l + h * h) * (l * l + h * h) / (fc * h * h + fcv * l * l) / h
end
@doc """
板壁の終局耐力Pu
Accepted parameters:
number_of_shear_connecters - 板1列あたりのダボ本数
single_connecter_yield_resistance - ダボ1本の降伏せん断耐力
fiber_direction_compressive_strength - 繊維方向の圧縮強度
fiber_orthogonal_direction_compressive_strength - 繊維直行方向の圧縮強度
thickness - 板厚
frame_inner_width - 軸組の内法幅
frame_inner_height - 軸組の内法高さ
yield_judgement_ratio - 全塑性圧縮域の対角長さに対する比
friction_coefficient - 板同士の摩擦係数
"""
@spec yield_resistance(map | keyword) :: float
def yield_resistance(params) when is_list(params) do
yield_resistance(Enum.into(params, %{}))
end
def yield_resistance(%{} = params) do
nd = params.number_of_shear_connecters
py = params.single_connecter_yield_resistance
l = params.frame_inner_width
h = params.frame_inner_height
c = Map.get(params, :friction_coefficient, 0)
fc = params.fiber_direction_compressive_strength
fcv = params.fiber_orthogonal_direction_compressive_strength
t = params.thickness
r = Map.get(params, :yield_judgement_ratio, 0.05)
min(
shear_connecter_yield_resistance(nd, py, l, h, c),
diagonal_siding_zone_yield_resistance(fc, fcv, t, l, h, r)
)
end
end
|
lib/structex/hysteresis/inserted_wooden_siding_wall.ex
| 0.551574
| 0.476519
|
inserted_wooden_siding_wall.ex
|
starcoder
|
defmodule Rps do
@moduledoc """
The `RPS` app is composed by 4 main components:
* `Rps.Games.Fsm` - This might be the main component, since it is the one
that implements the logic of the game itself; it handles the game session.
When a match is created, a new FSM (or session) instance can be created
and then the game starts. The FSM handles the whole logic, controlls every
round and updated all needed information in the database. When the game
finishes, the FSM updated the match with the final result and the winner
user to increments the score. The FSM instances are supervised by
`Rps.Games.Fsm.Supervisor`, which implements a `:simple_one_for_one`
strategy, so the FSM instances are added and deleted dynamically
(on-demand) Check the `Rps.Games.Fsm` module for more info.
* `Rps.Games.Leaderboard` - This module holds the leaderboard or ranking
about the game. It is implemented as a `GenServer` in order to provide an
owner for the ETS tables (according to the ETS ownership best practices),
but the access itself is done directly against the tables. The leaderboard
itself is implemented using two ETS tables. For more information about
its implementation check the `Rps.Games.Leaderboard` module doc.
* REST API - The functionality of the game will be exposed via HTTP, using
a REST API. There are several controllers, but the relevant ones are:
* `RpsWeb.SessionController` to requests access tokens (authentication).
* `RpsWeb.MatchController` to create games and also join to existing
games.
* `RpsWeb.RoundController` to play, this controller allows players to
send their moves but associated to a particular game.
* `RpsWeb.LeaderboardController` to show the leaderboard or ranking.
* Notifications - The notifications are implemented using Phoenix Channels,
which uses WebSocket internally. The channel `RpsWeb.RoomChannel` allows
users to subscribe to a particular topic, and the topic in this case is
the `id` of the game (or match). Once the players are subscribed to the
game (topic), they will receive notifications about the result of each
round (`Rps.Games.Round`) and once the game finished they will receive
also the info about the game (`Rps.Games.Match`). The module in charge
to send these notifications is the `Rps.Games.Fsm`, since it is the one
that controlls the logic of the game (check this module, the private
function `push` and the places where it is being invoked).
## Game Options
The following options can be defined in your config file:
* `:match_rounds` - max number of rounds per match. Default is `10`.
* `:round_timeout` - timeout per round. Default is `10000`.
"""
end
|
lib/rps.ex
| 0.826081
| 0.715821
|
rps.ex
|
starcoder
|
defmodule TubeStreamer.Metrics do
require Logger
alias :exometer, as: Exometer
def subscribe([:erlang, :beam, :star_time], _), do: []
def subscribe([:api, :request, :counter, _status] = metric, :counter), do:
{metric, :value, get_interval(), [series_name: "api.request.counter",
tags: [status: {:from_name, 4}]]}
def subscribe(metric, type) when type in [:gauge, :counter], do:
{metric, :value, get_interval(), [series_name: format(metric)]}
def subscribe(metric, :histogram), do:
for dp <- [95, 99, :max], do:
{metric, dp, get_interval(), [series_name: format(metric)]}
def subscribe(metric, :function) do
case Exometer.get_value(metric) do
{:ok, [_head | _] = values} ->
for {datapoint, _} <- values, do:
{metric, datapoint, get_interval(), [series_name: format(metric)]}
_ ->
Logger.error("unexpected error on metric subscribe: #{inspect metric}")
[]
end
end
def subscribe(_, _), do: []
def init() do
Exometer.update([:erlang, :beam, :start_time], timestamp())
Exometer.update_or_create([:cache, :meta, :size], 0,
{:function, __MODULE__, :get_meta_cache_size, [], :value, [:value]},
[])
end
defp get_interval() do
default = [influx: [interval: 60_000]]
Application.get_env(:tube_stream, :metrics, default)
|> Keyword.get(:influx)
|> Keyword.get(:interval)
end
defp format([head | tail]) do
to_string(head) <> to_string(for atom <- tail, do: "." <> to_string(atom))
end
def get_meta_cache_size(), do: [value: TubeStreamer.Stream.MetaCache.size()]
def garbage_collection do
{number_of_gcs, words_reclaimed, _} = :erlang.statistics(:garbage_collection)
[number_of_gcs: number_of_gcs, words_reclaimed: words_reclaimed]
end
def io do
{{:input, input}, {:output, output}} = :erlang.statistics(:io)
[input: input, output: output]
end
def process_info do
process_count = :erlang.system_info(:process_count)
process_limit = :erlang.system_info(:process_limit)
run_queue_size = :erlang.statistics(:run_queue)
[process_count: process_count, process_limit: process_limit, run_queue_size: run_queue_size]
end
def update_uptime do
{:ok, [{:value, start_time}, _ ]} = Exometer.get_value([:erlang, :beam, :start_time])
uptime = timestamp() - start_time
[value: round(uptime)]
end
def timestamp(), do: :erlang.system_time(:milli_seconds)
end
|
lib/tube_streamer/metrics.ex
| 0.784071
| 0.415225
|
metrics.ex
|
starcoder
|
defmodule BitFlagger do
@moduledoc """
A set of functions that manipulate bit flags.
"""
use Bitwise, only_operators: true
@type state :: non_neg_integer | binary
@type index :: non_neg_integer
@doc """
Converts state to a list of boolean values.
## Examples
iex> parse(0b1010, 4)
[false, true, false, true]
iex> parse(<<0b1010>>, 4)
[false, true, false, true]
"""
@spec parse(state, pos_integer) :: list(boolean)
def parse(state, size) when (is_integer(state) or is_binary(state)) and is_integer(size) do
for index <- 0..(size - 1), do: on?(state, index)
end
@doc """
Checks if the flag is turned on at a specified index.
## Examples
iex> on?(0b0010, 1)
true
iex> on?(0b0010, 3)
false
iex> on?(<<0b0010>>, 1)
true
iex> on?(<<0b0010>>, 3)
false
"""
@spec on?(state, index) :: boolean
def on?(state, index) when is_integer(state) and is_integer(index) do
(state >>> index &&& 0x01) == 1
end
def on?(state, index) when is_binary(state) do
state
|> :binary.decode_unsigned()
|> on?(index)
end
@doc """
Checks if the flag is turned off at a specified index.
## Examples
iex> off?(0b0001, 1)
true
iex> off?(0b0001, 0)
false
iex> off?(<<0b0001>>, 1)
true
iex> off?(<<0b0001>>, 0)
false
"""
@spec off?(state, index) :: boolean
def off?(state, index), do: !on?(state, index)
@doc """
Turns on the flag at a specified index.
## Examples
iex> on(0b0000, 2)
0b0100
iex> on(<<0b0000>>, 2)
<<0b0100>>
"""
@spec on(state, index) :: state
def on(state, index) when is_integer(state) and is_integer(index) do
state ||| 0x01 <<< index
end
def on(state, index) when is_binary(state) do
state
|> :binary.decode_unsigned()
|> on(index)
|> :binary.encode_unsigned()
end
@doc """
Turns off the flag at a specified index.
## Examples
iex> off(0b1111, 2)
0b1011
iex> off(<<0b1111>>, 2)
<<0b1011>>
"""
@spec off(state, index) :: state
def off(state, index) when is_integer(state) and is_integer(index) do
state &&& ~~~(0x01 <<< index)
end
def off(state, index) when is_binary(state) do
state
|> :binary.decode_unsigned()
|> off(index)
|> :binary.encode_unsigned()
end
end
|
lib/bit_flagger.ex
| 0.814385
| 0.502441
|
bit_flagger.ex
|
starcoder
|
defmodule Transpose do
@moduledoc false
def transpose(m) do
attach_row(m, [])
end
@doc """
Given a matrix and a result, make the first row into a column,
attach it to the result, and then recursively attach the
remaining rows to that new result.
When the original matrix has no rows remaining, the result
matrix is complete, but each row needs to be reversed.
"""
def attach_row([], result) do
reverse_rows(result, [])
end
def attach_row(row_list, result) do
[first_row | other_rows] = row_list
new_result = make_column(first_row, result, [])
attach_row(other_rows, new_result)
end
@doc """
Make a row into a column. The arguments are the items in the row,
the current state of the transposed matrix, and the new state of
the transposed matrix (the accumulator). The three clauses are as follows:
When there are no more entries in the row, the column you are
making is complete.
Make the row into a column when the result matrix is empty.
Do this by creating the first item as a singleton list
and prepend it to the new state (the accumulator).
Make a row into a column when the result matrix is not empty.
Do this by prepending the first item in the row to the
first row of the result state, and prepend that list to the new state.
"""
def make_column([], _, new) do
Enum.reverse(new)
end
def make_column(row, [], accumulator) do
[row_head | row_tail] = row
make_column(row_tail, [], [[row_head] | accumulator])
end
def make_column(row, result, accumulator) do
[row_head | row_tail] = row
[result_head | result_tail] = result
make_column(row_tail, result_tail, [[row_head | result_head] | accumulator])
end
@doc """
Reverse the order of items in each row of a matrix. This constructs
a new matrix whose rows are in reverse order, so you need to reverse
the final result.
"""
def reverse_rows([], result) do
Enum.reverse(result)
end
def reverse_rows([first | others], result) do
reverse_rows(others, [Enum.reverse(first) | result])
end
end
|
lib/transpose.ex
| 0.770422
| 0.879716
|
transpose.ex
|
starcoder
|
defmodule Flexto do
@moduledoc """
Configuration-driven Ecto Schemata.
"""
@doc """
Adds additional associations dynamically from app config.
Reads config for the given OTP application, under the name of the
current module. Each key maps to an Ecto.Schema function:
* `belongs_to`
* `field`
* `has_many`
* `has_one`
* `many_to_many`
Each of these keys should map to a keyword list where the key is the
name of the field or association and the value is one of:
* A type
* A tuple of type and options (keyword list)
Example Schema:
```
defmodule My.Schema do
use Ecto.Schema
import Flexto, only: [flex_schema: 1]
schema "my_table" do
field :name, :string # just normal schema things
flex_schema(:my_app) # boom! give me the stuff
end
end
```
Example configuration:
```
config :my_app, My.Schema,
belongs_to: [
foo: Foo, # belongs_to :foo, Foo
bar: {Bar, type: :integer}, # belongs_to :bar, Bar, type: :integer
],
field: [
foo: :string, # field :foo, :string
bar: {:integer, default: 4}, # field :foo, :integer, default: 4
],
has_one: [
foo: Foo, # has_one :foo, Foo
bar: {Bar, foreign_key: :the_bar_id}, # has_one :bar, Bar, foreign_key: :the_bar_id
]
has_many: [
foo: Foo, # has_many :foo, Foo
bar: {Bar, foreign_key: :the_bar_id}, # has_many :bar, Bar, foreign_key: :the_bar_id
]
many_to_many: [
foo: Foo, # many_to_many :foo, Foo
bar: {Bar, join_through: FooBar}, # many_to_many :bar, Bar, :join_through: FooBar
]
```
This one won't work very well because we define `foo` and `bar` 5
times each, but I think you get the point.
Reading of configuration is done during compile time. The relations
will be baked in during compilation, thus:
* Do not expect this to work in release config.
* You will need to rebuild all dependencies which use this macro
when you change their configuration.
"""
defmacro flex_schema(otp_app) when is_atom(otp_app) do
module = __CALLER__.module
config = Application.get_env(otp_app, module, [])
code = Enum.flat_map(config, &flex_category/1)
quote do
unquote_splicing(code)
end
end
# flex_schema impl
@cats [:belongs_to, :field, :has_one, :has_many, :many_to_many]
defp flex_category({cat, items}) when cat in @cats and is_list(items),
do: Enum.map(items, &flex_association(cat, &1))
defp flex_category(_), do: [] # skip over anything else, they might use it!
defp flex_association(rel, {name, type})
when is_atom(name) and is_atom(type),
do: flex_association(rel, name, type, [])
defp flex_association(rel, {name, opts})
when is_atom(name) and is_list(opts),
do: flex_association(rel, name, opts)
defp flex_association(rel, {name, {type, opts}})
when is_atom(name) and is_atom(type) and is_list(opts),
do: flex_association(rel, name, type, opts)
defp flex_association(rel, {name, {opts}})
when is_atom(name) and is_list(opts),
do: flex_association(rel, name, opts)
defp flex_association(rel, name, opts) do
quote do
unquote(rel)(unquote(name), unquote(opts))
end
end
defp flex_association(rel, name, type, opts) do
quote do
unquote(rel)(unquote(name), unquote(type), unquote(opts))
end
end
end
|
lib/flexto.ex
| 0.838084
| 0.780662
|
flexto.ex
|
starcoder
|
defmodule ArangoXEcto.Migration do
@moduledoc """
Defines Ecto Migrations for ArangoDB
**NOTE: ArangoXEcto dynamically creates collections for you and this method is discouraged unless
you need to define indexes.**
Migrations must use this module, otherwise migrations will not work. To do this, replace
`use Ecto.Migration` with `use ArangoXEcto.Migration`.
Since ArangoDB is schemaless, no fields need to be provided, only the collection name. First create
a collection struct using the `collection/2` function. Then pass the collection struct to the
`create/1` function. To create indexes it is a similar process using the `index/3` function.
**Collections must be created BEFORE indexes.**
To drop the collection on a migration down, do the same as creation except use the `drop/1` function
instead of the `create/1` function. Indexes are automatically removed when the collection is removed
and cannot be deleted using the `drop/1` function.
## Example
defmodule MyProject.Repo.Migrations.CreateUsers do
use ArangoXEcto.Migration
def up do
create(collection(:users))
create(index("users", [:email]))
end
def down do
drop(collection(:users))
end
end
"""
@type index_option ::
{:type, atom}
| {:unique, boolean}
| {:sparse, boolean}
| {:deduplication, boolean}
| {:minLength, integer}
| {:geoJson, boolean}
| {:expireAfter, integer}
defmodule Collection do
@moduledoc false
defstruct [:name, :type]
@type t :: %__MODULE__{}
end
defmodule Index do
@moduledoc false
defstruct [
:collection_name,
:fields,
:sparse,
:unique,
:deduplication,
:minLength,
type: :hash
]
@type t :: %__MODULE__{}
end
defmacro __using__(_) do
# Init conn
quote do
import ArangoXEcto.Migration
end
end
@doc """
Creates a collection struct
Used to in functions that perform actions on the database.
Accepts a collection type parameter that can either be `:document` or `:edge`, otherwise it will
raise an error. The default option is `:document`.
## Examples
iex> collection("users")
%Collection{name: "users", 2)
iex> collection("users", :edge)
%Collection{name: "users", 3)
"""
@spec collection(String.t(), atom()) :: Collection.t()
def collection(collection_name, type \\ :document) do
%Collection{name: collection_name, type: collection_type(type)}
end
@doc """
Creates an edge collection struct
Same as passing `:edge` as the second parameter to `collection/2`.
"""
@spec edge(String.t()) :: Collection.t()
def edge(edge_name), do: collection(edge_name, :edge)
@doc """
Creates an index struct
Default index type is a hash. To change this pass the `:type` option in options.
## Options
Options only apply to the creation of indexes and has no effect when using the `drop/1` function.
- `:type` - The type of index to create
- Accepts: `:fulltext`, `:geo`, `:hash`, `:persistent`, `:skiplist` or `:ttl`
- `:unique` - If the index should be unique, defaults to false (hash, persistent & skiplist only)
- `:sparse` - If index should be spares, defaults to false (hash, persistent & skiplist only)
- `:deduplication` - If duplication of array values should be turned off, defaults to true (hash & skiplist only)
- `:minLength` - Minimum character length of words to index (fulltext only)
- `:geoJson` - If a geo-spatial index on a location is constructed and geoJson is true, then the order
within the array is longitude followed by latitude (geo only)
- `:expireAfter` - Time in seconds after a document's creation it should count as `expired` (ttl only)
## Examples
Create index on email field
iex> index("users", [:email])
%Index{collection_name: "users", fields: [:email]}
Create dual index on email and ph_number fields
iex> index("users", [:email, :ph_number])
%Index{collection_name: "users", fields: [:email, :ph_number]}
Create unique email index
iex> index("users", [:email], unique: true)
%Index{collection_name: "users", fields: [:email], [unique: true]}
"""
@spec index(String.t(), [String.t()], [index_option]) :: Index.t()
def index(collection_name, fields, opts \\ []) do
keys =
[collection_name: collection_name, fields: fields]
|> Keyword.merge(opts)
struct(Index, keys)
end
@doc """
Creates an object
Will create the passed object, either a collection or an index.
## Examples
Create a collection
iex> create(collection("users"))
:ok
Create an index
iex> create(index("users", [:email])
:ok
"""
@spec create(%Collection{} | %Index{}) :: :ok | {:error, binary()}
def create(%Collection{} = collection) do
{:ok, conn} = get_db_conn()
case Arangox.post(conn, "/_api/collection", Map.from_struct(collection)) do
{:ok, _} -> :ok
{:error, %{status: status, message: message}} -> {:error, "#{status} - #{message}"}
end
end
def create(%Index{collection_name: collection_name} = index) do
{:ok, conn} = get_db_conn()
case Arangox.post(
conn,
"/_api/index?collection=" <> get_collection_name(collection_name),
Map.from_struct(index)
) do
{:ok, _} -> :ok
{:error, %{status: status, message: message}} -> {:error, "#{status} - #{message}"}
end
end
@doc """
Deletes an object
Will delete an object passed, can only be a collection, indexes cannot be deleted here.
## Example
iex> drop(collection("users"))
:ok
"""
@spec drop(%Collection{}) :: :ok | {:error, binary()}
def drop(%Collection{name: collection_name}) do
{:ok, conn} = get_db_conn()
case Arangox.delete(conn, "/_api/collection/" <> get_collection_name(collection_name)) do
{:ok, _} -> :ok
{:error, %{status: status, message: message}} -> {:error, "#{status} - #{message}"}
end
end
defp get_db_conn do
config(pool_size: 1)
|> Arangox.start_link()
end
defp get_default_repo! do
case Mix.Ecto.parse_repo([])
|> List.first() do
nil -> raise "No Default Repo Found"
repo -> repo
end
end
defp config(opts) do
get_default_repo!().config()
|> Keyword.merge(opts)
end
defp collection_type(:document), do: 2
defp collection_type(:edge), do: 3
defp get_collection_name(name) when is_atom(name), do: Atom.to_string(name)
defp get_collection_name(name) when is_binary(name), do: name
end
|
lib/arangox_ecto/migration.ex
| 0.874694
| 0.520557
|
migration.ex
|
starcoder
|
defmodule EnumParser do
@moduledoc """
EnumParser transform your Enum.
You must consider, transform enums have a significant cost
"""
@doc ~S"""
Transform map's keys to atom
# Example:
iex> EnumParser.to_atom_key(%{"key" => "value"})
%{key: "value"}
"""
def to_atom_key(enum) when is_map(enum) do
Map.new(enum, &to_atom_key/1)
end
@doc ~S"""
Tranform complex map's key to atom
# Example:
iex> EnumParser.to_atom_key(%{"key" => %{"key2" => "value"}})
%{key: %{key2: "value"}}
"""
def to_atom_key({k, v}) when is_map(v) do
cond do
is_bitstring(k) -> {String.to_atom(k), Map.new(v, &to_atom_key/1)}
is_atom(k) -> {k, Map.new(v, &to_atom_key/1)}
is_integer(k) -> {String.to_atom("#{k}"), Map.new(v, &to_atom_key/1)}
end
end
@doc ~S"""
Transform list's keys to atom
# Example:
iex> EnumParser.to_atom_key([%{"key" => "value"}, "key2"])
[%{key: "value"}, :key2]
"""
def to_atom_key(enum) when is_list(enum) do
Enum.map(enum, &to_atom_key/1)
end
@doc ~S"""
Transform a tuple {"key", "value"} to {:key, "value"}
# Example:
iex> EnumParser.to_atom_key({"key", "value"})
{:key, "value"}
"""
def to_atom_key({k, v}) when is_bitstring(k) do
{String.to_atom(k), v}
end
@doc ~S"""
Transform a tuple {1, "value"} to {:"1", "value"}
# Example:
iex> EnumParser.to_atom_key({1, "value"})
{:"1", "value"}
"""
def to_atom_key({k, v}) when is_number(k) do
{:"#{k}", v}
end
@doc ~S"""
Allow a tuple {:key, "value"}
# Example:
iex> EnumParser.to_atom_key({:key, "value"})
{:key, "value"}
"""
def to_atom_key({k, v}) when is_atom(k) do
{k, v}
end
@doc ~S"""
Transform the given string to atom
# Example
iex> EnumParser.to_atom_key("hello_world")
:hello_world
"""
def to_atom_key(k) when is_bitstring(k) do
String.to_atom(k)
end
@doc ~S"""
Transform enum's key to atom
# Examples:
iex> EnumParser.to_atom_key(%{"key" => %{"key2" => "value"}})
%{key: %{key2: "value"}}
iex> EnumParser.to_atom_key([%{"key" => "value"}, "key2"])
[%{key: "value"}, :key2]
"""
def to_atom_key(k) when is_atom(k) do
k
end
@doc ~S"""
Transform map's keys to string
# Example:
iex> EnumParser.to_string_key(%{key: "value"})
%{"key" => "value"}
"""
def to_string_key(enum) when is_map(enum) do
Map.new(enum, &to_string_key/1)
end
@doc ~S"""
Tranform complex map's key to string
# Example:
iex> EnumParser.to_atom_key(%{key: %{key2: "value"}})
%{"key" => %{"key2" => "value"}}
"""
def to_string_key({k, v}) when is_map(v) do
cond do
is_bitstring(k) -> {k, Map.new(v, &to_string_key/1)}
is_atom(k) -> {Atom.to_string(k), Map.new(v, &to_string_key/1)}
is_integer(k) -> {"#{k}", Map.new(v, &to_string_key/1)}
end
end
@doc ~S"""
Transform list's keys to string
# Example:
iex> EnumParser.to_string_key([%{key: "value"}, :key2])
[%{"key" => "value"}, "key2"]
"""
def to_string_key(enum) when is_list(enum) do
Enum.map(enum, &to_string_key/1)
end
@doc ~S"""
Transform a tuple {:key, "value"} to {"key", "value"}
# Example:
iex> EnumParser.to_string_key({:key, "value"})
{"key", "value"}
"""
def to_string_key({k, v}) when is_atom(k) do
{Atom.to_string(k), v}
end
@doc ~S"""
Transform a tuple {1, "value"} to {"1", "value"}
# Example:
iex> EnumParser.to_string_key({1, "value"})
{"1", "value"}
"""
def to_string_key({k, v}) when is_number(k) do
{Integer.to_string(k), v}
end
@doc ~S"""
Allow a tuple {"key", "value"}
# Example:
iex> EnumParser.to_string_key({"key", "value"})
{"key", "value"}
"""
def to_string_key({k, v}) when is_bitstring(k) do
{k, v}
end
@doc ~S"""
Transform the given atom to string
# Example
iex> EnumParser.to_string_key("hello_world")
:hello_world
"""
def to_string_key(k) when is_atom(k) do
Atom.to_string(k)
end
@doc ~S"""
Transform enum's key to string
# Examples:
iex> EnumParser.to_atom_key(%{key: %{key2: "value"}})
%{"key" => %{"key2" => "value"}}
iex> EnumParser.to_string_key([%{key: "value"}, :key2])
[%{"key" => "value"}, "key2"]
"""
def to_string_key(k) when is_bitstring(k) do
k
end
end
|
lib/enum_parser.ex
| 0.698535
| 0.430866
|
enum_parser.ex
|
starcoder
|
defmodule Solana.SPL.Governance do
@moduledoc """
Functions for interacting with the [SPL Governance
program](https://github.com/solana-labs/solana-program-library/tree/master/governance#readme).
The governance program aims to provide core building blocks for creating
Decentralized Autonomous Organizations (DAOs).
"""
alias Solana.{SPL.Token, Key, Instruction, Account, SystemProgram}
import Solana.Helpers
@max_vote_weight_sources [:fraction, :absolute]
@vote_weight_sources [:deposit, :snapshot]
@vote_thresholds [:yes, :quorum]
@set_realm_authority_actions [:set, :set_checked, :remove]
@doc """
The Governance program's default instance ID. Organizations can also deploy
their own custom instance if they wish.
"""
def id(), do: Solana.pubkey!("<KEY>")
@doc """
The governance program's test instance ID. This can be used to set up test DAOs.
"""
def test_id(), do: Solana.pubkey!("<KEY>")
@doc """
Finds the program metadata address for the given governance program. Should
have the seeds: `["metadata"]`
"""
@spec find_metadata_address(program :: Key.t()) :: Key.t()
def find_metadata_address(program), do: find_address(["metadata"], program)
@doc """
Finds the native SOL treasury address for the given `governance` account.
Should have the seeds: `["treasury", governance]`
"""
@spec find_native_treasury_address(program :: Key.t(), governance :: Key.t()) :: Key.t()
def find_native_treasury_address(program, governance) do
find_address(["treasury", governance], program)
end
@doc """
Finds the realm address for the given `name`. Should have the seeds `["governance", name]`
"""
@spec find_realm_address(program :: Key.t(), name :: String.t()) :: Key.t()
def find_realm_address(program, name) do
find_address(["governance", name], program)
end
@doc """
Finds a token holding address for the given community/council `mint`. Should
have the seeds: `["governance", realm, mint]`.
"""
@spec find_holding_address(program :: Key.t(), realm :: Key.t(), mint :: Key.t()) :: Key.t()
def find_holding_address(program, realm, mint) do
find_address(["governance", realm, mint], program)
end
@doc """
Finds the realm config address for the given `realm`. Should have the seeds:
`["realm-config", realm]`.
"""
@spec find_realm_config_address(program :: Key.t(), realm :: Key.t()) :: Key.t()
def find_realm_config_address(program, realm) do
find_address(["realm-config", realm], program)
end
@doc """
Finds the token owner record address for the given `realm`, `mint`, and
`owner`. Should have the seeds: `["governance", realm, mint, owner]`.
"""
@spec find_owner_record_address(
program :: Key.t(),
realm :: Key.t(),
mint :: Key.t(),
owner :: Key.t()
) :: Key.t()
def find_owner_record_address(program, realm, mint, owner) do
find_address(["governance", realm, mint, owner], program)
end
@doc """
Finds the vote record address for the given `proposal` and `owner_record`.
Should have the seeds: `["governance", proposal, owner_record]`.
"""
@spec find_vote_record_address(
program :: Key.t(),
proposal :: Key.t(),
owner_record :: Key.t()
) :: Key.t()
def find_vote_record_address(program, proposal, owner_record) do
find_address(["governance", proposal, owner_record], program)
end
@doc """
Finds the account governance address for the given `realm` and `account`.
Should have the seeds: `["account-governance", realm, account]`.
"""
@spec find_account_governance_address(program :: Key.t(), realm :: Key.t(), account :: Key.t()) ::
Key.t()
def find_account_governance_address(program, realm, account) do
find_address(["account-governance", realm, account], program)
end
@doc """
Finds the program governance address for the given `realm` and `governed`
program. Should have the seeds: `["program-governance", realm, governed]`.
"""
@spec find_program_governance_address(program :: Key.t(), realm :: Key.t(), governed :: Key.t()) ::
Key.t()
def find_program_governance_address(program, realm, governed) do
find_address(["program-governance", realm, governed], program)
end
@doc """
Finds the mint governance address for the given `realm` and `mint`.
Should have the seeds: `["mint-governance", realm, mint]`.
"""
@spec find_mint_governance_address(program :: Key.t(), realm :: Key.t(), mint :: Key.t()) ::
Key.t()
def find_mint_governance_address(program, realm, mint) do
find_address(["mint-governance", realm, mint], program)
end
@doc """
Finds the token governance address for the given `realm` and `token`.
Should have the seeds: `["token-governance", realm, token]`.
"""
@spec find_token_governance_address(program :: Key.t(), realm :: Key.t(), token :: Key.t()) ::
Key.t()
def find_token_governance_address(program, realm, token) do
find_address(["token-governance", realm, token], program)
end
@doc """
Finds the `governance` proposal address for the given `mint` and `index`.
Should have the seeds: `["governance", governance, mint, index]`.
"""
@spec find_proposal_address(
program :: Key.t(),
governance :: Key.t(),
mint :: Key.t(),
index :: integer
) ::
Key.t()
def find_proposal_address(program, governance, mint, index) do
find_address(["governance", governance, mint, <<index::size(32)>>], program)
end
@doc """
Finds the `proposal`'s `signatory` record address. Should have the seeds:
`["governance", proposal, signatory]`.
"""
@spec find_signatory_record_address(
program :: Key.t(),
proposal :: Key.t(),
signatory :: Key.t()
) :: Key.t()
def find_signatory_record_address(program, proposal, signatory) do
find_address(["governance", proposal, signatory], program)
end
@doc """
Finds the `proposal`'s transaction address for the given `option` and `index`.
Should have the seeds: `["governance", proposal, option, index]`.
"""
@spec find_transaction_address(
program :: Key.t(),
proposal :: Key.t(),
index :: non_neg_integer,
option :: non_neg_integer
) :: Key.t()
def find_transaction_address(program, proposal, index, option \\ 0) do
find_address(["governance", proposal, <<option::size(8)>>, <<index::size(16)>>], program)
end
defp find_address(seeds, program) do
case Key.find_address(seeds, program) do
{:ok, address, _} -> {:ok, address}
error -> error
end
end
@doc false
def validate_max_vote_weight_source({type, value})
when type in @max_vote_weight_sources and is_integer(value) and value > 0 do
{:ok, {type, value}}
end
def validate_max_vote_weight_source(_), do: {:error, "invalid max vote weight source"}
@doc false
def validate_vote_type(:single), do: {:ok, :single}
def validate_vote_type({:multiple, n}) when is_integer(n) and n > 0, do: {:ok, {:multiple, n}}
def validate_vote_type(other) do
{:error, "expected :single or {:multiple, n}, got: #{inspect(other)}"}
end
@doc false
def validate_vote({rank, weight} = vote) when rank in 0..255 and weight in 0..100,
do: {:ok, vote}
def validate_vote(other), do: {:error, "Expected a {rank, weight} tuple, got #{inspect(other)}"}
@doc false
def validate_account(%Account{} = account), do: {:ok, account}
def validate_account(other) do
{:error, "expected a Solana.Account, got #{inspect(other)}"}
end
@doc false
def validate_instruction(%Instruction{} = ix), do: {:ok, ix}
def validate_instruction(other) do
{:error, "expected a Solana.Instruction, got: #{inspect(other)}"}
end
@doc false
def validate_threshold({type, pct}) when type in @vote_thresholds and pct in 1..100 do
{:ok, {type, pct}}
end
def validate_threshold(threshold) do
{:error, "expected {:yes, percentage} or {:quorum, percentage}, got: #{inspect(threshold)}"}
end
@create_realm_schema [
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the new realm account's creation."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the authority account for the new realm."
],
community_mint: [
type: {:custom, Key, :check, []},
required: true,
doc: "Community token mint for the new realm."
],
council_mint: [
type: {:custom, Key, :check, []},
doc: "Community token mint for the new realm."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
],
voter_weight_addin: [
type: {:custom, Key, :check, []},
doc: "Community voter weight add-in program ID."
],
max_voter_weight_addin: [
type: {:custom, Key, :check, []},
doc: "Max Community voter weight add-in program ID."
],
name: [type: :string, required: true, doc: "The name of the new realm."],
max_vote_weight_source: [
type: {:custom, __MODULE__, :validate_max_vote_weight_source, []},
required: true,
doc: """
The source of max vote weight used for voting. Values below 100%
mint supply can be used when the governing token is fully minted but not
distributed yet.
"""
],
minimum: [
type: :non_neg_integer,
required: true,
doc: "Minimum number of community tokens a user must hold to create a governance."
]
]
@doc """
Generates instructions which create a new realm.
## Options
#{NimbleOptions.docs(@create_realm_schema)}
"""
def create_realm(opts) do
with {:ok, %{program: program, name: name} = params} <- validate(opts, @create_realm_schema),
{:ok, realm} <- find_realm_address(program, name),
{:ok, realm_config} <- find_realm_config_address(program, realm),
addin_accts = voter_weight_addin_accounts(params),
{:ok, council_accts} <- council_accounts(Map.put(params, :realm, realm)),
{:ok, holding_address} <- find_holding_address(program, realm, params.community_mint) do
%Instruction{
program: program,
accounts:
List.flatten([
%Account{key: realm, writable?: true},
%Account{key: params.authority},
%Account{key: params.community_mint},
%Account{key: holding_address, writable?: true},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()},
%Account{key: Token.id()},
%Account{key: Solana.rent()},
council_accts,
addin_accts,
if(addin_accts != [], do: [%Account{key: realm_config, writable?: true}], else: [])
]),
data:
Instruction.encode_data([0, {byte_size(name), 32}, name | realm_config_data(params)])
}
else
error -> error
end
end
defp council_accounts(%{council_mint: mint, realm: realm, program: program}) do
case find_holding_address(program, realm, mint) do
{:ok, address} -> {:ok, [%Account{key: mint}, %Account{key: address, writable?: true}]}
error -> error
end
end
defp council_accounts(_), do: {:ok, []}
defp voter_weight_addin_accounts(params) do
[:voter_weight_addin, :max_voter_weight_addin]
|> Enum.filter(&Map.has_key?(params, &1))
|> Enum.map(&%Account{key: Map.get(params, &1)})
end
defp realm_config_data(params) do
[
unary(Map.has_key?(params, :council_mint)),
{params.minimum, 64},
Enum.find_index(
@max_vote_weight_sources,
&(&1 == elem(params.max_vote_weight_source, 0))
),
{elem(params.max_vote_weight_source, 1), 64},
unary(Map.has_key?(params, :voter_weight_addin)),
unary(Map.has_key?(params, :max_voter_weight_addin)),
]
end
@deposit_schema [
owner: [
type: {:custom, Key, :check, []},
required: true,
doc: "The `from` token account's owner."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "The `from` token account's transfer authority."
],
realm: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the realm to deposit user tokens into."
],
mint: [
type: {:custom, Key, :check, []},
required: true,
doc: "The mint for the token the user wishes to deposit."
],
from: [
type: {:custom, Key, :check, []},
required: true,
doc: "The user's token account."
],
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: """
The account which will pay to create the user's token owner record account
(if necessary).
"""
],
amount: [
type: :pos_integer,
required: true,
doc: "The number of tokens to transfer."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates instructions which deposit governing tokens -- community or council
-- to the given `realm`.
This establishes a user's voter weight to be used when voting within the
`realm`.
Note: If a subsequent (top up) deposit is made, the user's vote weights on
active proposals *won't* be updated automatically. To do this, the user must
relinquish their votes and vote again.
## Options
#{NimbleOptions.docs(@deposit_schema)}
"""
def deposit(opts) do
with {:ok, params} <- validate(opts, @deposit_schema),
%{program: program, realm: realm, mint: mint, owner: owner} = params,
{:ok, holding} <- find_holding_address(program, realm, mint),
{:ok, owner_record} <- find_owner_record_address(program, realm, mint, owner) do
%Instruction{
program: program,
accounts: [
%Account{key: realm},
%Account{key: holding, writable?: true},
%Account{key: params.from, writable?: true},
%Account{key: owner, signer?: true},
%Account{key: params.authority, signer?: true},
%Account{key: owner_record, writable?: true},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()},
%Account{key: Token.id()},
%Account{key: Solana.rent()}
],
data: Instruction.encode_data([1, {params.amount, 64}])
}
else
error -> error
end
end
@withdraw_schema [
owner: [
type: {:custom, Key, :check, []},
required: true,
doc: "The `to` token account's owner."
],
realm: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the realm to withdraw governance tokens from."
],
mint: [
type: {:custom, Key, :check, []},
required: true,
doc: "The mint for the token the user wishes to withdraw."
],
to: [
type: {:custom, Key, :check, []},
required: true,
doc: "The user's token account. All tokens will be transferred to this account."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates instructions which withdraw governing tokens -- community or council
-- from the given `realm`.
This downgrades a user's voter weight within the `realm`.
Note: It's only possible to withdraw tokens if the user doesn't have any
outstanding active votes. Otherwise, the user needs to relinquish those
votes before withdrawing their tokens.
## Options
#{NimbleOptions.docs(@withdraw_schema)}
"""
# TODO Create test case
def withdraw(opts) do
with {:ok, params} <- validate(opts, @withdraw_schema),
%{program: program, realm: realm, mint: mint, owner: owner} = params,
{:ok, holding} <- find_holding_address(program, realm, mint),
{:ok, owner_record} <- find_owner_record_address(program, realm, mint, owner) do
%Instruction{
program: program,
accounts: [
%Account{key: realm},
%Account{key: holding, writable?: true},
%Account{key: params.to, writable?: true},
%Account{key: owner, signer?: true},
%Account{key: owner_record, writable?: true},
%Account{key: Token.id()}
],
data: Instruction.encode_data([2])
}
else
error -> error
end
end
@delegate_schema [
owner: [
type: {:custom, Key, :check, []},
required: true,
doc: "The current non-delegated holder of voting rights within the `realm`."
],
record: [
type: {:custom, Key, :check, []},
required: true,
doc: "The Token Owner Record account for which the `owner` wishes to delegate rights."
],
to: [
type: {:custom, Key, :check, []},
doc: """
The account which will receive voter rights from the `owner` in the given
`realm`. **Not including this argument will rescind the current delegate's
voting rights.**
"""
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates instructions which set the new governance delegate for an ownership
account within the given `realm` and `mint`.
The delegate can vote or create Proposals on behalf of the `owner`.
Note: Delegating voting rights doesn't take them away from the original owner.
## Options
#{NimbleOptions.docs(@delegate_schema)}
"""
# TODO Create test case
def delegate(opts) do
case validate(opts, @delegate_schema) do
{:ok, params} ->
%Instruction{
program: params.program,
accounts: [
%Account{key: params.owner, signer?: true},
%Account{key: params.record, writable?: true}
],
data: Instruction.encode_data([3 | delegate_data(params)])
}
error ->
error
end
end
defp delegate_data(%{to: delegate}), do: [1, delegate]
defp delegate_data(_), do: [0]
@governance_config_schema [
threshold: [
type: {:custom, __MODULE__, :validate_threshold, []},
required: true,
doc: "The type of vote threshold used to resolve a Proposal vote."
],
vote_weight_source: [
type: {:in, @vote_weight_sources},
default: :deposit,
doc: "The source of vote weight for voters."
],
minimum_community: [
type: :non_neg_integer,
default: 1,
doc: "The minimum number of community tokens an owner must have to create a proposal."
],
minimum_council: [
type: :non_neg_integer,
default: 1,
doc: "The minimum number of council tokens an owner must have to create a proposal."
],
duration: [
type: :non_neg_integer,
required: true,
doc: "Time limit (in seconds) for a proposal to be open for voting."
],
cooldown: [
type: :non_neg_integer,
default: 0,
doc: """
The time period (in seconds) within which a proposal can still be cancelled after voting has
ended.
"""
],
delay: [
type: :non_neg_integer,
default: 0,
doc: """
Minimum wait time (in seconds) after a proposal has been voted on before an instruction can
be executed.
"""
]
]
defp governance_config_data(config) do
[
Enum.find_index(@vote_thresholds, &(&1 == elem(config.threshold, 0))),
elem(config.threshold, 1),
{config.minimum_community, 64},
{config.delay, 32},
{config.duration, 32},
Enum.find_index(@vote_weight_sources, &(&1 == config.vote_weight_source)),
{config.cooldown, 32},
{config.minimum_council, 64}
]
end
@create_account_governance_schema [
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the new Account Governance account's creation."
],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "The address of the governing Token Owner Record."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority."
],
realm: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the realm the created Governance belongs to."
],
governed: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will be goverened by the newly created governance."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
],
voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the voter weight record account."
],
max_voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the max voter weight record account."
],
config: [
type: {:custom, Solana.Helpers, :validate, [@governance_config_schema]},
required: true,
doc: """
The desired governance configuration.
### Options
#{NimbleOptions.docs(@governance_config_schema)}
"""
]
]
@doc """
Generates instructions which create an Account Governance account, used to
govern an arbitrary account.
## Options
#{NimbleOptions.docs(@create_account_governance_schema)}
"""
# TODO Create test case
def create_account_governance(opts) do
with {:ok, params} <- validate(opts, @create_account_governance_schema),
%{program: program, realm: realm, governed: governed} = params,
{:ok, account_governance} <- find_account_governance_address(program, realm, governed),
{:ok, voter_weight_accts} <- voter_weight_accounts(params) do
%Instruction{
program: program,
accounts: [
%Account{key: realm},
%Account{key: account_governance, writable?: true},
%Account{key: governed},
%Account{key: params.owner_record},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()},
%Account{key: Solana.rent()},
%Account{key: params.authority, signer?: true}
| voter_weight_accts
],
data: Instruction.encode_data([4 | governance_config_data(params.config)])
}
else
error -> error
end
end
@create_program_governance_schema [
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the new Program Governance account's creation."
],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "The address of the governing Token Owner Record."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority."
],
realm: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the realm the created Governance belongs to."
],
governed: [
type: {:custom, Key, :check, []},
required: true,
doc: "The program which will be goverened by the newly created governance."
],
upgrade_authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "The current upgrade authority of the `goverened` program."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
],
voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the voter weight record account."
],
max_voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the max voter weight record account."
],
transfer_upgrade_authority?: [
type: :boolean,
default: false,
doc: """
Whether or not the `governed` program's upgrade authority should be
transferred to the governance PDA. This can also be done later.
"""
],
config: [
type: {:custom, Solana.Helpers, :validate, [@governance_config_schema]},
required: true,
doc: """
The desired governance configuration.
### Options
#{NimbleOptions.docs(@governance_config_schema)}
"""
]
]
@doc """
Generates instructions which create an Program Governance account, used to
govern an upgradable Solana program.
## Options
#{NimbleOptions.docs(@create_program_governance_schema)}
"""
# TODO Create test case
def create_program_governance(opts) do
with {:ok, params} <- validate(opts, @create_program_governance_schema),
%{program: program, realm: realm, governed: governed} = params,
{:ok, program_governance} <- find_program_governance_address(program, realm, governed),
{:ok, program_data} <- find_program_data(program),
{:ok, voter_weight_accts} <- voter_weight_accounts(params) do
%Instruction{
program: program,
accounts: [
%Account{key: realm},
%Account{key: program_governance, writable?: true},
%Account{key: governed},
%Account{key: program_data, writable?: true},
%Account{key: params.upgrade_authority, signer?: true},
%Account{key: params.owner_record},
%Account{key: params.payer, signer?: true},
%Account{key: bpf_loader()},
%Account{key: SystemProgram.id()},
%Account{key: Solana.rent()},
%Account{key: params.authority, signer?: true}
| voter_weight_accts
],
data:
Instruction.encode_data(
List.flatten([
5,
governance_config_data(params.config),
unary(params.transfer_upgrade_authority?)
])
)
}
else
error -> error
end
end
defp bpf_loader(), do: Solana.pubkey!("BPFLoaderUpgradeab1e11111111111111111111111")
defp find_program_data(program), do: find_address([program], bpf_loader())
@create_mint_governance_schema [
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the new Mint Governance account's creation."
],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "The address of the governing Token Owner Record."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority."
],
realm: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the realm the created Governance belongs to."
],
governed: [
type: {:custom, Key, :check, []},
required: true,
doc: "The mint which will be goverened by the newly created governance."
],
mint_authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "The current mint authority of the `mint` to be governed."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
],
voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the voter weight record account."
],
max_voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the max voter weight record account."
],
config: [
type: {:custom, Solana.Helpers, :validate, [@governance_config_schema]},
required: true,
doc: """
The desired governance configuration.
### Options
#{NimbleOptions.docs(@governance_config_schema)}
"""
],
transfer_mint_authority?: [
type: :boolean,
default: false,
doc: """
Whether or not the `governed` mint's authority should be transferred to
the governance PDA. This can also be done later.
"""
]
]
@doc """
Generates instructions which create an Mint Governance account, used to
govern a token mint.
## Options
#{NimbleOptions.docs(@create_mint_governance_schema)}
"""
def create_mint_governance(opts) do
with {:ok, params} <- validate(opts, @create_mint_governance_schema),
%{program: program, realm: realm, governed: mint} = params,
{:ok, mint_governance} <- find_mint_governance_address(program, realm, mint),
{:ok, voter_weight_accts} <- voter_weight_accounts(params) do
%Instruction{
program: program,
accounts: [
%Account{key: realm},
%Account{key: mint_governance, writable?: true},
%Account{key: mint, writable?: true},
%Account{key: params.mint_authority, signer?: true},
%Account{key: params.owner_record},
%Account{key: params.payer, signer?: true},
%Account{key: Token.id()},
%Account{key: SystemProgram.id()},
%Account{key: params.authority, signer?: true}
| voter_weight_accts
],
data:
Instruction.encode_data(
List.flatten([
17,
governance_config_data(params.config),
unary(params.transfer_mint_authority?)
])
)
}
else
error -> error
end
end
@create_token_governance_schema [
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the new Token Governance account's creation."
],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "The address of the governing Token Owner Record."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority."
],
realm: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the realm the created Governance belongs to."
],
governed: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will be goverened by the newly created governance."
],
owner: [
type: {:custom, Key, :check, []},
required: true,
doc: "The current owner of the `goverened` token account."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
],
voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the voter weight record account."
],
max_voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the max voter weight record account."
],
config: [
type: {:custom, Solana.Helpers, :validate, [@governance_config_schema]},
required: true,
doc: """
The desired governance configuration.
### Options
#{NimbleOptions.docs(@governance_config_schema)}
"""
],
transfer_ownership?: [
type: :boolean,
default: false,
doc: """
Whether or not the `governed` token's ownership should be transferred to
the governance PDA. This can also be done later.
"""
]
]
@doc """
Generates instructions which create a Token Governance account, used to
govern a token account.
## Options
#{NimbleOptions.docs(@create_token_governance_schema)}
"""
# TODO Create test case
def create_token_governance(opts) do
with {:ok, params} <- validate(opts, @create_token_governance_schema),
%{program: program, realm: realm, governed: governed} = params,
{:ok, token_governance} <- find_token_governance_address(program, realm, governed),
{:ok, voter_weight_accts} <- voter_weight_accounts(params) do
%Instruction{
program: program,
accounts: [
%Account{key: realm},
%Account{key: token_governance, writable?: true},
%Account{key: governed, writable?: true},
%Account{key: params.owner, signer?: true},
%Account{key: params.owner_record},
%Account{key: params.payer, signer?: true},
%Account{key: Token.id()},
%Account{key: SystemProgram.id()},
%Account{key: Solana.rent()},
%Account{key: params.authority, signer?: true}
| voter_weight_accts
],
data:
Instruction.encode_data(
List.flatten([
18,
governance_config_data(params.config),
unary(params.transfer_ownership?)
])
)
}
else
error -> error
end
end
@create_proposal_schema [
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the new Proposal account's creation."
],
owner: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the token owner who is making the propsal."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority."
],
mint: [
type: {:custom, Key, :check, []},
required: true,
doc: "The governing token mint."
],
governance: [
type: {:custom, Key, :check, []},
required: true,
doc: "The governance account for which this proposal is made."
],
realm: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the realm the created Governance belongs to."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
],
voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the voter weight record account."
],
max_voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the max voter weight record account."
],
name: [type: :string, required: true, doc: "The proposal name."],
description: [type: :string, required: true, doc: "The proposal explanation."],
vote_type: [
type: {:custom, __MODULE__, :validate_vote_type, []},
required: true,
doc: "The proposal's vote type."
],
options: [type: {:list, :string}, required: true, doc: "Proposal options."],
has_deny_option?: [
type: :boolean,
default: true,
doc: """
Indicates whether this proposal has a 'deny' option. Must be `true` if the
proposal wants to include executable instructions.
"""
],
index: [
type: :non_neg_integer,
required: true,
doc: "The proposal index, i.e. this is the Nth proposal for this governance."
]
]
@doc """
Generates instructions which create a Proposal account.
Proposals allow governance token owners to propose governance changes (i.e.
instructions) to an account that will go into effect (i.e. be executed) at
some point in the future.
## Options
#{NimbleOptions.docs(@create_proposal_schema)}
"""
def create_proposal(opts) do
with {:ok, params} <- validate(opts, @create_proposal_schema),
:ok <- check_proposal_options(params),
%{program: program, realm: realm, governance: governance, owner: owner} = params,
{:ok, proposal} <- find_proposal_address(program, governance, params.mint, params.index),
{:ok, owner_record} <- find_owner_record_address(program, realm, params.mint, owner),
{:ok, voter_weight_accts} <- voter_weight_accounts(params) do
%Instruction{
program: program,
accounts: [
%Account{key: realm},
%Account{key: proposal, writable?: true},
%Account{key: governance, writable?: true},
%Account{key: owner_record, writable?: true},
%Account{key: params.mint},
%Account{key: params.authority, signer?: true},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()},
%Account{key: Solana.rent()},
%Account{key: clock()}
| voter_weight_accts
],
data:
Instruction.encode_data(
List.flatten([
6,
encode_string(params.name),
encode_string(params.description),
encode_vote_type(params.vote_type),
{length(params.options), 32},
Enum.map(params.options, &encode_string/1),
unary(params.has_deny_option?)
])
)
}
else
error -> error
end
end
# https://docs.rs/spl-governance/2.1.4/spl_governance/state/proposal/enum.VoteType.html#variant.MultiChoice
defp check_proposal_options(%{vote_type: {:multiple, n}, options: options})
when n > length(options) do
{:error, "number of choices greater than options available"}
end
defp check_proposal_options(_), do: :ok
# TODO replace with {str, "borsh"} once `solana` package is updated
defp encode_string(str), do: [{byte_size(str), 32}, str]
defp encode_vote_type(:single), do: [0]
defp encode_vote_type({:multiple, n}), do: [1, {n, 16}]
@add_signatory_schema [
proposal: [
type: {:custom, Key, :check, []},
required: true,
doc: "proposal account to add the `signatory` to."
],
signatory: [
type: {:custom, Key, :check, []},
required: true,
doc: "the signatory to add to the `proposal`."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority (or its delegate)."
],
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the new signatory record's creation."
],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the `proposal` owner's Token Owner Record account."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates the instructions to add a `signatory` to the `proposal`.
This means that the `proposal` can't leave Draft state until this `signatory`
signs off on it.
## Options
#{NimbleOptions.docs(@add_signatory_schema)}
"""
# TODO create test case
def add_signatory(opts) do
with {:ok, params} <- validate(opts, @add_signatory_schema),
%{program: program, proposal: proposal, signatory: signatory} = params,
{:ok, signatory_record} <- find_signatory_record_address(program, proposal, signatory) do
%Instruction{
program: program,
accounts: [
%Account{key: proposal, writable?: true},
%Account{key: params.owner_record},
%Account{key: params.authority, signer?: true},
%Account{key: signatory_record, writable?: true},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()},
%Account{key: Solana.rent()}
],
data: Instruction.encode_data([7, signatory])
}
else
error -> error
end
end
@remove_signatory_schema [
proposal: [
type: {:custom, Key, :check, []},
required: true,
doc: "proposal account to add the `signatory` to."
],
signatory: [
type: {:custom, Key, :check, []},
required: true,
doc: "the signatory to add to the `proposal`."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority (or its delegate)."
],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the `proposal` owner's Token Owner Record account."
],
beneficiary: [
type: {:custom, Key, :check, []},
required: true,
doc:
"Public key of the account to receive the disposed signatory record account's lamports."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates the instructions to remove a `signatory` from the `proposal`.
## Options
#{NimbleOptions.docs(@remove_signatory_schema)}
"""
# TODO create test case
def remove_signatory(opts) do
with {:ok, params} <- validate(opts, @remove_signatory_schema),
%{program: program, proposal: proposal, signatory: signatory} = params,
{:ok, signatory_record} <- find_signatory_record_address(program, proposal, signatory) do
%Instruction{
program: program,
accounts: [
%Account{key: proposal, writable?: true},
%Account{key: params.owner_record},
%Account{key: params.authority, signer?: true},
%Account{key: signatory_record, writable?: true},
%Account{key: params.beneficiary, writable?: true}
],
data: Instruction.encode_data([8, signatory])
}
else
error -> error
end
end
@insert_transaction_schema [
governance: [type: {:custom, Key, :check, []}, required: true, doc: "The governance account."],
proposal: [type: {:custom, Key, :check, []}, required: true, doc: "The proposal account."],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the `proposal` owner's Token Owner Record account."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority (or its delegate)."
],
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the Proposal Instruction account's creation."
],
option: [
type: :non_neg_integer,
default: 0,
doc: "The index of the option the instruction is for."
],
index: [
type: :non_neg_integer,
required: true,
doc: "The index where the `instruction` will be inserted."
],
delay: [
type: :non_neg_integer,
default: 0,
doc: """
Wait time (in seconds) between the vote period ending and the
`instruction` being eligible for execution.
"""
],
instructions: [
type: {:list, {:custom, __MODULE__, :validate_instruction, []}},
required: true,
doc: "Data for the instructions to be executed"
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates the instructions to insert a transaction into the proposal at the
given `index`.
New transactions must be inserted at the end of the range indicated by the
proposal's `transaction_next_index` property. If a transaction replaces an
existing transaction at a given `index`, the old one must first be removed by
calling `Solana.SPL.Governance.remove_transaction/1`.
## Options
#{NimbleOptions.docs(@insert_transaction_schema)}
"""
def insert_transaction(opts) do
with {:ok, params} <- validate(opts, @insert_transaction_schema),
%{program: program, proposal: proposal, index: index, option: option} = params,
{:ok, transaction} <- find_transaction_address(program, proposal, index, option) do
%Instruction{
program: program,
accounts: [
%Account{key: params.governance},
%Account{key: proposal, writable?: true},
%Account{key: params.owner_record},
%Account{key: params.authority, signer?: true},
%Account{key: transaction, writable?: true},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()},
%Account{key: Solana.rent()}
],
data:
Instruction.encode_data([
9,
option,
{index, 16},
{params.delay, 32} |
tx_data(params.instructions)
])
}
else
error -> error
end
end
defp tx_data(ixs), do: List.flatten([{length(ixs), 32} | Enum.map(ixs, &ix_data/1)])
defp ix_data(%Instruction{} = ix) do
List.flatten([
ix.program,
{length(ix.accounts), 32},
Enum.map(ix.accounts, &account_data/1),
{byte_size(ix.data), 32},
ix.data
])
end
defp account_data(%Account{} = account) do
[account.key, unary(account.signer?), unary(account.writable?)]
end
@remove_transaction_schema [
proposal: [type: {:custom, Key, :check, []}, required: true, doc: "The proposal account."],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the `proposal` owner's Token Owner Record account."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority (or its delegate)."
],
beneficiary: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the account to receive the disposed instruction account's lamports."
],
transaction: [
type: {:custom, Key, :check, []},
required: true,
doc: "The Proposal Transaction account indicating the transaction to remove."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates the instructions to remove the Transaction data at the given `index`
from the given `proposal`.
## Options
#{NimbleOptions.docs(@remove_transaction_schema)}
"""
# TODO create test case
def remove_transaction(opts) do
case validate(opts, @remove_transaction_schema) do
{:ok, params} ->
%Instruction{
program: params.program,
accounts: [
%Account{key: params.proposal, writable?: true},
%Account{key: params.owner_record},
%Account{key: params.authority, signer?: true},
%Account{key: params.transaction, writable?: true},
%Account{key: params.beneficiary, writable?: true}
],
data: Instruction.encode_data([10])
}
error ->
error
end
end
@cancel_proposal_schema [
governance: [type: {:custom, Key, :check, []}, required: true, doc: "The governance account."],
proposal: [type: {:custom, Key, :check, []}, required: true, doc: "The proposal account."],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the `proposal` owner's Token Owner Record account."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority (or its delegate)."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates the instructions to cancel the given `proposal`.
## Options
#{NimbleOptions.docs(@cancel_proposal_schema)}
"""
# TODO create test case
def cancel_proposal(opts) do
case validate(opts, @cancel_proposal_schema) do
{:ok, params} ->
%Instruction{
program: params.program,
accounts: [
%Account{key: params.proposal, writable?: true},
%Account{key: params.owner_record, writable?: true},
%Account{key: params.authority, signer?: true},
%Account{key: clock()},
%Account{key: params.governance}
],
data: Instruction.encode_data([11])
}
error ->
error
end
end
@sign_off_proposal_schema [
proposal: [type: {:custom, Key, :check, []}, required: true, doc: "The proposal account."],
signatory: [
type: {:custom, Key, :check, []},
required: true,
doc: "the signatory signing off on the `proposal`."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates the instructions for a `signatory` to sign off on a `proposal`.
This indicates the `signatory` approves of the `proposal`. When the last
`signatory` signs off, the `proposal` moves to the Voting state.
## Options
#{NimbleOptions.docs(@sign_off_proposal_schema)}
"""
# TODO create test case
def sign_off_proposal(opts) do
with {:ok, params} <- validate(opts, @sign_off_proposal_schema),
%{program: program, signatory: signatory, proposal: proposal} = params,
{:ok, signatory_record} <- find_signatory_record_address(program, proposal, signatory) do
%Instruction{
program: program,
accounts: [
%Account{key: proposal, writable?: true},
%Account{key: signatory_record, writable?: true},
%Account{key: signatory, signer?: true},
%Account{key: clock()}
],
data: Instruction.encode_data([12])
}
else
error -> error
end
end
@cast_vote_schema [
realm: [type: {:custom, Key, :check, []}, required: true, doc: "The realm account."],
proposal: [type: {:custom, Key, :check, []}, required: true, doc: "The proposal account."],
governance: [type: {:custom, Key, :check, []}, required: true, doc: "The governance account."],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the `proposal` owner's Token Owner Record account."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority (or its delegate)."
],
mint: [type: {:custom, Key, :check, []}, required: true, doc: "The governing token mint."],
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the Vote Record account's creation."
],
voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the voter weight record account."
],
max_voter_weight_record: [
type: {:custom, Key, :check, []},
doc: "Public key of the max voter weight record account."
],
voter: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the voter's governing token account."
],
vote: [
type: {:list, {:custom, __MODULE__, :validate_vote, []}},
required: true,
doc: "The user's vote. Passing an empty list indicates proposal rejection."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates the instructions for a token owner to cast a vote on the given
`proposal`.
By doing so, the owner indicates they approve or disapprove of running the
`proposal`'s set of instructions.
If this vote causes the proposal to reach a consensus, the instructions can be
run after the configured `delay`.
## Options
#{NimbleOptions.docs(@cast_vote_schema)}
"""
# TODO create test case
def cast_vote(opts) do
with {:ok, params} <- validate(opts, @cast_vote_schema),
%{program: program, mint: mint, realm: realm, proposal: proposal} = params,
{:ok, voter_record} <- find_owner_record_address(program, realm, mint, params.voter),
{:ok, vote_record} <- find_vote_record_address(program, proposal, voter_record),
{:ok, voter_weight_accts} <- voter_weight_accounts(params) do
%Instruction{
program: program,
accounts: [
%Account{key: realm},
%Account{key: params.governance},
%Account{key: proposal, writable?: true},
%Account{key: params.owner_record, writable?: true},
%Account{key: voter_record, writable?: true},
%Account{key: params.authority, signer?: true},
%Account{key: vote_record, writable?: true},
%Account{key: mint},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()},
%Account{key: Solana.rent()},
%Account{key: clock()}
| voter_weight_accts
],
data: Instruction.encode_data([13 | vote_data(params.vote)])
}
else
error -> error
end
end
defp vote_data([]), do: [1]
defp vote_data(votes) do
List.flatten([
0,
{length(votes), 32},
Enum.map(votes, &Tuple.to_list/1)
])
end
@finalize_vote_schema [
realm: [type: {:custom, Key, :check, []}, required: true, doc: "The realm account."],
proposal: [type: {:custom, Key, :check, []}, required: true, doc: "The proposal account."],
governance: [type: {:custom, Key, :check, []}, required: true, doc: "The governance account."],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the `proposal` owner's Token Owner Record account."
],
mint: [type: {:custom, Key, :check, []}, required: true, doc: "The governing token mint."],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates instructions to finalize a vote.
This is available in case the vote was not automatically tipped with the
proposal's `duration`.
## Options
#{NimbleOptions.docs(@finalize_vote_schema)}
"""
# TODO create test case
def finalize_vote(opts) do
case validate(opts, @finalize_vote_schema) do
{:ok, params} ->
%Instruction{
program: params.program,
accounts: [
%Account{key: params.realm},
%Account{key: params.governance},
%Account{key: params.proposal, writable?: true},
%Account{key: params.owner_record, writable?: true},
%Account{key: params.mint},
%Account{key: clock()}
],
data: Instruction.encode_data([14])
}
error ->
error
end
end
@relinquish_vote_schema [
proposal: [type: {:custom, Key, :check, []}, required: true, doc: "The proposal account."],
governance: [type: {:custom, Key, :check, []}, required: true, doc: "The governance account."],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the voter's governing Token Owner Record."
],
mint: [type: {:custom, Key, :check, []}, required: true, doc: "The governing token mint."],
authority: [
type: {:custom, Key, :check, []},
doc: """
Public key of the governance authority (or its delegate). Only required if
the proposal is still being voted on.
"""
],
beneficiary: [
type: {:custom, Key, :check, []},
doc: """
Public key of the account to receive the disposed vote record account's
lamports. Only required if the proposal is still being voted on.
"""
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates instructions to relinquish a voter's vote from a proposal.
If the proposal is still being voted on, the voter's weight won't count toward
the outcome. If the proposal is already decided, this instruction has no
effect on the proposal, but allows voters to prune their outstanding votes in
case they want to withdraw governing tokens from the realm.
## Options
#{NimbleOptions.docs(@relinquish_vote_schema)}
"""
# TODO create test case
def relinquish_vote(opts) do
with {:ok, params} <- validate(opts, @relinquish_vote_schema),
%{program: program, proposal: proposal, owner_record: owner_record} = params,
{:ok, vote_record} <- find_vote_record_address(program, proposal, owner_record) do
%Instruction{
program: program,
accounts: [
%Account{key: params.governance},
%Account{key: proposal, writable?: true},
%Account{key: owner_record, writable?: true},
%Account{key: vote_record, writable?: true},
%Account{key: params.mint}
| optional_relinquish_accounts(params)
],
data: Instruction.encode_data([15])
}
else
error -> error
end
end
defp optional_relinquish_accounts(%{authority: authority, beneficiary: beneficiary}) do
[%Account{key: authority, signer?: true}, %Account{key: beneficiary, writable?: true}]
end
defp optional_relinquish_accounts(_), do: []
@execute_instruction_schema [
proposal: [type: {:custom, Key, :check, []}, required: true, doc: "The proposal account."],
instruction: [
type: {:custom, Key, :check, []},
required: true,
doc: "The Proposal Instruction account containing the instruction to execute."
],
accounts: [
type: {:list, {:custom, __MODULE__, :validate_account, []}},
doc: "Any extra accounts that are part of the instruction, in order."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates instructions to execute the instruction at index `index` in the
proposal.
Anybody can execute an instruction once the Proposal has been approved and the
instruction's `delay` time has passed.
The instruction being executed will be signed by the Governance PDA the
proposal belongs to, e.g. the Program Governance PDA for program upgrade
instructions.
## Options
#{NimbleOptions.docs(@execute_instruction_schema)}
"""
# TODO create test case
def execute_instruction(opts) do
case validate(opts, @execute_instruction_schema) do
{:ok, params} ->
%Instruction{
program: params.program,
accounts: [
%Account{key: params.proposal, writable?: true},
%Account{key: params.instruction, writable?: true},
%Account{key: clock()}
| params.accounts
],
data: Instruction.encode_data([16])
}
error ->
error
end
end
@set_governance_config_schema [
realm: [
type: {:custom, Key, :check, []},
required: true,
doc: "The realm account the `governance` belongs to."
],
governance: [
type: {:custom, Key, :check, []},
required: true,
doc: "The governance account to receive the new `config`."
],
config: [
type: {:custom, Solana.Helpers, :validate, [@governance_config_schema]},
required: true,
doc: """
The desired governance configuration.
### Options
#{NimbleOptions.docs(@governance_config_schema)}
"""
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates the instructions to set a governance's config.
## Options
#{NimbleOptions.docs(@set_governance_config_schema)}
"""
# TODO create test case
def set_governance_config(opts) do
case validate(opts, @set_governance_config_schema) do
{:ok, params} ->
%Instruction{
program: params.program,
accounts: [
%Account{key: params.realm},
%Account{key: params.governance, writable?: true, signer?: true}
],
data: Instruction.encode_data([19 | governance_config_data(params.config)])
}
error ->
error
end
end
@flag_instruction_error_schema [
proposal: [type: {:custom, Key, :check, []}, required: true, doc: "The proposal account."],
instruction: [
type: {:custom, Key, :check, []},
required: true,
doc: "The Proposal Instruction account to flag."
],
authority: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance authority (or its delegate)."
],
owner_record: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the `proposal` owner's Token Owner Record account."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Flag an instruction and its parent proposal with "error" status.
## Options
#{NimbleOptions.docs(@flag_instruction_error_schema)}
"""
# TODO create test case
def flag_instruction_error(opts) do
case validate(opts, @flag_instruction_error_schema) do
{:ok, params} ->
%Instruction{
program: params.program,
accounts: [
%Account{key: params.proposal, writable?: true},
%Account{key: params.owner_record},
%Account{key: params.authority, signer?: true},
%Account{key: params.instruction, writable?: true},
%Account{key: clock()}
],
data: Instruction.encode_data([20])
}
error ->
error
end
end
@set_realm_authority_schema [
realm: [
type: {:custom, Key, :check, []},
required: true,
doc: "The realm account to assign a new authority."
],
current: [
type: {:custom, Key, :check, []},
required: true,
doc: "The current realm authority."
],
new: [
type: {:custom, Key, :check, []},
doc: """
The new realm authority. Must be one of the realm governances.
"""
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
],
action: [
type: {:in, @set_realm_authority_actions},
required: true,
doc: """
The action to apply to the current realm authority. `:set` sets the new
realm authority without checks, `:set_checked` makes sure the new
authority is one of the realm's governances, and `:remove` removes the
realm authority.
"""
]
]
@doc """
Generates the instructions to set a new realm authority.
## Options
#{NimbleOptions.docs(@set_realm_authority_schema)}
"""
def set_realm_authority(opts) do
case validate(opts, @set_realm_authority_schema) do
{:ok, params} ->
%Instruction{
program: params.program,
accounts: [
%Account{key: params.realm, writable?: true},
%Account{key: params.current, signer?: true} |
new_realm_authority_account(params)
],
data: Instruction.encode_data([
21,
Enum.find_index(@set_realm_authority_actions, &(&1 == params.action))
])
}
error ->
error
end
end
defp new_realm_authority_account(%{action: :remove}), do: []
defp new_realm_authority_account(%{new: new}), do: [%Account{key: new}]
@set_realm_config_schema [
realm: [type: {:custom, Key, :check, []}, required: true, doc: "The realm account."],
authority: [type: {:custom, Key, :check, []}, required: true, doc: "The realm authority."],
council_mint: [type: {:custom, Key, :check, []}, doc: "The realm's council token mint."],
payer: [
type: {:custom, Key, :check, []},
doc: "The account which will pay for the Realm Config account's creation."
],
voter_weight_addin: [
type: {:custom, Key, :check, []},
doc: "Community voter weight add-in program ID."
],
max_voter_weight_addin: [
type: {:custom, Key, :check, []},
doc: "Max Community voter weight add-in program ID."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
],
max_vote_weight_source: [
type: {:custom, __MODULE__, :validate_max_vote_weight_source, []},
required: true,
doc: """
The source of max vote weight used for voting. Values below 100%
mint supply can be used when the governing token is fully minted but not
distributed yet.
"""
],
minimum: [
type: :non_neg_integer,
required: true,
doc: "Minimum number of community tokens a user must hold to create a governance."
]
]
@doc """
Generates instructions to set the realm config.
## Options
#{NimbleOptions.docs(@set_realm_config_schema)}
"""
# TODO add test case
def set_realm_config(opts) do
with {:ok, params} <- validate(opts, @set_realm_config_schema),
%{program: program, realm: realm} = params,
addin_accts = voter_weight_addin_accounts(params),
{:ok, realm_config} <- find_realm_config_address(program, realm) do
%Instruction{
program: program,
accounts:
List.flatten([
%Account{key: realm, writable?: true},
%Account{key: params.authority, signer?: true},
council_accounts(params),
%Account{key: SystemProgram.id()},
%Account{key: realm_config, writable?: true},
addin_accts,
if(addin_accts != [], do: payer_account(params), else: [])
]),
data: Instruction.encode_data([22 | realm_config_data(params)])
}
else
error -> error
end
end
defp payer_account(%{payer: payer}), do: [%Account{key: payer, signer?: true}]
defp payer_account(_), do: []
@create_owner_record_schema [
realm: [type: {:custom, Key, :check, []}, required: true, doc: "The realm account."],
owner: [
type: {:custom, Key, :check, []},
required: true,
doc: "The governing token owner's account."
],
mint: [
type: {:custom, Key, :check, []},
required: true,
doc: "The mint for the governing token."
],
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the Token Owner Record account's creation."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates instructions to create a Token Owner Record with no voter weight (0
deposit).
This is used to register a token owner when the Voter Weight Add-in is used
and the Governance program doesn't take deposits.
## Options
#{NimbleOptions.docs(@create_owner_record_schema)}
"""
# TODO add test case
def create_owner_record(opts) do
with {:ok, params} <- validate(opts, @create_owner_record_schema),
%{program: program, realm: realm, mint: mint, owner: owner} = params,
{:ok, owner_record} <- find_owner_record_address(program, realm, mint, owner) do
%Instruction{
program: program,
accounts: [
%Account{key: realm},
%Account{key: owner},
%Account{key: owner_record, writable?: true},
%Account{key: mint},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()}
],
data: Instruction.encode_data([23])
}
else
error -> error
end
end
@update_program_metadata_schema [
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the Program Metadata account's creation."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates instructions to update a Program Metadata account.
This dumps information implied by the governance program's code into a
persistent account.
## Options
#{NimbleOptions.docs(@update_program_metadata_schema)}
"""
# TODO add test case
def update_program_metadata(opts) do
with {:ok, params} <- validate(opts, @update_program_metadata_schema),
{:ok, metadata} <- find_metadata_address(params.program) do
%Instruction{
program: params.program,
accounts: [
%Account{key: metadata, writable?: true},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()}
],
data: Instruction.encode_data([24])
}
else
error -> error
end
end
@create_native_treasury_schema [
governance: [
type: {:custom, Key, :check, []},
required: true,
doc: "The governance account associated with the new treasury account."
],
payer: [
type: {:custom, Key, :check, []},
required: true,
doc: "The account which will pay for the native treasury account's creation."
],
program: [
type: {:custom, Key, :check, []},
required: true,
doc: "Public key of the governance program instance to use."
]
]
@doc """
Generates instructions to create a native SOL treasury account for a
Governance account.
The account has no data and can be used:
- as a payer for instructions signed by governance PDAs
- as a native SOL treasury
## Options
#{NimbleOptions.docs(@create_native_treasury_schema)}
"""
# TODO create test case
def create_native_treasury(opts) do
with {:ok, params} <- validate(opts, @create_native_treasury_schema),
%{program: program, governance: governance} = params,
{:ok, native_treasury} <- find_native_treasury_address(program, governance) do
%Instruction{
program: program,
accounts: [
%Account{key: governance},
%Account{key: native_treasury, writable?: true},
%Account{key: params.payer, signer?: true},
%Account{key: SystemProgram.id()}
],
data: Instruction.encode_data([25])
}
else
error -> error
end
end
# TODO replace with with Solana.clock() once `solana` package is updated
defp clock(), do: Solana.pubkey!("SysvarC1ock11111111111111111111111111111111")
defp unary(condition), do: if(condition, do: 1, else: 0)
defp voter_weight_accounts(%{realm: realm, program: program} = params) do
case find_realm_config_address(program, realm) do
{:ok, config} ->
[:voter_weight_record, :max_voter_weight_record]
|> Enum.filter(&Map.has_key?(params, &1))
|> Enum.map(&%Account{key: &1})
|> then(&{:ok, [%Account{key: config} | &1]})
error ->
error
end
end
end
|
lib/solana/spl/governance.ex
| 0.833934
| 0.432303
|
governance.ex
|
starcoder
|
defmodule Flipay.BestRateFinder do
@moduledoc """
Find the best rate according to input/output assets, input amount and exchange's order book.
"""
@doc """
Finds the best rate for input request.
Order book comes from specific exchange and the quotes are sorted by best to worst order.
Input/output assets and input amount are specified by user.
## Examples:
iex> order_book = %Flipay.Exchanges.OrderBook{
...> exchange: Flipay.Exchanges.Coinbase,
...> exchange_side: "asks",
...> asset: "BTC-USD",
...> quotes: [
...> %Flipay.Exchanges.Quote{
...> price: 5000,
...> size: 2
...> },
...> %Flipay.Exchanges.Quote{
...> price: 6000,
...> size: 1
...> }
...> ]
...>}
iex> {:ok, rate} = Flipay.BestRateFinder.find(%{order_book: order_book, input_amount: 12000})
iex> rate
#Decimal<2.333333333333333333333333333>
"""
def find(%{order_book: order_book, input_amount: input_amount}) do
cond do
Enum.count(order_book.quotes) == 0 -> {:error, :no_quotes}
order_book.exchange_side == "asks" -> buy_best_rate(order_book.quotes, input_amount, 0)
order_book.exchange_side == "bids" -> sell_best_rate(order_book.quotes, input_amount, 0)
true -> {:error, :unexpected}
end
end
@doc """
Calculates the best selling rate according to quotes and input size.
## Examples:
iex> quotes = [%Flipay.Exchanges.Quote{price: 5000, size: 1}, %Flipay.Exchanges.Quote{price: 4900, size: 2}]
iex> {:ok, amount} = Flipay.BestRateFinder.sell_best_rate(quotes, 1, 0)
iex> amount
#Decimal<5000>
iex> {:ok, input_size} = Decimal.parse("1.5")
iex> {:ok, amount} = Flipay.BestRateFinder.sell_best_rate(quotes, input_size, 0)
iex> amount
#Decimal<7450.0>
iex> {:ok, input_size} = Decimal.parse("3.1")
iex> Flipay.BestRateFinder.sell_best_rate(quotes, input_size, 0)
{:error, :not_enough_quotes}
"""
def sell_best_rate([] = _quotes, _, _), do: {:error, :not_enough_quotes}
def sell_best_rate([current | rest] = _quotes, remain_size, total_amount) do
case Decimal.cmp(remain_size, current.size) do
:gt ->
sell_best_rate(
rest,
Decimal.sub(remain_size, current.size),
Decimal.add(total_amount, Decimal.mult(current.size, current.price))
)
_ ->
{:ok, Decimal.add(total_amount, Decimal.mult(remain_size, current.price))}
end
end
@doc """
Calculates the best buying rate according to quotes and input amount.
## Examples
iex> order_books = [%Flipay.Exchanges.Quote{price: 5000, size: 1}, %Flipay.Exchanges.Quote{price: 5100, size: 2}]
iex> {:ok, size} = Flipay.BestRateFinder.buy_best_rate(order_books, 10100, 0)
iex> size
#Decimal<2>
iex> {:ok, size} = Flipay.BestRateFinder.buy_best_rate(order_books, 15200, 0)
iex> size
#Decimal<3>
iex> {:ok, size} = Flipay.BestRateFinder.buy_best_rate(order_books, 15000, 0)
iex> size
#Decimal<2.960784313725490196078431373>
iex> Flipay.BestRateFinder.buy_best_rate(order_books, 16000, 0)
{:error, :not_enough_quotes}
"""
def buy_best_rate([] = _quotes, _, _), do: {:error, :not_enough_quotes}
def buy_best_rate([current | rest] = _quotes, remain_amount, total_size) do
current_amount = Decimal.mult(current.price, current.size)
case Decimal.cmp(remain_amount, current_amount) do
:gt ->
buy_best_rate(
rest,
Decimal.sub(remain_amount, current_amount),
Decimal.add(total_size, current.size)
)
_ ->
{:ok, Decimal.add(total_size, Decimal.div(remain_amount, current.price))}
end
end
end
|
lib/flipay/best_rate_finder.ex
| 0.846562
| 0.542984
|
best_rate_finder.ex
|
starcoder
|
defmodule PokerValidator do
@moduledoc """
This is the main module for validating the hands.
"""
alias PokerValidator.Combination
alias PokerValidator.Hand
alias PokerValidator.Card
@doc """
With a given list of cards (Greater or equal than 5), this function evaluates
the best possible hand, it returns a Hand:
"""
def hand(cards) when is_list(cards) and length(cards) >= 5 do
combinations = Combination.combinations(cards, 5)
Enum.reduce(combinations, nil, fn(comb, best_hand) ->
hand = comb
|> Enum.sort(&(&1.value < &2.value))
|> get_hand
if is_nil(best_hand) || hand.score > best_hand.score,
do: hand, else: best_hand
end)
end
# Straight flush or flush
defp get_hand([
%Card{value: value1, suit: suit},
%Card{value: value2, suit: suit},
%Card{value: value3, suit: suit},
%Card{value: value4, suit: suit},
%Card{value: value5, suit: suit}] = cards) do
values = [value1, value2, value3, value4, value5]
case {is_straight?(values), value1, value5} do
{true, 10, _} ->
Hand.new_hand(:royal_straight_flush, cards,
%{highs: [value5], suit: suit})
{true, 2, 14} ->
Hand.new_hand(:straight_flush, cards, %{highs: [5], suit: suit})
{true, _, _} ->
Hand.new_hand(:straight_flush, cards, %{highs: [value5], suit: suit})
{false, _, _} ->
Hand.new_hand(:flush, cards, %{highs: [value5], suit: suit})
end
end
# Poker
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value2, suit: _}] = cards) do
Hand.new_hand(:poker, cards, %{highs: [value1, value2]})
end
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _}] = cards) do
Hand.new_hand(:poker, cards, %{highs: [value2, value1]})
end
# Full house
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _}] = cards) do
Hand.new_hand(:full_house, cards, %{highs: [value1, value2]})
end
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _}] = cards) do
Hand.new_hand(:full_house, cards, %{highs: [value2, value1]})
end
# set
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _}] = cards) do
Hand.new_hand(:set, cards, %{highs: [value1, value3, value2]})
end
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _}] = cards) do
Hand.new_hand(:set, cards, %{highs: [value2, value3, value1]})
end
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _},
%Card{value: value3, suit: _},
%Card{value: value3, suit: _}] = cards) do
Hand.new_hand(:set, cards, %{highs: [value3, value2, value1]})
end
# two pairs
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _}] = cards) do
Hand.new_hand(:two_pairs, cards, %{highs: [value2, value1, value3]})
end
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _},
%Card{value: value3, suit: _}] = cards) do
Hand.new_hand(:two_pairs, cards, %{highs: [value3, value2, value1]})
end
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _},
%Card{value: value3, suit: _}] = cards) do
Hand.new_hand(:two_pairs, cards, %{highs: [value3, value1, value2]})
end
# pair
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _},
%Card{value: value4, suit: _}] = cards) do
Hand.new_hand(:pair, cards, %{highs: [value1, value4, value3, value2]})
end
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _},
%Card{value: value4, suit: _}] = cards) do
Hand.new_hand(:pair, cards, %{highs: [value2, value4, value3, value1]})
end
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _},
%Card{value: value3, suit: _},
%Card{value: value4, suit: _}] = cards) do
Hand.new_hand(:pair, cards, %{highs: [value3, value4, value2, value1]})
end
defp get_hand([
%Card{value: value1, suit: _},
%Card{value: value2, suit: _},
%Card{value: value3, suit: _},
%Card{value: value4, suit: _},
%Card{value: value4, suit: _}] = cards) do
Hand.new_hand(:pair, cards, %{highs: [value4, value3, value2, value1]})
end
# Stright or high card
defp get_hand(cards) do
values = cards |> Enum.map(&(&1.value))
case {is_straight?(values), values} do
{true, [2, _, _, _, 14]} ->
Hand.new_hand(:straight, cards, %{highs: [5]})
{true, [_, _, _, _, value5]} ->
Hand.new_hand(:straight, cards, %{highs: [value5]})
_ ->
Hand.new_hand(:high_card, cards, %{highs: values |> Enum.reverse})
end
end
# Verifies if a list of 5 cards is a straight
defp is_straight?([2,3,4,5,14]), do: true
defp is_straight?(values) do
[h | t] = values
{is_straight?, _} = Enum.reduce_while(t, {true, h},
fn(value, {_straight?, prev_value}) ->
if value == prev_value + 1,
do: {:cont, {true, value}},
else: {:halt, {false, value}}
end
)
is_straight?
end
end
|
lib/poker_validator.ex
| 0.739422
| 0.454351
|
poker_validator.ex
|
starcoder
|
defmodule EVM.Memory do
@moduledoc """
Functions to help us handle memory operations
in the MachineState of the VM.
"""
alias EVM.MachineState
@type t :: binary()
@doc """
Reads a word out of memory, and also decides whether or not
we should increment number of active words in our machine state.
## Examples
iex> EVM.Memory.read(%EVM.MachineState{memory: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, 4::256>>, active_words: 0}, 0, 0)
{<<>>, %EVM.MachineState{memory: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, 4::256>>, active_words: 0}}
iex> EVM.Memory.read(%EVM.MachineState{memory: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, 4::256>>, active_words: 0}, 0, 30)
{<<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b>>, %EVM.MachineState{memory: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, 4::256>>, active_words: 1}}
iex> EVM.Memory.read(%EVM.MachineState{memory: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, 4::256>>, active_words: 0}, 0, 35)
{<<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:e968:6179::de52:7100>>, %EVM.MachineState{memory: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, 4::256>>, active_words: 2}}
iex> EVM.Memory.read(%EVM.MachineState{memory: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, 4::256>>, active_words: 0}, 32, 35)
{<<fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100>>, %EVM.MachineState{memory: <<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, 4::256>>, active_words: 3}}
iex> EVM.Memory.read(%EVM.MachineState{memory: <<1::256>>, active_words: 0}, 0, 35)
{<<fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:e968:6179::de52:7100>>, %EVM.MachineState{memory: <<1::256>>, active_words: 2}}
"""
@spec read(MachineState.t(), EVM.val(), EVM.val()) :: {binary(), MachineState.t()}
def read(machine_state, offset, bytes \\ EVM.word_size()) do
data = read_zeroed_memory(machine_state.memory, offset, bytes)
active_words = if data == <<>>, do: 0, else: get_active_words(offset + bytes)
{data, machine_state |> MachineState.maybe_set_active_words(active_words)}
end
@doc """
Writes data to memory, and also decides whether or not
we should increment number of active words in our machine state.
Note: we will fill in zeros if the memory extends beyond our previous memory
bounds. This could (very easily) overflow our memory by making a single byte
write to a far-away location. The gas might be high, but it's still not desirable
to have a system crash. The easiest mitigation will likely be to load in pages of
memory as needed. These pages could have an offset and thus a far away page will
only add a few bytes of memory.
For now, we'll simply extend our memory and perform a simple write operation.
Note: we also may just use a different data structure all-together for this.
## Examples
iex> EVM.Memory.write(%EVM.MachineState{memory: <<>>, active_words: 0}, 5, <<1, 1>>)
%EVM.MachineState{memory: <<0, 0, 0, 0, 0, 1, 1>>, active_words: 1}
iex> EVM.Memory.write(%EVM.MachineState{memory: <<0, 1, 2, 3, 4>>, active_words: 0}, 1, <<6, 6>>)
%EVM.MachineState{memory: <<0, 6, 6, 3, 4>>, active_words: 1}
iex> EVM.Memory.write(%EVM.MachineState{memory: <<0, 1, 2, 3, 4>>, active_words: 0}, 0, <<10, 11, 12, 13, 14, 15>>)
%EVM.MachineState{memory: <<10, 11, 12, 13, 14, 15>>, active_words: 1}
iex> EVM.Memory.write(%EVM.MachineState{memory: <<1, 1, 1>>, active_words: 0}, 5, <<1::80>>)
%EVM.MachineState{memory: <<1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1>>, active_words: 1}
"""
@spec write(MachineState.t(), EVM.val(), binary() | integer(), integer() | nil) ::
MachineState.t()
def write(machine_state, offset_bytes, original_data, size \\ nil)
def write(machine_state, offset_bytes, data, size) when is_integer(data),
do: write(machine_state, offset_bytes, :binary.encode_unsigned(data), size)
def write(machine_state = %MachineState{}, offset_bytes, original_data, size) do
data =
if size do
original_data
|> :binary.decode_unsigned()
|> rem(size * EVM.word_size())
|> :binary.encode_unsigned()
else
original_data
end
memory_size = byte_size(machine_state.memory)
data_size = byte_size(data)
final_pos = offset_bytes + data_size
padding_bits = max(final_pos - memory_size, 0) * 8
final_memory_byte = max(memory_size - final_pos, 0)
memory = machine_state.memory <> <<0::size(padding_bits)>>
updated_memory =
:binary.part(memory, 0, offset_bytes) <>
data <> :binary.part(memory, final_pos, final_memory_byte)
%{machine_state | memory: updated_memory}
|> MachineState.maybe_set_active_words(get_active_words(offset_bytes + byte_size(data)))
end
@doc """
Read zeroed memory will read bytes from a certain offset in the memory
binary. Any bytes extending beyond memory's size will be defauled to zero.
## Examples
iex> EVM.Memory.read_zeroed_memory(nil, 1, 4)
<<0, 0, 0, 0>>
iex> EVM.Memory.read_zeroed_memory(<<1, 2, 3>>, 1, 4)
<<2, 3, 0, 0>>
iex> EVM.Memory.read_zeroed_memory(<<1, 2, 3>>, 1, 2)
<<2, 3>>
iex> EVM.Memory.read_zeroed_memory(<<16, 17, 18, 19>>, 100, 1)
<<0>>
"""
@spec read_zeroed_memory(binary(), EVM.val(), EVM.val()) :: binary()
def read_zeroed_memory(memory, offset, bytes) when is_integer(memory),
do: read_zeroed_memory(:binary.encode_unsigned(memory), offset, bytes)
def read_zeroed_memory(memory, offset, bytes) do
if memory == nil || offset > byte_size(memory) do
# We're totally out of memory, let's just drop zeros
bytes_in_bits = bytes * 8
<<0::size(bytes_in_bits)>>
else
memory_size = byte_size(memory)
final_pos = offset + bytes
memory_bytes_final_pos = min(final_pos, memory_size)
padding = (final_pos - memory_bytes_final_pos) * 8
:binary.part(memory, offset, memory_bytes_final_pos - offset) <> <<0::size(padding)>>
end
end
@doc """
Returns the highest active word from the given inputs.
## Examples
iex> EVM.Memory.get_active_words(0) # TODO: We may actually want this to start at 1, even for zero bytes read
0
iex> EVM.Memory.get_active_words(80)
3
iex> EVM.Memory.get_active_words(321)
11
"""
def get_active_words(bytes) do
# note: round has no effect due to ceil, just being used for float to int conversion
:math.ceil(bytes / 32) |> round
end
@doc """
When calling instructions, we may adjust the number
of active words in the machine state. These functions
provide a simple way to determine the number of words
after an instruction would be called. This wraps anywhere
you might see `μ'_i` in the Yellow Paper.
"""
@spec active_words_after(
Operation.operation(),
list(EVM.val()),
MachineState.t(),
EVM.ExecEnv.t()
) :: integer()
def active_words_after(_instruction, _state, machine_state, _exec_env),
do: machine_state.active_words
end
|
apps/evm/lib/evm/memory.ex
| 0.799599
| 0.464476
|
memory.ex
|
starcoder
|
defmodule Solana.SPL.Token do
@moduledoc """
Functions for interacting with Solana's [Token
Program](https://spl.solana.com/token).
"""
alias Solana.{Instruction, Account, SystemProgram}
import Solana.Helpers
@typedoc "Token account metadata."
@type t :: %__MODULE__{
mint: Solana.key(),
owner: Solana.key(),
amount: non_neg_integer,
delegate: Solana.key() | nil,
delegated_amount: non_neg_integer,
initialized?: boolean,
frozen?: boolean,
native?: boolean,
rent_exempt_reserve: non_neg_integer | nil,
close_authority: Solana.key() | nil
}
@authority_types [:mint, :freeze, :owner, :close]
defstruct [
:mint,
:owner,
:amount,
:delegate,
:rent_exempt_reserve,
:close_authority,
delegated_amount: 0,
initialized?: false,
frozen?: false,
native?: false
]
@doc """
The Token Program's ID.
"""
@spec id() :: binary
def id(), do: Solana.pubkey!("<KEY>")
@doc """
The size of a serialized token account.
"""
@spec byte_size() :: pos_integer
def byte_size(), do: 165
@doc """
Translates the result of a `Solana.RPC.Request.get_account_info/2` into a
`t:Solana.SPL.Token.t/0`.
"""
@spec from_account_info(info :: map) :: t | :error
def from_account_info(info)
def from_account_info(%{"data" => %{"parsed" => %{"info" => info}}}) do
case from_token_account_info(info) do
:error -> :error
token -> Enum.reduce(info, token, &add_info/2)
end
end
def from_account_info(_), do: :error
defp from_token_account_info(%{
"isNative" => native?,
"mint" => mint,
"owner" => owner,
"tokenAmount" => %{"amount" => amount}
}) do
%__MODULE__{
native?: native?,
mint: B58.decode58!(mint),
owner: B58.decode58!(owner),
amount: String.to_integer(amount)
}
end
defp from_token_account_info(_), do: :error
defp add_info({"state", "initialized"}, token) do
%{token | initialized?: true}
end
defp add_info({"state", "frozen"}, token) do
%{token | initialized?: true, frozen?: true}
end
defp add_info({"delegate", delegate}, token) do
%{token | delegate: B58.decode58!(delegate)}
end
defp add_info({"delegatedAmount", %{"amount" => amount}}, token) do
%{token | delegated_amount: String.to_integer(amount)}
end
defp add_info(_, token), do: token
@init_schema [
payer: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account that will pay for the token account creation"
],
balance: [
type: :non_neg_integer,
required: true,
doc: "The lamport balance the token account should have"
],
mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The mint of the newly-created token account"
],
owner: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The owner of the newly-created token account"
],
new: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The public key of the newly-created token account"
]
]
@doc """
Creates the instructions which initialize a new account to hold tokens.
If this account is associated with the native mint then the token balance of
the initialized account will be equal to the amount of SOL in the account. If
this account is associated with another mint, that mint must be initialized
before this command can succeed.
All instructions must be executed as part of the same transaction. Otherwise
another party can acquire ownership of the uninitialized account.
## Options
#{NimbleOptions.docs(@init_schema)}
"""
def init(opts) do
case validate(opts, @init_schema) do
{:ok, params} ->
[
SystemProgram.create_account(
lamports: params.balance,
space: byte_size(),
from: params.payer,
new: params.new,
program_id: id()
),
initialize_ix(params)
]
error ->
error
end
end
defp initialize_ix(params) do
%Instruction{
program: id(),
accounts: [
%Account{key: params.new, writable?: true},
%Account{key: params.mint},
%Account{key: params.owner},
%Account{key: Solana.rent()}
],
data: Instruction.encode_data([1])
}
end
@transfer_schema [
from: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account to send tokens from"
],
to: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account to receive tokens"
],
owner: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The owner of `from`"
],
multi_signers: [
type: {:list, {:custom, Solana.Key, :check, []}},
doc: "signing accounts if the `owner` is a `Solana.SPL.Token.MultiSig` account"
],
amount: [
type: :pos_integer,
required: true,
doc: "The number of tokens to send"
],
checked?: [
type: :boolean,
default: false,
doc: """
whether or not to check the token mint and decimals; may be useful
when creating transactions offline or within a hardware wallet.
"""
],
decimals: [
type: {:in, 0..255},
doc: "The number of decimals in the `amount`. Only used if `checked?` is true."
],
mint: [
type: {:custom, Solana.Key, :check, []},
doc: "The mint account for `from` and `to`. Only used if `checked?` is true."
]
]
@doc """
Creates an instruction to transfer tokens from one account to another either
directly or via a delegate.
If this account is associated with the native mint then equal amounts of SOL
and Tokens will be transferred to the destination account.
If you want to check the token's `mint` and `decimals`, set the `checked?`
option to `true` and provide the `mint` and `decimals` options.
## Options
#{NimbleOptions.docs(@transfer_schema)}
"""
def transfer(opts) do
case validate(opts, @transfer_schema) do
{:ok, params = %{checked?: true, mint: mint, decimals: decimals}} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.from, writable?: true},
%Account{key: mint},
%Account{key: params.to, writable?: true}
| signer_accounts(params)
],
data: Instruction.encode_data([12, {params.amount, 64}, decimals])
}
{:ok, params = %{checked?: false}} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.from, writable?: true},
%Account{key: params.to, writable?: true}
| signer_accounts(params)
],
data: Instruction.encode_data([3, {params.amount, 64}])
}
{:ok, _} ->
{:error, :invalid_checked_params}
error ->
error
end
end
@approve_schema [
source: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account to send tokens from"
],
delegate: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account authorized to perform a transfer of tokens from `source`"
],
owner: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account which owns `source`"
],
multi_signers: [
type: {:list, {:custom, Solana.Key, :check, []}},
doc: "signing accounts if the `owner` is a `Solana.SPL.Token.MultiSig` account"
],
amount: [
type: :pos_integer,
required: true,
doc: "The maximum number of tokens that `delegate` can send on behalf of `source`"
],
checked?: [
type: :boolean,
default: false,
doc: """
whether or not to check the token mint and decimals; may be useful
when creating transactions offline or within a hardware wallet.
"""
],
decimals: [
type: {:in, 0..255},
doc: "The number of decimals in the `amount`. Only used if `checked?` is true."
],
mint: [
type: {:custom, Solana.Key, :check, []},
doc: "The mint account for `from` and `to`. Only used if `checked?` is true."
]
]
@doc """
Creates an instruction to approves a delegate.
A delegate is given the authority over tokens on behalf of the source
account's owner.
If you want to check the token's `mint` and `decimals`, set the `checked?`
option to `true` and provide the `mint` and `decimals` options.
## Options
#{NimbleOptions.docs(@approve_schema)}
"""
def approve(opts) do
case validate(opts, @approve_schema) do
{:ok, params = %{checked?: true, mint: mint, decimals: decimals}} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.source, writable?: true},
%Account{key: mint},
%Account{key: params.delegate}
| signer_accounts(params)
],
data: Instruction.encode_data([13, {params.amount, 64}, decimals])
}
{:ok, params = %{checked?: false}} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.source, writable?: true},
%Account{key: params.delegate}
| signer_accounts(params)
],
data: Instruction.encode_data([4, {params.amount, 64}])
}
{:ok, _} ->
{:error, :invalid_checked_params}
error ->
error
end
end
@revoke_schema [
source: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account to send tokens from"
],
owner: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account which owns `source`"
],
multi_signers: [
type: {:list, {:custom, Solana.Key, :check, []}},
doc: "signing accounts if the `owner` is a `Solana.SPL.Token.MultiSig` account"
]
]
@doc """
Creates an instruction to revoke a previously approved delegate's authority to
make transfers.
## Options
#{NimbleOptions.docs(@revoke_schema)}
"""
def revoke(opts) do
case validate(opts, @revoke_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.source, writable?: true}
| signer_accounts(params)
],
data: Instruction.encode_data([5])
}
error ->
error
end
end
@set_authority_schema [
account: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account which will change authority, either a mint or token account"
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the current authority for `mint_or_token`"
],
new_authority: [
type: {:custom, Solana.Key, :check, []},
doc: "the new authority for `mint_or_token`"
],
type: [
type: {:in, @authority_types},
required: true,
doc: "type of authority to set"
],
multi_signers: [
type: {:list, {:custom, Solana.Key, :check, []}},
doc: "signing accounts if the `authority` is a `Solana.SPL.Token.MultiSig` account"
]
]
@doc """
Creates an instruction to set a new authority for a mint or account.
## Options
#{NimbleOptions.docs(@set_authority_schema)}
"""
def set_authority(opts) do
case validate(opts, @set_authority_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.account, writable?: true}
| signer_accounts(params)
],
data:
Instruction.encode_data([
6,
Enum.find_index(@authority_types, &(&1 == params.type))
| add_new_authority(params)
])
}
error ->
error
end
end
defp add_new_authority(%{new_authority: new_authority}) do
[1, new_authority]
end
defp add_new_authority(_params), do: [0, <<0::32*8>>]
@mint_to_schema [
token: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token account which will receive the minted tokens"
],
mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The mint account which will mint the tokens"
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the current mint authority"
],
amount: [
type: :pos_integer,
required: true,
doc: "amount of tokens to mint"
],
multi_signers: [
type: {:list, {:custom, Solana.Key, :check, []}},
doc: "signing accounts if the `authority` is a `Solana.SPL.Token.MultiSig` account"
],
checked?: [
type: :boolean,
default: false,
doc: """
whether or not to check the token mint and decimals; may be useful
when creating transactions offline or within a hardware wallet.
"""
],
decimals: [
type: {:in, 0..255},
doc: "The number of decimals in the `amount`. Only used if `checked?` is true."
]
]
@doc """
Creates an instruction to mints new tokens to an account.
The native mint does not support minting.
If you want to check the token's `mint` and `decimals`, set the `checked?`
option to `true` and provide the `decimals` option.
## Options
#{NimbleOptions.docs(@mint_to_schema)}
"""
def mint_to(opts) do
case validate(opts, @mint_to_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.mint, writable?: true},
%Account{key: params.token, writable?: true}
| signer_accounts(params)
]
}
|> add_mint_to_data(params)
error ->
error
end
end
defp add_mint_to_data(ix, %{checked?: true, decimals: decimals, amount: amount}) do
%{ix | data: Instruction.encode_data([14, {amount, 64}, decimals])}
end
defp add_mint_to_data(ix, %{checked?: false, amount: amount}) do
%{ix | data: Instruction.encode_data([7, {amount, 64}])}
end
defp add_mint_to_data(_, _), do: {:error, :invalid_checked_params}
@burn_schema [
token: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token account which will have its tokens burned"
],
mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The mint account which will burn the tokens"
],
owner: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the owner of `token`"
],
amount: [
type: :pos_integer,
required: true,
doc: "amount of tokens to burn"
],
multi_signers: [
type: {:list, {:custom, Solana.Key, :check, []}},
doc: "signing accounts if the `owner` is a `Solana.SPL.Token.MultiSig` account"
],
checked?: [
type: :boolean,
default: false,
doc: """
whether or not to check the token mint and decimals; may be useful
when creating transactions offline or within a hardware wallet.
"""
],
decimals: [
type: {:in, 0..255},
doc: "The number of decimals in the `amount`. Only used if `checked?` is true."
]
]
@doc """
Creates an instruction to burn tokens by removing them from an account.
`burn/1` does not support accounts associated with the native mint, use
`close_account/1` instead.
If you want to check the token's `mint` and `decimals`, set the `checked?`
option to `true` and provide the `decimals` option.
## Options
#{NimbleOptions.docs(@burn_schema)}
"""
def burn(opts) do
case validate(opts, @burn_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.token, writable?: true},
%Account{key: params.mint, writable?: true}
| signer_accounts(params)
]
}
|> add_burn_data(params)
error ->
error
end
end
defp add_burn_data(ix, %{checked?: true, decimals: decimals, amount: amount}) do
%{ix | data: Instruction.encode_data([15, {amount, 64}, decimals])}
end
defp add_burn_data(ix, %{checked?: false, amount: amount}) do
%{ix | data: Instruction.encode_data([8, {amount, 64}])}
end
defp add_burn_data(_, _), do: {:error, :invalid_checked_params}
@close_account_schema [
to_close: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account to close"
],
destination: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account which will receive the remaining balance of `to_close`"
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the `account close` authority for `to_close`"
],
multi_signers: [
type: {:list, {:custom, Solana.Key, :check, []}},
doc: "signing accounts if the `authority` is a `Solana.SPL.Token.MultiSig` account"
]
]
@doc """
Creates an instruction to close an account by transferring all its SOL to the
`destination` account.
A non-native account may only be closed if its token amount is zero.
## Options
#{NimbleOptions.docs(@close_account_schema)}
"""
def close_account(opts) do
case validate(opts, @close_account_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.to_close, writable?: true},
%Account{key: params.destination, writable?: true}
| signer_accounts(params)
],
data: Instruction.encode_data([9])
}
error ->
error
end
end
@freeze_schema [
to_freeze: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account to freeze"
],
mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The mint account for `to_freeze`"
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the `freeze` authority for `mint`"
],
multi_signers: [
type: {:list, {:custom, Solana.Key, :check, []}},
doc: "signing accounts if the `authority` is a `Solana.SPL.Token.MultiSig` account"
]
]
@doc """
Creates an instruction to freeze an initialized account using the mint's
`freeze_authority` (if set).
## Options
#{NimbleOptions.docs(@freeze_schema)}
"""
def freeze(opts) do
case validate(opts, @freeze_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.to_freeze, writable?: true},
%Account{key: params.mint, writable?: true}
| signer_accounts(params)
],
data: Instruction.encode_data([10])
}
error ->
error
end
end
@thaw_schema [
to_thaw: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account to thaw"
],
mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The mint account for `to_thaw`"
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the `freeze` authority for `mint`"
],
multi_signers: [
type: {:list, {:custom, Solana.Key, :check, []}},
doc: "signing accounts if the `authority` is a `Solana.SPL.Token.MultiSig` account"
]
]
@doc """
Creates an instruction to thaw a frozen account using the mint's
`freeze_authority` (if set).
## Options
#{NimbleOptions.docs(@thaw_schema)}
"""
def thaw(opts) do
case validate(opts, @thaw_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts: [
%Account{key: params.to_thaw, writable?: true},
%Account{key: params.mint, writable?: true}
| signer_accounts(params)
],
data: Instruction.encode_data([11])
}
error ->
error
end
end
defp signer_accounts(params = %{owner: owner}) do
params
|> Map.delete(:owner)
|> Map.put(:authority, owner)
|> signer_accounts()
end
defp signer_accounts(%{multi_signers: signers, authority: authority}) do
[
%Account{key: authority}
| Enum.map(signers, &%Account{key: &1, signer?: true})
]
end
defp signer_accounts(%{authority: authority}) do
[%Account{key: authority, signer?: true}]
end
end
|
lib/solana/spl/token.ex
| 0.907838
| 0.423041
|
token.ex
|
starcoder
|
defmodule Scenic.Primitive.Style.Paint.Image do
alias Scenic.Assets.Static
@moduledoc """
Fill a primitive with an image from Scenic.Assets.Static
### Data Format
`{:image, id}`
Fill with the static image indicated by `id`
The `id` can be either the name of the file when the static assets library was built
or an alias that you set up in your conig. The following example has the alias
`:parrot` pointing to the `"images/parrot.png"` file, so both fills are identical.
```elixir
Graph.build()
|> rect({100, 50}, fill: {:image, "images/parrot.jpg"})
|> rect({100, 50}, fill: {:image, :parrot})
```
Note that this is a fill, and the images will repeat automatically if the primitive
being filled is larger than the source image.
If you want more control, such as no repeats, a subsection of the image, or scaling
the image up or down when you draw it, use the `Sprites` primitive.
"""
@doc false
def validate({:image, id}) do
case Static.meta(id) do
{:ok, {Static.Image, _}} -> {:ok, {:image, id}}
{:ok, {Static.Font, _}} -> err_is_a_font(id)
_ -> err_missing(id)
end
end
def validate(data), do: err_invalid(data)
defp err_is_a_font(_) do
{
:error,
"""
This is a font!!
#{IO.ANSI.yellow()}
Image fills must be an id that names an image in your Scenic.Assets.Static library.#{IO.ANSI.default_color()}
"""
}
end
defp err_missing(id) do
{
:error,
"""
The asset #{inspect(id)} could not be found.
#{IO.ANSI.yellow()}
Image fills must be an id that names an image in your Scenic.Assets.Static library.
To resolve this do the following checks.
1) Confirm that the file exists in your assets folder.
2) Make sure the image file is being compiled into your asset library.
If this file is new, you may need to "touch" your asset library module to cause it to recompile.
Maybe somebody will help add a filesystem watcher to do this automatically. (hint hint...)
3) Check that and that the asset module is defined in your config.
config :scenic, :assets,
module: MyApplication.Assets #{IO.ANSI.default_color()}
"""
}
end
defp err_invalid(_) do
{
:error,
"""
#{IO.ANSI.yellow()}
Image fills must be an id that names an image in your Scenic.Assets.Static library.
Valid image ids can be the path or an alias to a file in your assets library.
Examples:
fill: {:image, "images/parrot.jpg"}
fill: {:image, :parrot}#{IO.ANSI.default_color()}
"""
}
end
end
|
lib/scenic/primitive/style/paint/image.ex
| 0.898023
| 0.867429
|
image.ex
|
starcoder
|
defmodule Reductions do
@moduledoc """
Utility functions for reducing lists.
"""
defmacro __using__(_opts) do
quote do
import Reductions
end
end
use InliningTools
# [0.1, 0.2, 0.3]
# fun +
# acc = [0.1], xs = [0.2, 0.3]
# acc = [0.1], item = 0.2, xs = [0.3]
# [fun(hd(acc), item) | acc]
@compile :inline_list_funcs
@doc ~S"""
Returns a list of the intermediate values of the reduction of `list` with `fun`.
iex> Reductions.reductions([1, 2, 3, 4], &+/2)
[1, 3, 6, 10]
"""
def reductions(list, fun)
def reductions([x | xs], fun) do
# :lists.reverse(List.foldl(xs, [x], fn item, acc -> [fun.(item, hd(acc)) | acc] end))
:lists.reverse(:lists.foldl(fn item, acc -> [fun.(item, hd(acc)) | acc] end, [x], xs))
end
@inlines {:reductions, 2}
@doc ~S"""
Run `fun` on `item` `count` times.
iex> Reductions.reduce_times(1, &(&1 + 1), 3)
4
"""
@spec reduce_times(item_type, (item_type -> item_type), non_neg_integer()) :: item_type
when item_type: var
def reduce_times(item, fun, count)
def reduce_times(item, _fun, count) when count <= 0, do: item
def reduce_times(item, fun, 1) do
fun.(item)
end
def reduce_times(item, fun, count) do
reduce_times(fun.(item), fun, count - 1)
end
@inlines {:reduce_times, 3}
@doc ~S"""
`index_of_probability` returns the index of a random element from `probabilities`.
`probabilities` should sum to 1. Example: if an element at idx 4 of `probabilities` is `0.5`,
`index_of_probability` will have a 50% chance of returning `4`
"""
@spec index_of_probability([float()]) :: non_neg_integer()
def index_of_probability(probabilities) do
prob = :rand.uniform()
index_of_probability_(reductions(probabilities, &+/2) ++ [1], prob, 0)
end
@inlines {:index_of_probability, 1}
@spec index_of_probability_(list(float()), float(), non_neg_integer()) :: non_neg_integer()
defp index_of_probability_([head_prob | _], prob, idx) when prob <= head_prob do
idx
end
defp index_of_probability_([_ | rest_probs], prob, idx) do
index_of_probability_(rest_probs, prob, idx + 1)
end
defp index_of_probability_([], _p, idx) do
idx
end
end
|
lib/mulix/genome/reductions.ex
| 0.739611
| 0.613975
|
reductions.ex
|
starcoder
|
defmodule Sippet.Message.RequestLine do
@moduledoc """
A SIP Request-Line struct, composed by the Method, Request-URI and
SIP-Version.
The `start_line` of requests are represented by this struct. The RFC 3261
represents the Request-Line as:
Request-Line = Method SP Request-URI SP SIP-Version CRLF
The above `Method` is represented by atoms, when the method is a known one,
or by binaries, when the method is unknown. Known ones are `:ack`, `:invite`,
`:register`, `:cancel`, `:message` and all others returned by the function
`Sippet.Message.known_methods/0`.
The `Request-URI` is represented by a `Sippet.URI` struct, breaking down the
SIP-URI in more useful parts for processing.
The `SIP-Version` is a `{major, minor}` tuple, which assumes the value
`{2, 0}` in standard implementations.
"""
alias Sippet.URI, as: URI
defstruct [
method: nil,
request_uri: nil,
version: nil
]
@type method :: Sippet.Message.method
@type uri :: URI.t
@type version :: {integer, integer}
@type t :: %__MODULE__{
method: method,
request_uri: uri,
version: version
}
@doc """
Creates a Request-Line struct.
The version will assume the default value `{2, 0}`.
"""
@spec new(method, uri | binary) :: t
def new(method, %URI{} = request_uri)
when is_atom(method) or is_binary(method) do
%__MODULE__{
method: method,
request_uri: request_uri,
version: {2, 0}
}
end
def new(method, request_uri)
when is_binary(request_uri) do
new(method, URI.parse!(request_uri))
end
@doc """
Returns a binary which corresponds to the text representation of the given
Request-Line.
It does not includes an ending line CRLF.
"""
@spec to_string(t) :: binary
defdelegate to_string(value), to: String.Chars.Sippet.Message.RequestLine
@doc """
Returns an iodata which corresponds to the text representation of the given
Request-Line.
It does not includes an ending line CRLF.
"""
@spec to_iodata(t) :: iodata
def to_iodata(%Sippet.Message.RequestLine{version: {major, minor},
request_uri: uri, method: method}) do
[if(is_atom(method), do: String.upcase(Atom.to_string(method)), else: method),
" ", Sippet.URI.to_string(uri),
" SIP/", Integer.to_string(major), ".", Integer.to_string(minor)]
end
end
defimpl String.Chars, for: Sippet.Message.RequestLine do
alias Sippet.Message.RequestLine, as: RequestLine
def to_string(%RequestLine{} = request_line) do
request_line
|> RequestLine.to_iodata()
|> IO.iodata_to_binary
end
end
|
lib/sippet/message/request_line.ex
| 0.922752
| 0.475179
|
request_line.ex
|
starcoder
|
defmodule SecureX.Helper do
@moduledoc false
@spec keys_to_atoms(any()) :: map()
def keys_to_atoms(string_key_map) when is_map(string_key_map) do
for {key, val} <- string_key_map, into: %{} do
if is_struct(val) do
if val.__struct__ in [DateTime, NaiveDateTime, Date, Time] do
{String.to_atom(key), val}
else
{String.to_atom(key), keys_to_atoms(val)}
end
else
{String.to_atom(key), keys_to_atoms(val)}
end
end
end
def keys_to_atoms(string_key_list) when is_list(string_key_list) do
string_key_list
|> Enum.map(&keys_to_atoms/1)
end
def keys_to_atoms(value), do: value
@spec trimmed_downcase(String.t()) :: String.t()
def trimmed_downcase(str), do: str |> String.trim() |> downcase
@spec downcase(String.t()) :: String.t()
def downcase(str), do: str |> String.downcase() |> String.replace(" ", "_")
@spec abort(any(), any(), any()) :: atom()
def abort(_, _, _), do: :abort
@spec default_resp(any(), Keyword.t()) :: tuple()
def default_resp(result, opts \\ [])
@doc """
sends response for the Ecto.all, and list responses.
In mode: :reverse we are doing to inverse of response like '[]' will indicate success and '[struct]' will indicate error.
##Examples
`some_query |> repo.all`
this will return `[]` or `[structs]` so we can handle that response as
'if '[]' or '[structs]', do:some_query |> repo.all |> default_resp()'
'if '[]', do: some_query |> repo.all |> default_resp(msg: error_msg)'
'if '[]', do: some_query |> repo.all |> default_resp(mode: :reverse,msg: success_msg)'
'if '[structs]', do: some_query |> repo.all |> default_resp(mode: :reverse,msg: error_msg)'
"""
def default_resp([], mode: :reverse, msg: msg), do: ok(msg)
def default_resp([], msg: err), do: err |> error()
def default_resp([], _), do: error()
def default_resp(result, mode: :reverse, msg: err) when is_list(result), do: err |> error()
def default_resp(result, _) when is_list(result), do: ok(result)
@doc """
merges transactions of a sage.
##Examples
'defp create_employee_sage(input) do
new()
|> run(:employee, &create_employee/2, &abort/3)
|> run(:salary, &create_salary/2, &abort/3)
|> transaction(MyApp.Repo, input)
end'
and we like to merge two transactions like employee data and its salary as
'{:ok, _, result} |> default_resp(in: salary, [employee: employee])'
"""
def default_resp({:ok, _, result}, in: in_, keys: keys) when is_map(result) do
in_ = result[in_]
case is_map(in_) do
true ->
Enum.reduce(keys, in_, fn {key, value}, acc ->
Map.put(acc, value, result[key])
end)
|> ok()
false ->
result[in_]
end
end
@doc """
gets data from transactions of a sage.
##Examples
'defp create_employee_sage(input) do
new()
|> run(:employee, &create_employee/2, &abort/3)
|> run(:salary, &create_salary/2, &abort/3)
|> transaction(MyApp.Repo, input)
end'
and we can to get employee as
'{:ok, _, result} |> default_resp(key: employee)'
"""
def default_resp({:ok, _, result}, key: key) when is_map(result),
do: result |> Map.get(key) |> ok()
@doc """
sends response for the Ecto.insert_all,Ecto.insert_all and Ecto.insert_all.
In mode: :reverse we are doing to inverse of response like '{integer,nil}' will indicate success and '{integer,[structs]}' will indicate error.
##Examples
`some_query |> repo.insert_all`
this will return `{integer,nil}` or `{integer,[structs]}` so we can handle that response as
'if '{integer,nil} or {integer,[structs]}', do:some_query |> repo.insert_all |> default_resp()'
'if '{integer,nil}', do: some_query |> repo.insert_all |> default_resp(msg: error_msg)'
'if '{integer,nil}', do: some_query |> repo.insert_all |> default_resp(mode: :reverse,msg: success_msg)'
'if '{integer,[structs]}', do: some_query |> repo.insert_all |> default_resp(mode: :reverse,msg: error_msg)'
"""
def default_resp({_, nil}, mode: :reverse, msg: msg), do: ok(msg)
def default_resp({_, nil}, msg: err), do: err |> error()
def default_resp({_, nil}, _), do: error()
def default_resp({_, result}, msg: msg) when is_list(result), do: ok(msg)
def default_resp({_, result}, _) when is_list(result), do: ok(result)
@doc """
sends response for the changeset errors in functions Ecto.insert , Ecto.update,Ecto.delete.
##Examples
`some_query |> repo.insert`
this will return `{:ok,struct}` or `{:error,changeset}` so we can handle that response as
'if '{:error,changeset}', do: some_query |> repo.insert |> default_resp()'
"""
def default_resp({:error, changeset}, _), do: changeset_error(changeset)
@doc """
sends response for the Ecto.get, Ecto.get_by,Ecto.one and functions that will return nil or struct.
Also works for Ecto.insert ,Ecto.update and Ecto.delete.
In mode: :reverse we are doing to inverse of response like 'nil' will indicate success and 'struct' will indicate error.
##Examples
`some_query |> repo.get`
this will return `nil` or `struct` so we can handle that response as
'if 'nil or struct', do:some_query |> repo.get |> default_resp()'
'if 'nil or struct', do: some_query |> repo.insert_all |> default_resp(mode: :reverse)'
`some_query |> repo.create`
this will return `{:ok,struct}` or ;{:error,changeset}' so we can handle that response as
'some_query |> repo.insert |> default_resp()'
'some_query |> repo.update |> default_resp()'
'some_query |> repo.delete |> default_resp()'
default_resp returns tuple as
'result |> default_resp()' Returns {:ok,result}
'default_resp(mode: :reverse,msg: error)' Returns {:error,error}
'params |> default_resp()' Returns {:ok, params}
"""
def default_resp(result, _) when is_tuple(result), do: result
def default_resp(result, mode: :reverse) when is_nil(result), do: ok(result)
def default_resp(result, _) when is_nil(result), do: error(result)
def default_resp(_, mode: :reverse, msg: err), do: err |> error()
def default_resp(result, mode: :reverse), do: result |> error()
def default_resp(result, _), do: result |> ok
@spec changeset_error(struct()) :: tuple()
def changeset_error(%Ecto.Changeset{errors: errors}) do
{key, {msg, _}} = List.first(errors)
{:error, "#{key} #{msg}"}
end
def changeset_error(err), do: err |> error
@doc """
sends ok tuple.
##Examples
'result |> ok()' Returns {:ok, result}
"""
@spec ok(any()) :: tuple()
def ok(data) when is_tuple(data), do: data
def ok(data), do: {:ok, data}
@doc """
sends error tuple.
##Examples
'error |> error()' Returns {:error,error}
"""
@spec error(any()) :: tuple()
def error(data \\ "Doesn't Exist!")
def error(data) when is_tuple(data), do: data
def error(nil), do: {:error, "Doesn't Exist!"}
def error(err), do: {:error, err}
end
|
lib/utils/helper.ex
| 0.806815
| 0.405743
|
helper.ex
|
starcoder
|
defmodule Bintreeviz.Node do
@moduledoc """
Bintreeviz.Node describes a single Node in the graph and contains the functions
to manipulate said Nodes.
"""
@padding 4
alias __MODULE__
@type t() :: %Node{
label: String.t(),
x: non_neg_integer(),
y: non_neg_integer(),
offset: integer(),
left_child: Node.t(),
right_child: Node.t()
}
defstruct label: nil,
x: 0,
y: 0,
offset: 0,
left_child: nil,
right_child: nil
@doc "new/1 takes a string label and returns a new %Node{}"
@spec new(String.t()) :: Node.t()
def new(label) when is_binary(label) do
%Node{label: label}
end
@doc """
new/2 takes a string label and an Keyword list containing left and right
children and returns a new %Node{}
"""
@type node_children :: [
left_child: Node.t() | nil,
right_child: Node.t() | nil
]
@spec new(String.t(), node_children()) :: Node.t()
def new(label, options)
def new(label, left_child: %Node{} = left_child, right_child: %Node{} = right_child)
when is_binary(label) do
label
|> new()
|> set_left_child(left_child)
|> set_right_child(right_child)
end
def new(label, left_child: nil, right_child: %Node{} = right_child) when is_binary(label) do
label
|> new()
|> set_right_child(right_child)
end
def new(label, left_child: %Node{} = left_child, right_child: nil) when is_binary(label) do
label
|> new()
|> set_left_child(left_child)
end
def new(label, left_child: nil, right_child: nil), do: new(label)
@doc "set_left_child/2 assigns the passed in node as the left_child to the node."
@spec set_left_child(Node.t(), Node.t()) :: Node.t()
def set_left_child(%Node{} = self, %Node{} = child) do
%Node{self | left_child: child}
end
@doc "set_right_child/2 assigns the passed in node as the right_child to the node."
@spec set_right_child(Node.t(), Node.t()) :: Node.t()
def set_right_child(%Node{} = self, %Node{} = child) do
%Node{self | right_child: child}
end
@doc """
width/1 returns the width of the node. Width of the node is determined by the
length of the label plus the configured padding for the nodes.
"""
@spec width(Node.t()) :: non_neg_integer()
def width(%Node{label: label}) do
String.length(label) + @padding
end
@doc """
is_leaf/1 returns true if the node has no left_child and no right_child.
"""
@spec is_leaf?(Node.t()) :: boolean()
def is_leaf?(%Node{left_child: nil, right_child: nil}), do: true
def is_leaf?(%Node{} = _root), do: false
end
|
lib/node.ex
| 0.841598
| 0.643336
|
node.ex
|
starcoder
|
defmodule Hive do
@moduledoc """
Efficient in-memory fleet state management
where each vehicle is a separate process
which manages it's own state and provides
APIs to access and use it.
Public API exposed under Hive module.
"""
use Hive.Base
import Hive.Vehicle.Helpers
@doc """
Infleet by `vehicle_id`, when vehicle infleeted
only by `vehicle_id` then the state of `VehicleWorker`
initialized as `%Vehicle{id: vehicle_id}`.
If you want to pass more metadata consider passing
`%Vehicle{}` struct instead of binary.
Will return `{:error, {:already_started, pid}}`.
"""
def infleet(vehicle_id) when is_binary(vehicle_id) do
VehicleSupervisor.infleet(%Vehicle{id: vehicle_id})
end
@doc """
Infleet `Vehicle`
Will return `{:error, {:already_started, pid}}`.
"""
def infleet(%Vehicle{} = vehicle) do
VehicleSupervisor.infleet(vehicle)
end
@doc """
Defleet by `vehicle_id`
then `{:error, :not_found}` returned.
"""
def defleet(vehicle_id) when is_binary(vehicle_id) do
VehicleSupervisor.defleet(%Vehicle{id: vehicle_id})
end
@doc """
Defleet `Vehicle` if not known
then `{:error, :not_found}` returned.
"""
def defleet(%Vehicle{} = vehicle) do
VehicleSupervisor.defleet(vehicle)
end
@doc """
Update position by `vehicle_id`
"""
def update_position(vehicle_id, %GeoPosition{} = position) do
VehicleWorker.update(:position, %Vehicle{id: vehicle_id}, position)
end
@doc """
Get position by `vehicle_id`
"""
def get_position(vehicle_id) do
%Vehicle{id: vehicle_id}
|> proc_name()
|> GenServer.call(:position)
end
@doc """
Get vehicle by `vehicle_id`
"""
def get_vehicle(vehicle_id) do
%Vehicle{id: vehicle_id}
|> proc_name()
|> GenServer.call(:get)
end
@doc """
Sets vehicle online
"""
def set_online(vehicle_id) do
VehicleWorker.set_online(%Vehicle{id: vehicle_id})
end
@doc """
Sets vehicle offline
"""
def set_offline(vehicle_id) do
VehicleWorker.set_offline(%Vehicle{id: vehicle_id})
end
@doc """
Change state has_passengers to true
"""
def pickup(vehicle_id) do
VehicleWorker.pickup(%Vehicle{id: vehicle_id})
end
@doc """
Sets state has_passengers to false
"""
def dropoff(vehicle_id) do
VehicleWorker.dropoff(%Vehicle{id: vehicle_id})
end
@doc """
Check if vehicle is known and supervised
"""
def alive?(vehicle_id) do
%Vehicle{id: vehicle_id}
|> make_name()
|> VehicleSupervisor.alive?()
end
end
|
lib/hive.ex
| 0.713232
| 0.456046
|
hive.ex
|
starcoder
|
defmodule ExQueb do
@moduledoc """
Build Ecto filter Queries.
"""
import Ecto.Query
@doc """
Create the filter
Uses the :q query parameter to build the filter.
"""
def filter(query, params) do
filters =
params[Application.get_env(:ex_queb, :filter_param, :q)]
|> params_to_filters()
if filters do
query
|> ExQueb.StringFilters.string_filters(filters)
|> integer_filters(filters)
|> date_filters(filters)
|> boolean_filters(filters)
else
query
end
end
def params_to_filters(nil), do: nil
def params_to_filters(q) do
Map.to_list(q)
|> Enum.filter(fn {_k, v} -> v not in ["", nil, []] end)
|> Enum.map(&({Atom.to_string(elem(&1, 0)), elem(&1, 1)}))
end
defp integer_filters(builder, filters) do
builder
|> build_integer_filters(filters, :eq)
|> build_integer_filters(filters, :lt)
|> build_integer_filters(filters, :gt)
|> build_integer_filters(filters, :in)
end
defp date_filters(builder, filters) do
builder
|> build_date_filters(filters, :gte)
|> build_date_filters(filters, :lte)
end
defp boolean_filters(builder, filters) do
builder
|> build_boolean_filters(filters, :is)
end
defp build_integer_filters(builder, filters, condition) do
map_filters(builder, filters, condition, &_build_integer_filter/4)
end
defp _build_integer_filter(query, fld, value, :eq) do
where(query, [q], field(q, ^fld) == ^value)
end
defp _build_integer_filter(query, fld, value, :lt) do
where(query, [q], field(q, ^fld) < ^value)
end
defp _build_integer_filter(query, fld, value, :gte) do
where(query, [q], field(q, ^fld) >= ^value)
end
defp _build_integer_filter(query, fld, value, :lte) do
where(query, [q], field(q, ^fld) <= ^value)
end
defp _build_integer_filter(query, fld, value, :gt) do
where(query, [q], field(q, ^fld) > ^value)
end
defp _build_integer_filter(query, fld, value, :in) do
value_list = value |> String.split(",") |> Enum.map(&String.trim/1) |> Enum.map(&String.to_integer/1)
where(query, [q], field(q, ^fld) in ^value_list)
end
defp build_date_filters(builder, filters, condition) do
map_filters(builder, filters, condition, &_build_date_filter/4)
end
defp _build_date_filter(query, fld, value, :lte) do
where(query, [q], field(q, ^fld) <= ^cast_date_time(value, :lte))
end
defp _build_date_filter(query, fld, value, :gte) do
where(query, [q], field(q, ^fld) >= ^cast_date_time(value, :gte))
end
defp build_boolean_filters(builder, filters, condition) do
map_filters(builder, filters, condition, &_build_boolean_filter/4)
end
defp _build_boolean_filter(query, fld, "not_null", :is) do
case ExQueb.Utils.get_entry_type(query, fld) do
:assoc ->
from(
m in query,
as: :query,
where: exists(ExQueb.Utils.build_exists_subquery(query, fld, :query))
)
:field ->
where(query, [q], not is_nil(field(q, ^fld)))
nil -> query
end
end
defp _build_boolean_filter(query, fld, "null", :is) do
case ExQueb.Utils.get_entry_type(query, fld) do
:assoc ->
from(
m in query,
as: :query,
where: not exists(ExQueb.Utils.build_exists_subquery(query, fld, :query))
)
:field ->
where(query, [q], is_nil(field(q, ^fld)))
nil -> query
end
end
defp cast_date_time(value, :lte) do
NaiveDateTime.from_iso8601!("#{value} 23:59:59")
end
defp cast_date_time(value, :gte) do
NaiveDateTime.from_iso8601!("#{value} 00:00:00")
end
defp map_filters(builder, filters, condition, reduce_fn, map_value_fn\\fn v -> v end) do
filters
|> Enum.filter(& String.match?(elem(&1,0), ~r/_#{condition}$/))
|> Enum.map(& {String.replace(elem(&1, 0), "_#{condition}", ""), elem(&1, 1)})
|> Enum.reduce(builder, fn({k,v}, acc) ->
reduce_fn.(acc, String.to_atom(k), map_value_fn.(v), condition)
end)
end
@doc """
Build order for a given query.
"""
def build_order_bys(query, opts, action, params) when action in ~w(index csv)a do
case Keyword.get(params, :order, nil) do
nil ->
build_default_order_bys(query, opts, action, params)
order ->
case get_sort_order(order) do
nil ->
build_default_order_bys(query, opts, action, params)
{name, sort_order} ->
name_atom = String.to_existing_atom name
if sort_order == "desc" do
order_by query, [c], [desc: field(c, ^name_atom)]
else
order_by query, [c], [asc: field(c, ^name_atom)]
end
end
end
end
def build_order_bys(query, _, _, _), do: query
defp build_default_order_bys(query, opts, action, _params) when action in ~w(index csv)a do
case query.order_bys do
[] ->
index_opts = Map.get(opts, action, []) |> Enum.into(%{})
{order, primary_key} = get_default_order_by_field(query, index_opts)
order_by(query, [c], [{^order, field(c, ^primary_key)}])
_ -> query
end
end
defp build_default_order_bys(query, _opts, _action, _params), do: query
@doc """
Get the sort order for a params entry.
"""
def get_sort_order(nil), do: nil
def get_sort_order(order) do
case Regex.scan ~r/(.+)_(desc|asc)$/, order do
[] -> nil
[[_, name, sort_order]] -> {name, sort_order}
end
end
defp get_default_order_by_field(_query, %{default_sort: [{order, field}]}) do
{order, field}
end
defp get_default_order_by_field(query, %{default_sort_order: order}) do
{order, get_default_order_by_field(query)}
end
defp get_default_order_by_field(_query, %{default_sort_field: field}) do
{:desc, field}
end
defp get_default_order_by_field(query, _) do
{:desc, get_default_order_by_field(query)}
end
defp get_default_order_by_field(query) do
case query do
%Ecto.Query{} = q ->
mod = ExQueb.Utils.query_to_module(q)
case mod.__schema__(:primary_key) do
[name |_] -> name
_ -> mod.__schema__(:fields) |> List.first
end
_ -> :id
end
end
end
|
lib/ex_queb.ex
| 0.719482
| 0.418459
|
ex_queb.ex
|
starcoder
|
defmodule ExAws.ACM do
@moduledoc """
Operations for AWS Certificate Manager.
## Basic Usage
```elixir
ExAws.ACM.request_certificate("helloworld.example.com", validation_method: "DNS") |> ExAws.request!()
```
"""
@namespace "CertificateManager"
@type certificate_arn ::
String.t()
@type certificate_options ::
[
certificate_transparency_logging_preference: String.t()
]
@type domain_validation_option ::
%{
domain_name: String.t(),
validation_domain: String.t()
}
@type filter ::
%{
extended_key_usage: [String.t()],
key_types: [String.t()],
key_usage: [String.t()]
}
@type import_certificate_opts ::
[
certificate_arn: certificate_arn,
certificate_chain: binary,
tags: [tag]
]
@type list_certificates_opts ::
[
certificate_statuses: String.t(),
includes: filter,
max_items: pos_integer,
next_token: String.t()
]
@type request_certificate_opts ::
[
certificate_authority_arn: String.t(),
domain_validation_options: domain_validation_option,
idempotency_token: String.t(),
options: certificate_options,
subject_alternative_names: [String.t()],
tags: [tag],
validation_method: String.t()
]
@type tag ::
%{ key: String.t(), value: String.t() }
@doc """
Adds one or more tags to an ACM certificate.
"""
@spec add_tags_to_certificate(certificate_arn, [tag]) :: ExAws.Operation.JSON.t()
def add_tags_to_certificate(certificate_arn, tags) do
params = Keyword.new()
params = Keyword.put(params, :CertificateArn, certificate_arn)
params = Keyword.put(params, :Tags, tags)
request(:add_tags_to_certificate, params)
end
@doc """
Deletes a certificate and its associated private key.
"""
@spec delete_certificate(certificate_arn) :: ExAws.Operation.JSON.t()
def delete_certificate(certificate_arn) do
request(:delete_certificate, %{ CertificateArn: certificate_arn })
end
@doc """
Returns detailed metadata about the specified ACM certificate.
"""
@spec describe_certificate(certificate_arn) :: ExAws.Operation.JSON.t()
def describe_certificate(certificate_arn) do
request(:describe_certificate, %{ CertificateArn: certificate_arn })
end
@doc """
Exports a private certificate issued by a private certificate authority (CA).
"""
@spec export_certificate(certificate_arn, binary) :: ExAws.Operation.JSON.t()
def export_certificate(certificate_arn, passphrase) do
params = Keyword.new()
params = Keyword.put(params, :CertificateArn, certificate_arn)
params = Keyword.put(params, :Passphrase, passphrase)
request(:export_certificate, params)
end
@doc """
Retrieves an Amazon-issued certificate and its certificate chain.
"""
@spec get_certificate(certificate_arn) :: ExAws.Operation.JSON.t()
def get_certificate(certificate_arn) do
request(:get_certificate, [CertificateArn: certificate_arn])
end
@doc """
Imports a certificate into AWS Certificate Manager (ACM) to use with services
that are integrated with ACM.
"""
@spec import_certificate(binary, binary, import_certificate_opts) :: ExAws.Operation.JSON.t()
def import_certificate(certificate, private_key, opts \\ []) do
params = Keyword.new()
params = Keyword.put(params, :Certificate, certificate)
params = Keyword.put(params, :PrivateKey, private_key)
params = Keyword.merge(params, opts)
request(:import_certificate, params)
end
@doc """
Retrieves a list of certificate ARNs and domain names.
"""
@spec list_certificates(list_certificates_opts) :: ExAws.Operation.JSON.t()
def list_certificates(opts \\ []) do
request(:list_certificates, opts)
end
@doc """
Lists the tags that have been applied to the ACM certificate.
"""
@spec list_tags_for_certificate(certificate_arn) :: ExAws.Operation.JSON.t()
def list_tags_for_certificate(certificate_arn) do
request(:list_tags_for_certificate, [CertificateArn: certificate_arn])
end
@doc """
Remove one or more tags from an ACM certificate.
"""
@spec remove_tags_from_certificate(certificate_arn, [tag]) :: ExAws.Operation.JSON.t()
def remove_tags_from_certificate(certificate_arn, tags) do
params = Keyword.new()
params = Keyword.put(params, :CertificateArn, certificate_arn)
params = Keyword.put(params, :Tags, tags)
request(:remove_tags_from_certificate, params)
end
@doc """
Renews an eligable ACM certificate. At this time, only exported private
certificates can be renewed with this operation.
"""
@spec renew_certificate(String.t()) :: ExAws.Operation.JSON.t()
def renew_certificate(certificate_arn) do
request(:renew_certificate, [CertificateArn: certificate_arn])
end
@doc """
Requests an ACM certificate for use with other AWS services.
"""
@spec request_certificate(certificate_arn, request_certificate_opts) :: ExAws.Operation.JSON.t()
def request_certificate(domain_name, opts \\ []) do
params = Keyword.new()
params = Keyword.put(params, :DomainName, domain_name)
params = Keyword.merge(params, opts)
request(:request_certificate, params)
end
@doc """
Resends the email that requests domain ownership validation.
"""
@spec resend_validation_email(certificate_arn, String.t(), String.t()) :: ExAws.Operation.JSON.t()
def resend_validation_email(certificate_arn, domain, validation_domain) do
params = Keyword.new()
params = Keyword.put(params, :CertificateArn, certificate_arn)
params = Keyword.put(params, :Domain, domain)
params = Keyword.put(params, :ValidationDomain, validation_domain)
request(:resend_validation_email, params)
end
@doc false
@spec target(String.t()) :: String.t()
def target(name) do
"#{@namespace}.#{name}"
end
@doc """
Updates a certificate.
"""
@spec update_certificate_options(certificate_arn, certificate_options) :: ExAws.Operation.JSON.t()
def update_certificate_options(certificate_arn, options) do
params = Keyword.new()
params = Keyword.put(params, :CertificateArn, certificate_arn)
params = Keyword.put(params, :Options, options)
request(:update_certificate_options, params)
end
defp request(operation, params, opts \\ %{}) do
data = ExAws.Utils.camelize_keys(params, deep: true)
opts = Map.merge(%{ data: data, headers: headers(operation) }, opts)
ExAws.Operation.JSON.new(:acm, opts)
end
defp headers(operation) do
target = Atom.to_string(operation)
target = Macro.camelize(target)
headers = []
headers = headers ++ [{ "content-type", "application/x-amz-json-1.1" }]
headers = headers ++ [{ "x-amz-target", target(target) }]
headers
end
end
|
lib/ex_aws/acm.ex
| 0.792504
| 0.770119
|
acm.ex
|
starcoder
|
defmodule StarkInfra.PixRequest do
alias __MODULE__, as: PixRequest
alias StarkInfra.User.Organization
alias StarkInfra.User.Project
alias StarkInfra.Utils.Parse
alias StarkInfra.Utils.Check
alias StarkInfra.Utils.Rest
alias StarkInfra.Error
@moduledoc """
Groups PixRequest related functions
"""
@doc """
PixRequests are used to receive or send instant payments to accounts
hosted in any Pix participant.
When you initialize a PixRequest, the entity will not be automatically
created in the Stark Infra API. The 'create' function sends the structs
to the Stark Infra API and returns the list of created structs.
## Parameters (required):
- `:amount` [integer]: amount in cents to be transferred. ex: 11234 (= R$ 112.34)
- `:external_id` [string]: string that must be unique among all your PixRequests. Duplicated external IDs will cause failures. By default, this parameter will block any PixRequests that repeats amount and receiver information on the same date. ex: "my-internal-id-123456"
- `:sender_name` [string]: sender's full name. ex: "<NAME>"
- `:sender_tax_id` [string]: sender's tax ID (CPF or CNPJ) with or without formatting. ex: "01234567890" or "20.018.183/0001-80"
- `:sender_branch_code` [string]: sender's bank account branch code. Use '-' in case there is a verifier digit. ex: "1357-9"
- `:sender_account_number` [string]: sender's bank account number. Use '-' before the verifier digit. ex: "876543-2"
- `:sender_account_type` [string, default "checking"]: sender's bank account type. ex: "checking", "savings", "salary" or "payment"
- `:receiver_name` [string]: receiver's full name. ex: "<NAME>"
- `:receiver_tax_id` [string]: receiver's tax ID (CPF or CNPJ) with or without formatting. ex: "01234567890" or "20.018.183/0001-80"
- `:receiver_bank_code` [string]: receiver's bank institution code in Brazil. ex: "20018183"
- `:receiver_account_number` [string]: receiver's bank account number. Use '-' before the verifier digit. ex: "876543-2"
- `:receiver_branch_code` [string]: receiver's bank account branch code. Use '-' in case there is a verifier digit. ex: "1357-9"
- `:receiver_account_type` [string]: receiver's bank account type. ex: "checking", "savings", "salary" or "payment"
- `:end_to_end_id` [string]: central bank's unique transaction ID. ex: "E79457883202101262140HHX553UPqeq"
## Parameters (optional):
- `:receiver_key_id` [string, default nil]: receiver's dict key. ex: "20.018.183/0001-80"
- `:description` [string, default nil]: optional description to override default description to be shown in the bank statement. ex: "Payment for service #1234"
- `:reconciliation_id` [string, default nil]: Reconciliation ID linked to this payment. ex: "b77f5236-7ab9-4487-9f95-66ee6eaf1781"
- `:initiator_tax_id` [string, default nil]: Payment initiator's tax id (CPF/CNPJ). ex: "01234567890" or "20.018.183/0001-80"
- `:cash_amount` [integer, default nil]: Amount to be withdrawal from the cashier in cents. ex: 1000 (= R$ 10.00)
- `:cashier_bank_code` [string, default nil]: Cashier's bank code. ex: "00000000"
- `:cashier_type` [string, default nil]: Cashier's type. ex: [merchant, other, participant]
- `:tags` [list of strings, default nil]: list of strings for reference when searching for PixRequests. ex: ["employees", "monthly"]
- `:method` [string, default nil]: execution method for thr creation of the PIX. ex: "manual", "payerQrcode", "dynamicQrcode".
## Attributes (return-only):
- `:id` [string]: unique id returned when the PixRequest is created. ex: "5656565656565656"
- `:fee` [integer]: fee charged when PixRequest is paid. ex: 200 (= R$ 2.00)
- `:status` [string]: current PixRequest status. Options: “created”, “processing”, “success”, “failed”
- `:flow` [string]: direction of money flow. ex: "in" or "out"
- `:sender_bank_code` [string]: sender's bank institution code in Brazil. ex: "20018183"
- `:created` [DateTime]: creation datetime for the PixRequest. ex: ~U[2020-03-10 10:30:0:0]
- `:updated` [DateTime]: latest update datetime for the PixRequest. ex: ~U[2020-03-10 10:30:0:0]
"""
@enforce_keys [
:amount,
:external_id,
:sender_name,
:sender_tax_id,
:sender_branch_code,
:sender_account_number,
:sender_account_type,
:receiver_name,
:receiver_tax_id,
:receiver_bank_code,
:receiver_account_number,
:receiver_branch_code,
:receiver_account_type,
:end_to_end_id,
]
defstruct [
:amount,
:external_id,
:sender_name,
:sender_tax_id,
:sender_branch_code,
:sender_account_number,
:sender_account_type,
:receiver_name,
:receiver_tax_id,
:receiver_bank_code,
:receiver_account_number,
:receiver_branch_code,
:receiver_account_type,
:end_to_end_id,
:receiver_key_id,
:description,
:reconciliation_id,
:initiator_tax_id,
:cash_amount,
:cashier_bank_code,
:cashier_type,
:tags,
:method,
:id,
:fee,
:status,
:flow,
:sender_bank_code,
:created,
:updated
]
@type t() :: %__MODULE__{}
@doc """
Send a list of PixRequest structs for creation in the Stark Infra API
## Parameters (required):
- `:requests` [list of PixRequest structs]: list of PixRequest structs to be created in the API
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of PixRequest structs with updated attributes
"""
@spec create(
[PixRequest.t() | map()],
user: Project.t() | Organization.t() | nil
)::
{:ok, [PixRequest.t()]} |
{:error, [error: Error.t()]}
def create(requests, options \\ []) do
Rest.post(
resource(),
requests,
options
)
end
@doc """
Same as create(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec create!(
[PixRequest.t() | map()],
user: Project.t() | Organization.t() | nil
):: any
def create!(requests, options \\ []) do
Rest.post!(
resource(),
requests,
options
)
end
@doc """
Receive a single PixRequest struct previously created in the Stark Infra API by its id
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixRequest struct with updated attributes
"""
@spec get(
id: binary,
user: Project.t() | Organization.t() | nil
)::
{:ok, PixRequest.t()} |
{:error, [error: Error.t()]}
def get(id, options \\ []) do
Rest.get_id(
resource(),
id,
options
)
end
@doc """
Same as get(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec get!(
id: binary,
user: Project.t() | Organization.t() | nil
):: any
def get!(id, options \\ []) do
Rest.get_id!(
resource(),
id,
options
)
end
@doc """
Receive a stream of PixRequest structs previously created in the Stark Infra API
## Options:
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020, 3, 10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020, 3, 10]
- `:status` [list of strings, default nil]: filter for status of retrieved structs. Options: “created”, “processing”, “success”, “failed”
- `:tags` [list of strings, default nil]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:end_to_end_ids` [list of strings, default nil]: central bank's unique transaction IDs. ex: ["E79457883202101262140HHX553UPqeq", "E79457883202101262140HHX553UPxzx"]
- `:external_ids` [list of strings, default nil]: url safe strings that must be unique among all your PixRequests. Duplicated external IDs will cause failures. By default, this parameter will block any PixRequests that repeats amount and receiver information on the same date. ex: ["my-internal-id-123456", "my-internal-id-654321"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of PixRequest structs with updated attributes
"""
@spec query(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
tags: [binary],
ids: [binary],
end_to_end_ids: [binary],
external_ids: [binary],
user: Project.t() | Organization.t() | nil
)::
{:ok, [PixRequest.t()]} |
{:error, [error: Error.t()]}
def query(options \\ []) do
Rest.get_list(
resource(),
options
)
end
@doc """
Same as query(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec query!(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
tags: [binary],
ids: [binary],
end_to_end_ids: [binary],
external_ids: [binary],
user: Project.t() | Organization.t() | nil
):: any
def query!(options \\ []) do
Rest.get_list!(
resource(),
options
)
end
@doc """
Receive a list of up to 100 PixRequest structs previously created in the Stark Infra API and the cursor to the next page.
Use this function instead of query if you want to manually page your requests.
## Options:
- `:cursor` [string, default nil]: cursor returned on the previous page function call
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. Max = 100. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020, 3, 10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020, 3, 10]
- `:status` [list of strings, default nil]: filter for status of retrieved structs. Options: “created”, “processing”, “success”, “failed”
- `:tags` [list of strings, default nil]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:end_to_end_ids` [list of strings, default nil]: central bank's unique transaction IDs. ex: ["E79457883202101262140HHX553UPqeq", "E79457883202101262140HHX553UPxzx"]
- `:external_ids` [list of strings, default nil]: url safe strings that must be unique among all your PixRequests. Duplicated external IDs will cause failures. By default, this parameter will block any PixRequests that repeats amount and receiver information on the same date. ex: ["my-internal-id-123456", "my-internal-id-654321"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of PixRequest structs with updated attributes
- cursor to retrieve the next page of PixRequest structs
"""
@spec page(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
tags: [binary],
ids: [binary],
end_to_end_ids: [binary],
external_ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
{:ok, {[PixRequest.t()], cursor: binary}} |
{:error, [error: Error.t()]}
def page(options \\ []) do
Rest.get_page(
resource(),
options
)
end
@doc """
Same as page(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec page!(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
tags: [binary],
ids: [binary],
end_to_end_ids: [binary],
external_ids: [binary],
user: Project.t() | Organization.t() | nil
) :: any
def page!(options \\ []) do
Rest.get_page!(
resource(),
options
)
end
@doc """
Create a single PixRequest struct from a content string received from a handler listening at the request url.
If the provided digital signature does not check out with the StarkInfra public key, a
starkinfra.error.InvalidSignatureError will be raised.
## Parameters (required):
- `:content` [string]: response content from request received at user endpoint (not parsed)
- `:signature` [string]: base-64 digital signature received at response header "Digital-Signature"
## Options:
- `cache_pid` [PID, default nil]: PID of the process that holds the public key cache, returned on previous parses. If not provided, a new cache process will be generated.
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- Parsed PixRequest object
"""
@spec parse(
content: binary,
signature: binary,
cache_pid: PID,
user: Project.t() | Organization.t()
)::
{:ok, PixRequest.t()} |
{:error, [error: Error.t()]}
def parse(options) do
%{content: content, signature: signature, cache_pid: cache_pid, user: user} =
Enum.into(
options |> Check.enforced_keys([:content, :signature]),
%{cache_pid: nil, user: nil}
)
Parse.parse_and_verify(
content: content,
signature: signature,
cache_pid: cache_pid,
key: nil,
resource_maker: &resource_maker/1,
user: user
)
end
@doc """
Same as parse(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec parse!(
content: binary,
signature: binary,
cache_pid: PID,
user: Project.t() | Organization.t()
) :: any
def parse!(options \\ []) do
%{content: content, signature: signature, cache_pid: cache_pid, user: user} =
Enum.into(
options |> Check.enforced_keys([:content, :signature]),
%{cache_pid: nil, user: nil}
)
Parse.parse_and_verify!(
content: content,
signature: signature,
cache_pid: cache_pid,
key: nil,
resource_maker: &resource_maker/1,
user: user
)
end
@doc false
def resource() do
{
"PixRequest",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%PixRequest{
amount: json[:amount],
external_id: json[:external_id],
sender_name: json[:sender_name],
sender_tax_id: json[:sender_tax_id],
sender_branch_code: json[:sender_branch_code],
sender_account_number: json[:sender_account_number],
sender_account_type: json[:sender_account_type],
receiver_name: json[:receiver_name],
receiver_tax_id: json[:receiver_tax_id],
receiver_bank_code: json[:receiver_bank_code],
receiver_account_number: json[:receiver_account_number],
receiver_branch_code: json[:receiver_branch_code],
receiver_account_type: json[:receiver_account_type],
end_to_end_id: json[:end_to_end_id],
receiver_key_id: json[:receiver_key_id],
description: json[:description],
reconciliation_id: json[:reconciliation_id],
initiator_tax_id: json[:initiator_tax_id],
cash_amount: json[:cash_amount],
cashier_bank_code: json[:cashier_bank_code],
cashier_type: json[:cashier_type],
tags: json[:tags],
method: json[:method],
id: json[:id],
fee: json[:fee],
status: json[:status],
flow: json[:flow],
sender_bank_code: json[:sender_bank_code],
created: json[:created] |> Check.datetime(),
updated: json[:updated] |> Check.datetime()
}
end
end
|
lib/pix_request/pix_request.ex
| 0.851999
| 0.51812
|
pix_request.ex
|
starcoder
|
defmodule Family do
alias Family.Individual
@moduledoc """
Family module is to be used in order to parse GEDCOM 5.5 files.
"""
@individual_tag "INDI"
@name_tag "NAME"
@gender_tag "SEX"
@birthday_tag "BIRT"
@given_name_tag "GIVN"
@surname_tag "SURN"
@family_tag "FAM"
@doc """
Returns a list of families
"""
def get_families(file_path) do
file_path
|> parse
|> Enum.filter(fn(row) -> Regex.match?(~r/0 @.+@ #{@family_tag}/, row) end)
end
@doc """
Returns a list of Individuals
"""
def get_individuals(file_path) do
file_path
|> parse
|> Enum.filter(fn(row) -> Regex.match?(~r/0 @.+@ #{@individual_tag}/, row) end)
|> Enum.map(fn(row) ->
~r/0 @(?<id>.+)@ #{@individual_tag}/
|> Regex.named_captures(row)
|> Map.get("id")
end)
|> Enum.map(fn(row) -> get_individual(file_path, row) end)
end
@doc """
Returns a single Individual via it's ID
"""
def get_individual(file_path, individual_id) do
file_path
|> parse
|> Enum.drop_while(fn(row) ->
!Regex.match?(~r/0 @#{individual_id}@ INDI/, row)
end)
|> Enum.reduce_while(%Individual{id: individual_id}, fn(row, acc) ->
cond do
Regex.match?(~r/0 .+ INDI/, row) && !Regex.match?(~r/0 @#{individual_id}@ INDI/, row) ->
{:halt, acc}
Map.get(acc, :date_of_birth) == "" ->
date_of_birth = parse_value(2, "DATE", row)
{:cont, Map.put(acc, :date_of_birth, date_of_birth)}
Regex.match?(~r/1 #{@gender_tag}/, row) ->
gender = parse_value(1, @gender_tag, row)
{:cont, Map.put(acc, :sex, gender)}
Regex.match?(~r/1 #{@name_tag}/, row) ->
name = parse_value(1, @name_tag, row)
{:cont, Map.put(acc, :name, name)}
Regex.match?(~r/2 #{@given_name_tag}/, row) ->
name = parse_value(2, @given_name_tag, row)
{:cont, Map.put(acc, :given_name, name)}
Regex.match?(~r/2 #{@surname_tag}/, row) ->
name = parse_value(2, @surname_tag, row)
{:cont, Map.put(acc, :surname, name)}
Regex.match?(~r/1 #{@birthday_tag}/, row) ->
{:cont, Map.put(acc, :date_of_birth, "")}
true ->
{:cont, acc}
end
end)
end
@doc """
Returns the number of individuals
"""
def individual_count(file_path) do
file_path
|> get_individuals
|> Enum.count
end
@doc """
Returns the number of families
"""
def family_count(file_path) do
file_path
|> get_families
|> Enum.count
end
@doc """
Returns the number of living
"""
def living_count(file_path) do
individual_count(file_path) - deceased_count(file_path)
end
@doc """
Returns the number of living
"""
def deceased_count(file_path) do
file_path
|> parse
|> Enum.filter(fn(row) -> Regex.match?(~r/1 DEAT/, row) end)
|> Enum.count
end
defp parse_value(depth, tag, row) do
lowercased_tag = String.downcase(tag)
~r/#{depth} #{tag} (?<#{lowercased_tag}>.+)/
|> Regex.named_captures(row)
|> Map.get(lowercased_tag)
end
@doc """
"""
def parse(file_path) do
{:ok, data} = File.read(file_path)
data |> String.split("\n", trim: true)
end
end
|
lib/family.ex
| 0.681515
| 0.437763
|
family.ex
|
starcoder
|
defmodule Cashtrail.Users do
@moduledoc """
The Users context manages the users data of one entity and performs user
authentication.
See `Cashtrail.Users.User` to have more info about user.
"""
import Ecto.Query, warn: false
alias Cashtrail.Repo
alias Cashtrail.{Paginator, Users}
alias Cashtrail.Users.PasswordHash
import Cashtrail.QueryBuilder, only: [build_search: 3]
@type user() :: Users.User.t()
@doc """
Returns a `%Cashtrail.Paginator.Page{}` struct with a list of users in the
`:entries` field.
## Expected arguments
* options - A `keyword` list of the following options:
* `:search` - search users by `:first_name`, `:last_name` or `:email`.
* See `Cashtrail.Paginator.paginate/2` to see the paginations options.
See `Cashtrail.Users.User` to have more detailed info about the fields to
be filtered or searched.
## Examples
iex> list_users()
%Cashtrail.Paginator{entries: [%Users.User{}, ...]}
iex> list_users(search: "my")
%Cashtrail.Paginator{entries: [%Users.User{first_name: "<NAME>"}, ...]}
"""
@spec list_users(keyword) :: Cashtrail.Paginator.Page.t(user)
def list_users(options \\ []) do
Users.User
|> build_search(Keyword.get(options, :search), [:first_name, :last_name, :email])
|> Paginator.paginate(options)
end
@doc """
Gets a single user.
Raises `Ecto.NoResultsError` if the User does not exist.
See `Cashtrail.Users.User` to have more detailed info about the returned
struct.
## Expected Arguments
* id - A `string` that is the unique id of the user to be found.
## Examples
iex> get_user!(123)
%Users.User{}
iex> get_user!(456)
** (Ecto.NoResultsError)
"""
@spec get_user!(Ecto.UUID.t() | String.t()) :: user()
def get_user!(id), do: Repo.get!(Users.User, id)
@doc """
Gets a single user by the given param.
Returns nil the User does not exist.
See `Cashtrail.Users.User` to have more detailed info about the returned
struct and the params attributes that can be given.
## Expected Arguments
* params - A `keyword` or a `map` with the attributes of the user to be found.
## Examples
iex> get_user_by(email: "<EMAIL>")
%Users.User{}
iex> get_user_by(email: "noexists')
nil
"""
@spec get_user_by(keyword | map) :: nil | {:error, :invalid_email} | user()
def get_user_by(email: nil), do: {:error, :invalid_email}
def get_user_by(params), do: Repo.get_by(Users.User, params)
@doc """
Authenticates a user with its email and password.
## Expected Arguments
* email - A `string` that is the email of the user.
* password - A `string` that is the expected password of the user.
## Returns
* `{:ok, user}` if user is found and the passwords match.
* `{:error, :unauthorized}` if passwords does not match.
* `{:error, :not_found}` if user email is not found.
## Examples
iex> authenticate(email, password)
{:ok, %Users.User{}}
iex> authenticate(email, wrong_pass)
{:error, :unauthorized}
iex> authenticate(wrong_email, password)
{:error, :not_found}
"""
@spec authenticate(String.t(), String.t()) ::
{:ok, user()} | {:error, :not_found | :unauthorized}
def authenticate(email, password) do
case get_user_by(email: email) do
nil ->
PasswordHash.no_user_verify()
{:error, :not_found}
%Users.User{} = user ->
verify_password_hash(user, password)
end
end
defp verify_password_hash(%Users.User{password_hash: password_hash} = user, password) do
if PasswordHash.verify_pass(password, password_hash) do
{:ok, user}
else
{:error, :unauthorized}
end
end
@doc """
Creates a user.
## Expected Arguments
* params - A `map` with the params of the user to be created:
* `:email` (required) - A `string` with the email of the user. Must be
a valid email and unique in the application.
* `:first_name` (required) - A `string` with the first name of the user.
* `:last_name` - A `string` with the last name of the user.
* `:password` (required) - A `string` with the password of the user to be created.
The password must have the min size of 6 characters containing at least one
letter, and one number.
* `:password_confirmation` (required) - A `string` with password confirmation
of the user to be created. Must be the equals the `:password` field.
See `Cashtrail.Users.User` to have more detailed info about the fields.
## Returns
* `{:ok, %Cashtrail.Users.User{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> create_user(%{field: value})
{:ok, %Cashtrail.Users.User{}}
iex> create_user(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec create_user(map) ::
{:ok, user()} | {:error, Ecto.Changeset.t(user())}
def create_user(attrs) do
%Users.User{}
|> Users.User.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a user.
## Expected Arguments
* user - The `%Cashtrail.Users.User{}` to be updated.
* params - A `map` with the field of the user to be updated. See
`create_user/2` to know about the params that can be given.
## Returns
* `{:ok, %Cashtrail.Users.User{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> update_user(user, %{field: new_value})
{:ok, %Users.User{}}
iex> update_user(user, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec update_user(user(), map) :: {:ok, user()} | {:error, Ecto.Changeset.t(user())}
def update_user(%Users.User{} = user, attrs) do
user
|> Users.User.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a user.
## Expected Arguments
* user - The `%Cashtrail.Users.User{}` to be deleted.
## Returns
* `{:ok, %Cashtrail.Users.User{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> delete_user(user)
{:ok, %Users.User{}}
iex> delete_user(user)
{:error, %Ecto.Changeset{}}
"""
@spec delete_user(user()) :: {:ok, user()} | {:error, Ecto.Changeset.t(user())}
def delete_user(%Users.User{} = user) do
Repo.delete(user)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking user changes.
## Expected Arguments
* user - The `%Cashtrail.Users.User{}` to be tracked.
## Examples
iex> change_user(user)
%Ecto.Changeset{source: %Cashtrail.Users.User{}}
"""
@spec change_user(user()) :: Ecto.Changeset.t(user())
def change_user(%Users.User{} = user) do
Users.User.changeset(user, %{})
end
end
|
apps/cashtrail/lib/cashtrail/users.ex
| 0.843219
| 0.421254
|
users.ex
|
starcoder
|
defmodule Xfighter.Orderbook do
import Xfighter.API, only: [decode_response: 2, request: 2]
@type entry :: %{price: non_neg_integer, qty: non_neg_integer, isBuy: boolean}
@type t :: %__MODULE__{
ok: boolean,
venue: String.t,
symbol: String.t,
bids: [__MODULE__.entry],
asks: [__MODULE__.entry],
ts: String.t
}
defstruct ok: false,
venue: "",
symbol: "",
bids: [],
asks: [],
ts: ""
@doc """
Get the orderbook for a particular stock.
## Examples:
```
iex> Xfighter.Orderbook.state("FOOBAR", "TESTEX")
{:ok,
%Xfighter.Orderbook{
asks: [%{isBuy: false, price: 6000, qty: 3320},
%{isBuy: false, price: 6000, qty: 5000}],
bids: [%{isBuy: true, price: 5850, qty: 2554654},
%{isBuy: true, price: 5000, qty: 375879}],
ok: true, symbol: "FOOBAR", ts: "2015-12-17T23:30:37.455298328Z",
venue: "TESTEX"}}
iex> Xfighter.Orderbook.state("FOOBAR", "TEST")
{:error, {:request, "Error 404: No venue exists with the symbol TEST"}}
```
"""
@spec state(String.t, String.t) :: {:ok, __MODULE__.t} | {:error, tuple}
def state(stock, venue) when is_bitstring(stock) and is_bitstring(venue) do
try do
{:ok, state!(stock, venue)}
rescue
e in RequestError -> {:error, {:request, RequestError.message(e)}}
e in ConnectionError -> {:error, {:connection, ConnectionError.message(e)}}
e in InvalidJSON -> {:error, {:json, InvalidJSON.message(e)}}
end
end
@doc """
Get the orderbook for a particular stock.
A `RequestError` exception is raised if the venue could not be found or
the stock is not traded on the venue.
A `ConnectionError` exception is raised if a connection attempt to the venue failed.
An `UnhandledAPIResponse` exception is raised if an unexpected event occurs.
An `InvalidJSON` is raised if the response is not a valid JSON.
## Examples:
```
iex> Xfighter.Orderbook.state!("FOOBAR", "TESTEX")
%Xfighter.Orderbook{
asks: [%{isBuy: false, price: 6000, qty: 3320},
%{isBuy: false, price: 6000, qty: 5000}],
bids: [%{isBuy: true, price: 5850, qty: 2554654},
%{isBuy: true, price: 5000, qty: 375879}],
ok: true, symbol: "FOOBAR", ts: "2015-12-17T23:30:37.455298328Z",
venue: "TESTEX"}
iex> Xfighter.Orderbook.state!("FOOBAR", "TEST")
** (RequestError) Error 404: No venue exists with the symbol TEST
```
"""
@spec state!(String.t, String.t) :: __MODULE__.t
def state!(stock, venue) when is_bitstring(stock) and is_bitstring(venue) do
request(:get, "/venues/#{venue}/stocks/#{stock}")
|> decode_response(as: __MODULE__)
end
end #defmodule
|
lib/xfighter/orderbook.ex
| 0.826151
| 0.867878
|
orderbook.ex
|
starcoder
|
defmodule RDF.Turtle.Encoder do
@moduledoc """
An encoder for Turtle serializations of RDF.ex data structures.
As for all encoders of `RDF.Serialization.Format`s, you normally won't use these
functions directly, but via one of the `write_` functions on the `RDF.Turtle`
format module or the generic `RDF.Serialization` module.
## Options
- `:base`: : Allows to specify the base URI to be used for a `@base` directive.
If not specified the one from the given graph is used or if there is also none
specified for the graph the `RDF.default_base_iri/0`.
- `:prefixes`: Allows to specify the prefixes to be used as a `RDF.PrefixMap` or
anything from which a `RDF.PrefixMap` can be created with `RDF.PrefixMap.new/1`.
If not specified the ones from the given graph are used or if these are also not
present the `RDF.default_prefixes/0`.
- `:only`: Allows to specify which parts of a Turtle document should be generated.
Possible values: `:base`, `:prefixes`, `:directives` (means the same as `[:base, :prefixes]`),
`:triples` or a list with any combination of these values.
- `:indent`: Allows to specify the number of spaces the output should be indented.
"""
use RDF.Serialization.Encoder
alias RDF.Turtle.Encoder.State
alias RDF.{BlankNode, Dataset, Description, Graph, IRI, XSD, Literal, LangString, PrefixMap}
import RDF.NTriples.Encoder, only: [escape_string: 1]
@document_structure [
:base,
:prefixes,
:triples
]
@indentation_char " "
@indentation 4
@native_supported_datatypes [
XSD.Boolean,
XSD.Integer,
XSD.Double,
XSD.Decimal
]
@rdf_type RDF.Utils.Bootstrapping.rdf_iri("type")
@rdf_nil RDF.Utils.Bootstrapping.rdf_iri("nil")
# Defines rdf:type of subjects to be serialized at the beginning of the encoded graph
@top_classes [RDF.Utils.Bootstrapping.rdfs_iri("Class")]
# Defines order of predicates at the beginning of a resource description
@predicate_order [
@rdf_type,
RDF.Utils.Bootstrapping.rdfs_iri("label"),
RDF.iri("http://purl.org/dc/terms/title")
]
@ordered_properties MapSet.new(@predicate_order)
@impl RDF.Serialization.Encoder
@spec encode(RDF.Data.t(), keyword) :: {:ok, String.t()} | {:error, any}
def encode(data, opts \\ []) do
base =
Keyword.get(opts, :base, Keyword.get(opts, :base_iri))
|> base_iri(data)
|> init_base_iri()
prefixes =
Keyword.get(opts, :prefixes)
|> prefixes(data)
{:ok, state} = State.start_link(data, base, prefixes)
try do
State.preprocess(state)
{:ok,
(Keyword.get(opts, :only) || @document_structure)
|> compile(base, prefixes, state, opts)}
after
State.stop(state)
end
end
defp compile(:base, base, _, _, opts), do: base_directive(base, opts)
defp compile(:prefixes, _, prefixes, _, opts), do: prefix_directives(prefixes, opts)
defp compile(:triples, _, _, state, opts), do: graph_statements(state, opts)
defp compile(:directives, base, prefixes, state, opts),
do: [:base, :prefixes] |> compile(base, prefixes, state, opts)
defp compile(elements, base, prefixes, state, opts) when is_list(elements) do
Enum.map_join(elements, &compile(&1, base, prefixes, state, opts))
end
defp compile(element, _, _, _, _) do
raise "unknown Turtle document element: #{inspect(element)}"
end
defp base_iri(nil, %Graph{base_iri: base_iri}) when not is_nil(base_iri), do: base_iri
defp base_iri(nil, _), do: RDF.default_base_iri()
defp base_iri(base_iri, _), do: IRI.coerce_base(base_iri)
defp init_base_iri(nil), do: nil
defp init_base_iri(base_iri), do: {:ok, to_string(base_iri)}
defp prefixes(nil, %Graph{prefixes: prefixes}) when not is_nil(prefixes), do: prefixes
defp prefixes(nil, %Dataset{} = dataset) do
prefixes = Dataset.prefixes(dataset)
if Enum.empty?(prefixes) do
RDF.default_prefixes()
else
prefixes
end
end
defp prefixes(nil, _), do: RDF.default_prefixes()
defp prefixes(prefixes, _), do: PrefixMap.new(prefixes)
defp base_directive(nil, _), do: ""
defp base_directive({_, base}, opts) do
indent(opts) <>
case Keyword.get(opts, :directive_style) do
:sparql -> "BASE <#{base}>"
_ -> "@base <#{base}> ."
end <> "\n\n"
end
defp prefix_directive({prefix, ns}, opts) do
indent(opts) <>
case Keyword.get(opts, :directive_style) do
:sparql -> "PREFIX #{prefix}: <#{to_string(ns)}>\n"
_ -> "@prefix #{prefix}: <#{to_string(ns)}> .\n"
end
end
defp prefix_directives(prefixes, opts) do
case Enum.map(prefixes, &prefix_directive(&1, opts)) do
[] -> ""
prefixes -> Enum.join(prefixes, "") <> "\n"
end
end
defp graph_statements(state, opts) do
indent = indent(opts)
State.data(state)
|> RDF.Data.descriptions()
|> order_descriptions(state)
|> Enum.map(&description_statements(&1, state, Keyword.get(opts, :indent, 0)))
|> Enum.reject(&is_nil/1)
|> Enum.map_join("\n", &(indent <> &1))
end
defp order_descriptions(descriptions, state) do
base_iri = State.base_iri(state)
group =
Enum.group_by(descriptions, fn
%Description{subject: ^base_iri} ->
:base
description ->
with types when not is_nil(types) <- description.predications[@rdf_type] do
Enum.find(@top_classes, :other, fn top_class ->
Map.has_key?(types, top_class)
end)
else
_ -> :other
end
end)
ordered_descriptions =
(@top_classes
|> Stream.map(fn top_class -> group[top_class] end)
|> Stream.reject(&is_nil/1)
|> Stream.map(&sort_description_group/1)
|> Enum.reduce([], fn class_group, ordered_descriptions ->
ordered_descriptions ++ class_group
end)) ++ (group |> Map.get(:other, []) |> sort_description_group())
case group[:base] do
[base] -> [base | ordered_descriptions]
_ -> ordered_descriptions
end
end
defp sort_description_group(descriptions) do
Enum.sort(descriptions, fn
%Description{subject: %IRI{}}, %Description{subject: %BlankNode{}} ->
true
%Description{subject: %BlankNode{}}, %Description{subject: %IRI{}} ->
false
%Description{subject: s1}, %Description{subject: s2} ->
to_string(s1) < to_string(s2)
end)
end
defp description_statements(description, state, nesting) do
with %BlankNode{} <- description.subject,
ref_count when ref_count < 2 <-
State.bnode_ref_counter(state, description.subject) do
unrefed_bnode_subject_term(description, ref_count, state, nesting)
else
_ -> full_description_statements(description, state, nesting)
end
end
defp full_description_statements(subject, description, state, nesting) do
nesting = nesting + @indentation
subject <> newline_indent(nesting) <> predications(description, state, nesting) <> " .\n"
end
defp full_description_statements(description, state, nesting) do
term(description.subject, state, :subject, nesting)
|> full_description_statements(description, state, nesting)
end
defp blank_node_property_list(description, state, nesting) do
indented = nesting + @indentation
"[" <>
newline_indent(indented) <>
predications(description, state, indented) <>
newline_indent(nesting) <> "]"
end
defp predications(description, state, nesting) do
description.predications
|> order_predications()
|> Enum.map(&predication(&1, state, nesting))
|> Enum.join(" ;" <> newline_indent(nesting))
end
@dialyzer {:nowarn_function, order_predications: 1}
defp order_predications(predications) do
sorted_predications =
@predicate_order
|> Enum.map(fn predicate -> {predicate, predications[predicate]} end)
|> Enum.reject(fn {_, objects} -> is_nil(objects) end)
unsorted_predications =
Enum.reject(predications, fn {predicate, _} ->
MapSet.member?(@ordered_properties, predicate)
end)
sorted_predications ++ unsorted_predications
end
defp predication({predicate, objects}, state, nesting) do
term(predicate, state, :predicate, nesting) <>
" " <>
(objects
|> Enum.map(fn {object, _} -> term(object, state, :object, nesting) end)
# TODO: split if the line gets too long
|> Enum.join(", "))
end
defp unrefed_bnode_subject_term(bnode_description, ref_count, state, nesting) do
if valid_list_node?(bnode_description.subject, state) do
case ref_count do
0 ->
bnode_description.subject
|> list_term(state, nesting)
|> full_description_statements(
list_subject_description(bnode_description),
state,
nesting
)
1 ->
nil
_ ->
raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error."
end
else
case ref_count do
0 ->
blank_node_property_list(bnode_description, state, nesting) <> " .\n"
1 ->
nil
_ ->
raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error."
end
end
end
@dialyzer {:nowarn_function, list_subject_description: 1}
defp list_subject_description(description) do
description = Description.delete_predicates(description, [RDF.first(), RDF.rest()])
if Enum.count(description.predications) == 0 do
# since the Turtle grammar doesn't allow bare lists, we add a statement
description |> RDF.type(RDF.List)
else
description
end
end
defp unrefed_bnode_object_term(bnode, ref_count, state, nesting) do
if valid_list_node?(bnode, state) do
list_term(bnode, state, nesting)
else
if ref_count == 1 do
State.data(state)
|> RDF.Data.description(bnode)
|> blank_node_property_list(state, nesting)
else
raise "Internal error: This shouldn't happen. Please raise an issue in the RDF.ex project with the input document causing this error."
end
end
end
defp valid_list_node?(bnode, state) do
MapSet.member?(State.list_nodes(state), bnode)
end
defp list_term(head, state, nesting) do
head
|> State.list_values(state)
|> term(state, :list, nesting)
end
defp term(@rdf_type, _, :predicate, _), do: "a"
defp term(@rdf_nil, _, _, _), do: "()"
defp term(%IRI{} = iri, state, _, _) do
based_name(iri, State.base(state)) ||
prefixed_name(iri, State.prefixes(state)) ||
"<#{to_string(iri)}>"
end
defp term(%BlankNode{} = bnode, state, position, nesting)
when position in ~w[object list]a do
if (ref_count = State.bnode_ref_counter(state, bnode)) <= 1 do
unrefed_bnode_object_term(bnode, ref_count, state, nesting)
else
to_string(bnode)
end
end
defp term(%BlankNode{} = bnode, _, _, _),
do: to_string(bnode)
defp term(%Literal{literal: %LangString{} = lang_string}, _, _, _) do
quoted(lang_string.value) <> "@" <> lang_string.language
end
defp term(%Literal{literal: %XSD.String{}} = literal, _, _, _) do
literal |> Literal.lexical() |> quoted()
end
defp term(%Literal{literal: %datatype{}} = literal, state, _, nesting)
when datatype in @native_supported_datatypes do
if Literal.valid?(literal) do
Literal.canonical_lexical(literal)
else
typed_literal_term(literal, state, nesting)
end
end
defp term(%Literal{} = literal, state, _, nesting),
do: typed_literal_term(literal, state, nesting)
defp term(list, state, _, nesting) when is_list(list) do
"(" <>
(list
|> Enum.map(&term(&1, state, :list, nesting))
|> Enum.join(" ")) <>
")"
end
defp based_name(%IRI{} = iri, base), do: based_name(to_string(iri), base)
defp based_name(iri, {:ok, base}) do
if String.starts_with?(iri, base) do
"<#{String.slice(iri, String.length(base)..-1)}>"
end
end
defp based_name(_, _), do: nil
defp typed_literal_term(%Literal{} = literal, state, nesting) do
~s["#{Literal.lexical(literal)}"^^#{literal |> Literal.datatype_id() |> term(state, :datatype, nesting)}]
end
def prefixed_name(iri, prefixes) do
case PrefixMap.prefix_name_pair(prefixes, iri) do
{prefix, name} -> if valid_pn_local?(name), do: prefix <> ":" <> name
_ -> nil
end
end
defp valid_pn_local?(name) do
String.match?(name, ~r/^([[:alpha:]]|[[:digit:]]|_|:)*$/u)
end
defp quoted(string) do
if String.contains?(string, ["\n", "\r"]) do
~s["""#{string}"""]
else
~s["#{escape_string(string)}"]
end
end
defp newline_indent(nesting),
do: "\n" <> String.duplicate(@indentation_char, nesting)
defp indent(opts) when is_list(opts), do: opts |> Keyword.get(:indent) |> indent()
defp indent(nil), do: ""
defp indent(count), do: String.duplicate(" ", count)
end
|
lib/rdf/serializations/turtle_encoder.ex
| 0.876489
| 0.642545
|
turtle_encoder.ex
|
starcoder
|
defmodule Homework.Merchants do
@moduledoc """
The Merchants context.
"""
import Ecto.Query, warn: false
alias Homework.Repo
alias Homework.Pagination
alias Homework.Merchants.Merchant
@doc """
Returns the list of merchants.
## Examples
iex> list_merchants(%{limit: 10, offset: 0})
[%Merchant{}, ...]
"""
def list_merchants(args) do
query = from m in Merchant
query_with_limit_and_offset = Pagination.add_limit_and_offset(query, args.limit, args.offset)
merchants = Repo.all(query_with_limit_and_offset)
for merchant <- merchants, do: Map.put_new(merchant, :total_rows, Pagination.get_total_rows(query))
end
@doc """
Gets all merchants that have a name that fuzzy matches the given name by 5 or less string distance
## Examples
iex> fuzzy_search_merchants_by_name(%{name: "Connelly"})
[%Merchant{name: "Connelly", ...}, %Merchant{name: "Connelley", ...}, ...]
"""
def fuzzy_search_merchants_by_name(args) do
query = from m in Merchant, where: fragment("levenshtein(?, ?)", m.name, ^args.name) <= ^args.string_difference
query_with_limit_and_offset = Pagination.add_limit_and_offset(query, args.limit, args.offset)
merchants = Repo.all(query_with_limit_and_offset)
for merchant <- merchants, do: Map.put_new(merchant, :total_rows, Pagination.get_total_rows(query))
end
@doc """
Gets a single merchant.
Raises `Ecto.NoResultsError` if the Merchant does not exist.
## Examples
iex> get_merchant!(123)
%Merchant{}
iex> get_merchant!(456)
** (Ecto.NoResultsError)
"""
def get_merchant!(id), do: Repo.get!(Merchant, id)
@doc """
Creates a merchant.
## Examples
iex> create_merchant(%{field: value})
{:ok, %Merchant{}}
iex> create_merchant(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_merchant(attrs \\ %{}) do
%Merchant{}
|> Merchant.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a merchant.
## Examples
iex> update_merchant(merchant, %{field: new_value})
{:ok, %Merchant{}}
iex> update_merchant(merchant, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_merchant(%Merchant{} = merchant, attrs) do
merchant
|> Merchant.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a merchant.
## Examples
iex> delete_merchant(merchant)
{:ok, %Merchant{}}
iex> delete_merchant(merchant)
{:error, %Ecto.Changeset{}}
"""
def delete_merchant(%Merchant{} = merchant) do
Repo.delete(merchant)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking merchant changes.
## Examples
iex> change_merchant(merchant)
%Ecto.Changeset{data: %Merchant{}}
"""
def change_merchant(%Merchant{} = merchant, attrs \\ %{}) do
Merchant.changeset(merchant, attrs)
end
end
|
elixir/lib/homework/merchants.ex
| 0.819026
| 0.403508
|
merchants.ex
|
starcoder
|
defmodule Numy.Lapack.Vector do
@moduledoc """
LAPACK Vector.
Implements protocols: `Numy.Vc`, `Numy.Vcm`
## Example of mutating `add!`
iex(7)> v = Numy.Lapack.Vector.new([1,2,3])
%Numy.Lapack.Vector{lapack: #Numy.Lapack<shape: [...], ...>, nelm: 3}
iex(8)> Numy.Vcm.add!(v,v)
:ok
iex(9)> Numy.Lapack.data(v.lapack)
[2.0, 4.0, 6.0]
## Example of non-mutating `add`
iex(3)> v = Numy.Lapack.Vector.new([1,2,3])
%Numy.Lapack.Vector{lapack: #Numy.Lapack<shape: [...], ...>, nelm: 3}
iex(4)> Numy.Vc.add(v,v)
[1.0, 2.0, 3.0]
"""
@enforce_keys [:nelm]
defstruct [
:nelm, # length of the vector
:lapack # %Numy.Lapack structure
]
alias Numy.Lapack.Vector, as: LVec
def new(nelm) when is_integer(nelm) do
%Numy.Lapack.Vector{nelm: nelm, lapack: Numy.Lapack.new_tensor([nelm])}
end
def new(list) when is_list(list) do
nelm = length(list)
v = %Numy.Lapack.Vector{nelm: nelm, lapack: Numy.Lapack.new_tensor([nelm])}
cond do
v.lapack == nil -> nil
true ->
Numy.Lapack.assign(v.lapack, Numy.Enumy.all_to_float(list))
v
end
end
@doc "Create new Vector as a copy of other Vector"
def new(%Numy.Lapack.Vector{nelm: sz, lapack: lpk} = _other_vec) do
new_vec = Numy.Lapack.Vector.new(sz)
Numy.Lapack.copy(new_vec.lapack, lpk)
new_vec
end
@doc "Create new Vector from Elixir Range"
def new(%Range{} = range) do
new(Enum.to_list(range))
end
@doc """
Create new Vector as a concatination of 2 other vectors
## Examples
iex(1)> v1 = Numy.Lapack.Vector.new([1,2,3])
iex(2)> v2 = Numy.Lapack.Vector.new([4,5,6])
iex(3)> v3 = Numy.Lapack.Vector.new(v1,v2)
iex(4)> Numy.Vc.data(v3)
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0]
"""
def new(%Numy.Lapack.Vector{nelm: sz1, lapack: lpk1} = _v1,
%Numy.Lapack.Vector{nelm: sz2, lapack: lpk2} = _v2) do
new_vec = Numy.Lapack.Vector.new(sz1 + sz2)
Numy.Lapack.copy(new_vec.lapack, lpk1)
Numy.Lapack.vector_copy_range(new_vec.lapack.nif_resource, lpk2.nif_resource,
sz2, sz1, 0, 1, 1)
new_vec
end
def make_from_nif_res(res) do
nrelm = Numy.Lapack.tensor_nrelm(res);
%Numy.Lapack.Vector{nelm: nrelm, lapack: %Numy.Lapack{nif_resource: res, shape: [nrelm]}}
end
def save_to_file(v, filename) when is_map(v) do
Numy.Lapack.tensor_save_to_file(v.lapack.nif_resource, filename)
end
@doc """
## Examples
iex(4)> v = Numy.Lapack.Vector.new(1..100)
#Vector<size=100, [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, ...]>
iex(5)> Numy.Lapack.Vector.save_to_file(v, 'vec.numy.bin')
:ok
iex(6)> Numy.Lapack.Vector.load_from_file('vec.numy.bin')
#Vector<size=100, [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, ...]>
"""
def load_from_file(filename) do
res = Numy.Lapack.tensor_load_from_file(filename)
make_from_nif_res(res)
end
defimpl Numy.Vc do
@doc "Make a clone"
def clone(%Numy.Lapack.Vector{} = v) do
Numy.Lapack.Vector.new(v)
end
def assign_all(v, val) when is_map(v) and is_number(val) do
Numy.Lapack.vector_assign_all(v.lapack.nif_resource, val)
v
end
def assign_zeros(v) when is_map(v) do
Numy.Lapack.vector_assign_all(v.lapack.nif_resource, 0.0)
v
end
def assign_ones(v) when is_map(v) do
Numy.Lapack.vector_assign_all(v.lapack.nif_resource, 1.0)
v
end
def assign_random(v) when is_map(v) do
Numy.Lapack.assign(v.lapack, Numy.Float.make_list_randoms(v.nelm))
v
end
def data(v, nelm) when is_map(v) and is_integer(nelm) do
Numy.Lapack.data(v.lapack, nelm)
end
def at(%Numy.Lapack.Vector{nelm: nelm}, index, default) when index < 0 or index >= nelm, do: default
def at(v, index, _default) when is_map(v) and is_integer(index) do
Numy.Lapack.vector_get_at(v.lapack.nif_resource, index)
end
def empty?(v) when is_map(v) do
v.nelm == 0
end
@doc "Return size/length of the vector."
def size(v) when is_map(v) do
v.nelm
end
def equal?(v1,v2) when is_map(v1) and is_map(v2) do
Numy.Lapack.vector_equal(v1.lapack.nif_resource, v2.lapack.nif_resource)
end
@doc """
Add two vectors.
## Examples
iex(5)> v = Numy.Lapack.Vector.new([1,2,3])
%Numy.Vector{data: [1.0, 2.0, 3.0], nelm: 3}
iex(6)> Numy.Vc.add(v, v)
%Numy.Vector{data: [2.0, 4.0, 6.0], nelm: 3}
"""
def add(v1, v2) when is_map(v1) and is_map(v2) do
Numy.Vcm.add!(LVec.new(v1), v2)
end
def sub(v1, v2) when is_map(v1) and is_map(v2) do
Numy.Vcm.sub!(LVec.new(v1), v2)
end
def mul(v1, v2) when is_map(v1) and is_map(v2) do
Numy.Vcm.mul!(LVec.new(v1), v2)
end
def div(v1, v2) when is_map(v1) and is_map(v2) do
Numy.Vcm.div!(LVec.new(v1), v2)
end
def scale(v, factor) when is_map(v) and is_number(factor) do
Numy.Vcm.scale!(LVec.new(v), factor)
end
def offset(v, off) when is_map(v) and is_number(off) do
Numy.Vcm.offset!(LVec.new(v), off)
end
def negate(v) when is_map(v) do
Numy.Vcm.negate!(LVec.new(v))
end
def dot(v1, v2) when is_map(v1) and is_map(v2) do
Numy.Lapack.vector_dot(v1.lapack.nif_resource, v2.lapack.nif_resource)
end
@doc "Sum of all elements, ∑aᵢ"
def sum(v) do
Numy.Lapack.vector_sum(v.lapack.nif_resource)
end
@doc "Average (∑aᵢ)/length"
def mean(%Numy.Lapack.Vector{nelm: nelm, lapack: _}) when nelm == 0 do
raise "empty vector";
end
def mean(%Numy.Lapack.Vector{nelm: nelm, lapack: _} = v) do
Numy.Vc.sum(v) / nelm
end
@doc "Return max value"
def max(v) when is_map(v) do
Numy.Lapack.vector_max(v.lapack.nif_resource)
end
@doc "Return min value"
def min(v) when is_map(v) do
Numy.Lapack.vector_min(v.lapack.nif_resource)
end
@doc "Return index of max value"
def max_index(v) when is_map(v) do
Numy.Lapack.vector_max_index(v.lapack.nif_resource)
end
@doc "Return index of min value"
def min_index(v) when is_map(v) do
Numy.Lapack.vector_min_index(v.lapack.nif_resource)
end
@doc "Step function, aᵢ ← 0 if aᵢ < 0 else 1"
def apply_heaviside(v, cutoff \\ 0.0) when is_map(v) and is_number(cutoff) do
Numy.Vcm.apply_heaviside!(LVec.new(v), cutoff)
end
@doc "f(x) = 1/(1 + e⁻ˣ)"
def apply_sigmoid(v) when is_map(v) do
Numy.Vcm.apply_sigmoid!(LVec.new(v))
end
def sort(v) when is_map(v) do
Numy.Vcm.sort!(LVec.new(v))
end
def reverse(v) when is_map(v) do
Numy.Vcm.reverse!(LVec.new(v))
end
@doc "Concatenate 2 vectors"
def concat(v1,v2) when is_map(v1) and is_map(v2) do
Numy.Lapack.Vector.new(v1,v2)
end
def find(v,val) when is_map(v) and is_number(val) do
Numy.Lapack.vector_find(v.lapack.nif_resource,val)
end
def contains?(v,val) when is_map(v) and is_number(val) do
Numy.Vc.find(v,val) != -1
end
def abs(v) when is_map(v) do
Numy.Vcm.abs!(LVec.new(v))
end
def pow2(v) when is_map(v) do
Numy.Vcm.pow2!(LVec.new(v))
end
def pow(v,p) when is_map(v) do
Numy.Vcm.pow!(LVec.new(v),p)
end
def norm2(v) when is_map(v) do
Numy.Lapack.vector_norm2(v.lapack.nif_resource)
end
end # defimpl Numy.Vc
defimpl Numy.Vcm do
def add!(v1,v2) when is_map(v1) and is_map(v2) do
try do
Numy.Lapack.vector_add(v1.lapack.nif_resource, v2.lapack.nif_resource)
v1
rescue
_ -> :error
end
end
def sub!(v1,v2) when is_map(v1) and is_map(v2) do
try do
Numy.Lapack.vector_sub(v1.lapack.nif_resource, v2.lapack.nif_resource)
v1
rescue
_ -> :error
end
end
def mul!(v1,v2) when is_map(v1) and is_map(v2) do
try do
Numy.Lapack.vector_mul(v1.lapack.nif_resource, v2.lapack.nif_resource)
v1
rescue
_ -> :error
end
end
def div!(v1,v2) when is_map(v1) and is_map(v2) do
try do
Numy.Lapack.vector_div(v1.lapack.nif_resource, v2.lapack.nif_resource)
v1
rescue
_ -> :error
end
end
def scale!(v, factor) when is_map(v) and is_number(factor) do
try do
Numy.Lapack.vector_scale(v.lapack.nif_resource, factor)
v
rescue
_ -> :error
end
end
@spec offset!(map, number) :: :error
def offset!(v, factor) when is_map(v) and is_number(factor) do
try do
Numy.Lapack.vector_offset(v.lapack.nif_resource, factor)
v
rescue
_ -> :error
end
end
def negate!(v) when is_map(v) do
try do
Numy.Lapack.vector_negate(v.lapack.nif_resource)
v
rescue
_ -> :error
end
end
def apply_heaviside!(v, cutoff \\ 0.0) when is_map(v) and is_number(cutoff) do
try do
Numy.Lapack.vector_heaviside(v.lapack.nif_resource, cutoff)
v
rescue
_ -> :error
end
end
def apply_sigmoid!(v) when is_map(v) do
try do
Numy.Lapack.vector_sigmoid(v.lapack.nif_resource)
v
rescue
_ -> :error
end
end
def sort!(v) when is_map(v) do
try do
Numy.Lapack.vector_sort(v.lapack.nif_resource)
v
rescue
_ -> :error
end
end
def reverse!(v) when is_map(v) do
try do
Numy.Lapack.vector_reverse(v.lapack.nif_resource)
v
rescue
_ -> :error
end
end
def set_at!(v, pos, val) when is_map(v) and is_integer(pos) and is_number(val) do
try do
Numy.Lapack.vector_set_at(v.lapack.nif_resource, pos, val)
v
rescue
_ -> :error
end
end
def axpby!(v1, v2, f1, f2) when is_map(v1) and is_map(v2) and
is_number(f1) and is_number(f2) do
try do
Numy.Lapack.vector_axpby(v1.lapack.nif_resource, v1.lapack.nif_resource, f1, f2)
v1
rescue
_ -> :error
end
end
def abs!(v) when is_map(v) do
try do
Numy.Lapack.vector_abs(v.lapack.nif_resource)
v
rescue
_ -> :error
end
end
def pow2!(v) when is_map(v) do
try do
Numy.Lapack.vector_pow2(v.lapack.nif_resource)
v
rescue
_ -> :error
end
end
def pow!(v,p) when is_map(v) and is_number(p) do
try do
Numy.Lapack.vector_pow(v.lapack.nif_resource, p)
v
rescue
_ -> :error
end
end
end # defimpl Numy.Vcm
@doc """
## Examples
iex(1)> alias Numy.Lapack.Vector
Numy.Lapack.Vector
iex(2)> a = Vector.new(1..10)
#Vector<size=10, [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0]>
iex(3)> b = Vector.new(1..10)
#Vector<size=10, [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0]>
iex(4)> Vector.swap_ranges(a,b,3,2,7)
:ok
iex(5)> a
#Vector<size=10, [1.0, 2.0, 8.0, 9.0, 10.0, 6.0, 7.0, 8.0, 9.0, 10.0]>
iex(6)> b
#Vector<size=10, [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 3.0, 4.0, 5.0]>
"""
def swap_ranges(v1, v2, nelm, off_a, off_b) do
Numy.Lapack.vector_swap_ranges(v1.lapack.nif_resource, v2.lapack.nif_resource,
nelm, off_a, off_b)
end
end
defimpl Inspect, for: Numy.Lapack.Vector do
import Inspect.Algebra
def inspect(v, opts) do
opts = %{opts | limit: 10}
concat(["#Vector<size=", to_doc(v.nelm, opts), ", ",
to_doc(Numy.Vc.data(v), opts), ">"])
end
end
defimpl Numy.Set, for: Numy.Lapack.Vector do
@doc """
The union of two sets is formed by the elements that are present
in either one of the sets, or in both.
C = A ∪ B = {x : x ∈ A or x ∈ B}
"""
def union(a, b) when is_map(a) and is_map(b) do
res = Numy.Lapack.set_op(a.lapack.nif_resource, b.lapack.nif_resource, :union)
Numy.Lapack.Vector.make_from_nif_res(res)
end
@doc """
The intersection of two sets is formed only by the elements
that are present in both sets.
C = A ∩ B = {x : x ∈ A and x ∈ B}
"""
def intersection(a, b) when is_map(a) and is_map(b) do
res = Numy.Lapack.set_op(a.lapack.nif_resource, b.lapack.nif_resource, :intersection)
Numy.Lapack.Vector.make_from_nif_res(res)
end
@doc """
The difference of two sets is formed by the elements
that are present in the first set, but not in the second one.
"""
def diff(a, b) when is_map(a) and is_map(b) do
res = Numy.Lapack.set_op(a.lapack.nif_resource, b.lapack.nif_resource, :diff)
Numy.Lapack.Vector.make_from_nif_res(res)
end
@doc """
The symmetric difference of two sets is formed by the elements
that are present in one of the sets, but not in the other.
"""
def symm_diff(a, b) when is_map(a) and is_map(b) do
res = Numy.Lapack.set_op(a.lapack.nif_resource, b.lapack.nif_resource, :symm_diff)
Numy.Lapack.Vector.make_from_nif_res(res)
end
@doc """
The [Jaccard index](https://en.wikipedia.org/wiki/Jaccard_index)
(also known as similarity coefficient)
measures similarity between finite sample sets, and is defined as
the size of the intersection divided by the size of the union of the sample sets.
"""
def jaccard_index(a, b) when is_map(a) and is_map(b) do
union_size = Numy.Set.union(a, b) |> Numy.Vc.size
intersection_size = Numy.Set.intersection(a, b) |> Numy.Vc.size
cond do
union_size == 0 -> raise ArgumentError, message: "divide by 0"
true -> intersection_size / union_size
end
end
end
|
lib/lapack/lapack_vector.ex
| 0.862004
| 0.627252
|
lapack_vector.ex
|
starcoder
|
defmodule DaySeventeen do
def solve(input, opts \\ []) do
cycles = Keyword.get(opts, :cycles, 6)
input
|> format_input()
|> step(cycles)
|> count_active()
end
def format_input(input) do
input
|> String.split("", trim: true)
|> build_map()
end
def build_map(input) do
input
|> Enum.reduce({[], {0, 0}}, fn
"\n", {acc, {_, y}} -> {acc, {0, y + 1}}
c, {acc, {x, y}} -> {[{{x, y, 0}, c} | acc], {x + 1, y}}
end)
|> elem(0)
|> Map.new()
end
def step(state, 0), do: state
def step(state, cycles) do
neighbour_counts =
state
|> Map.keys()
|> Enum.reduce(%{}, fn
pos, acc ->
sym = Map.get(state, pos)
add_to_neighbours(acc, pos, sym)
end)
new_state =
neighbour_counts
|> Enum.filter(fn {_, x} -> x == 2 or x == 3 end)
|> Enum.map(fn
{pos, count} ->
new_sym = Map.get(state, pos) |> updated_sym(count)
{pos, new_sym}
end)
|> Map.new()
step(new_state, cycles - 1)
end
def updated_sym(old_sym, count)
def updated_sym("#", 2), do: "#"
def updated_sym(_, 3), do: "#"
def updated_sym(_, _), do: "."
def add_to_neighbours(map, pos = {_, _, _}, "#") do
neighbours = pos |> neighbours()
neighbours
|> Enum.reduce(map, fn
pos, acc -> Map.update(acc, pos, 1, fn n -> n + 1 end)
end)
end
def add_to_neighbours(map, _, "."), do: map
def count_active_neighbours(map, pos) do
neighbours = pos |> neighbours()
map
|> Enum.filter(fn {pos, _} -> pos in neighbours end)
|> count_active()
end
def neighbours({x, y, z}) do
still? = fn
0, 0, 0 -> true
_, _, _ -> false
end
for dx <- -1..1,
dy <- -1..1,
dz <- -1..1,
not still?.(dx, dy, dz),
do: {x + dx, y + dy, z + dz}
end
def count_active(map) do
map
|> Map.values()
|> Enum.filter(fn x -> x == "#" end)
|> Enum.count()
end
end
|
adv_2020/lib/day_17.ex
| 0.554832
| 0.611817
|
day_17.ex
|
starcoder
|
defmodule Advent20.Docking do
@moduledoc """
Day 14: Docking Data
"""
defp parse(input) do
input
|> String.split("\n", trim: true)
|> Enum.map(&String.split(&1, " = "))
|> Enum.map(fn
["mem" <> address_string, value] ->
address = address_string |> String.replace(["[", "]"], "") |> String.to_integer()
value = String.to_integer(value)
{:mem, address, value}
["mask", mask] ->
{:mask, mask}
end)
end
@doc """
Part 1: Execute the initialization program. What is the sum of all
values left in memory after it completes?
"""
def part_1(input) do
program = input |> parse()
state = %{
bitmask: nil,
memory: %{}
}
program
|> Enum.reduce(state, fn instruction, state -> exec(instruction, state) end)
|> Map.fetch!(:memory)
|> Map.values()
|> Enum.sum()
end
defp exec({:mask, new_mask}, state), do: %{state | bitmask: new_mask}
defp exec({:mem, address, value}, state) do
value_bits = value |> Integer.to_string(2) |> String.pad_leading(36, "0") |> String.codepoints()
masked_value =
state.bitmask
|> String.codepoints()
|> Enum.zip(value_bits)
|> Enum.map(fn
{"X", bit} -> bit
{"0", _} -> "0"
{"1", _} -> "1"
end)
|> Enum.join()
|> String.to_integer(2)
%{state | memory: Map.put(state.memory, address, masked_value)}
end
@doc """
Part 2: Execute the initialization program using an emulator for a version 2
decoder chip. What is the sum of all values left in memory after it completes?
"""
def part_2(input) do
program = input |> parse()
state = %{
bitmask: nil,
memory: %{}
}
program
|> Enum.reduce(state, fn instruction, state -> exec_v2(instruction, state) end)
|> Map.fetch!(:memory)
|> Map.values()
|> Enum.sum()
end
defp exec_v2({:mask, new_mask}, state), do: %{state | bitmask: new_mask}
defp exec_v2({:mem, address, value}, state) do
address_bits = address |> Integer.to_string(2) |> String.pad_leading(36, "0") |> String.codepoints()
state.bitmask
|> String.codepoints()
|> Enum.zip(address_bits)
|> Enum.map(fn
{"0", bit} -> bit
{"1", _} -> "1"
{"X", _} -> :floating
end)
|> Enum.reduce([""], fn
:floating, acc ->
acc_1 = acc |> Enum.map(&(&1 <> "0"))
acc_2 = acc |> Enum.map(&(&1 <> "1"))
acc_1 ++ acc_2
char, acc ->
Enum.map(acc, &(&1 <> char))
end)
|> Enum.map(&String.to_integer(&1, 2))
|> Enum.reduce(state, fn address, state ->
%{state | memory: Map.put(state.memory, address, value)}
end)
end
end
|
lib/advent20/14_docking.ex
| 0.808219
| 0.548915
|
14_docking.ex
|
starcoder
|
defmodule LineBot.Message.Action do
@moduledoc """
Represents any one of the possible [Action objects](https://developers.line.biz/en/reference/messaging-api/#action-objects).
"""
@type t() ::
LineBot.Message.Action.Message.t()
| LineBot.Message.Action.URI.t()
| LineBot.Message.Action.DateTimePicker.t()
| LineBot.Message.Action.Camera.t()
| LineBot.Message.Action.CameraRoll.t()
| LineBot.Message.Action.Location.t()
end
defmodule LineBot.Message.Action.Postback do
@moduledoc """
Represents a [Postback action](https://developers.line.biz/en/reference/messaging-api/#postback-action).
"""
use LineBot.Message
@type t :: %__MODULE__{
label: String.t(),
data: String.t(),
displayText: String.t(),
type: :postback
}
@enforce_keys [:data]
defstruct [:label, :data, :displayText, type: :postback]
end
defmodule LineBot.Message.Action.Message do
@moduledoc """
Represents a [Message action](https://developers.line.biz/en/reference/messaging-api/#message-action).
"""
use LineBot.Message
@type t :: %__MODULE__{
label: String.t(),
text: String.t(),
type: :message
}
@enforce_keys [:text]
defstruct [:label, :text, type: :message]
end
defmodule LineBot.Message.Action.URI do
@moduledoc """
Represents a [URI action](https://developers.line.biz/en/reference/messaging-api/#postback-action).
"""
use LineBot.Message
@type t :: %__MODULE__{
label: String.t(),
uri: String.t(),
altUri: %{desktop: :http | :https | :line | :tel},
type: :uri
}
@enforce_keys [:uri]
defstruct [:label, :uri, :altUri, type: :uri]
end
defmodule LineBot.Message.Action.DateTimePicker do
@moduledoc """
Represents a [Datetime picker action](https://developers.line.biz/en/reference/messaging-api/#postback-action).
"""
use LineBot.Message
@type t :: %__MODULE__{
label: String.t(),
data: String.t(),
mode: :date | :time | :datetime,
initial: String.t(),
max: String.t(),
min: String.t(),
type: :datetimepicker
}
@enforce_keys [:data, :mode]
defstruct [:label, :data, :mode, :initial, :max, :min, type: :datetimepicker]
end
defmodule LineBot.Message.Action.Camera do
@moduledoc """
Represents a [Camera action](https://developers.line.biz/en/reference/messaging-api/#postback-action).
"""
@derive Jason.Encoder
@type t :: %__MODULE__{
label: String.t(),
type: :camera
}
@enforce_keys [:label]
defstruct [:label, type: :camera]
end
defmodule LineBot.Message.Action.CameraRoll do
@moduledoc """
Represents a [Camera roll action](https://developers.line.biz/en/reference/messaging-api/#postback-action).
"""
@derive Jason.Encoder
@type t :: %__MODULE__{
label: String.t(),
type: :cameraRoll
}
@enforce_keys [:label]
defstruct [:label, type: :cameraRoll]
end
defmodule LineBot.Message.Action.Location do
@moduledoc """
Represents a [Locaiton action](https://developers.line.biz/en/reference/messaging-api/#location-action).
"""
@derive Jason.Encoder
@type t :: %__MODULE__{
label: String.t(),
type: :location
}
@enforce_keys [:label]
defstruct [:label, type: :location]
end
|
lib/line_bot/message/action.ex
| 0.877503
| 0.5
|
action.ex
|
starcoder
|
defmodule Wavexfront.Proxy.Worker do
@moduledoc """
This is the actual connection to the proxy and handle all the TCP
aspect of sending the message to the proxy
"""
use Connection
require Logger
alias Wavexfront.Item
@initial_state %{socket: nil}
def start_link(opts) do
state = Map.merge(@initial_state, Enum.into(opts, %{}))
Connection.start_link(__MODULE__, state, [])
end
def init(state) do
{:connect, nil, state}
end
def send(conn, data), do: Connection.call(conn, {:send, data})
def recv(conn, bytes, timeout \\ 3000) do
Connection.call(conn, {:recv, bytes, timeout})
end
def close(conn), do: Connection.call(conn, :close)
def connect(_info, state) do
opts = [:binary, active: false]
case :gen_tcp.connect(to_charlist(state[:host]), state[:port], opts) do
{:ok, socket} ->
{:ok, %{state | socket: socket}}
{:error, reason} ->
# FIXME: try again in one second. Might want to make this exponential
# and configurable
:error_logger.format("Connection error: ~s for ~s:~B ", [
reason,
state[:host],
state[:port]
])
{:backoff, 1000, state}
end
end
def disconnect(info, %{socket: sock} = s) do
:ok = :gen_tcp.close(sock)
case info do
{:close, from} ->
Connection.reply(from, :ok)
{:error, :closed} ->
:error_logger.format("Connection closed for ~s:~B ~n", [s[:host], s[:port]])
{:error, reason} ->
reason = :inet.format_error(reason)
:error_logger.format("Connection error: ~s for ~s:~B ", [reason, s[:host], s[:port]])
end
{:connect, :reconnect, %{s | socket: nil}}
end
def handle_call(_, _, %{sock: nil} = s) do
{:reply, {:error, :closed}, s}
end
def handle_call({:send, item}, _, %{socket: sock} = s) do
case :gen_tcp.send(sock, Item.to_text(item)) do
:ok ->
{:reply, :ok, s}
{:error, _} = error ->
{:disconnect, error, error, s}
end
end
def handle_call({:recv, bytes, timeout}, _, %{sock: sock} = s) do
case :gen_tcp.recv(sock, bytes, timeout) do
{:ok, _} = ok ->
{:reply, ok, s}
{:error, :timeout} = timeout ->
{:reply, timeout, s}
{:error, _} = error ->
{:disconnect, error, error, s}
end
end
def handle_call(:close, from, s) do
{:disconnect, {:close, from}, s}
end
end
|
lib/wavexfront/proxy/worker.ex
| 0.503418
| 0.413418
|
worker.ex
|
starcoder
|
defmodule Kira.RuntimeState do
require Kira.Branch, as: Branch
require Kira.BranchState, as: BranchState
require Kira.Progress, as: Progress
require Kira.Util, as: Util
@moduledoc false
defstruct [:config, :branch_states, :running, :timeout, :progress]
@type branches :: %{required(atom()) => BranchState.t()}
@type running :: %{required(pid) => atom}
@type t() :: %__MODULE__{
config: any,
branch_states: branches(),
timeout: timeout(),
running: running,
progress: Progress.t()
}
@spec create(config :: any, branch_list :: [Branch.t()], timeout :: timeout) :: Util.result(t)
def create(config, branch_list, timeout) do
branches =
for b <- branch_list,
into: %{},
do: {b.name, b}
if Enum.count(branch_list) == Enum.count(branches) do
branch_states =
for b <- branch_list,
into: %{},
do: {b.name, BranchState.create(b, branches)}
{:ok,
%__MODULE__{
config: config,
timeout: timeout,
running: %{},
progress: Progress.create(length(branch_list)),
branch_states: branch_states
}}
else
{:error, :duplicate_branch_names}
end
end
@spec get_branch(state :: t(), branch_name :: atom) :: Util.result(BranchState.t())
def get_branch(state, branch_name) do
fetch_error = runtime_state_error({:failed_to_find_branch, branch_name})
Util.fetch(state.branch_states, branch_name, fetch_error)
end
@spec get_branch_from_pid(state :: t(), pid :: pid) :: Util.result(BranchState.t())
def get_branch_from_pid(state, pid) do
fetch_error = runtime_state_error({:failed_to_find_branch_from, pid})
with {:ok, branch_name} <- Util.fetch(state.running, pid, fetch_error) do
get_branch(state, branch_name)
end
end
@spec find_apply_ready(state :: t()) :: MapSet.t(atom())
def find_apply_ready(state) do
for {t, state} <- state.branch_states,
BranchState.apply_ready?(state),
into: MapSet.new(),
do: t
end
@spec find_unapply_ready(state :: t()) :: MapSet.t(atom())
def find_unapply_ready(state) do
for {t, state} <- state.branch_states,
BranchState.unapply_ready?(state),
into: MapSet.new(),
do: t
end
@spec resolve_dependencies_of(state :: t, branch_name :: atom) :: Util.result(map)
def resolve_dependencies_of(state, branch_name) do
with {:ok, b} <- get_branch(state, branch_name) do
dependencies = b.branch.dependencies
Util.result_reduce(dependencies, %{}, fn requirement_name, collected ->
with {:ok, branch} <- get_branch(state, requirement_name),
{:ok, value} <- BranchState.get_completed(branch) do
{:ok, Map.put(collected, requirement_name, value)}
end
end)
end
end
@doc """
Records the fact that the process has started running, and also prevents
any of the branches that depend on it from start rolling back.
"""
@spec mark_as_applying(state :: t, branch_name :: atom, pid :: pid) :: Util.result(t)
def mark_as_applying(state, branch_name, pid) do
# Ensures none of it's dependencies can start rolling back while it's running.
updated_unapply_state = fn dependent_name, state ->
with {:ok, branch} <- get_branch(state, dependent_name) do
awaiting_unapply = MapSet.put(branch.awaiting_unapply, branch_name)
branch = %{branch | awaiting_unapply: awaiting_unapply}
branches = Map.put(state.branch_states, dependent_name, branch)
{:ok, %{state | branch_states: branches}}
end
end
with {:ok, branch} <- get_branch(state, branch_name),
errors = BranchState.get_errors(branch),
{:ok, state} <- set_branch_task(state, branch_name, {:running_apply, pid, errors}),
{:ok, state} <- Util.result_reduce(branch.blocking_unapply, state, updated_unapply_state) do
# record the fact the this pid is associated with this branch name
running = Map.put(state.running, pid, branch_name)
progress = Progress.record_apply_start(state.progress, branch_name)
{:ok, %{state | running: running, progress: progress}}
end
end
@spec mark_as_applied(state :: t, branch_name :: atom, value :: any) :: Util.result(t)
def mark_as_applied(state = %__MODULE__{}, branch_name, value) do
updated_blocked_state = fn blocked_name, state ->
with {:ok, branch} <- get_branch(state, blocked_name) do
branch = %{branch | awaiting: MapSet.delete(branch.awaiting, branch_name)}
branches = Map.put(state.branch_states, blocked_name, branch)
{:ok, %{state | branch_states: branches}}
end
end
with {:ok, pid} <- get_branch_pid(state, branch_name),
{:ok, state} <- set_branch_task(state, branch_name, {:done_applied, value}),
{:ok, branch} <- get_branch(state, branch_name),
{:ok, state} <- Util.result_reduce(branch.blocking, state, updated_blocked_state) do
running = Map.drop(state.running, [pid])
progress = Progress.record_apply_done(state.progress, branch_name)
{:ok, %{state | running: running, progress: progress}}
end
end
@spec mark_as_unapplied(state :: t, branch_name :: atom) :: Util.result(t)
def mark_as_unapplied(state = %__MODULE__{}, branch_name) do
# tells the branches which this branch was dependent on, that
# it has finished unappling
updated_blocked_state = fn blocked_name, state ->
with {:ok, branch} <- get_branch(state, blocked_name) do
branch = %{
branch
| awaiting_unapply: MapSet.delete(branch.awaiting_unapply, branch_name)
}
branches = Map.put(state.branch_states, blocked_name, branch)
{:ok, %{state | branch_states: branches}}
end
end
with {:ok, pid} <- get_branch_pid(state, branch_name),
{:ok, state} <- set_branch_task(state, branch_name, :done_unapplied),
{:ok, branch} <- get_branch(state, branch_name),
{:ok, state} <- Util.result_reduce(branch.blocking_unapply, state, updated_blocked_state) do
running = Map.drop(state.running, [pid])
progress = Progress.record_unapply_done(state.progress, branch_name)
{:ok, %{state | running: running, progress: progress}}
end
end
@doc """
Ensure anything that would be awaiting for this task to be rolled back is no
longer blocked by the task associated with this branch_name.
"""
@spec unblock_dependend_unapplys(state :: t, branch_name :: atom) :: Util.result(t)
def unblock_dependend_unapplys(state, branch_name) do
updated_unapply_state = fn blocked_name, state ->
with {:ok, branch} <- get_branch(state, blocked_name) do
awaiting_unapply = MapSet.delete(branch.awaiting_unapply, branch_name)
branch = %{branch | awaiting_unapply: awaiting_unapply}
branches = Map.put(state.branch_states, blocked_name, branch)
{:ok, %{state | branch_states: branches}}
end
end
with {:ok, branch_s} <- get_branch(state, branch_name) do
Util.result_reduce(branch_s.blocking_unapply, state, updated_unapply_state)
end
end
@doc """
When you record a failure with a branch it doesn't necessary mean this process
has completely failed, as there is an oppurtunity for it to be reattmpted. But
it is important for the exception to be recorded, so if a rollback is necessary
we can identify the culprit that made the rollback necessary.
"""
@spec record_failure(state :: t, branch_name :: atom, error :: any) :: Util.result(t)
def record_failure(state = %__MODULE__{}, branch_name, error) do
with {:ok, branch} <- get_branch(state, branch_name) do
error_pair = {error, DateTime.utc_now()}
errors = [error_pair | BranchState.get_errors(branch)]
set_branch_task(state, branch_name, {:failed, errors})
end
end
@spec record_apply_retry(state :: t, branch_name :: atom, pid :: pid) :: Util.result(t)
def record_apply_retry(state = %__MODULE__{}, branch_name, pid) do
with {:ok, branch} <- get_branch(state, branch_name) do
errors = BranchState.get_errors(branch)
set_branch_task(state, branch_name, {:running_apply_retry, pid, errors})
end
end
@spec get_branch_pid(state :: t, branch_name :: atom) :: Util.result(pid)
def get_branch_pid(state, branch_name) do
with {:ok, branch} <- get_branch(state, branch_name) do
BranchState.get_task_pid(branch)
end
end
@spec set_branch_task(state :: t, branch_name :: atom, task :: BranchState.task()) ::
Util.result(t)
def set_branch_task(state, branch_name, task) do
with {:ok, branch} <- get_branch(state, branch_name) do
branch = BranchState.set_task(branch, task)
{:ok, %{state | branch_states: Map.put(state.branch_states, branch_name, branch)}}
end
end
@spec get_errors(state :: t) :: [any]
def get_errors(state) do
append_if_error = fn b, acc -> BranchState.get_errors(b) ++ acc end
time_as_unix = fn {_, t} -> DateTime.to_unix(t) end
state.branch_states
|> Map.values()
|> Enum.reduce([], append_if_error)
|> Enum.sort_by(time_as_unix)
end
@spec get_done(state :: t) :: %{required(atom) => any}
def get_done(state) do
pairs = Map.to_list(state.branch_states)
Enum.reduce(pairs, %{}, fn {k, branch_s}, acc ->
case branch_s.task do
{:done_applied, value} -> Map.put(acc, k, value)
_anything_else -> acc
end
end)
end
@spec apply_done?(state :: t) :: boolean()
def apply_done?(state), do: Progress.apply_done?(state.progress)
@spec unapply_done?(state :: t) :: boolean()
def unapply_done?(state), do: Progress.unapply_done?(state.progress)
def runtime_state_error(error), do: {:error, {:runtime_state_error, error}}
end
|
lib/kira/runtime_state.ex
| 0.746509
| 0.408808
|
runtime_state.ex
|
starcoder
|
defmodule Cased.Sensitive.Handler do
@moduledoc """
Behaviour used to identify sensitive data.
Implementing custom handlers only requires two functions:
- `c:new/2`, which is called by `from_spec/1`, passing any custom configuration.
- `c:ranges/3`, which is called for each value in an audit event by `Cased.Sensitive.Processor.process/2`.
See `Cased.Sensitive.RegexHandler` and tests for example implementations.
"""
@type handler_module :: module()
@typedoc """
A tuple structure used to declare the options for a handler.
## Examples
Configuring a `Cased.Sensitive.RegexHandler` to detect `@`-prefixed usernames:
```
{Cased.Sensitive.RegexHandler, :username, ~r/@\w+/}
```
For your own, custom defined handlers:
```
{MyApp.CustomHandler, :custom_label_for_handler, custom_configuration_for_handler}
```
"""
@type spec :: {
module :: handler_module(),
label :: atom(),
config :: any()
}
@type t :: %{
:__struct__ => handler_module(),
:label => atom(),
optional(atom()) => any()
}
@doc """
Create a handler with a given label and custom configuration.
"""
@callback new(label :: atom(), config :: any()) :: t()
@doc """
Extract `Cased.Sensitive.Range` structs for a given `value` at `key`.
Note that `value` can be of any type; your implementation should return an
empty list for any unsupported values.
"""
@callback ranges(
handler :: t(),
audit_event :: map(),
{
key :: Cased.Sensitive.Range.key(),
value :: any()
}
) :: [Cased.Sensitive.Range.t()]
@doc """
Create a handler from a handler specification (commonly loaded from application config).
## Examples
Creating a `Cased.Sensitive.RegexHandler` from the tuple specification:
```
iex> handler_spec = {Cased.Sensitive.RegexHandler, :username, ~r/@\w+/}
iex> Cased.Sensitive.Handler.from_spec(handler)
%Cased.Sensitive.RegexHandler{label: :username, regex: ~r/@\w+/}
```
"""
@spec from_spec(raw_handler :: spec()) :: t()
def from_spec(%{} = handler), do: handler
def from_spec({module, label, config}), do: module.new(label, config)
end
|
lib/cased/sensitive/handler.ex
| 0.930954
| 0.810629
|
handler.ex
|
starcoder
|
import Kernel, except: [length: 1]
defmodule String do
@moduledoc ~S"""
A String in Elixir is a UTF-8 encoded binary.
## Codepoints and graphemes
The functions in this module act according to the Unicode
Standard, version 6.3.0. As per the standard, a codepoint is
an Unicode Character, which may be represented by one or more
bytes. For example, the character "é" is represented with two
bytes:
iex> byte_size("é")
2
However, this module returns the proper length:
iex> String.length("é")
1
Furthermore, this module also presents the concept of
graphemes, which are multiple characters that may be
"perceived as a single character" by readers. For example,
the same "é" character written above could be represented
by the letter "e" followed by the accent ́:
iex> string = "\u0065\u0301"
iex> byte_size(string)
3
iex> String.length(string)
1
Although the example above is made of two characters, it is
perceived by users as one.
Graphemes can also be two characters that are interpreted
as one by some languages. For example, some languages may
consider "ch" as a grapheme. However, since this information
depends on the locale, it is not taken into account by this
module.
In general, the functions in this module rely on the Unicode
Standard, but do not contain any of the locale specific behaviour.
More information about graphemes can be found in the [Unicode
Standard Annex #29](http://www.unicode.org/reports/tr29/).
This current Elixir version implements Extended Grapheme Cluster
algorithm.
## String and binary operations
To act accordingly to the Unicode Standard, many functions
in this module runs in linear time, as it needs to traverse
the whole string considering the proper Unicode codepoints.
For example, `String.length/1` is going to take longer as
the input grows. On the other hand, `Kernel.byte_size/1` always runs
in constant time (i.e. regardless of the input size).
This means often there are performance costs in using the
functions in this module, compared to the more low-level
operations that work directly with binaries:
* `Kernel.binary_part/3` - retrieves part of the binary
* `Kernel.bit_size/1` and `Kernel.byte_size/1` - size related functions
* `Kernel.is_bitstring/1` and `Kernel.is_binary/1` - type checking function
* Plus a number of functions for working with binaries (bytes)
[in the `:binary` module](http://www.erlang.org/doc/man/binary.html)
There are many situations where using the `String` module can
be avoided in favor of binary functions or pattern matching.
For example, imagine you have a string `prefix` and you want to
remove this prefix from another string named `full`.
One may be tempted to write:
iex> take_prefix = fn full, prefix ->
...> base = String.length(prefix)
...> String.slice(full, base, String.length(full) - base)
...> end
iex> take_prefix.("Mr. John", "Mr. ")
"John"
Although the function above works, it performs poorly. To
calculate the length of the string, we need to traverse it
fully, so we traverse both `prefix` and `full` strings, then
slice the `full` one, traversing it again.
A first attempting at improving it could be with ranges:
iex> take_prefix = fn full, prefix ->
...> base = String.length(prefix)
...> String.slice(full, base..-1)
...> end
iex> take_prefix.("Mr. John", "Mr. ")
"John"
While this is much better (we don't traverse `full` twice),
it could still be improved. In this case, since we want to
extract a substring from a string, we can use `byte_size/1`
and `binary_part/3` as there is no chance we will slice in
the middle of a codepoint made of more than one byte:
iex> take_prefix = fn full, prefix ->
...> base = byte_size(prefix)
...> binary_part(full, base, byte_size(full) - base)
...> end
iex> take_prefix.("Mr. John", "Mr. ")
"John"
Or simply use pattern matching:
iex> take_prefix = fn full, prefix ->
...> base = byte_size(prefix)
...> <<_ :: binary-size(base), rest :: binary>> = full
...> rest
...> end
iex> take_prefix.("Mr. John", "Mr. ")
"John"
On the other hand, if you want to dynamically slice a string
based on an integer value, then using `String.slice/3` is the
best option as it guarantees we won't incorrectly split a valid
codepoint in multiple bytes.
## Integer codepoints
Although codepoints could be represented as integers, this
module represents all codepoints as strings. For example:
iex> String.codepoints("olá")
["o", "l", "á"]
There are a couple of ways to retrieve a character integer
codepoint. One may use the `?` construct:
iex> ?o
111
iex> ?á
225
Or also via pattern matching:
iex> << eacute :: utf8 >> = "á"
iex> eacute
225
As we have seen above, codepoints can be inserted into
a string by their hexadecimal code:
"ol\u0061\u0301" #=>
"olá"
## Self-synchronization
The UTF-8 encoding is self-synchronizing. This means that
if malformed data (i.e., data that is not possible according
to the definition of the encoding) is encountered, only one
codepoint needs to be rejected.
This module relies on this behaviour to ignore such invalid
characters. For example, `length/1` is going to return
a correct result even if an invalid codepoint is fed into it.
In other words, this module expects invalid data to be detected
when retrieving data from the external source. For example, a
driver that reads strings from a database will be the one
responsible to check the validity of the encoding.
## Patterns
Many functions in this module work with patterns. For example,
String.split/2 can split a string into multiple patterns given
a pattern. This pattern can be a string, a list of strings or
a compiled pattern:
iex> String.split("foo bar", " ")
["foo", "bar"]
iex> String.split("foo bar!", [" ", "!"])
["foo", "bar", ""]
iex> pattern = :binary.compile_pattern([" ", "!"])
iex> String.split("foo bar!", pattern)
["foo", "bar", ""]
The compiled pattern is useful when the same match will
be done over and over again. Note though the compiled
pattern cannot be stored in a module attribute as the pattern
is generated at runtime and does not survive compile term.
"""
@type t :: binary
@type codepoint :: t
@type grapheme :: t
@type pattern :: t | [t] | :binary.cp
@doc """
Checks if a string is printable considering it is encoded
as UTF-8. Returns `true` if so, `false` otherwise.
## Examples
iex> String.printable?("abc")
true
"""
@spec printable?(t) :: boolean
def printable?(string)
def printable?(<< h :: utf8, t :: binary >>)
when h in 0x20..0x7E
when h in 0xA0..0xD7FF
when h in 0xE000..0xFFFD
when h in 0x10000..0x10FFFF do
printable?(t)
end
def printable?(<<?\n, t :: binary>>), do: printable?(t)
def printable?(<<?\r, t :: binary>>), do: printable?(t)
def printable?(<<?\t, t :: binary>>), do: printable?(t)
def printable?(<<?\v, t :: binary>>), do: printable?(t)
def printable?(<<?\b, t :: binary>>), do: printable?(t)
def printable?(<<?\f, t :: binary>>), do: printable?(t)
def printable?(<<?\e, t :: binary>>), do: printable?(t)
def printable?(<<?\d, t :: binary>>), do: printable?(t)
def printable?(<<?\a, t :: binary>>), do: printable?(t)
def printable?(<<>>), do: true
def printable?(binary) when is_binary(binary), do: false
@doc """
Divides a string into substrings at each Unicode whitespace
occurrence with leading and trailing whitespace ignored.
## Examples
iex> String.split("foo bar")
["foo", "bar"]
iex> String.split("foo" <> <<194, 133>> <> "bar")
["foo", "bar"]
iex> String.split(" foo bar ")
["foo", "bar"]
"""
@spec split(t) :: [t]
defdelegate split(binary), to: String.Unicode
@doc ~S"""
Divides a string into substrings based on a pattern.
Returns a list of these substrings. The pattern can
be a string, a list of strings or a regular expression.
The string is split into as many parts as possible by
default, but can be controlled via the `parts: num` option.
If you pass `parts: :infinity`, it will return all possible parts
(being this one the default behaviour).
Empty strings are only removed from the result if the
`trim` option is set to `true` (default is `false`).
## Examples
Splitting with a string pattern:
iex> String.split("a,b,c", ",")
["a", "b", "c"]
iex> String.split("a,b,c", ",", parts: 2)
["a", "b,c"]
iex> String.split(" a b c ", " ", trim: true)
["a", "b", "c"]
A list of patterns:
iex> String.split("1,2 3,4", [" ", ","])
["1", "2", "3", "4"]
A regular expression:
iex> String.split("a,b,c", ~r{,})
["a", "b", "c"]
iex> String.split("a,b,c", ~r{,}, parts: 2)
["a", "b,c"]
iex> String.split(" a b c ", ~r{\s}, trim: true)
["a", "b", "c"]
Splitting on empty patterns returns codepoints:
iex> String.split("abc", ~r{})
["a", "b", "c", ""]
iex> String.split("abc", "")
["a", "b", "c", ""]
iex> String.split("abc", "", trim: true)
["a", "b", "c"]
iex> String.split("abc", "", parts: 2)
["a", "bc"]
A precompiled pattern can also be given:
iex> pattern = :binary.compile_pattern([" ", ","])
iex> String.split("1,2 3,4", pattern)
["1", "2", "3", "4"]
"""
@spec split(t, pattern | Regex.t) :: [t]
@spec split(t, pattern | Regex.t, Keyword.t) :: [t]
def split(string, pattern, options \\ [])
def split(string, %Regex{} = pattern, options) do
Regex.split(pattern, string, options)
end
def split(string, pattern, []) when pattern != "" do
:binary.split(string, pattern, [:global])
end
def split(string, pattern, options) do
parts = Keyword.get(options, :parts, :infinity)
trim = Keyword.get(options, :trim, false)
pattern = maybe_compile_pattern(pattern)
split_each(string, pattern, trim, parts_to_index(parts))
end
defp parts_to_index(:infinity), do: 0
defp parts_to_index(n) when is_integer(n) and n > 0, do: n
defp split_each(string, _pattern, _trim, 1) when is_binary(string), do: [string]
defp split_each(string, pattern, trim, count) do
case do_splitter(string, pattern, trim) do
{h, t} -> [h|split_each(t, pattern, trim, count - 1)]
nil -> []
end
end
@doc """
Splits a string on demand.
Returns an enumerable that splits the string on
demand, instead of splitting all data upfront.
Note splitter does not support regular expressions
(as it is often more efficient to have the regular
expressions traverse the string at once than in
multiple passes).
## Options
* :trim - when true, does not emit empty patterns
"""
@spec splitter(t, pattern, Keyword.t) :: Enumerable.t
def splitter(string, pattern, options \\ []) do
pattern = maybe_compile_pattern(pattern)
trim = Keyword.get(options, :trim, false)
Stream.unfold(string, &do_splitter(&1, pattern, trim))
end
defp do_splitter(:nomatch, _pattern, _), do: nil
defp do_splitter("", _pattern, true), do: nil
defp do_splitter("", _pattern, false), do: {"", :nomatch}
defp do_splitter(bin, "", _trim) do
next_grapheme(bin)
end
defp do_splitter(bin, pattern, trim) do
case :binary.match(bin, pattern) do
{0, length} when trim ->
do_splitter(:binary.part(bin, length, byte_size(bin) - length), pattern, trim)
{pos, length} ->
final = pos + length
{:binary.part(bin, 0, pos),
:binary.part(bin, final, byte_size(bin) - final)}
:nomatch ->
{bin, :nomatch}
end
end
defp maybe_compile_pattern(""), do: ""
defp maybe_compile_pattern(pattern), do: :binary.compile_pattern(pattern)
@doc """
Splits a string into two at the specified offset. When the offset given is
negative, location is counted from the end of the string.
The offset is capped to the length of the string. Returns a tuple with
two elements.
Note: keep in mind this function splits on graphemes and for such it
has to linearly traverse the string. If you want to split a string or
a binary based on the number of bytes, use `Kernel.binary_part/3`
instead.
## Examples
iex> String.split_at "sweetelixir", 5
{"sweet", "elixir"}
iex> String.split_at "sweetelixir", -6
{"sweet", "elixir"}
iex> String.split_at "abc", 0
{"", "abc"}
iex> String.split_at "abc", 1000
{"abc", ""}
iex> String.split_at "abc", -1000
{"", "abc"}
"""
@spec split_at(t, integer) :: {t, t}
def split_at(string, position)
def split_at(string, position) when is_integer(position) and position >= 0 do
do_split_at(string, position)
end
def split_at(string, position) when is_integer(position) and position < 0 do
position = length(string) - abs(position)
case position >= 0 do
true -> do_split_at(string, position)
false -> {"", string}
end
end
defp do_split_at(string, position) do
{byte_size, rest} = String.Graphemes.split_at(string, position)
{binary_part(string, 0, byte_size), rest || ""}
end
@doc """
Converts all characters in the given string to uppercase.
## Examples
iex> String.upcase("abcd")
"ABCD"
iex> String.upcase("ab 123 xpto")
"AB 123 XPTO"
iex> String.upcase("olá")
"OLÁ"
"""
@spec upcase(t) :: t
defdelegate upcase(binary), to: String.Unicode
@doc """
Converts all characters in the given string to lowercase.
## Examples
iex> String.downcase("ABCD")
"abcd"
iex> String.downcase("AB 123 XPTO")
"ab 123 xpto"
iex> String.downcase("OLÁ")
"olá"
"""
@spec downcase(t) :: t
defdelegate downcase(binary), to: String.Unicode
@doc """
Converts the first character in the given string to
uppercase and the remainder to lowercase.
This relies on the titlecase information provided
by the Unicode Standard. Note this function makes
no attempt to capitalize all words in the string
(usually known as titlecase).
## Examples
iex> String.capitalize("abcd")
"Abcd"
iex> String.capitalize("fin")
"Fin"
iex> String.capitalize("olá")
"Olá"
"""
@spec capitalize(t) :: t
def capitalize(string) when is_binary(string) do
{char, rest} = String.Unicode.titlecase_once(string)
char <> downcase(rest)
end
@doc """
Returns a string where trailing Unicode whitespace
has been removed.
## Examples
iex> String.rstrip(" abc ")
" abc"
"""
@spec rstrip(t) :: t
defdelegate rstrip(binary), to: String.Unicode
@doc """
Returns a string where trailing `char` have been removed.
## Examples
iex> String.rstrip(" abc _", ?_)
" abc "
"""
@spec rstrip(t, char) :: t
def rstrip("", _char), do: ""
# Do a quick check before we traverse the whole
# binary. :binary.last is a fast operation (it
# does not traverse the whole binary).
def rstrip(string, char) when char in 0..127 do
if :binary.last(string) == char do
rstrip(binary_part(string, 0, byte_size(string) - 1), char)
else
string
end
end
def rstrip(string, char) when is_integer(char) do
do_rstrip(string, "", char)
end
defp do_rstrip(<<char :: utf8, string :: binary>>, buffer, char) do
<<do_rstrip(string, <<char :: utf8, buffer :: binary>>, char) :: binary>>
end
defp do_rstrip(<<char :: utf8, string :: binary>>, buffer, another_char) do
<<buffer :: binary, char :: utf8, do_rstrip(string, "", another_char) :: binary>>
end
defp do_rstrip(<<>>, _, _) do
<<>>
end
@doc """
Returns a string where leading Unicode whitespace
has been removed.
## Examples
iex> String.lstrip(" abc ")
"abc "
"""
defdelegate lstrip(binary), to: String.Unicode
@doc """
Returns a string where leading `char` have been removed.
## Examples
iex> String.lstrip("_ abc _", ?_)
" abc _"
"""
@spec lstrip(t, char) :: t
def lstrip(string, char)
def lstrip(<<char :: utf8, rest :: binary>>, char) when is_integer(char) do
<<lstrip(rest, char) :: binary>>
end
def lstrip(string, char) when is_integer(char) do
string
end
@doc """
Returns a string where leading and trailing Unicode whitespace
has been removed.
## Examples
iex> String.strip(" abc ")
"abc"
"""
@spec strip(t) :: t
def strip(string) do
rstrip(lstrip(string))
end
@doc """
Returns a string where leading and trailing `char` have been
removed.
## Examples
iex> String.strip("a abc a", ?a)
" abc "
"""
@spec strip(t, char) :: t
def strip(string, char) do
rstrip(lstrip(string, char), char)
end
@doc ~S"""
Returns a new string of length `len` with `subject` right justified and
padded with `padding`. If `padding` is not present, it defaults to
whitespace. When `len` is less than the length of `subject`, `subject` is
returned.
## Examples
iex> String.rjust("abc", 5)
" abc"
iex> String.rjust("abc", 5, ?-)
"--abc"
"""
@spec rjust(t, non_neg_integer) :: t
@spec rjust(t, non_neg_integer, char) :: t
def rjust(subject, len, pad \\ ?\s) when is_integer(pad) and is_integer(len) and len >= 0 do
justify(subject, len, pad, :right)
end
@doc ~S"""
Returns a new string of length `len` with `subject` left justified and padded
with `padding`. If `padding` is not present, it defaults to whitespace. When
`len` is less than the length of `subject`, `subject` is returned.
## Examples
iex> String.ljust("abc", 5)
"abc "
iex> String.ljust("abc", 5, ?-)
"abc--"
"""
@spec ljust(t, non_neg_integer) :: t
@spec ljust(t, non_neg_integer, char) :: t
def ljust(subject, len, pad \\ ?\s) when is_integer(pad) and is_integer(len) and len >= 0 do
justify(subject, len, pad, :left)
end
defp justify(subject, 0, _pad, _type), do: subject
defp justify(subject, len, padding, type) do
subject_len = length(subject)
cond do
subject_len >= len ->
subject
subject_len < len ->
fill = duplicate(<<padding :: utf8>>, len - subject_len)
case type do
:left -> subject <> fill
:right -> fill <> subject
end
end
end
@doc ~S"""
Returns a new binary created by replacing occurences of `pattern` in
`subject` with `replacement`.
By default, it replaces all occurences, except if the `global` option is
set to `false`.
A `pattern` may be a string or a regular expression.
## Examples
iex> String.replace("a,b,c", ",", "-")
"a-b-c"
iex> String.replace("a,b,c", ",", "-", global: false)
"a-b,c"
The pattern can also be a regular expression. In those cases, one can give `\N` or
`\g{N}` in the `replacement` string to access a specific capture in the
regex:
iex> String.replace("a,b,c", ~r/,(.)/, ",\\1\\1")
"a,bb,cc"
Notice we had to escape the escape character `\`. By giving `\0`,
one can inject the whole matched pattern in the replacement string.
When strings are used as a pattern, a developer can also use the
replaced part inside the `replacement` via the `:insert_replaced` option:
iex> String.replace("a,b,c", "b", "[]", insert_replaced: 1)
"a,[b],c"
iex> String.replace("a,b,c", ",", "[]", insert_replaced: 2)
"a[],b[],c"
iex> String.replace("a,b,c", ",", "[]", insert_replaced: [1, 1])
"a[,,]b[,,]c"
"""
@spec replace(t, pattern | Regex.t, t, Keyword.t) :: t
def replace(subject, pattern, replacement, options \\ []) when is_binary(replacement) do
if Regex.regex?(pattern) do
Regex.replace(pattern, subject, replacement, global: options[:global])
else
opts = translate_replace_options(options)
:binary.replace(subject, pattern, replacement, opts)
end
end
defp translate_replace_options(options) do
opts = if Keyword.get(options, :global) != false, do: [:global], else: []
if insert = Keyword.get(options, :insert_replaced) do
opts = [{:insert_replaced, insert}|opts]
end
opts
end
@doc """
Reverses the given string. Works on graphemes.
## Examples
iex> String.reverse("abcd")
"dcba"
iex> String.reverse("hello world")
"dlrow olleh"
iex> String.reverse("hello ∂og")
"go∂ olleh"
"""
@spec reverse(t) :: t
def reverse(string) do
do_reverse(next_grapheme(string), [])
end
defp do_reverse({grapheme, rest}, acc) do
do_reverse(next_grapheme(rest), [grapheme|acc])
end
defp do_reverse(nil, acc), do: IO.iodata_to_binary(acc)
@doc """
Returns a binary `subject` duplicated `n` times.
## Examples
iex> String.duplicate("abc", 0)
""
iex> String.duplicate("abc", 1)
"abc"
iex> String.duplicate("abc", 2)
"abcabc"
"""
@spec duplicate(t, non_neg_integer) :: t
def duplicate(subject, n) when is_integer(n) and n >= 0 do
:binary.copy(subject, n)
end
@doc """
Returns all codepoints in the string.
## Examples
iex> String.codepoints("olá")
["o", "l", "á"]
iex> String.codepoints("оптими зации")
["о", "п", "т", "и", "м", "и", " ", "з", "а", "ц", "и", "и"]
iex> String.codepoints("ἅἪῼ")
["ἅ", "Ἢ", "ῼ"]
"""
@spec codepoints(t) :: [codepoint]
defdelegate codepoints(string), to: String.Unicode
@doc """
Returns the next codepoint in a String.
The result is a tuple with the codepoint and the
remainder of the string or `nil` in case
the string reached its end.
As with other functions in the String module, this
function does not check for the validity of the codepoint.
That said, if an invalid codepoint is found, it will
be returned by this function.
## Examples
iex> String.next_codepoint("olá")
{"o", "lá"}
"""
@compile {:inline, next_codepoint: 1}
@spec next_codepoint(t) :: {codepoint, t} | nil
defdelegate next_codepoint(string), to: String.Unicode
@doc ~S"""
Checks whether `str` contains only valid characters.
## Examples
iex> String.valid?("a")
true
iex> String.valid?("ø")
true
iex> String.valid?(<<0xffff :: 16>>)
false
iex> String.valid?("asd" <> <<0xffff :: 16>>)
false
"""
@spec valid?(t) :: boolean
def valid?(string)
noncharacters = Enum.to_list(0xFDD0..0xFDEF) ++
[0x0FFFE, 0x0FFFF, 0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF,
0x3FFFE, 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,
0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, 0x8FFFF,
0x9FFFE, 0x9FFFF, 0x10FFFE, 0x10FFFF]
for noncharacter <- noncharacters do
def valid?(<< unquote(noncharacter) :: utf8, _ :: binary >>), do: false
end
def valid?(<<_ :: utf8, t :: binary>>), do: valid?(t)
def valid?(<<>>), do: true
def valid?(_), do: false
@doc ~S"""
Checks whether `string` is a valid character.
All characters are codepoints, but some codepoints
are not valid characters. They may be reserved, private,
or other.
More info at: https://en.wikipedia.org/wiki/Universal_Character_Set_characters#Non-characters
## Examples
iex> String.valid_character?("a")
true
iex> String.valid_character?("ø")
true
iex> String.valid_character?("\uFFFF")
false
"""
@spec valid_character?(t) :: boolean
def valid_character?(<<_ :: utf8>> = codepoint), do: valid?(codepoint)
def valid_character?(_), do: false
@doc ~S"""
Splits the string into chunks of characters that share a common trait.
The trait can be one of two options:
* `:valid` – the string is split into chunks of valid and invalid character
sequences
* `:printable` – the string is split into chunks of printable and
non-printable character sequences
Returns a list of binaries each of which contains only one kind of
characters.
If the given string is empty, an empty list is returned.
## Examples
iex> String.chunk(<<?a, ?b, ?c, 0>>, :valid)
["abc\0"]
iex> String.chunk(<<?a, ?b, ?c, 0, 0x0ffff::utf8>>, :valid)
["abc\0", <<0x0ffff::utf8>>]
iex> String.chunk(<<?a, ?b, ?c, 0, 0x0ffff::utf8>>, :printable)
["abc", <<0, 0x0ffff::utf8>>]
"""
@spec chunk(t, :valid | :printable) :: [t]
def chunk(string, trait)
def chunk("", _), do: []
def chunk(str, trait) when trait in [:valid, :printable] do
{cp, _} = next_codepoint(str)
pred_fn = make_chunk_pred(trait)
do_chunk(str, pred_fn.(cp), pred_fn)
end
defp do_chunk(str, flag, pred_fn), do: do_chunk(str, [], <<>>, flag, pred_fn)
defp do_chunk(<<>>, acc, <<>>, _, _), do: Enum.reverse(acc)
defp do_chunk(<<>>, acc, chunk, _, _), do: Enum.reverse(acc, [chunk])
defp do_chunk(str, acc, chunk, flag, pred_fn) do
{cp, rest} = next_codepoint(str)
if pred_fn.(cp) != flag do
do_chunk(rest, [chunk|acc], cp, not flag, pred_fn)
else
do_chunk(rest, acc, chunk <> cp, flag, pred_fn)
end
end
defp make_chunk_pred(:valid), do: &valid?/1
defp make_chunk_pred(:printable), do: &printable?/1
@doc """
Returns Unicode graphemes in the string as per Extended Grapheme
Cluster algorithm outlined in the [Unicode Standard Annex #29,
Unicode Text Segmentation](http://www.unicode.org/reports/tr29/).
## Examples
iex> String.graphemes("Ńaïve")
["Ń", "a", "ï", "v", "e"]
"""
@spec graphemes(t) :: [grapheme]
defdelegate graphemes(string), to: String.Graphemes
@compile {:inline, next_grapheme: 1, next_grapheme_size: 1}
@doc """
Returns the next grapheme in a string.
The result is a tuple with the grapheme and the
remainder of the string or `nil` in case
the String reached its end.
## Examples
iex> String.next_grapheme("olá")
{"o", "lá"}
"""
@spec next_grapheme(t) :: {grapheme, t} | nil
def next_grapheme(binary) do
case next_grapheme_size(binary) do
{size, rest} -> {:binary.part(binary, 0, size), rest}
nil -> nil
end
end
@doc """
Returns the size of the next grapheme.
The result is a tuple with the next grapheme size and
the remainder of the string or `nil` in case the string
reached its end.
## Examples
iex> String.next_grapheme_size("olá")
{1, "lá"}
"""
@spec next_grapheme_size(t) :: {pos_integer, t} | nil
defdelegate next_grapheme_size(string), to: String.Graphemes
@doc """
Returns the first grapheme from a utf8 string,
`nil` if the string is empty.
## Examples
iex> String.first("elixir")
"e"
iex> String.first("եոգլի")
"ե"
"""
@spec first(t) :: grapheme | nil
def first(string) do
case next_grapheme(string) do
{char, _} -> char
nil -> nil
end
end
@doc """
Returns the last grapheme from a utf8 string,
`nil` if the string is empty.
## Examples
iex> String.last("elixir")
"r"
iex> String.last("եոգլի")
"ի"
"""
@spec last(t) :: grapheme | nil
def last(string) do
do_last(next_grapheme(string), nil)
end
defp do_last({char, rest}, _) do
do_last(next_grapheme(rest), char)
end
defp do_last(nil, last_char), do: last_char
@doc """
Returns the number of Unicode graphemes in a utf8 string.
## Examples
iex> String.length("elixir")
6
iex> String.length("եոգլի")
5
"""
@spec length(t) :: non_neg_integer
defdelegate length(string), to: String.Graphemes
@doc """
Returns the grapheme in the `position` of the given utf8 `string`.
If `position` is greater than `string` length, then it returns `nil`.
## Examples
iex> String.at("elixir", 0)
"e"
iex> String.at("elixir", 1)
"l"
iex> String.at("elixir", 10)
nil
iex> String.at("elixir", -1)
"r"
iex> String.at("elixir", -10)
nil
"""
@spec at(t, integer) :: grapheme | nil
def at(string, position) when is_integer(position) and position >= 0 do
do_at(string, position)
end
def at(string, position) when is_integer(position) and position < 0 do
position = length(string) - abs(position)
case position >= 0 do
true -> do_at(string, position)
false -> nil
end
end
defp do_at(string, position) do
case String.Graphemes.split_at(string, position) do
{_, nil} -> nil
{_, rest} -> first(rest)
end
end
@doc """
Returns a substring starting at the offset `start`, and of
length `len`.
If the offset is greater than string length, then it returns `""`.
Remember this function works with Unicode codepoints and considers
the slices to represent codepoint offsets. If you want to split
on raw bytes, check `Kernel.binary_part/3` instead.
## Examples
iex> String.slice("elixir", 1, 3)
"lix"
iex> String.slice("elixir", 1, 10)
"lixir"
iex> String.slice("elixir", 10, 3)
""
iex> String.slice("elixir", -4, 4)
"ixir"
iex> String.slice("elixir", -10, 3)
""
iex> String.slice("a", 0, 1500)
"a"
iex> String.slice("a", 1, 1500)
""
iex> String.slice("a", 2, 1500)
""
"""
@spec slice(t, integer, integer) :: grapheme
def slice(_, _, 0) do
""
end
def slice(string, start, len) when start >= 0 and len >= 0 do
case String.Graphemes.split_at(string, start) do
{_, nil} -> ""
{start_bytes, rest} ->
{len_bytes, _} = String.Graphemes.split_at(rest, len)
binary_part(string, start_bytes, len_bytes)
end
end
def slice(string, start, len) when start < 0 and len >= 0 do
start = length(string) + start
case start >= 0 do
true -> slice(string, start, len)
false -> ""
end
end
@doc """
Returns a substring from the offset given by the start of the
range to the offset given by the end of the range.
If the start of the range is not a valid offset for the given
string or if the range is in reverse order, returns `""`.
If the start or end of the range is negative, the whole string
is traversed first in order to convert the negative indices into
positive ones.
Remember this function works with Unicode codepoints and considers
the slices to represent codepoints offsets. If you want to split
on raw bytes, check `Kernel.binary_part/3` instead.
## Examples
iex> String.slice("elixir", 1..3)
"lix"
iex> String.slice("elixir", 1..10)
"lixir"
iex> String.slice("elixir", 10..3)
""
iex> String.slice("elixir", -4..-1)
"ixir"
iex> String.slice("elixir", 2..-1)
"ixir"
iex> String.slice("elixir", -4..6)
"ixir"
iex> String.slice("elixir", -1..-4)
""
iex> String.slice("elixir", -10..-7)
""
iex> String.slice("a", 0..1500)
"a"
iex> String.slice("a", 1..1500)
""
"""
@spec slice(t, Range.t) :: t
def slice(string, range)
def slice("", _.._), do: ""
def slice(string, first..-1) when first >= 0 do
case String.Graphemes.split_at(string, first) do
{_, nil} ->
""
{start_bytes, _} ->
binary_part(string, start_bytes, byte_size(string) - start_bytes)
end
end
def slice(string, first..last) when first >= 0 and last >= 0 do
if last >= first do
slice(string, first, last - first + 1)
else
""
end
end
def slice(string, first..last) do
{bytes, length} = do_acc_bytes(next_grapheme_size(string), [], 0)
if first < 0, do: first = length + first
if last < 0, do: last = length + last
if first < 0 or first > last or first > length do
""
else
last = min(last + 1, length)
bytes = Enum.drop(bytes, length - last)
first = last - first
{length_bytes, start_bytes} = Enum.split(bytes, first)
binary_part(string, Enum.sum(start_bytes), Enum.sum(length_bytes))
end
end
defp do_acc_bytes({size, rest}, bytes, length) do
do_acc_bytes(next_grapheme_size(rest), [size|bytes], length + 1)
end
defp do_acc_bytes(nil, bytes, length) do
{bytes, length}
end
@doc """
Returns `true` if `string` starts with any of the prefixes given, otherwise
returns `false`. `prefixes` can be either a single prefix or a list of prefixes.
## Examples
iex> String.starts_with? "elixir", "eli"
true
iex> String.starts_with? "elixir", ["erlang", "elixir"]
true
iex> String.starts_with? "elixir", ["erlang", "ruby"]
false
"""
@spec starts_with?(t, t | [t]) :: boolean
# TODO: Remove me by 1.3
def starts_with?(_string, "") do
IO.puts :stderr, "[deprecation] Calling String.starts_with?/2 with an empty string is deprecated and " <>
"will fail in the future\n" <> Exception.format_stacktrace()
true
end
def starts_with?(_string, []) do
false
end
def starts_with?(string, prefix) when is_list(prefix) or is_binary(prefix) do
Kernel.match?({0, _}, :binary.match(string, prefix))
end
@doc """
Returns `true` if `string` ends with any of the suffixes given, otherwise
returns `false`. `suffixes` can be either a single suffix or a list of suffixes.
## Examples
iex> String.ends_with? "language", "age"
true
iex> String.ends_with? "language", ["youth", "age"]
true
iex> String.ends_with? "language", ["youth", "elixir"]
false
"""
@spec ends_with?(t, t | [t]) :: boolean
# TODO: Remove me by 1.3
def ends_with?(_string, "") do
IO.puts :stderr, "[deprecation] Calling String.ends_with?/2 with an empty string is deprecated and " <>
"will fail in the future\n" <> Exception.format_stacktrace()
true
end
def ends_with?(string, suffixes) when is_list(suffixes) do
Enum.any?(suffixes, &do_ends_with(string, &1))
end
def ends_with?(string, suffix) do
do_ends_with(string, suffix)
end
defp do_ends_with(string, suffix) when is_binary(suffix) do
string_size = byte_size(string)
suffix_size = byte_size(suffix)
scope = {string_size - suffix_size, suffix_size}
(suffix_size <= string_size) and (:nomatch != :binary.match(string, suffix, [scope: scope]))
end
@doc """
Checks if `string` matches the given regular expression.
## Examples
iex> String.match?("foo", ~r/foo/)
true
iex> String.match?("bar", ~r/foo/)
false
"""
@spec match?(t, Regex.t) :: boolean
def match?(string, regex) do
Regex.match?(regex, string)
end
@doc """
Checks if `string` contains any of the given `contents`.
`contents` can be either a single string or a list of strings.
## Examples
iex> String.contains? "elixir of life", "of"
true
iex> String.contains? "elixir of life", ["life", "death"]
true
iex> String.contains? "elixir of life", ["death", "mercury"]
false
The argument can also be a precompiled pattern:
iex> pattern = :binary.compile_pattern(["life", "death"])
iex> String.contains? "elixir of life", pattern
true
"""
@spec contains?(t, pattern) :: boolean
# TODO: Remove me by 1.3
def contains?(_string, "") do
IO.puts :stderr, "[deprecation] Calling String.contains?/2 with an empty string is deprecated and " <>
"will fail in the future\n" <> Exception.format_stacktrace()
true
end
def contains?(_string, []) do
false
end
def contains?(string, contents) do
:binary.match(string, contents) != :nomatch
end
@doc """
Converts a string into a char list.
Specifically, this functions takes a UTF-8 encoded binary and returns a list of its integer
codepoints. It is similar to `codepoints/1` except that the latter returns a list of codepoints as
strings.
In case you need to work with bytes, take a look at the
[`:binary` module](http://www.erlang.org/doc/man/binary.html).
## Examples
iex> String.to_char_list("æß")
'æß'
"""
@spec to_char_list(t) :: char_list
def to_char_list(string) when is_binary(string) do
case :unicode.characters_to_list(string) do
result when is_list(result) ->
result
{:error, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :invalid
{:incomplete, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :incomplete
end
end
@doc """
Converts a string to an atom.
Currently Elixir does not support the conversion of strings
that contain Unicode codepoints greater than 0xFF.
Inlined by the compiler.
## Examples
iex> String.to_atom("my_atom")
:my_atom
"""
@spec to_atom(String.t) :: atom
def to_atom(string) do
:erlang.binary_to_atom(string, :utf8)
end
@doc """
Converts a string to an existing atom.
Currently Elixir does not support the conversion of strings
that contain Unicode codepoints greater than 0xFF.
Inlined by the compiler.
## Examples
iex> _ = :my_atom
iex> String.to_existing_atom("my_atom")
:my_atom
iex> String.to_existing_atom("this_atom_will_never_exist")
** (ArgumentError) argument error
"""
@spec to_existing_atom(String.t) :: atom
def to_existing_atom(string) do
:erlang.binary_to_existing_atom(string, :utf8)
end
@doc """
Returns an integer whose text representation is `string`.
Inlined by the compiler.
## Examples
iex> String.to_integer("123")
123
"""
@spec to_integer(String.t) :: integer
def to_integer(string) do
:erlang.binary_to_integer(string)
end
@doc """
Returns an integer whose text representation is `string` in base `base`.
Inlined by the compiler.
## Examples
iex> String.to_integer("3FF", 16)
1023
"""
@spec to_integer(String.t, 2..36) :: integer
def to_integer(string, base) do
:erlang.binary_to_integer(string, base)
end
@doc """
Returns a float whose text representation is `string`.
`string` must be the string representation of a float.
If a string representation of an integer wants to be used,
then `Float.parse/1` should be used instead,
otherwise an argument error will be raised.
Inlined by the compiler.
## Examples
iex> String.to_float("2.2017764e+0")
2.2017764
iex> String.to_float("3.0")
3.0
"""
@spec to_float(String.t) :: float
def to_float(string) do
:erlang.binary_to_float(string)
end
@doc """
Returns a float value between 0 (equates to no similarity) and 1 (is an exact match)
representing [Jaro](https://en.wikipedia.org/wiki/Jaro–Winkler_distance)
distance between `str1` and `str2`.
The Jaro distance metric is designed and best suited for short strings such as person names.
## Examples
iex> String.jaro_distance("dwayne", "duane")
0.8222222222222223
iex> String.jaro_distance("even", "odd")
0.0
"""
@spec jaro_distance(t, t) :: 0..1
def jaro_distance(str1, str2)
def jaro_distance(str, str), do: 1.0
def jaro_distance(_str, ""), do: 0.0
def jaro_distance("", _str), do: 0.0
def jaro_distance(str1, str2) do
{chars1, len1} = decompose(str1)
{chars2, len2} = decompose(str2)
case match(chars1, len1, chars2, len2) do
{0, _trans} -> 0.0
{comm, trans} ->
((comm / len1) +
(comm / len2) +
((comm - trans) / comm)) / 3
end
end
@compile {:inline, decompose: 1}
defp decompose(str) do
chars = graphemes(str)
{chars, Kernel.length(chars)}
end
defp match(chars1, len1, chars2, len2) do
if len1 < len2 do
match(chars1, chars2, div(len2, 2) - 1)
else
match(chars2, chars1, div(len1, 2) - 1)
end
end
defp match(chars1, chars2, lim) do
match(chars1, chars2, {0, lim}, {0, 0, -1}, 0)
end
defp match([char | rest], chars, range, state, idx) do
{chars, state} = submatch(char, chars, range, state, idx)
case range do
{lim, lim} -> match(rest, tl(chars), range, state, idx + 1)
{pre, lim} -> match(rest, chars, {pre + 1, lim}, state, idx + 1)
end
end
defp match([], _, _, {comm, trans, _}, _), do: {comm, trans}
defp submatch(char, chars, {pre, _} = range, state, idx) do
case detect(char, chars, range) do
nil -> {chars, state}
{subidx, chars} ->
{chars, proceed(state, idx - pre + subidx)}
end
end
defp detect(char, chars, {pre, lim}) do
detect(char, chars, pre + 1 + lim, 0, [])
end
defp detect(_char, _chars, 0, _idx, _acc), do: nil
defp detect(_char, [], _lim, _idx, _acc), do: nil
defp detect(char, [char | rest], _lim, idx, acc),
do: {idx, Enum.reverse(acc, [nil | rest])}
defp detect(char, [other | rest], lim, idx, acc),
do: detect(char, rest, lim - 1, idx + 1, [other | acc])
defp proceed({comm, trans, former}, current) do
if current < former do
{comm + 1, trans + 1, current}
else
{comm + 1, trans, current}
end
end
end
|
lib/elixir/lib/string.ex
| 0.848376
| 0.695041
|
string.ex
|
starcoder
|
defmodule Rubbergloves.Mapper do
@moduledoc"""
The core module to convert your input into a struct using the previously defined stucture mappings
### Usage
```
Rubbergloves.Mapper.map(LoginRequest, params)
"""
defmodule Override do
defstruct [key: :default, value: :default]
end
defmodule Options do
defstruct [keys: &Rubbergloves.Mapper.DynamicKeyResolver.resolve/2, overrides: %{}]
end
def map(structOrModule, map, opts \\ %Options{})
def map(module, map, nil) when is_atom(module), do: map(struct(module), map, module.mappings)
def map(module, map, opts) when is_atom(module), do: map(struct(module), map, opts)
def map(struct, map, opts) do
Enum.reduce(keys(struct), struct, fn key, struct ->
case fetch(map, key, opts) do
{:ok, v} -> %{struct | key => value(key, v, opts)}
:error -> %{struct | key => Map.get(struct, key)}
end
end)
end
# Fetch eith uses the hard coded key or the function provided
defp fetch(map, key, %Options{keys: key_fun}=options) when is_function(key_fun), do: fetch(map, key, key_fun, options)
defp fetch(map, key, fun, %Options{overrides: overrides}) when is_function(fun) do
case Map.get(overrides, key) do
nil -> Map.fetch(map, fun.(key, map))
%Override{key: :default} -> Map.fetch(map, fun.(key, map))
%Override{key: key_fn} when is_function(key_fn) -> Map.fetch(map, key_fn.(key, map))
%Override{key: new_key} -> Map.fetch(map, new_key)
end
end
# Value from map with no further mapping function
defp value(key, value, %Options{overrides: overrides}) do
case Map.get(overrides, key) do
nil -> value
%Override{value: :default} -> value
%Override{value: value_fn} when is_function(value_fn) -> value_fn.(value)
_ -> raise "Expected override for value of #{key} to be a function"
end
end
# Default from struct defenition
defp default(_, default_val, _), do: default_val
defp keys(map) do
Map.to_list(map)
|> Enum.map(fn {key, _} -> key end)
|> Enum.filter(fn(key) -> Atom.to_string(key) != "__struct__" end)
end
end
|
lib/mapper/mapper.ex
| 0.773901
| 0.854156
|
mapper.ex
|
starcoder
|
defprotocol PhStTransform do
@moduledoc """
The `PhStTransform` protocol will convert any Elixir data structure
using a given transform into a new data structure.
The `transform/3` function takes the data structure and
a map of transformation functions and a depth list. It
then does a depth-first recursion through the structure,
applying the tranformation functions for all
data types found in the data structure.
The transform map has data types as keys and anonymous functions
as values. The anonymous functions have the data item and
optionally a recursion depth list as inputs and can
return anything. These maps of transform functions
are refered to as potions.
The `transmogrify/3` function is similar except that it allows
the functions to modify the potion map as the tranform is
in progress and it returns a tuple consisting of the
transformed data and potion.
## Example: Convert all atoms to strings
atom_to_string_potion = %{ Atom => fn(atom) -> Atom.to_string(atom) end }
PhStTransform.transform(data, atom_to_string_potion)
The potion map should have Elixir Data types as keys and anonymous functions
of either `fn(x)` or `fn(x, depth)` arity. You can supply nearly any kind of map
as an argument however, since the `PhStTransform.Potion.brew`function will strip
out any invalid values. The valid keys are all of the standard Protocol types:
[Atom, Integer, Float, BitString, Regexp, PID, Function, Reference, Port, Tuple, List, Map]
plus `Keyword` and the name of any defined Structs (e.g. `Range`)
There is also the special type `Any`, this is the default function applied
when there is no function for the type listed in the potion. By default
this is set to the identity function `fn(x, _d) -> x end`, but can be overridden
in the initial map.
The depth argument should always be left at the default value when using
this protocol. For the anonymous functions in the potion map, they can use
the depth list to know which kind of data structure contains the current
data type.
## Example: Capitalize all strings in the UserName struct, normalize all other strings.
user_potion = %{ BitString => fn(str, depth) ->
if(List.first(depth) == UserName), do: String.capitalize(str), else: String.downcase(str)) end}
PhStTransform.transform(data, user_potion)
## Example: Parse a list of strings input from a CSV file, into a list of maps.
csv_potion = %{ BitString => fn(str, potion) ->
keys = String.split(str, ",")
new_potion = Map.put(potion, BitString, fn(str, potion) ->
{ String.split(str,",")
|> Enum.zip(keys)
|> Enum.reduce( %{}, fn(tuple, map) ->
{v, k} = tuple
Map.put(map,k,v) end),
potion }
end )
{keys, new_potion}
end }
csv_strings = File.stream!("file.csv") |> Enum.into([])
{[keys | maps ], new_potion } = PhStTranform.transmogrify(csv_strings, csv_potion)
"""
# Handle structs in Any
@fallback_to_any true
@doc """
uses the given function_map to transform any Elixir data structure.
`function_map` should contain keys that correspond to the data types
to be transformed. Each key must map to a function that takes that data
type and optionally the depth list as arguments.
`depth` should always be left at the default value since it is meant for
internal recursion.
## Examples
iex> atom_to_string_potion = %{ Atom => fn(atom) -> Atom.to_string(atom) end }
iex> PhStTransform.transform([[:a], :b, {:c, :e}], atom_to_string_potion)
[["a"], "b", {"c", "e"}]
"""
def transform(data_structure, function_map, depth \\ [])
@doc """
Works similarly to transform, but returns a tuple consisting
of {result, potion} allowing self modifying potions.
## Examples
iex> atom_first = %{ Atom => fn(atom, potion) ->
old = atom
{ atom, Map.put(potion, Atom, fn(atom, potion) ->
{old, potion} end )} end }
iex> PhStTransform.transmorgrify([:a, :b, :c, :d], atom_first)
{[:a, :a, :a, :a], %{Atom => #Function<12.54118792/2 in :erl_eval.expr/5>} }
"""
def transmogrify(data_structure, function_map, depth \\ [])
end
defimpl PhStTransform, for: Atom do
def transform(atom, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.brew(function_map, depth)
trans = PhStTransform.Potion.distill(Atom, potion)
trans.(atom, depth)
end
def transmogrify(atom, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.concoct(function_map, depth)
trans = PhStTransform.Potion.distill(Atom, potion)
trans.(atom, potion, depth)
end
end
defimpl PhStTransform, for: BitString do
def transform(bitstring, function_map, depth \\ []) do
potion = PhStTransform.Potion.brew(function_map, depth)
trans = PhStTransform.Potion.distill(BitString, potion)
trans.(bitstring, depth)
end
def transmogrify(bitstring, function_map, depth \\ []) do
potion = PhStTransform.Potion.concoct(function_map, depth)
trans = PhStTransform.Potion.distill(BitString, potion)
trans.(bitstring, potion, depth)
end
end
defimpl PhStTransform, for: Integer do
def transform(integer, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.brew(function_map, depth)
trans = PhStTransform.Potion.distill(Integer, potion)
trans.(integer, depth)
end
def transmogrify(integer, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.concoct(function_map, depth)
trans = PhStTransform.Potion.distill(Integer, potion)
trans.(integer, potion, depth)
end
end
defimpl PhStTransform, for: Float do
def transform(float, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.brew(function_map, depth)
trans = PhStTransform.Potion.distill(Float, potion)
trans.(float, depth)
end
def transmogrify(float, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.concoct(function_map, depth)
trans = PhStTransform.Potion.distill(Float, potion)
trans.(float, potion, depth)
end
end
defimpl PhStTransform, for: List do
def transform(list, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.brew(function_map, depth)
case Keyword.keyword?(list) do
true -> keyword_transform(list, potion, depth)
_ -> list_transform(list, potion, depth)
end
end
defp list_transform(list, potion, depth) do
new_list = Enum.map(list, fn(l) -> PhStTransform.transform(l, potion, [List | depth]) end)
trans = PhStTransform.Potion.distill(List, potion)
trans.(new_list, depth)
end
defp keyword_transform(klist, potion, depth) do
new_klist = Enum.map(klist, fn({key, value}) -> {key, PhStTransform.transform(value, potion,[Keyword | depth]) } end)
trans = PhStTransform.Potion.distill(Keyword, potion)
trans.(new_klist, depth)
end
def transmogrify(list, function_map, depth \\ []) do
potion = PhStTransform.Potion.concoct(function_map, depth)
case Keyword.keyword?(list) do
true -> keyword_transmogrify(list, potion, depth)
_ -> list_transmogrify(list, potion, depth)
end
end
defp list_transmogrify(list, potion, depth) do
new_depth = [List | depth]
{new_list, next_potion} = Enum.reduce(list, {[], potion}, fn(item, {l, potion})->
{new_item, new_potion} = PhStTransform.transmogrify(item, potion, new_depth)
{[new_item | l ], new_potion} end)
trans = PhStTransform.Potion.distill(List, next_potion)
trans.(:lists.reverse(new_list), next_potion, depth)
end
defp keyword_transmogrify(klist, potion, depth) do
new_depth = [Keyword | depth]
{new_klist, next_potion } = Enum.reduce(klist, {[], potion}, fn({key, value}, {kl, potion}) ->
{new_value, new_potion} = PhStTransform.transmogrify(value, potion, new_depth)
{[{key, new_value}| kl], new_potion} end)
trans = PhStTransform.Potion.distill(Keyword, next_potion)
trans.(:lists.reverse(new_klist), next_potion, depth)
end
end
defimpl PhStTransform, for: Tuple do
def transform(tuple, function_map, depth \\ []) do
potion = PhStTransform.Potion.brew(function_map, depth)
new_tuple = tuple
|> Tuple.to_list
|> Enum.map(fn(x) -> PhStTransform.transform(x, potion, [Tuple | depth] ) end)
|> List.to_tuple
trans = PhStTransform.Potion.distill(Tuple, potion)
trans.(new_tuple, depth)
end
def transmogrify(tuple, function_map, depth \\[]) do
potion = PhStTransform.Potion.concoct(function_map, depth)
new_depth = [Tuple| depth]
{new_tuple_list, next_potion} = tuple
|> Tuple.to_list
|> Enum.reduce({[], potion}, fn(item, {l, potion})->
{new_item, new_potion} = PhStTransform.transmogrify(item, potion, new_depth)
{[new_item | l ], new_potion} end)
new_tuple = List.to_tuple(:lists.reverse(new_tuple_list))
trans = PhStTransform.Potion.distill(Tuple, potion)
trans.(new_tuple, next_potion, depth)
end
end
defimpl PhStTransform, for: Map do
def transform(map, function_map, depth \\0 ) do
potion = PhStTransform.Potion.brew(function_map, depth)
new_map = for {key, val} <- map, into: %{}, do: {key, PhStTransform.transform(val, potion, [Map | depth])}
trans = PhStTransform.Potion.distill(Map, potion)
trans.(new_map, depth)
end
def transmogrify(map, function_map, depth \\0 ) do
potion = PhStTransform.Potion.concoct(function_map, depth)
new_depth = [Map | depth]
{new_map, next_potion} = Enum.reduce(map, {%{}, potion}, fn({key, value}, {bmap, potion}) ->
{new_value, new_potion} = PhStTransform.transmogrify(value, potion, new_depth)
{Map.put(bmap,key,new_value), new_potion} end)
trans = PhStTransform.Potion.distill(Map, potion)
trans.(new_map, next_potion, depth)
end
end
defimpl PhStTransform, for: Regex do
def transform(regex, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.brew(function_map, depth)
trans = PhStTransform.Potion.distill(Regex, potion)
trans.(regex, depth)
end
def transmogrify(regex, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.concoct(function_map, depth)
trans = PhStTransform.Potion.distill(Regex, potion)
trans.(regex, potion, depth)
end
end
defimpl PhStTransform, for: Function do
def transform(function, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.brew(function_map, depth)
trans = PhStTransform.Potion.distill(Function, potion)
trans.(function, depth)
end
def transmogrify(function, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.concoct(function_map, depth)
trans = PhStTransform.Potion.distill(Function, potion)
trans.(function, potion, depth)
end
end
defimpl PhStTransform, for: PID do
def transform(pid, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.brew(function_map, depth)
trans = PhStTransform.Potion.distill(PID, potion)
trans.(pid, depth)
end
def transmogrify(pid, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.concoct(function_map, depth)
trans = PhStTransform.Potion.distill(PID, potion)
trans.(pid, potion, depth)
end
end
defimpl PhStTransform, for: Port do
def transform(port, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.brew(function_map, depth)
trans = PhStTransform.Potion.distill(Port, potion)
trans.(port, depth)
end
def transmogrify(port, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.concoct(function_map, depth)
trans = PhStTransform.Potion.distill(Port, potion)
trans.(port, potion, depth)
end
end
defimpl PhStTransform, for: Reference do
def transform(reference, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.brew(function_map, depth)
trans = PhStTransform.Potion.distill(Reference, potion)
trans.(reference, depth)
end
def transmogrify(reference, function_map, depth \\ [] ) do
potion = PhStTransform.Potion.concoct(function_map, depth)
trans = PhStTransform.Potion.distill(Reference, potion)
trans.(reference, potion, depth)
end
end
defimpl PhStTransform, for: Any do
def transform(%{__struct__: struct_name} = map, function_map, depth \\ []) do
potion = PhStTransform.Potion.brew(function_map, depth)
try do
struct_name.__struct__
rescue
_ -> PhStTransform.Map.transform(map, potion, depth)
else
default_struct ->
if :maps.keys(default_struct) == :maps.keys(map) do
data = Map.from_struct(map)
# replace any Map transforms from the potion with the identity map
new_potion = Map.put(potion, Map, fn(x, _d) -> x end)
new_data = PhStTransform.Map.transform(data, new_potion, [struct_name | depth])
new_struct = struct(struct_name, new_data)
trans = PhStTransform.Potion.distill(struct_name, potion)
trans.(new_struct, depth)
else
PhStTransform.Map.transform(map, potion, depth)
end
end
end
def transmogrify(%{__struct__: struct_name} = map, function_map, depth \\ []) do
potion = PhStTransform.Potion.concoct(function_map, depth)
try do
struct_name.__struct__
rescue
_ -> PhStTransform.Map.transmogrify(map, potion, depth)
else
default_struct ->
if :maps.keys(default_struct) == :maps.keys(map) do
data = Map.from_struct(map)
# replace any Map transforms from the potion with the identity map
new_potion = Map.put(potion, Map, fn(x, p, _d) -> {x, p} end)
{ new_data, next_potion } = PhStTransform.Map.transmogrify(data, new_potion, [struct_name | depth])
new_struct = struct(struct_name, new_data)
trans = PhStTransform.Potion.distill(struct_name, potion)
trans.(new_struct, next_potion, depth)
else
PhStTransform.Map.transmogrify(map, potion, depth)
end
end
end
end
|
lib/phst_transform.ex
| 0.915752
| 0.905615
|
phst_transform.ex
|
starcoder
|
defmodule Differ do
alias Differ.Diffable
alias Differ.Patchable
@moduledoc """
Module that computes `diff` for terms
# Using with structs
It is possible to use `Differ` with structs, you need to derive default implementation
for `Differ.Diffable` and `Differ.Patchable` protocols:
```elixir
defmodule User do
@derive [Differ.Diffable, Differ.Patchable]
defstruct name: "", age: 21
end
```
And now you can call `Differ.diff/2` with your structs:
iex> Differ.diff(%User{name: "John"}, %User{name: "John Smith"})
[{:name, :diff, [eq: "John", ins: " Smith"]}, {:age, :eq, 21}]
You can skip some fields aswell (e.g. timestamps, id), by using `skip` option, when deriving default implementation
```elixir
@derive [{Differ.Diffable, skip: [:updated_at, :diffs]}, Differ.Patchable]
schema "posts" do
field :content, :string
field :tags, {:array, :string}
field :title, :string
field :diffs, {:array, Diff}, default: []
timestamps()
end
```
"""
@doc """
Returns diff between 2 terms that implement `Differ.Diffable` protocol
Diff here is *edit script*, that should be compatible with `List.myers_difference/3`
## Examples
iex> Differ.diff(%{key: "value"}, %{key: "value"})
[eq: %{key: "value"}]
iex> Differ.diff("Hello!", "Hey!")
[eq: "He", del: "llo", ins: "y", eq: "!"]
"""
@spec diff(Diffable.t(), Diffable.t()) :: Diffable.diff()
def diff(old, new) do
Diffable.diff(old, new)
end
@doc """
Applies diff and returns patched value
## Examples
iex> old_list = ["22", "1"]
iex> diff = Differ.diff(old_list, ["2", "1", "3"])
iex> Differ.patch(old_list, diff)
{:ok, ["2", "1", "3"]}
"""
@spec patch(Patchable.t(), Diffable.diff()) :: {:ok, Patchable.t()} | {:error, String.t()}
def patch(obj, diff) do
apply_diff(obj, diff, false, nil)
end
@doc "Same as `Differ.patch/2`, but returns value and throws on error"
@spec patch!(Patchable.t(), Diffable.diff()) :: Patchable.t()
def patch!(obj, diff) do
case patch(obj, diff) do
{:ok, val} -> val
end
end
@doc """
Reverts diff and returns patched value
## Examples
iex> old_list = ["22", "1"]
iex> new_list = ["2", "1", "3"]
iex> diff = Differ.diff(old_list, new_list)
iex> Differ.revert(new_list, diff)
{:ok, ["22", "1"]}
"""
@spec revert(Patchable.t(), Diffable.diff()) :: {:ok, Patchable.t()} | {:error, String.t()}
def revert(obj, diff) do
apply_diff(obj, diff, true, nil)
end
@doc "Same as `Differ.revert/2`, but returns value and throws on error"
@spec revert!(Patchable.t(), Diffable.diff()) :: Patchable.t()
def revert!(obj, diff) do
case revert(obj, diff) do
{:ok, val} -> val
end
end
defp optimize_op(op, level) do
case op do
{:remove, _val} ->
op
{:skip, _val} ->
op
{:diff, diff} ->
new_op = optimize(diff, level)
{:diff, new_op}
{key, :diff, diff} ->
new_op = optimize(diff, level)
{key, :diff, new_op}
{_key, a, _val} when is_atom(a) ->
Diffable.optimize_op(%{}, op, level)
{key, val} when is_atom(key) ->
Diffable.optimize_op(val, op, level)
end
end
@doc """
Optimizes diff size
Optimizes size by removing data that is not relevant for change.
There is 3 levels of optimization:
1. Safe - can have conflicts, can be reverted
2. Safe-ish - you lose ability to get conflicts, but still can be reverted
3. Un-safe - no conflicts and no reverting
## Examples
iex> regular_diff = Differ.diff(%{"same" => "same"}, %{"same" => "same", "new" => "val"})
[{"same", :eq, "same"}, {"new", :ins, "val"}]
iex> Differ.optimize(regular_diff)
[{"new", :ins, "val"}]
iex> diff = Differ.diff("Somewhat long string with a litle change athere", "Somewhat long string with a litle change here")
[eq: "Somewhat long string with a litle change ", del: "at", eq: "here"]
iex> Differ.optimize(diff, 2)
[skip: 41, del: "at", skip: 4]
iex> Differ.optimize(diff, 3)
[skip: 41, remove: 2, skip: 4]
"""
@spec optimize(Diffable.diff(), Diffable.level()) :: Diffable.diff()
def optimize(diff, level \\ 1) do
Enum.reduce(diff, [], fn operation, new_diff ->
case optimize_op(operation, level) do
nil -> new_diff
opt -> [opt | new_diff]
end
end)
|> Enum.reverse()
end
@doc since: "0.1.1"
@deprecated "Use Differ.explain/3 instead"
def show_diff(term, diff, cb, opts \\ []) do
{:ok, explain(term, diff, cb, opts)}
end
@doc since: "0.1.1"
@doc """
Allows to visualize diff
Applies diff to a `term` and calls `cb` on each operation,
result of `cb` will be used to construct new value for `term`
## Options
- `revert` - reverts term with given diff, before apply (default `true`)
## Examples
iex> Differ.explain("qwerty", [eq: "qwer", del: "123", ins: "ty"],
...> fn {op, val} ->
...> case op do
...> :del -> "--" <> val
...> :ins -> "++" <> val
...> _ -> val
...> end
...> end)
"qwer--123++ty"
"""
@spec explain(Patchable.t(), Diffable.diff(), (Diffable.operation() -> String.t()), revert: true) ::
String.t()
def explain(term, diff, cb, opts \\ []) do
revert? = Keyword.get(opts, :revert, true)
term = if revert?, do: revert!(term, diff), else: term
{res, _} =
Enum.reduce(diff, {"", 0}, fn op, acc ->
{:ok, acc} = Patchable.explain(term, op, acc, cb)
acc
end)
res
end
defp match_res(res, old_val, acc, revert, cb) do
case res do
{:ok, new_acc} ->
{:cont, new_acc}
{:diff, diff, old, op} ->
diff_res = apply_diff(old, diff, revert, cb)
case diff_res do
{:ok, val} ->
new_op = Tuple.append(op, val)
Patchable.perform(old_val, new_op, acc)
|> match_res(old_val, acc, revert, cb)
_ ->
{:halt, diff_res}
end
_ ->
{:halt, res}
end
end
defp apply_diff(nil, _, _, _), do: {:ok, nil}
defp apply_diff(v, nil, _, _), do: {:ok, v}
defp apply_diff(old_val, diff, revert, cb) do
result =
Enum.reduce_while(diff, {old_val, 0}, fn op, acc ->
op = if revert, do: Patchable.revert_op(old_val, op), else: {:ok, op}
case op do
{:ok, op} ->
Patchable.perform(old_val, op, acc)
|> match_res(old_val, acc, revert, cb)
_ ->
{:halt, op}
end
end)
case result do
{:error, _} -> result
{:conflict, _} -> result
{val, _} -> {:ok, val}
end
end
end
|
lib/differ.ex
| 0.904033
| 0.87289
|
differ.ex
|
starcoder
|
defmodule Alods.Queue do
@moduledoc """
This module takes care of starting a DETS store which will hold the message to be delivered.
"""
import Ex2ms
use Alods.DETS, "queue"
@valid_methods [:get, :post]
@valid_statuses [:pending, :processing]
@doc """
Returns all entries of which their timestamp are smaller then the current time
"""
@spec get_pending_entries :: {:ok, [{String.t, atom, String.t, map, number, atom}]}
def get_pending_entries, do: GenServer.call(__MODULE__, {:get_pending_entries})
@spec retry_later(%Alods.Record{}, any) :: :ok | {:error, any}
def retry_later(record, reason), do: GenServer.call(__MODULE__, {:retry_later, record.id, reason})
@doc """
Pushes a record into the store.
"""
@spec push(atom, String.t, map | list | tuple, function | nil) :: {:ok, String.t} | {:error, any}
def push(method, url, data, callback \\ nil)
def push(method, url, data, callback)
when method in @valid_methods and is_map(data) or is_tuple(data) do
with {:ok, record} <- Alods.Record.create(%{method: method, url: url, data: data, callback: callback}),
true <- :dets.insert_new(__MODULE__, {record.id, record})
do
{:ok, record.id}
else
error -> error
end
end
def push(method, url, data, callback) when is_list(data) do
push(method, url, Enum.into(data, %{}), callback)
end
def push(method, _url, data, _callback) when is_map(data) do
{:error, "#{method} is not valid, must be one of #{Enum.join(@valid_methods, ", ")}"}
end
def push(method, url, data, callback) when is_binary(data) do
push(method, url, {:raw, data}, callback)
end
def get_work do
reset_entries_stuck_in_processing()
get_pending_entries()
|> Enum.map(&mark_entry_as_processing/1)
|> Enum.filter(&(&1 != nil))
end
defp mark_entry_as_processing(entry) do
case update_status(entry, :processing) do
{:ok, record} -> record
_ -> nil
end
end
defp update_status(%Alods.Record{} = record, status) when status in @valid_statuses,
do: GenServer.call(__MODULE__, {:update_status, record, status})
defp reset_entries_stuck_in_processing do
seconds = Application.get_env(:alods, :reset_after_processing_in_seconds, 60)
__MODULE__
|> :dets.select(select_processing_longer_than_or_equal_to_seconds(seconds))
|> Enum.each(&(update_status(&1, :pending)))
end
@spec select_all :: list
defp select_all do
fun do{id, record} when id != nil -> record end
end
@spec select_pending_older_than_or_equal_to_now :: list
defp select_pending_older_than_or_equal_to_now do
now = :os.system_time(:seconds)
Ex2ms.fun do
{_id, %{timestamp: timestamp, status: status}} = record when timestamp <= ^now and status == "pending" -> record
end
end
@spec select_processing_longer_than_or_equal_to_seconds(non_neg_integer) :: list
defp select_processing_longer_than_or_equal_to_seconds(seconds) do
time = :os.system_time(:seconds) - seconds
Ex2ms.fun do
{_id, %{timestamp: timestamp, status: status}} = record
when timestamp <= ^time and status == "processing" -> record
end
end
def handle_call({:retry_later, id, reason}, _caller, state) do
{:ok, record} = find(id)
delay = (2 * record.retries)
capped_delay = if delay > 3600, do: 3600, else: delay
retry_at = :os.system_time(:seconds) + capped_delay
updated_record = Alods.Record.update!(
record,
%{timestamp: retry_at, status: :pending, retries: (record.retries + 1), reason: reason}
)
:ok = :dets.insert(__MODULE__, {updated_record.id, updated_record})
{:reply, {:ok, record.id}, state}
end
def handle_call({:get_pending_entries}, _caller, state) do
records = __MODULE__
|> :dets.select(select_pending_older_than_or_equal_to_now())
|> Enum.map(fn {_id, record} -> record end)
{:reply, records, state}
end
def handle_call({:update_status, record, status}, _caller, state) when status in @valid_statuses do
record = Alods.Record.update!(record, %{status: status})
:ok = :dets.insert(__MODULE__, {record.id, record})
{:reply, {:ok, record}, state}
end
end
|
lib/alods/queue.ex
| 0.799638
| 0.412412
|
queue.ex
|
starcoder
|
defmodule Elibuf.Primitives do
alias Elibuf.Primitives.Base
@moduledoc """
Protobuf primitive types
* Double, Float
* Int32, Int64
* Uint32, Uint64
* Sint32, Sint64
* Fixed32, Fixed64
* Sfixed32, Sfixed64
* Bool
* String
* Bytes
* Enum
"""
@doc """
Double type
"""
def double() do
%Base{type: :double, repeating: false, default: nil}
end
@doc """
Float type
"""
def float() do
%Base{type: :float, repeating: false, default: nil}
end
@doc """
Int32 type
* Notes: Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint32 instead.
"""
def int32() do
%Base{type: :int32, repeating: false, default: nil}
end
@doc """
Int64 type
* Notes: Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint64 instead.
"""
def int64() do
%Base{type: :int64, repeating: false, default: nil}
end
@doc """
Uint32 type
* Notes: Uses variable-length encoding.
"""
def uint32() do
%Base{type: :uint32, repeating: false, default: nil}
end
@doc """
Uint64 type
* Notes: Uses variable-length encoding.
"""
def uint64() do
%Base{type: :uint64, repeating: false, default: nil}
end
@doc """
Sint32 type
* Notes: Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int32s.
"""
def sint32() do
%Base{type: :sint32, repeating: false, default: nil}
end
@doc """
Sin64 type
* Notes: Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int64s.
"""
def sint64() do
%Base{type: :sint64, repeating: false, default: nil}
end
@doc """
Fixed32 type
* Notes: Always four bytes. More efficient than uint32 if values are often greater than 2^28.
"""
def fixed32() do
%Base{type: :fixed32, repeating: false, default: nil}
end
@doc """
Fixed64 type
* Notes: Always eight bytes. More efficient than uint64 if values are often greater than 2^56.
"""
def fixed64() do
%Base{type: :fixed64, repeating: false, default: nil}
end
@doc """
Sfixed32 type
* Notes: Always four bytes.
"""
def sfixed32() do
%Base{type: :sfixed32, repeating: false, default: nil}
end
@doc """
Sfixed64 type
* Notes: Always eight bytes.
"""
def sfixed64() do
%Base{type: :sfixed64, repeating: false, default: nil}
end
@doc """
Bool type
"""
def bool() do
%Base{type: :bool, repeating: false, default: nil}
end
@doc """
String type
* Notes: A string must always contain UTF-8 encoded or 7-bit ASCII text.
"""
def string() do
%Base{type: :string, repeating: false, default: nil}
end
@doc """
Bytes type
* Notes: May contain any arbitrary sequence of bytes.
"""
def bytes() do
%Base{type: :bytes, repeating: false, default: nil}
end
@doc """
Enum type
"""
def enum() do
%Base{type: :enum, repeating: false, default: nil}
end
@doc """
Timestamp type
"""
def timestamp()do
%Base{type: :timestamp, repeating: false, default: nil}
end
@doc """
Duration type
"""
def duration() do
%Base{type: :duration, repeating: false, default: nil}
end
end
|
lib/primitives/primitives.ex
| 0.806662
| 0.470615
|
primitives.ex
|
starcoder
|
defmodule Ecto.Adapters.SQL.Connection do
@moduledoc """
Specifies the behaviour to be implemented by all SQL connections.
"""
@typedoc "The prepared query which is an SQL command"
@type prepared :: String.t
@typedoc "The cache query which is a DBConnection Query"
@type cached :: map
@doc """
Receives options and returns `DBConnection` supervisor child
specification.
"""
@callback child_spec(options :: Keyword.t) :: {module, Keyword.t}
@doc """
Prepares and executes the given query with `DBConnection`.
"""
@callback prepare_execute(connection :: DBConnection.t, name :: String.t, prepared, params :: [term], options :: Keyword.t) ::
{:ok, query :: map, term} | {:error, Exception.t}
@doc """
Executes the given prepared query with `DBConnection`.
"""
@callback execute(connection :: DBConnection.t, prepared_query :: prepared, params :: [term], options :: Keyword.t) ::
{:ok, term} | {:error, Exception.t}
@callback execute(connection :: DBConnection.t, prepared_query :: cached, params :: [term], options :: Keyword.t) ::
{:ok, term} | {:error | :reset, Exception.t}
@doc """
Returns a stream that prepares and executes the given query with
`DBConnection`.
"""
@callback stream(connection :: DBConnection.conn, prepared_query :: prepared, params :: [term], options :: Keyword.t) ::
Enum.t
@doc """
Receives the exception returned by `query/4`.
The constraints are in the keyword list and must return the
constraint type, like `:unique`, and the constraint name as
a string, for example:
[unique: "posts_title_index"]
Must return an empty list if the error does not come
from any constraint.
"""
@callback to_constraints(exception :: Exception.t) :: Keyword.t
## Queries
@doc """
Receives a query and must return a SELECT query.
"""
@callback all(query :: Ecto.Query.t) :: iodata
@doc """
Receives a query and values to update and must return an UPDATE query.
"""
@callback update_all(query :: Ecto.Query.t) :: iodata
@doc """
Receives a query and must return a DELETE query.
"""
@callback delete_all(query :: Ecto.Query.t) :: iodata
@doc """
Returns an INSERT for the given `rows` in `table` returning
the given `returning`.
"""
@callback insert(prefix ::String.t, table :: String.t,
header :: [atom], rows :: [[atom | nil]],
on_conflict :: Ecto.Adapter.on_conflict, returning :: [atom]) :: iodata
@doc """
Returns an UPDATE for the given `fields` in `table` filtered by
`filters` returning the given `returning`.
"""
@callback update(prefix :: String.t, table :: String.t, fields :: [atom],
filters :: [atom], returning :: [atom]) :: iodata
@doc """
Returns a DELETE for the `filters` returning the given `returning`.
"""
@callback delete(prefix :: String.t, table :: String.t,
filters :: [atom], returning :: [atom]) :: iodata
## DDL
@doc """
Receives a DDL command and returns a query that executes it.
"""
@callback execute_ddl(command :: Ecto.Adapter.Migration.command) :: String.t | [iodata]
end
|
lib/ecto/adapters/sql/connection.ex
| 0.903145
| 0.431045
|
connection.ex
|
starcoder
|
defmodule Math.Enum do
@moduledoc """
Math.Enum defines Math-functions that work on any collection extending the Enumerable protocol.
This means Maps, Lists, Sets, etc., and any custom collection types as well.
"""
require Integer
@doc """
Calculates the product, obtained by multiplying all elements in *collection* with eachother.
## Examples
iex> Math.Enum.product [1,2,3]
6
iex> Math.Enum.product 1..10
3628800
iex> Math.Enum.product [1,2,3,4,5, -100]
-12000
"""
def product(collection)
# General implementation for any enumerable.
def product(collection) do
Enum.reduce(collection, &*/2)
end
@doc """
Calculates the mean of a collection of numbers.
This is the sum, divided by the amount of elements in the collection.
If the collection is empty, returns `nil`
Also see `Math.Enum.median/1`
## Examples
iex> Math.Enum.mean [1,2,3]
2.0
iex> Math.Enum.mean 1..10
5.5
iex> Math.Enum.mean [1,2,3,4,5, -100]
-14.166666666666666
iex> Math.Enum.mean []
nil
"""
@spec mean(Enum.t()) :: number
def mean(collection)
def mean(collection) do
count = Enum.count(collection)
case count do
0 -> nil
_ -> Enum.sum(collection) / count
end
end
@doc """
Calculates the median of a given collection of numbers.
- If the collection has an odd number of elements, this will be the middle-most element of the (sorted) collection.
- If the collection has an even number of elements, this will be mean of the middle-most two elements of the (sorted) collection.
If the collection is empty, returns `nil`
Also see `Math.Enum.mean/1`
## Examples
iex> Math.Enum.median [1,2,3]
2
iex> Math.Enum.median 1..10
5.5
iex> Math.Enum.median [1,2,3,4,5, -100]
2.5
iex> Math.Enum.median [1,2]
1.5
iex> Math.Enum.median []
nil
"""
@spec median(Enum.t()) :: number | nil
def median(collection)
def median(collection) do
count = Enum.count(collection)
mid_point = div(count, 2)
cond do
count == 0 ->
nil
# Middle element exists
Integer.is_odd(count) ->
collection
|> Enum.sort()
|> Enum.fetch!(mid_point)
true ->
collection
|> Enum.sort()
|> Enum.slice((mid_point - 1)..mid_point)
|> Math.Enum.mean()
end
end
@doc """
Calculates the mode of a given collection of numbers.
Always returns a list. An empty input results in an empty list. Supports bimodal/multimodal collections by returning a list with multiple values.
## Examples
iex> Math.Enum.mode [1, 2, 3, 4, 1]
[1]
iex> Math.Enum.mode [1, 2, 3, 2, 3]
[2, 3]
iex> Math.Enum.mode []
[]
"""
@spec mode(Enum.t()) :: Enum.t()
def mode(collection)
def mode(collection) do
collection
|> Enum.reduce(%{}, fn k, acc -> Map.update(acc, k, 0, &(&1 + 1)) end)
|> Enum.group_by(&elem(&1, 1), &elem(&1, 0))
|> Enum.max_by(&elem(&1, 0), fn -> {nil, []} end)
|> elem(1)
end
end
|
lib/math/enum.ex
| 0.919742
| 0.583144
|
enum.ex
|
starcoder
|
defmodule Wobserver2.Util.Process do
@moduledoc ~S"""
Process and pid handling.
"""
import Wobserver2.Util.Helper, only: [string_to_module: 1, format_function: 1]
@process_summary [
:registered_name,
:initial_call,
:memory,
:reductions,
:current_function,
:message_queue_len,
:dictionary
]
@process_full [
:registered_name,
:priority,
:trap_exit,
:initial_call,
:current_function,
:message_queue_len,
:error_handler,
:group_leader,
:links,
:memory,
:total_heap_size,
:heap_size,
:stack_size,
:min_heap_size,
:garbage_collection,
:status,
:dictionary
]
@process_meta [
:initial_call,
:current_function,
:status,
:dictionary
]
@doc ~S"""
Turns the argument `pid` into a pid or if not possible returns `nil`.
It will accept:
- pids
- atoms / module names (registered processes)
- single integers
- a list of 3 integers
- a tuple of 3 integers
- a charlist in the format: `'<0.0.0>'`
- a String in the following formats:
- `"#PID<0.0.0>"`
- `"<0.0.0>"`
- atom / module name
Example:
```bash
iex> Wobserver2.Util.Process.pid pid(0, 33, 0)
#PID<0.33.0>
```
```bash
iex> Wobserver2.Util.Process.pid :cowboy_sup
#PID<0.253.0>
```
```bash
iex> Wobserver2.Util.Process.pid Logger
#PID<0.213.0>
```
```bash
iex> Wobserver2.Util.Process.pid 33
#PID<0.33.0>
```
```bash
iex> Wobserver2.Util.Process.pid [0, 33, 0]
#PID<0.33.0>
```
```bash
iex> Wobserver2.Util.Process.pid '<0.33.0>'
#PID<0.33.0>
```
```bash
iex> Wobserver2.Util.Process.pid {0, 33, 0}
#PID<0.33.0>
```
```bash
iex> Wobserver2.Util.Process.pid "#PID<0.33.0>"
#PID<0.33.0>
```
```bash
iex> Wobserver2.Util.Process.pid "<0.33.0>"
#PID<0.33.0>
```
```bash
iex> Wobserver2.Util.Process.pid "cowboy_sup"
#PID<0.253.0>
```
```bash
iex> Wobserver2.Util.Process.pid "Logger"
#PID<0.213.0>
```
```bash
iex> Wobserver2.Util.Process.pid 4.5
nil
```
"""
@spec pid(pid :: pid | atom | list | binary | integer | {integer, integer, integer}) ::
pid | nil
def pid(pid)
def pid(pid) when is_pid(pid), do: pid
def pid(pid) when is_atom(pid), do: Process.whereis(pid)
def pid(pid) when is_integer(pid), do: pid("<0.#{pid}.0>")
def pid([a, b, c]), do: pid("<#{a}.#{b}.#{c}>")
def pid(pid) when is_list(pid), do: :erlang.list_to_pid(pid)
def pid({a, b, c}), do: pid("<#{a}.#{b}.#{c}>")
def pid("#PID" <> pid), do: pid |> String.to_charlist() |> pid()
def pid(pid = "<" <> _), do: pid |> String.to_charlist() |> pid()
def pid(pid) when is_binary(pid), do: pid |> string_to_module() |> pid()
def pid(_), do: nil
@doc ~S"""
Turns the argument `pid` into a pid or if not possible raises error.
For example see: `Wobserver2.Util.Process.pid/1`.
"""
@spec pid!(pid :: pid | list | binary | integer | {integer, integer, integer}) :: pid
def pid!(pid) do
case pid(pid) do
nil ->
raise ArgumentError, message: "Can not convert #{inspect(pid)} to pid."
p ->
p
end
end
@doc ~S"""
Creates a complete overview of process stats based on the given `pid`.
Including but not limited to:
- `id`, the process pid
- `name`, the registered name or `nil`.
- `init`, initial function or name.
- `current`, current function.
- `memory`, the total amount of memory used by the process.
- `reductions`, the amount of reductions.
- `message_queue_length`, the amount of unprocessed messages for the process.,
"""
@spec info(pid :: pid | list | binary | integer | {integer, integer, integer}) :: :error | map
def info(pid) do
pid
|> pid()
|> process_info(@process_full, &structure_full/2)
end
@doc ~S"""
Retreives a list of process summaries.
Every summary contains:
- `id`, the process pid.
- `name`, the registered name or `nil`.
- `init`, initial function or name.
- `current`, current function.
- `memory`, the total amount of memory used by the process.
- `reductions`, the amount of reductions.
- `message_queue_length`, the amount of unprocessed messages for the process.
"""
@spec list :: list(map)
def list do
:erlang.processes()
|> Enum.map(&summary/1)
end
@doc ~S"""
Creates formatted meta information about the process based on the given `pid`.
The information contains:
- `init`, initial function or name.
- `current`, current function.
- `status`, process status.
"""
@spec meta(pid :: pid) :: map
def meta(pid),
do: pid |> process_info(@process_meta, &structure_meta/2)
@doc ~S"""
Creates formatted summary about the process based on the given `pid`.
Every summary contains:
- `id`, the process pid.
- `name`, the registered name or `nil`.
- `init`, initial function or name.
- `current`, current function.
- `memory`, the total amount of memory used by the process.
- `reductions`, the amount of reductions.
- `message_queue_length`, the amount of unprocessed messages for the process.
"""
@spec summary(pid :: pid) :: map
def summary(pid),
do: pid |> process_info(@process_summary, &structure_summary/2)
# Helpers
defp process_info(nil, _, _), do: :error
defp process_info(pid, information, structurer) do
case :erlang.process_info(pid, information) do
:undefined -> :error
data -> structurer.(data, pid)
end
end
defp process_status_module(pid) do
{:status, ^pid, {:module, class}, _} = :sys.get_status(pid, 100)
class
catch
_, _ -> :unknown
end
defp state(pid) do
:sys.get_state(pid, 100)
catch
_, _ -> :unknown
end
@doc false
@spec initial_call(data :: keyword) :: {atom, atom, integer} | atom
def initial_call(data) do
dictionary_init =
data
|> Keyword.get(:dictionary, [])
|> Keyword.get(:"$initial_call", nil)
case dictionary_init do
nil ->
Keyword.get(data, :initial_call, nil)
call ->
call
end
end
# Structurers
defp structure_summary(data, pid) do
process_name =
case Keyword.get(data, :registered_name, []) do
[] -> nil
name -> name
end
%{
pid: pid,
name: process_name,
init: format_function(initial_call(data)),
current: format_function(Keyword.get(data, :current_function, nil)),
memory: Keyword.get(data, :memory, 0),
reductions: Keyword.get(data, :reductions, 0),
message_queue_length: Keyword.get(data, :message_queue_len, 0)
}
end
defp structure_full(data, pid) do
gc = Keyword.get(data, :garbage_collection, [])
dictionary = Keyword.get(data, :dictionary)
%{
pid: pid,
registered_name: Keyword.get(data, :registered_name, nil),
priority: Keyword.get(data, :priority, :normal),
trap_exit: Keyword.get(data, :trap_exit, false),
message_queue_len: Keyword.get(data, :message_queue_len, 0),
error_handler: Keyword.get(data, :error_handler, :none),
relations: %{
group_leader: Keyword.get(data, :group_leader, nil),
ancestors: Keyword.get(dictionary, :"$ancestors", []),
links: Keyword.get(data, :links, nil)
},
memory: %{
total: Keyword.get(data, :memory, 0),
stack_and_heap: Keyword.get(data, :total_heap_size, 0),
heap_size: Keyword.get(data, :heap_size, 0),
stack_size: Keyword.get(data, :stack_size, 0),
gc_min_heap_size: Keyword.get(gc, :min_heap_size, 0),
gc_full_sweep_after: Keyword.get(gc, :fullsweep_after, 0)
},
meta: structure_meta(data, pid),
state: to_string(:io_lib.format("~tp", [state(pid)]))
}
end
defp structure_meta(data, pid) do
init = initial_call(data)
class =
case init do
{:supervisor, _, _} -> :supervisor
{:application_master, _, _} -> :application
_ -> process_status_module(pid)
end
%{
init: format_function(init),
current: format_function(Keyword.get(data, :current_function)),
status: Keyword.get(data, :status),
class: class
}
end
end
|
lib/wobserver2/util/process.ex
| 0.796846
| 0.800653
|
process.ex
|
starcoder
|
defmodule AWS.Workspaces do
@moduledoc """
Amazon WorkSpaces Service
Amazon WorkSpaces enables you to provision virtual, cloud-based Microsoft
Windows and Amazon Linux desktops for your users.
"""
@doc """
Associates the specified IP access control group with the specified
directory.
"""
def associate_ip_groups(client, input, options \\ []) do
request(client, "AssociateIpGroups", input, options)
end
@doc """
Adds one or more rules to the specified IP access control group.
This action gives users permission to access their WorkSpaces from the CIDR
address ranges specified in the rules.
"""
def authorize_ip_rules(client, input, options \\ []) do
request(client, "AuthorizeIpRules", input, options)
end
@doc """
Creates an IP access control group.
An IP access control group provides you with the ability to control the IP
addresses from which users are allowed to access their WorkSpaces. To
specify the CIDR address ranges, add rules to your IP access control group
and then associate the group with your directory. You can add rules when
you create the group or at any time using `AuthorizeIpRules`.
There is a default IP access control group associated with your directory.
If you don't associate an IP access control group with your directory, the
default group is used. The default group includes a default rule that
allows users to access their WorkSpaces from anywhere. You cannot modify
the default IP access control group for your directory.
"""
def create_ip_group(client, input, options \\ []) do
request(client, "CreateIpGroup", input, options)
end
@doc """
Creates the specified tags for the specified WorkSpace.
"""
def create_tags(client, input, options \\ []) do
request(client, "CreateTags", input, options)
end
@doc """
Creates one or more WorkSpaces.
This operation is asynchronous and returns before the WorkSpaces are
created.
"""
def create_workspaces(client, input, options \\ []) do
request(client, "CreateWorkspaces", input, options)
end
@doc """
Deletes the specified IP access control group.
You cannot delete an IP access control group that is associated with a
directory.
"""
def delete_ip_group(client, input, options \\ []) do
request(client, "DeleteIpGroup", input, options)
end
@doc """
Deletes the specified tags from the specified WorkSpace.
"""
def delete_tags(client, input, options \\ []) do
request(client, "DeleteTags", input, options)
end
@doc """
Deletes the specified image from your account. To delete an image, you must
first delete any bundles that are associated with the image.
"""
def delete_workspace_image(client, input, options \\ []) do
request(client, "DeleteWorkspaceImage", input, options)
end
@doc """
Retrieves a list that describes the configuration of bring your own license
(BYOL) for the specified account.
"""
def describe_account(client, input, options \\ []) do
request(client, "DescribeAccount", input, options)
end
@doc """
Retrieves a list that describes modifications to the configuration of bring
your own license (BYOL) for the specified account.
"""
def describe_account_modifications(client, input, options \\ []) do
request(client, "DescribeAccountModifications", input, options)
end
@doc """
Retrieves a list that describes one or more specified Amazon WorkSpaces
clients.
"""
def describe_client_properties(client, input, options \\ []) do
request(client, "DescribeClientProperties", input, options)
end
@doc """
Describes one or more of your IP access control groups.
"""
def describe_ip_groups(client, input, options \\ []) do
request(client, "DescribeIpGroups", input, options)
end
@doc """
Describes the specified tags for the specified WorkSpace.
"""
def describe_tags(client, input, options \\ []) do
request(client, "DescribeTags", input, options)
end
@doc """
Retrieves a list that describes the available WorkSpace bundles.
You can filter the results using either bundle ID or owner, but not both.
"""
def describe_workspace_bundles(client, input, options \\ []) do
request(client, "DescribeWorkspaceBundles", input, options)
end
@doc """
Describes the available AWS Directory Service directories that are
registered with Amazon WorkSpaces.
"""
def describe_workspace_directories(client, input, options \\ []) do
request(client, "DescribeWorkspaceDirectories", input, options)
end
@doc """
Retrieves a list that describes one or more specified images, if the image
identifiers are provided. Otherwise, all images in the account are
described.
"""
def describe_workspace_images(client, input, options \\ []) do
request(client, "DescribeWorkspaceImages", input, options)
end
@doc """
Describes the specified WorkSpaces.
You can filter the results by using the bundle identifier, directory
identifier, or owner, but you can specify only one filter at a time.
"""
def describe_workspaces(client, input, options \\ []) do
request(client, "DescribeWorkspaces", input, options)
end
@doc """
Describes the connection status of the specified WorkSpaces.
"""
def describe_workspaces_connection_status(client, input, options \\ []) do
request(client, "DescribeWorkspacesConnectionStatus", input, options)
end
@doc """
Disassociates the specified IP access control group from the specified
directory.
"""
def disassociate_ip_groups(client, input, options \\ []) do
request(client, "DisassociateIpGroups", input, options)
end
@doc """
Imports the specified Windows 7 or Windows 10 bring your own license (BYOL)
image into Amazon WorkSpaces. The image must be an already licensed EC2
image that is in your AWS account, and you must own the image.
"""
def import_workspace_image(client, input, options \\ []) do
request(client, "ImportWorkspaceImage", input, options)
end
@doc """
Retrieves a list of IP address ranges, specified as IPv4 CIDR blocks, that
you can use for the network management interface when you enable bring your
own license (BYOL).
The management network interface is connected to a secure Amazon WorkSpaces
management network. It is used for interactive streaming of the WorkSpace
desktop to Amazon WorkSpaces clients, and to allow Amazon WorkSpaces to
manage the WorkSpace.
"""
def list_available_management_cidr_ranges(client, input, options \\ []) do
request(client, "ListAvailableManagementCidrRanges", input, options)
end
@doc """
Modifies the configuration of bring your own license (BYOL) for the
specified account.
"""
def modify_account(client, input, options \\ []) do
request(client, "ModifyAccount", input, options)
end
@doc """
Modifies the properties of the specified Amazon WorkSpaces clients.
"""
def modify_client_properties(client, input, options \\ []) do
request(client, "ModifyClientProperties", input, options)
end
@doc """
Modifies the specified WorkSpace properties.
"""
def modify_workspace_properties(client, input, options \\ []) do
request(client, "ModifyWorkspaceProperties", input, options)
end
@doc """
Sets the state of the specified WorkSpace.
To maintain a WorkSpace without being interrupted, set the WorkSpace state
to `ADMIN_MAINTENANCE`. WorkSpaces in this state do not respond to requests
to reboot, stop, start, or rebuild. An AutoStop WorkSpace in this state is
not stopped. Users can log into a WorkSpace in the `ADMIN_MAINTENANCE`
state.
"""
def modify_workspace_state(client, input, options \\ []) do
request(client, "ModifyWorkspaceState", input, options)
end
@doc """
Reboots the specified WorkSpaces.
You cannot reboot a WorkSpace unless its state is `AVAILABLE` or
`UNHEALTHY`.
This operation is asynchronous and returns before the WorkSpaces have
rebooted.
"""
def reboot_workspaces(client, input, options \\ []) do
request(client, "RebootWorkspaces", input, options)
end
@doc """
Rebuilds the specified WorkSpace.
You cannot rebuild a WorkSpace unless its state is `AVAILABLE`, `ERROR`, or
`UNHEALTHY`.
Rebuilding a WorkSpace is a potentially destructive action that can result
in the loss of data. For more information, see [Rebuild a
WorkSpace](https://docs.aws.amazon.com/workspaces/latest/adminguide/reset-workspace.html).
This operation is asynchronous and returns before the WorkSpaces have been
completely rebuilt.
"""
def rebuild_workspaces(client, input, options \\ []) do
request(client, "RebuildWorkspaces", input, options)
end
@doc """
Removes one or more rules from the specified IP access control group.
"""
def revoke_ip_rules(client, input, options \\ []) do
request(client, "RevokeIpRules", input, options)
end
@doc """
Starts the specified WorkSpaces.
You cannot start a WorkSpace unless it has a running mode of `AutoStop` and
a state of `STOPPED`.
"""
def start_workspaces(client, input, options \\ []) do
request(client, "StartWorkspaces", input, options)
end
@doc """
Stops the specified WorkSpaces.
You cannot stop a WorkSpace unless it has a running mode of `AutoStop` and
a state of `AVAILABLE`, `IMPAIRED`, `UNHEALTHY`, or `ERROR`.
"""
def stop_workspaces(client, input, options \\ []) do
request(client, "StopWorkspaces", input, options)
end
@doc """
Terminates the specified WorkSpaces.
Terminating a WorkSpace is a permanent action and cannot be undone. The
user's data is destroyed. If you need to archive any user data, contact
Amazon Web Services before terminating the WorkSpace.
You can terminate a WorkSpace that is in any state except `SUSPENDED`.
This operation is asynchronous and returns before the WorkSpaces have been
completely terminated.
"""
def terminate_workspaces(client, input, options \\ []) do
request(client, "TerminateWorkspaces", input, options)
end
@doc """
Replaces the current rules of the specified IP access control group with
the specified rules.
"""
def update_rules_of_ip_group(client, input, options \\ []) do
request(client, "UpdateRulesOfIpGroup", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "workspaces"}
host = get_host("workspaces", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "WorkspacesService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/workspaces.ex
| 0.869798
| 0.431884
|
workspaces.ex
|
starcoder
|
defmodule Util.EqualityOperator do
@moduledoc """
The `equalif*` method is designed to allow for unorderd equality comparison between lists.
In an ordered comparison, `[1,2,3]` is not considered equal to `[3,2,1]`, but `equali/fy?|form?` would consider those two lists to be equal.
`equalify|equaliform?` will accept inputs other than maps, tuples, or lists:
### Examples
iex> Morphix.equaliform?(1,1)
true
iex> Morphix.equaliform?(DateTime.utc_now(), DateTime.utc_now())
false
But it is designed for situations where you have two Enumerables, and you want to see if they have the same elements.
"""
defmacro __using__(_opts) do
quote do
@doc """
Takes two elements and returns `true` if they are equal, ignoring order for Enumerables.
Order is also ignored for nested Enumerables.
### Examples
iex> Morphix.equaliform?([1, ["two", :three], %{a: 1, c: "three", e: %{d: 4, b: 2}}], [["two", :three], 1, %{c: "three", a: 1, e: %{b: 2, d: 4}}])
true
iex> Morphix.equaliform?([1, "two", :three, %{a: 1, c: "three", e: %{g: 4, b: 2}}], ["two", :three, 1, %{c: "three", a: 1, e: %{b: 2, d: 4}}])
false
"""
@spec equaliform?(any(), any()) :: boolean
def equaliform?(any1, any2) when is_tuple(any1) and is_tuple(any2) do
equaliform?(Tuple.to_list(any1), Tuple.to_list(any2))
end
def equaliform?(any1, any2) do
equaliform?(both_enumerables?(any1, any2), any1, any2)
end
defp equaliform?(true, enum1, enum2), do: sort_elem(enum1) == sort_elem(enum2)
defp equaliform?(false, any1, any2), do: any1 == any2
@doc """
Takes two elements and returns `true` if they are equal, ignoring order for Enumerables.
Order is not ignored for nested Enumerables.
### Examples
iex> Morphix.equalify?([1, ["two", :three], %{a: 1, c: "three", e: %{d: 4, b: 2}}], [["two", :three], 1, %{c: "three", a: 1, e: %{b: 2, d: 4}}])
true
iex> Morphix.equalify?([1, ["two", :three], %{a: 1, c: "three", e: %{d: 4, b: 2}}], [[:three, "two"], 1, %{c: "three", a: 1, e: %{b: 2, d: 4}}])
false
"""
@spec equalify?(any(), any()) :: boolean
def equalify?(any1, any2) when is_tuple(any1) and is_tuple(any2) do
equalify?(Tuple.to_list(any1), Tuple.to_list(any2))
end
def equalify?(any1, any2) do
equalify?(both_enumerables?(any1, any2), any1, any2)
end
defp equalify?(true, enum1, enum2), do: Enum.sort(enum1) == Enum.sort(enum2)
defp equalify?(false, any1, any2), do: any1 == any2
defp both_enumerables?(any1, any2) do
case Enumerable.impl_for(any1) && Enumerable.impl_for(any2) do
nil -> false
_ -> true
end
end
defp sort_elem(list) when is_list(list) do
list
|> Keyword.keyword?()
|> sort_elem(list)
end
defp sort_elem(map) when is_map(map) do
map
|> Enum.reduce(%{}, fn {k, v}, acc -> Map.put(acc, k, sort_elem(v)) end)
|> Enum.sort()
end
defp sort_elem(elem), do: elem
defp sort_elem(true, list) do
list
|> Enum.reduce([], fn {k, v}, acc -> acc ++ [sort_elem(v)] end)
|> Enum.sort()
end
defp sort_elem(false, list) do
list
|> Enum.reduce([], fn elem, acc -> acc ++ [sort_elem(elem)] end)
|> Enum.sort()
end
end
end
end
|
lib/util/equality_operator.ex
| 0.909058
| 0.620118
|
equality_operator.ex
|
starcoder
|
defmodule Benchmarks.Proto2.GoogleMessage1 do
@moduledoc false
use Protobuf, syntax: :proto2
field :field1, 1, required: true, type: :string
field :field9, 9, optional: true, type: :string
field :field18, 18, optional: true, type: :string
field :field80, 80, optional: true, type: :bool, default: false
field :field81, 81, optional: true, type: :bool, default: true
field :field2, 2, required: true, type: :int32
field :field3, 3, required: true, type: :int32
field :field280, 280, optional: true, type: :int32
field :field6, 6, optional: true, type: :int32, default: 0
field :field22, 22, optional: true, type: :int64
field :field4, 4, optional: true, type: :string
field :field5, 5, repeated: true, type: :fixed64
field :field59, 59, optional: true, type: :bool, default: false
field :field7, 7, optional: true, type: :string
field :field16, 16, optional: true, type: :int32
field :field130, 130, optional: true, type: :int32, default: 0
field :field12, 12, optional: true, type: :bool, default: true
field :field17, 17, optional: true, type: :bool, default: true
field :field13, 13, optional: true, type: :bool, default: true
field :field14, 14, optional: true, type: :bool, default: true
field :field104, 104, optional: true, type: :int32, default: 0
field :field100, 100, optional: true, type: :int32, default: 0
field :field101, 101, optional: true, type: :int32, default: 0
field :field102, 102, optional: true, type: :string
field :field103, 103, optional: true, type: :string
field :field29, 29, optional: true, type: :int32, default: 0
field :field30, 30, optional: true, type: :bool, default: false
field :field60, 60, optional: true, type: :int32, default: -1
field :field271, 271, optional: true, type: :int32, default: -1
field :field272, 272, optional: true, type: :int32, default: -1
field :field150, 150, optional: true, type: :int32
field :field23, 23, optional: true, type: :int32, default: 0
field :field24, 24, optional: true, type: :bool, default: false
field :field25, 25, optional: true, type: :int32, default: 0
field :field15, 15, optional: true, type: Benchmarks.Proto2.GoogleMessage1SubMessage
field :field78, 78, optional: true, type: :bool
field :field67, 67, optional: true, type: :int32, default: 0
field :field68, 68, optional: true, type: :int32
field :field128, 128, optional: true, type: :int32, default: 0
field :field129, 129, optional: true, type: :string, default: "xxxxxxxxxxxxxxxxxxxxx"
field :field131, 131, optional: true, type: :int32, default: 0
end
defmodule Benchmarks.Proto2.GoogleMessage1SubMessage do
@moduledoc false
use Protobuf, syntax: :proto2
field :field1, 1, optional: true, type: :int32, default: 0
field :field2, 2, optional: true, type: :int32, default: 0
field :field3, 3, optional: true, type: :int32, default: 0
field :field15, 15, optional: true, type: :string
field :field12, 12, optional: true, type: :bool, default: true
field :field13, 13, optional: true, type: :int64
field :field14, 14, optional: true, type: :int64
field :field16, 16, optional: true, type: :int32
field :field19, 19, optional: true, type: :int32, default: 2
field :field20, 20, optional: true, type: :bool, default: true
field :field28, 28, optional: true, type: :bool, default: true
field :field21, 21, optional: true, type: :fixed64
field :field22, 22, optional: true, type: :int32
field :field23, 23, optional: true, type: :bool, default: false
field :field206, 206, optional: true, type: :bool, default: false
field :field203, 203, optional: true, type: :fixed32
field :field204, 204, optional: true, type: :int32
field :field205, 205, optional: true, type: :string
field :field207, 207, optional: true, type: :uint64
field :field300, 300, optional: true, type: :uint64
end
|
bench/lib/datasets/google_message1/proto2/benchmark_message1_proto2.pb.ex
| 0.590543
| 0.457016
|
benchmark_message1_proto2.pb.ex
|
starcoder
|
defmodule ExAws.CloudwatchLogs do
@moduledoc """
Documentation for ExAwsCloudwatchLogs.
"""
import ExAws.Utils, only: [camelize_keys: 1]
@namespace "Logs_20140328"
@doc """
Create a log group with the given name.
## Examples
ExAws.CloudwatchLogs.create_log_group("my-group")
|> ExAws.request
"""
def create_log_group(group_name) do
request(:create_log_group, %{"logGroupName" => group_name})
end
@doc """
Create a log stream with the given name for the given log group.
## Examples
ExAws.CloudwatchLogs.create_log_stream("my-group", "my-stream")
|> ExAws.request
"""
def create_log_stream(group_name, stream_name) do
data = %{"logGroupName" => group_name, "logStreamName" => stream_name}
request(:create_log_stream, data)
end
@doc """
Delete the log group with the given name as well as all related log streams
and log events.
## Examples
ExAws.CloudwatchLogs.delete_log_group("my-group")
|> ExAws.request
"""
def delete_log_group(group_name) do
request(:delete_log_group, %{"logGroupName" => group_name})
end
@doc """
Delete the log stream with the given name in the given group.
## Examples
ExAws.CloudwatchLogs.delete_log_stream("my-group", "my-stream")
|> ExAws.request
"""
def delete_log_stream(group_name, stream_name) do
data = %{"logGroupName" => group_name, "logStreamName" => stream_name}
request(:delete_log_stream, data)
end
@doc """
List log groups.
## Examples
ExAws.CloudwatchLogs.describe_log_groups()
|> ExAws.request
"""
def describe_log_groups(opts \\ []) do
data =
opts
|> Map.new
|> camelize_keys
request(:describe_log_groups, data)
end
@doc """
List the log streams for the given log group.
## Examples
ExAws.CloudwatchLogs.describe_log_streams("my-group")
|> ExAws.request
"""
def describe_log_streams(group_name, opts \\ %{}) do
data =
opts
|> Map.new
|> camelize_keys
|> Map.merge(%{"logGroupName" => group_name})
request(:describe_log_streams, data)
end
@doc """
List the log events from the given log stream.
## Examples
ExAws.CloudwatchLogs.get_log_events("my-group", "my-stream")
|> ExAws.request
"""
def get_log_events(group_name, stream_name, opts \\ []) do
data =
opts
|> camelize_keys
|> Map.merge(%{"logGroupName" => group_name, "logStreamName" => stream_name})
request(:get_log_events, data)
end
@doc """
Write the log events to the given log stream.
## Examples
ExAws.CloudwatchLogs.put_log_events("my-group", "my-stream", events)
|> ExAws.request
"""
def put_log_events(group_name, stream_name, events, opts \\ []) do
data =
opts
|> Map.new
|> camelize_keys
|> Map.merge(%{"logEvents" => events, "logGroupName" => group_name, "logStreamName" => stream_name})
request(:put_log_events, data)
end
defp request(action, data, opts \\ %{}) do
operation =
action
|> Atom.to_string
|> Macro.camelize
ExAws.Operation.JSON.new(:logs, %{
data: data,
headers: [
{"x-amz-target", "#{@namespace}.#{operation}"},
{"content-type", "application/x-amz-json-1.1"}
]
} |> Map.merge(opts))
end
end
|
lib/ex_aws_cloudwatch_logs.ex
| 0.752831
| 0.437643
|
ex_aws_cloudwatch_logs.ex
|
starcoder
|
defmodule Readability do
@moduledoc """
Readability library for extracting & curating articles.
## Example
```elixir
@type html :: binary
# Just pass url
%Readability.Summary{title: title, authors: authors, article_html: article} = Readability.summarize(url)
# Extract title
Readability.title(html)
# Extract authors.
Readability.authors(html)
# Extract only text from article
article = html
|> Readability.article
|> Readability.readable_text
# Extract article with transformed html
article = html
|> Readability.article
|> Readability.raw_html
```
"""
alias Readability.TitleFinder
alias Readability.AuthorFinder
alias Readability.ArticleBuilder
alias Readability.Summary
alias Readability.Helper
@default_options [
retry_length: 250,
min_text_length: 25,
remove_unlikely_candidates: true,
weight_classes: true,
clean_conditionally: true,
remove_empty_nodes: true,
min_image_width: 130,
min_image_height: 80,
ignore_image_format: [],
blacklist: nil,
whitelist: nil,
page_url: nil
]
@regexes [
unlikely_candidate:
~r/combx|comment|community|disqus|extra|foot|header|hidden|lightbox|modal|menu|meta|nav|remark|rss|shoutbox|sidebar|sponsor|ad-break|agegate|pagination|pager|popup/i,
ok_maybe_its_a_candidate: ~r/and|article|body|column|main|shadow/i,
positive: ~r/article|body|content|entry|hentry|main|page|pagination|post|text|blog|story/i,
negative:
~r/hidden|^hid|combx|comment|com-|contact|foot|footer|footnote|link|masthead|media|meta|outbrain|promo|related|scroll|shoutbox|sidebar|sponsor|shopping|tags|tool|utility|widget/i,
div_to_p_elements: ~r/<(a|blockquote|dl|div|img|ol|p|pre|table|ul)/i,
replace_brs: ~r/(<br[^>]*>[ \n\r\t]*){2,}/i,
replace_fonts: ~r/<(\/?)font[^>]*>/i,
replace_xml_version: ~r/<\?xml.*\?>/i,
normalize: ~r/\s{2,}/,
video: ~r/\/\/(www\.)?(dailymotion|youtube|youtube-nocookie|player\.vimeo)\.com/i,
protect_attrs: ~r/^(?!id|rel|for|summary|title|href|src|alt|srcdoc)/i,
img_tag_src: ~r/(<img.*src=['"])([^'"]+)(['"][^>]*>)/Ui
]
@markup_mimes ~r/^(application|text)\/[a-z\-_\.\+]+ml(;\s*charset=.*)?$/i
@type html_tree :: tuple | list
@type raw_html :: binary
@type url :: binary
@type options :: list
@type headers :: list[tuple]
@doc """
summarize the primary readable content of a webpage.
"""
@spec summarize(url, options) :: {atom(), Summary.t() | HTTPoison.Error.t()}
def summarize(url, opts \\ []) when is_bitstring(url) do
opts = Keyword.merge(opts, page_url: url)
httpoison_options = Application.get_env(:readability, :httpoison_options, [])
HTTPoison.get(url, [], httpoison_options)
|> parse_markup(opts)
end
defp parse_markup({:error, error}, _opts) do
{:error, error}
end
defp parse_markup({:ok, response}, opts) do
%{status_code: _, body: raw, headers: headers} = response
[page_url: url] = opts
case is_response_markup(headers) do
true ->
html_tree =
raw
|> Helper.normalize(url: url)
article_tree =
html_tree
|> ArticleBuilder.build(opts)
{:ok,
%Summary{
title: title(html_tree),
authors: authors(html_tree),
description: description(html_tree),
article_html: readable_html(article_tree),
article_text: readable_text(article_tree)
}}
_ ->
{:ok, %Summary{title: nil, authors: nil, article_html: nil, article_text: raw}}
end
end
@spec summarize_html(binary, url, options) :: Summary.t()
def summarize_html(html_body, url, opts \\ []) do
html_tree =
html_body
|> Helper.normalize(url: url)
article_tree =
html_tree
|> ArticleBuilder.build(opts)
%Summary{
title: title(html_tree),
authors: authors(html_tree),
description: description(html_tree),
article_html: readable_html(article_tree),
article_text: readable_text(article_tree)
}
end
@doc """
Extract MIME Type from headers
## Example
iex> mime = Readability.mime(headers_list)
"text/html"
"""
@spec mime(headers) :: String.t()
def mime(headers \\ []) do
headers
|> Enum.find(
# default
{"Content-Type", "text/plain"},
fn {key, _} -> String.downcase(key) == "content-type" end
)
|> elem(1)
end
@doc """
Return true if Content-Type in provided headers list is a markup type,
else false
## Example
iex> Readability.is_response_markup?([{"Content-Type", "text/html"}])
true
"""
@spec is_response_markup(headers) :: boolean
def is_response_markup(headers) do
mime(headers) =~ @markup_mimes
end
@doc """
Extract title
## Example
iex> title = Readability.title(html_str)
"Some title in html"
"""
@spec title(binary | html_tree) :: binary
def title(raw_html) when is_binary(raw_html) do
raw_html
|> Helper.normalize()
|> title
end
def title(html_tree), do: TitleFinder.title(html_tree)
def description(raw_html) when is_binary(raw_html) do
raw_html
|> Helper.normalize()
|> description
end
def description(html_tree), do: Readability.DescriptionFinder.description(html_tree)
@doc """
Extract authors
## Example
iex> authors = Readability.authors(html_str)
["<NAME>", "chrismccord"]
"""
@spec authors(binary | html_tree) :: list[binary]
def authors(html) when is_binary(html), do: html |> parse |> authors
def authors(html_tree), do: AuthorFinder.find(html_tree)
@doc """
Using a variety of metrics (content score, classname, element types), find the content that is
most likely to be the stuff a user wants to read
## Example
iex> article_tree = Redability(html_str)
# returns article that is tuple
"""
@spec article(binary, options) :: html_tree
def article(raw_html, opts \\ []) do
opts = Keyword.merge(@default_options, opts)
raw_html
|> Helper.normalize()
|> ArticleBuilder.build(opts)
end
@doc """
return attributes, tags cleaned html
"""
@spec readable_html(html_tree) :: binary
def readable_html(html_tree) do
html_tree
|> Helper.remove_attrs(regexes(:protect_attrs))
|> raw_html
end
@doc """
return only text binary from html_tree
"""
@spec readable_text(html_tree) :: binary
def readable_text(html_tree) do
# TODO: Remove image caption when extract only text
tags_to_br = ~r/<\/(p|div|article|h\d)/i
html_str = html_tree |> raw_html
Regex.replace(tags_to_br, html_str, &"\n#{&1}")
|> Floki.parse_document!()
|> Floki.text()
|> String.trim()
end
@doc """
return raw html binary from html_tree
"""
@spec raw_html(html_tree) :: binary
def raw_html(html_tree) do
html_tree |> Floki.raw_html(encode: false)
end
def parse(raw_html) when is_binary(raw_html), do: Floki.parse_document!(raw_html)
def regexes(key), do: @regexes[key]
def default_options, do: @default_options
end
|
lib/readability.ex
| 0.818156
| 0.654177
|
readability.ex
|
starcoder
|
defmodule CCSP.Chapter5.ListCompression do
alias __MODULE__, as: T
@moduledoc """
Corresponds to CCSP in Python, Chapter 5, titled "Genetic Algorithms"
NOTE: This is very slow to find a solution... along the lines of a couple of hours.
It does not terminate early and runs through all generations. Maybe an implementation issue?
One potential solution:
["Narine","Melanie","Daniel","Michael","Joshua","Lisa","Dean","Brian","Murat","David","Sajid","Wei","Sarah"]
"""
@type t :: %T{
lst: list(any)
}
defstruct [
:lst
]
@people ["Michael", "Sarah", "Joshua", "Narine", "David",
"Sajid", "Melanie", "Daniel", "Wei", "Dean", "Brian", "Murat", "Lisa"]
@spec new(list(any)) :: t
def new(lst) do
%T{lst: lst}
end
@spec random_instance() :: t
def random_instance() do
Enum.shuffle(@people)
|> new()
end
@spec bytes_compressed(t) :: non_neg_integer
def bytes_compressed(lc) do
lc.lst
|> :erlang.term_to_binary
|> :zlib.compress()
|> byte_size
end
end
defimpl CCSP.Chapter5.Chromosome, for: CCSP.Chapter5.ListCompression do
alias CCSP.Chapter5.ListCompression
@type t :: __MODULE__.t()
@spec fitness(t) :: float
def fitness(lc) do
1 / ListCompression.bytes_compressed(lc)
end
@spec crossover(t, t) :: {t, t}
def crossover(lc1, lc2) do
[idx1, idx2] = Enum.take_random(0..(length(lc1.lst) - 1), 2)
l1 = Enum.at(lc1.lst, idx1)
l2 = Enum.at(lc2.lst, idx2)
new_lc1_lst =
lc1.lst
|> List.replace_at(
Enum.find_index(lc1.lst, &(&1 == l2)),
Enum.at(lc1.lst, idx2)
)
|> List.replace_at(idx2, l2)
new_lc2_lst =
lc2.lst
|> List.replace_at(
Enum.find_index(lc2.lst, &(&1 == l1)),
Enum.at(lc2.lst, idx1)
)
|> List.replace_at(idx1, l1)
{
%ListCompression{lc1 | :lst => new_lc1_lst},
%ListCompression{lc2 | :lst => new_lc2_lst}
}
end
@spec mutate(t) :: t
def mutate(lc) do
[idx1, idx2] = Enum.take_random(0..(length(lc.lst) - 1), 2)
new_lst =
lc.lst
|> List.replace_at(idx1, Enum.at(lc.lst, idx2))
|> List.replace_at(idx2, Enum.at(lc.lst, idx1))
%ListCompression{lc | :lst => new_lst}
end
end
defimpl Inspect, for: CCSP.Chapter5.ListCompression do
alias CCSP.Chapter5.ListCompression
def inspect(lc, _opts) do
"Order: #{lc.lst} Bytes: #{ListCompression.bytes_compressed(lc)}"
end
end
defimpl String.Chars, for: CCSP.Chapter5.ListCompression do
alias CCSP.Chapter5.ListCompression
def to_string(lc) do
"Order: #{lc.lst} Bytes: #{ListCompression.bytes_compressed(lc)}"
end
end
|
lib/ccsp/chapter5/list_compression.ex
| 0.7865
| 0.535402
|
list_compression.ex
|
starcoder
|
defmodule ReviewScraper.DealerRater.Scraper do
@moduledoc """
Module responsible for parsing the HTML document.
"""
alias ReviewScraper.DealerRater.Review
@doc """
Find the reviews in an HTML document and parse into the `ReviewScraper.DealerRater.Review` struct.
"""
@spec get_reviews(String.t()) :: [Review.t()]
def get_reviews(document) do
document
|> Floki.parse_document!()
|> Floki.find("#reviews .review-entry")
|> Enum.map(&parse_review/1)
end
defp parse_review(review_html_tree) do
optional_ratings = parse_optional_ratings(review_html_tree)
%Review{
title: parse_title(review_html_tree),
description: parse_description(review_html_tree),
reviewer_name: parse_reviewer_name(review_html_tree),
date: parse_review_date(review_html_tree),
dealership_rating: parse_dealership_rating(review_html_tree),
customer_service_rating: Map.get(optional_ratings, "Customer Service", nil),
friendliness_rating: Map.get(optional_ratings, "Friendliness", nil),
pricing_rating: Map.get(optional_ratings, "Pricing", nil),
overall_experience_rating: Map.get(optional_ratings, "Overall Experience", nil),
recommend_dealer?: parse_dealer_recommendation(review_html_tree)
}
end
defp parse_title(review_html_tree) do
review_html_tree
|> Floki.find("h3")
|> Floki.text()
|> String.replace("\"", "")
end
defp parse_description(review_html_tree) do
review_html_tree
|> Floki.find("p")
|> Floki.text()
|> String.replace(["\"", "\n", "\r"], "")
|> String.trim()
end
defp parse_reviewer_name(review_html_tree) do
review_html_tree
|> Floki.find("span.notranslate")
|> Floki.text()
|> String.replace("- ", "")
|> String.trim()
end
defp parse_review_date(review_html_tree) do
review_html_tree
|> Floki.find(".review-date div:first-child")
|> Floki.text()
|> String.trim()
end
defp parse_dealership_rating(review_html_tree) do
review_html_tree
|> Floki.find(".review-date .dealership-rating .rating-static:first-child")
|> Floki.attribute("class")
|> extract_rating_from_css_classes()
end
defp parse_optional_ratings(review_html_tree) do
review_html_tree
|> Floki.find(".review-ratings-all .table .tr")
|> Map.new(fn rating_html ->
rating_name =
Floki.find(rating_html, "div.small-text")
|> Floki.text()
|> String.trim()
rating_value =
Floki.find(rating_html, "div.rating-static-indv")
|> Floki.attribute("class")
|> extract_rating_from_css_classes()
{rating_name, rating_value}
end)
end
defp parse_dealer_recommendation(review_html_tree) do
review_html_tree
|> Floki.find(".review-ratings-all .table .tr:last-child")
|> Floki.text()
|> String.trim()
|> String.upcase()
|> String.contains?("YES")
end
defp extract_rating_from_css_classes(css_classes) when is_list(css_classes) do
css_classes
|> Enum.join()
|> extract_rating_from_css_classes()
end
defp extract_rating_from_css_classes(css_classes) do
~r/rating-(\d)/
|> Regex.run(css_classes, capture: :all_but_first)
|> case do
nil -> nil
[rating] -> String.to_integer(rating)
end
end
end
|
lib/dealer_rater/scraper.ex
| 0.722918
| 0.41941
|
scraper.ex
|
starcoder
|
defmodule FIQLEx do
@moduledoc """
[FIQL](http://tools.ietf.org/html/draft-nottingham-atompub-fiql-00) (Feed Item Query Language)
is a URI-friendly syntax for expressing filters.
FIQL looks like this:
```
fiql = "author.age=ge=25;author.name==*Doe"
```
Using this module you will be able to parse a FIQL string and to build a query for any
system (SQL, Elasticsearch, etc...) from it.
Given a FIQL string like:
```
fiql = "author.age=ge=25;author.name==*Doe"
```
Pass it to the `parse/1` or `parse1!/1` functions to retrieve an AST of the FIQL string:
```
{:ok, ast} = FIQLEx.parse(fiql)
```
Then you can use this AST to build you own query for your system or use our built-in
query builders like `FIQLEx.QueryBuilders.SQLQueryBuilder`:
```
{:ok, sql_query} = FIQLEx.build_query(ast, FIQLEx.QueryBuilders.SQLQueryBuilder, table: "author")
```
Here, `sql_query` is `SELECT * FROM author WHERE (author.age >= 25 AND author.name LIKE '%Doe')`.
You can use your own query builder by providing your own module that uses `FIQLEx.QueryBuilder`
as second argument of `build_query/3`.
"""
@type ast() :: any()
@doc """
Parses the FIQL string and returns an AST representation of the query to be built to
any other query (SQL, Elasticsearch) with the `build_query/3` function.
Returns `{:ok, ast}` if everything is fine and `{:error, reason}` in case of error in the
FIQL.
"""
@spec parse(binary) :: {:ok, ast()} | {:error, any()}
def parse(str) do
with {:ok, tokens, _end_line} <- str |> to_charlist() |> :fiql_lexer.string(),
{:ok, ast} <- :fiql_parser.parse(tokens) do
{:ok, ast}
else
{_, reason, _} ->
{:error, reason}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Same as `parse/1` but returns the AST or raises an exception.
"""
@spec parse!(binary) :: ast
def parse!(str) do
case parse(str) do
{:ok, ast} -> ast
{:error, err} -> throw(err)
end
end
@doc """
Use an AST to build a query in the way you want. For instance you could create a
query for Elasticsearch from a FIQL AST, or use the `FIQLEx.QueryBuilders.SQLQueryBuilder` module
to build an SQL query from a FIQL AST.
Parameters are:
* `ast`: The AST to transform to a query for another system
* `module`: The module to use for the AST traversal
* `opts`: Options you want to pass to the `init/2` function of your `module`
This function returns `{:ok, query}` with your created query if everything is fine, or
`{:error, reason}` if there is something wrong.
```
query = "author.age=ge=25;author.name==*Doe"
{:ok, ast} = FIQLEx.parse(query)
{:ok, query} = FIQLEx.build_query(ast, MyQueryBuilder)
```
See the documentation of the `FIQLEx.QueryBuilder` module to learn more about the AST
traversal.
"""
@spec build_query(ast(), atom(), Keyword.t()) :: {:ok, any()} | {:error, any()}
def build_query(ast, module, opts \\ []) do
state = apply(module, :init, [ast, opts])
with {:ok, state} <- run_ast(ast, ast, module, state) do
apply(module, :build, [ast, state])
else
{:error, err} -> {:error, err}
end
end
@doc """
This function will go deeper in the ast traversal.
Parameters are:
* `curr_ast`: The AST we want to go deeper with
* `ast`: The global AST
* `module`: The module to use for the traversal
* `state`: The current state of your query builder
The function returns `{:ok, state}` if everything is fine, and `{:error, reason}`
if there is an error
"""
@spec handle_ast(ast(), ast(), atom(), any()) :: {:ok, any()} | {:error, any()}
def handle_ast(curr_ast, ast, module, state) do
run_ast(curr_ast, ast, module, state)
end
@doc """
Same as `handle_ast/4` but returns the `state` or raises an exception.
"""
@spec handle_ast!(ast(), ast(), atom(), any()) :: any()
def handle_ast!(curr_ast, ast, module, state) do
case handle_ast(curr_ast, ast, module, state) do
{:ok, result} -> result
{:error, err} -> throw(err)
end
end
defp run_ast({:or_op, exp1, exp2}, ast, module, state) do
apply(module, :handle_or_expression, [exp1, exp2, ast, state])
end
defp run_ast({:and_op, exp1, exp2}, ast, module, state) do
apply(module, :handle_and_expression, [exp1, exp2, ast, state])
end
defp run_ast({:op, exp}, ast, module, state) do
apply(module, :handle_expression, [exp, ast, state])
end
defp run_ast({:selector, selector_name}, ast, module, state) do
apply(module, :handle_selector, [selector_name, ast, state])
end
defp run_ast({:selector_and_value, selector_name, :equal, value}, ast, module, state) do
apply(module, :handle_selector_and_value, [selector_name, :equal, value, ast, state])
end
defp run_ast({:selector_and_value, selector_name, :not_equal, value}, ast, module, state) do
apply(module, :handle_selector_and_value, [selector_name, :not_equal, value, ast, state])
end
defp run_ast(
{:selector_and_value, selector_name, {:comparison, comparison}, value},
ast,
module,
state
) do
apply(module, :handle_selector_and_value_with_comparison, [
selector_name,
comparison,
value,
ast,
state
])
end
end
|
lib/fiql_ex.ex
| 0.863909
| 0.904777
|
fiql_ex.ex
|
starcoder
|
defmodule AWS.IoT do
@moduledoc """
IoT
IoT provides secure, bi-directional communication between Internet-connected
devices (such as sensors, actuators, embedded devices, or smart appliances) and
the Amazon Web Services cloud.
You can discover your custom IoT-Data endpoint to communicate with, configure
rules for data processing and integration with other services, organize
resources associated with each device (Registry), configure logging, and create
and manage policies and credentials to authenticate devices.
The service endpoints that expose this API are listed in [Amazon Web Services IoT Core Endpoints and
Quotas](https://docs.aws.amazon.com/general/latest/gr/iot-core.html). You must
use the endpoint for the region that has the resources you want to access.
The service name used by [Amazon Web Services Signature Version 4](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html) to
sign the request is: *execute-api*.
For more information about how IoT works, see the [Developer Guide](https://docs.aws.amazon.com/iot/latest/developerguide/aws-iot-how-it-works.html).
For information about how to use the credentials provider for IoT, see
[Authorizing Direct Calls to Amazon Web Services Services](https://docs.aws.amazon.com/iot/latest/developerguide/authorizing-direct-aws.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2015-05-28",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "iot",
global?: false,
protocol: "rest-json",
service_id: "IoT",
signature_version: "v4",
signing_name: "execute-api",
target_prefix: nil
}
end
@doc """
Accepts a pending certificate transfer.
The default state of the certificate is INACTIVE.
To check for pending certificate transfers, call `ListCertificates` to enumerate
your certificates.
Requires permission to access the
[AcceptCertificateTransfer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def accept_certificate_transfer(%Client{} = client, certificate_id, input, options \\ []) do
url_path = "/accept-certificate-transfer/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
{query_params, input} =
[
{"setAsActive", "setAsActive"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds a thing to a billing group.
Requires permission to access the
[AddThingToBillingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def add_thing_to_billing_group(%Client{} = client, input, options \\ []) do
url_path = "/billing-groups/addThingToBillingGroup"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds a thing to a thing group.
Requires permission to access the
[AddThingToThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def add_thing_to_thing_group(%Client{} = client, input, options \\ []) do
url_path = "/thing-groups/addThingToThingGroup"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Associates a group with a continuous job.
The following criteria must be met:
* The job must have been created with the `targetSelection` field
set to "CONTINUOUS".
* The job status must currently be "IN_PROGRESS".
* The total number of targets associated with a job must not exceed
100.
Requires permission to access the
[AssociateTargetsWithJob](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def associate_targets_with_job(%Client{} = client, job_id, input, options \\ []) do
url_path = "/jobs/#{AWS.Util.encode_uri(job_id)}/targets"
headers = []
{query_params, input} =
[
{"namespaceId", "namespaceId"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Attaches the specified policy to the specified principal (certificate or other
credential).
Requires permission to access the
[AttachPolicy](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def attach_policy(%Client{} = client, policy_name, input, options \\ []) do
url_path = "/target-policies/#{AWS.Util.encode_uri(policy_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Attaches the specified policy to the specified principal (certificate or other
credential).
**Note:** This action is deprecated. Please use `AttachPolicy` instead.
Requires permission to access the
[AttachPrincipalPolicy](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def attach_principal_policy(%Client{} = client, policy_name, input, options \\ []) do
url_path = "/principal-policies/#{AWS.Util.encode_uri(policy_name)}"
{headers, input} =
[
{"principal", "x-amzn-iot-principal"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Associates a Device Defender security profile with a thing group or this
account.
Each thing group or account can have up to five security profiles associated
with it.
Requires permission to access the
[AttachSecurityProfile](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def attach_security_profile(%Client{} = client, security_profile_name, input, options \\ []) do
url_path = "/security-profiles/#{AWS.Util.encode_uri(security_profile_name)}/targets"
headers = []
{query_params, input} =
[
{"securityProfileTargetArn", "securityProfileTargetArn"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Attaches the specified principal to the specified thing.
A principal can be X.509 certificates, IAM users, groups, and roles, Amazon
Cognito identities or federated identities.
Requires permission to access the
[AttachThingPrincipal](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def attach_thing_principal(%Client{} = client, thing_name, input, options \\ []) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}/principals"
{headers, input} =
[
{"principal", "x-amzn-principal"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Cancels a mitigation action task that is in progress.
If the task is not in progress, an InvalidRequestException occurs.
Requires permission to access the
[CancelAuditMitigationActionsTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def cancel_audit_mitigation_actions_task(%Client{} = client, task_id, input, options \\ []) do
url_path = "/audit/mitigationactions/tasks/#{AWS.Util.encode_uri(task_id)}/cancel"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Cancels an audit that is in progress.
The audit can be either scheduled or on demand. If the audit isn't in progress,
an "InvalidRequestException" occurs.
Requires permission to access the
[CancelAuditTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def cancel_audit_task(%Client{} = client, task_id, input, options \\ []) do
url_path = "/audit/tasks/#{AWS.Util.encode_uri(task_id)}/cancel"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Cancels a pending transfer for the specified certificate.
**Note** Only the transfer source account can use this operation to cancel a
transfer. (Transfer destinations can use `RejectCertificateTransfer` instead.)
After transfer, IoT returns the certificate to the source account in the
INACTIVE state. After the destination account has accepted the transfer, the
transfer cannot be cancelled.
After a certificate transfer is cancelled, the status of the certificate changes
from PENDING_TRANSFER to INACTIVE.
Requires permission to access the
[CancelCertificateTransfer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def cancel_certificate_transfer(%Client{} = client, certificate_id, input, options \\ []) do
url_path = "/cancel-certificate-transfer/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Cancels a Device Defender ML Detect mitigation action.
Requires permission to access the
[CancelDetectMitigationActionsTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def cancel_detect_mitigation_actions_task(%Client{} = client, task_id, input, options \\ []) do
url_path = "/detect/mitigationactions/tasks/#{AWS.Util.encode_uri(task_id)}/cancel"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Cancels a job.
Requires permission to access the
[CancelJob](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def cancel_job(%Client{} = client, job_id, input, options \\ []) do
url_path = "/jobs/#{AWS.Util.encode_uri(job_id)}/cancel"
headers = []
{query_params, input} =
[
{"force", "force"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Cancels the execution of a job for a given thing.
Requires permission to access the
[CancelJobExecution](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def cancel_job_execution(%Client{} = client, job_id, thing_name, input, options \\ []) do
url_path =
"/things/#{AWS.Util.encode_uri(thing_name)}/jobs/#{AWS.Util.encode_uri(job_id)}/cancel"
headers = []
{query_params, input} =
[
{"force", "force"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Clears the default authorizer.
Requires permission to access the
[ClearDefaultAuthorizer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def clear_default_authorizer(%Client{} = client, input, options \\ []) do
url_path = "/default-authorizer"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Confirms a topic rule destination.
When you create a rule requiring a destination, IoT sends a confirmation message
to the endpoint or base address you specify. The message includes a token which
you pass back when calling `ConfirmTopicRuleDestination` to confirm that you own
or have access to the endpoint.
Requires permission to access the
[ConfirmTopicRuleDestination](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def confirm_topic_rule_destination(%Client{} = client, confirmation_token, options \\ []) do
url_path = "/confirmdestination/#{AWS.Util.encode_multi_segment_uri(confirmation_token)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Creates a Device Defender audit suppression.
Requires permission to access the
[CreateAuditSuppression](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_audit_suppression(%Client{} = client, input, options \\ []) do
url_path = "/audit/suppressions/create"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an authorizer.
Requires permission to access the
[CreateAuthorizer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_authorizer(%Client{} = client, authorizer_name, input, options \\ []) do
url_path = "/authorizer/#{AWS.Util.encode_uri(authorizer_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a billing group.
Requires permission to access the
[CreateBillingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_billing_group(%Client{} = client, billing_group_name, input, options \\ []) do
url_path = "/billing-groups/#{AWS.Util.encode_uri(billing_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an X.509 certificate using the specified certificate signing request.
**Note:** The CSR must include a public key that is either an RSA key with a
length of at least 2048 bits or an ECC key from NIST P-256, NIST P-384, or NIST
P-512 curves. For supported certificates, consult [ Certificate signing algorithms supported by
IoT](https://docs.aws.amazon.com/iot/latest/developerguide/x509-client-certs.html#x509-cert-algorithms).
**Note:** Reusing the same certificate signing request (CSR) results in a
distinct certificate.
Requires permission to access the
[CreateCertificateFromCsr](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
You can create multiple certificates in a batch by creating a directory, copying
multiple .csr files into that directory, and then specifying that directory on
the command line. The following commands show how to create a batch of
certificates given a batch of CSRs.
Assuming a set of CSRs are located inside of the directory my-csr-directory:
On Linux and OS X, the command is:
$ ls my-csr-directory/ | xargs -I {} aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/{}
This command lists all of the CSRs in my-csr-directory and pipes each CSR file
name to the aws iot create-certificate-from-csr Amazon Web Services CLI command
to create a certificate for the corresponding CSR.
The aws iot create-certificate-from-csr part of the command can also be run in
parallel to speed up the certificate creation process:
$ ls my-csr-directory/ | xargs -P 10 -I {} aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/{}
On Windows PowerShell, the command to create certificates for all CSRs in
my-csr-directory is:
> ls -Name my-csr-directory | %{aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/$_}
On a Windows command prompt, the command to create certificates for all CSRs in
my-csr-directory is:
> forfiles /p my-csr-directory /c "cmd /c aws iot create-certificate-from-csr
--certificate-signing-request file://@path"
"""
def create_certificate_from_csr(%Client{} = client, input, options \\ []) do
url_path = "/certificates"
headers = []
{query_params, input} =
[
{"setAsActive", "setAsActive"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Use this API to define a Custom Metric published by your devices to Device
Defender.
Requires permission to access the
[CreateCustomMetric](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_custom_metric(%Client{} = client, metric_name, input, options \\ []) do
url_path = "/custom-metric/#{AWS.Util.encode_uri(metric_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Create a dimension that you can use to limit the scope of a metric used in a
security profile for IoT Device Defender.
For example, using a `TOPIC_FILTER` dimension, you can narrow down the scope of
the metric only to MQTT topics whose name match the pattern specified in the
dimension.
Requires permission to access the
[CreateDimension](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_dimension(%Client{} = client, name, input, options \\ []) do
url_path = "/dimensions/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a domain configuration.
Requires permission to access the
[CreateDomainConfiguration](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_domain_configuration(
%Client{} = client,
domain_configuration_name,
input,
options \\ []
) do
url_path = "/domainConfigurations/#{AWS.Util.encode_uri(domain_configuration_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a dynamic thing group.
Requires permission to access the
[CreateDynamicThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_dynamic_thing_group(%Client{} = client, thing_group_name, input, options \\ []) do
url_path = "/dynamic-thing-groups/#{AWS.Util.encode_uri(thing_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a fleet metric.
Requires permission to access the
[CreateFleetMetric](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_fleet_metric(%Client{} = client, metric_name, input, options \\ []) do
url_path = "/fleet-metric/#{AWS.Util.encode_uri(metric_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a job.
Requires permission to access the
[CreateJob](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_job(%Client{} = client, job_id, input, options \\ []) do
url_path = "/jobs/#{AWS.Util.encode_uri(job_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a job template.
Requires permission to access the
[CreateJobTemplate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_job_template(%Client{} = client, job_template_id, input, options \\ []) do
url_path = "/job-templates/#{AWS.Util.encode_uri(job_template_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a 2048-bit RSA key pair and issues an X.509 certificate using the issued
public key.
You can also call `CreateKeysAndCertificate` over MQTT from a device, for more
information, see [Provisioning MQTT API](https://docs.aws.amazon.com/iot/latest/developerguide/provision-wo-cert.html#provision-mqtt-api).
**Note** This is the only time IoT issues the private key for this certificate,
so it is important to keep it in a secure location.
Requires permission to access the
[CreateKeysAndCertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_keys_and_certificate(%Client{} = client, input, options \\ []) do
url_path = "/keys-and-certificate"
headers = []
{query_params, input} =
[
{"setAsActive", "setAsActive"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Defines an action that can be applied to audit findings by using
StartAuditMitigationActionsTask.
Only certain types of mitigation actions can be applied to specific check names.
For more information, see [Mitigation actions](https://docs.aws.amazon.com/iot/latest/developerguide/device-defender-mitigation-actions.html).
Each mitigation action can apply only one type of change.
Requires permission to access the
[CreateMitigationAction](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_mitigation_action(%Client{} = client, action_name, input, options \\ []) do
url_path = "/mitigationactions/actions/#{AWS.Util.encode_uri(action_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an IoT OTA update on a target group of things or groups.
Requires permission to access the
[CreateOTAUpdate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_ota_update(%Client{} = client, ota_update_id, input, options \\ []) do
url_path = "/otaUpdates/#{AWS.Util.encode_uri(ota_update_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an IoT policy.
The created policy is the default version for the policy. This operation creates
a policy version with a version identifier of **1** and sets **1** as the
policy's default version.
Requires permission to access the
[CreatePolicy](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_policy(%Client{} = client, policy_name, input, options \\ []) do
url_path = "/policies/#{AWS.Util.encode_uri(policy_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new version of the specified IoT policy.
To update a policy, create a new policy version. A managed policy can have up to
five versions. If the policy has five versions, you must use
`DeletePolicyVersion` to delete an existing version before you create a new one.
Optionally, you can set the new version as the policy's default version. The
default version is the operative version (that is, the version that is in effect
for the certificates to which the policy is attached).
Requires permission to access the
[CreatePolicyVersion](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_policy_version(%Client{} = client, policy_name, input, options \\ []) do
url_path = "/policies/#{AWS.Util.encode_uri(policy_name)}/version"
headers = []
{query_params, input} =
[
{"setAsDefault", "setAsDefault"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a provisioning claim.
Requires permission to access the
[CreateProvisioningClaim](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_provisioning_claim(%Client{} = client, template_name, input, options \\ []) do
url_path = "/provisioning-templates/#{AWS.Util.encode_uri(template_name)}/provisioning-claim"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a fleet provisioning template.
Requires permission to access the
[CreateProvisioningTemplate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_provisioning_template(%Client{} = client, input, options \\ []) do
url_path = "/provisioning-templates"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new version of a fleet provisioning template.
Requires permission to access the
[CreateProvisioningTemplateVersion](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_provisioning_template_version(
%Client{} = client,
template_name,
input,
options \\ []
) do
url_path = "/provisioning-templates/#{AWS.Util.encode_uri(template_name)}/versions"
headers = []
{query_params, input} =
[
{"setAsDefault", "setAsDefault"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a role alias.
Requires permission to access the
[CreateRoleAlias](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_role_alias(%Client{} = client, role_alias, input, options \\ []) do
url_path = "/role-aliases/#{AWS.Util.encode_uri(role_alias)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a scheduled audit that is run at a specified time interval.
Requires permission to access the
[CreateScheduledAudit](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_scheduled_audit(%Client{} = client, scheduled_audit_name, input, options \\ []) do
url_path = "/audit/scheduledaudits/#{AWS.Util.encode_uri(scheduled_audit_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a Device Defender security profile.
Requires permission to access the
[CreateSecurityProfile](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_security_profile(%Client{} = client, security_profile_name, input, options \\ []) do
url_path = "/security-profiles/#{AWS.Util.encode_uri(security_profile_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a stream for delivering one or more large files in chunks over MQTT.
A stream transports data bytes in chunks or blocks packaged as MQTT messages
from a source like S3. You can have one or more files associated with a stream.
Requires permission to access the
[CreateStream](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_stream(%Client{} = client, stream_id, input, options \\ []) do
url_path = "/streams/#{AWS.Util.encode_uri(stream_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a thing record in the registry.
If this call is made multiple times using the same thing name and configuration,
the call will succeed. If this call is made with the same thing name but
different configuration a `ResourceAlreadyExistsException` is thrown.
This is a control plane operation. See
[Authorization](https://docs.aws.amazon.com/iot/latest/developerguide/iot-authorization.html) for information about authorizing control plane actions.
Requires permission to access the
[CreateThing](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_thing(%Client{} = client, thing_name, input, options \\ []) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Create a thing group.
This is a control plane operation. See
[Authorization](https://docs.aws.amazon.com/iot/latest/developerguide/iot-authorization.html) for information about authorizing control plane actions.
Requires permission to access the
[CreateThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_thing_group(%Client{} = client, thing_group_name, input, options \\ []) do
url_path = "/thing-groups/#{AWS.Util.encode_uri(thing_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new thing type.
Requires permission to access the
[CreateThingType](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_thing_type(%Client{} = client, thing_type_name, input, options \\ []) do
url_path = "/thing-types/#{AWS.Util.encode_uri(thing_type_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a rule.
Creating rules is an administrator-level action. Any user who has permission to
create rules will be able to access data processed by the rule.
Requires permission to access the
[CreateTopicRule](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_topic_rule(%Client{} = client, rule_name, input, options \\ []) do
url_path = "/rules/#{AWS.Util.encode_uri(rule_name)}"
{headers, input} =
[
{"tags", "x-amz-tagging"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a topic rule destination.
The destination must be confirmed prior to use.
Requires permission to access the
[CreateTopicRuleDestination](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def create_topic_rule_destination(%Client{} = client, input, options \\ []) do
url_path = "/destinations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Restores the default settings for Device Defender audits for this account.
Any configuration data you entered is deleted and all audit checks are reset to
disabled.
Requires permission to access the
[DeleteAccountAuditConfiguration](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_account_audit_configuration(%Client{} = client, input, options \\ []) do
url_path = "/audit/configuration"
headers = []
{query_params, input} =
[
{"deleteScheduledAudits", "deleteScheduledAudits"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a Device Defender audit suppression.
Requires permission to access the
[DeleteAuditSuppression](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_audit_suppression(%Client{} = client, input, options \\ []) do
url_path = "/audit/suppressions/delete"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes an authorizer.
Requires permission to access the
[DeleteAuthorizer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_authorizer(%Client{} = client, authorizer_name, input, options \\ []) do
url_path = "/authorizer/#{AWS.Util.encode_uri(authorizer_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the billing group.
Requires permission to access the
[DeleteBillingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_billing_group(%Client{} = client, billing_group_name, input, options \\ []) do
url_path = "/billing-groups/#{AWS.Util.encode_uri(billing_group_name)}"
headers = []
{query_params, input} =
[
{"expectedVersion", "expectedVersion"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a registered CA certificate.
Requires permission to access the
[DeleteCACertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_ca_certificate(%Client{} = client, certificate_id, input, options \\ []) do
url_path = "/cacertificate/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified certificate.
A certificate cannot be deleted if it has a policy or IoT thing attached to it
or if its status is set to ACTIVE. To delete a certificate, first use the
`DetachPolicy` action to detach all policies. Next, use the `UpdateCertificate`
action to set the certificate to the INACTIVE status.
Requires permission to access the
[DeleteCertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_certificate(%Client{} = client, certificate_id, input, options \\ []) do
url_path = "/certificates/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
{query_params, input} =
[
{"forceDelete", "forceDelete"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a Device Defender detect custom metric.
Requires permission to access the
[DeleteCustomMetric](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions) action.
Before you can delete a custom metric, you must first remove the custom metric
from all security profiles it's a part of. The security profile associated with
the custom metric can be found using the
[ListSecurityProfiles](https://docs.aws.amazon.com/iot/latest/apireference/API_ListSecurityProfiles.html)
API with `metricName` set to your custom metric name.
"""
def delete_custom_metric(%Client{} = client, metric_name, input, options \\ []) do
url_path = "/custom-metric/#{AWS.Util.encode_uri(metric_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the specified dimension from your Amazon Web Services accounts.
Requires permission to access the
[DeleteDimension](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_dimension(%Client{} = client, name, input, options \\ []) do
url_path = "/dimensions/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified domain configuration.
Requires permission to access the
[DeleteDomainConfiguration](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_domain_configuration(
%Client{} = client,
domain_configuration_name,
input,
options \\ []
) do
url_path = "/domainConfigurations/#{AWS.Util.encode_uri(domain_configuration_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a dynamic thing group.
Requires permission to access the
[DeleteDynamicThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_dynamic_thing_group(%Client{} = client, thing_group_name, input, options \\ []) do
url_path = "/dynamic-thing-groups/#{AWS.Util.encode_uri(thing_group_name)}"
headers = []
{query_params, input} =
[
{"expectedVersion", "expectedVersion"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified fleet metric.
Returns successfully with no error if the deletion is successful or you specify
a fleet metric that doesn't exist.
Requires permission to access the
[DeleteFleetMetric](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_fleet_metric(%Client{} = client, metric_name, input, options \\ []) do
url_path = "/fleet-metric/#{AWS.Util.encode_uri(metric_name)}"
headers = []
{query_params, input} =
[
{"expectedVersion", "expectedVersion"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a job and its related job executions.
Deleting a job may take time, depending on the number of job executions created
for the job and various other factors. While the job is being deleted, the
status of the job will be shown as "DELETION_IN_PROGRESS". Attempting to delete
or cancel a job whose status is already "DELETION_IN_PROGRESS" will result in an
error.
Only 10 jobs may have status "DELETION_IN_PROGRESS" at the same time, or a
LimitExceededException will occur.
Requires permission to access the
[DeleteJob](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_job(%Client{} = client, job_id, input, options \\ []) do
url_path = "/jobs/#{AWS.Util.encode_uri(job_id)}"
headers = []
{query_params, input} =
[
{"force", "force"},
{"namespaceId", "namespaceId"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a job execution.
Requires permission to access the
[DeleteJobExecution](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_job_execution(
%Client{} = client,
execution_number,
job_id,
thing_name,
input,
options \\ []
) do
url_path =
"/things/#{AWS.Util.encode_uri(thing_name)}/jobs/#{AWS.Util.encode_uri(job_id)}/executionNumber/#{AWS.Util.encode_uri(execution_number)}"
headers = []
{query_params, input} =
[
{"force", "force"},
{"namespaceId", "namespaceId"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified job template.
"""
def delete_job_template(%Client{} = client, job_template_id, input, options \\ []) do
url_path = "/job-templates/#{AWS.Util.encode_uri(job_template_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a defined mitigation action from your Amazon Web Services accounts.
Requires permission to access the
[DeleteMitigationAction](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_mitigation_action(%Client{} = client, action_name, input, options \\ []) do
url_path = "/mitigationactions/actions/#{AWS.Util.encode_uri(action_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Delete an OTA update.
Requires permission to access the
[DeleteOTAUpdate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_ota_update(%Client{} = client, ota_update_id, input, options \\ []) do
url_path = "/otaUpdates/#{AWS.Util.encode_uri(ota_update_id)}"
headers = []
{query_params, input} =
[
{"deleteStream", "deleteStream"},
{"forceDeleteAWSJob", "forceDeleteAWSJob"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified policy.
A policy cannot be deleted if it has non-default versions or it is attached to
any certificate.
To delete a policy, use the `DeletePolicyVersion` action to delete all
non-default versions of the policy; use the `DetachPolicy` action to detach the
policy from any certificate; and then use the DeletePolicy action to delete the
policy.
When a policy is deleted using DeletePolicy, its default version is deleted with
it.
Because of the distributed nature of Amazon Web Services, it can take up to five
minutes after a policy is detached before it's ready to be deleted.
Requires permission to access the
[DeletePolicy](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_policy(%Client{} = client, policy_name, input, options \\ []) do
url_path = "/policies/#{AWS.Util.encode_uri(policy_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified version of the specified policy.
You cannot delete the default version of a policy using this action. To delete
the default version of a policy, use `DeletePolicy`. To find out which version
of a policy is marked as the default version, use ListPolicyVersions.
Requires permission to access the
[DeletePolicyVersion](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_policy_version(
%Client{} = client,
policy_name,
policy_version_id,
input,
options \\ []
) do
url_path =
"/policies/#{AWS.Util.encode_uri(policy_name)}/version/#{AWS.Util.encode_uri(policy_version_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a fleet provisioning template.
Requires permission to access the
[DeleteProvisioningTemplate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_provisioning_template(%Client{} = client, template_name, input, options \\ []) do
url_path = "/provisioning-templates/#{AWS.Util.encode_uri(template_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a fleet provisioning template version.
Requires permission to access the
[DeleteProvisioningTemplateVersion](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_provisioning_template_version(
%Client{} = client,
template_name,
version_id,
input,
options \\ []
) do
url_path =
"/provisioning-templates/#{AWS.Util.encode_uri(template_name)}/versions/#{AWS.Util.encode_uri(version_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a CA certificate registration code.
Requires permission to access the
[DeleteRegistrationCode](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_registration_code(%Client{} = client, input, options \\ []) do
url_path = "/registrationcode"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a role alias
Requires permission to access the
[DeleteRoleAlias](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_role_alias(%Client{} = client, role_alias, input, options \\ []) do
url_path = "/role-aliases/#{AWS.Util.encode_uri(role_alias)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a scheduled audit.
Requires permission to access the
[DeleteScheduledAudit](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_scheduled_audit(%Client{} = client, scheduled_audit_name, input, options \\ []) do
url_path = "/audit/scheduledaudits/#{AWS.Util.encode_uri(scheduled_audit_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a Device Defender security profile.
Requires permission to access the
[DeleteSecurityProfile](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_security_profile(%Client{} = client, security_profile_name, input, options \\ []) do
url_path = "/security-profiles/#{AWS.Util.encode_uri(security_profile_name)}"
headers = []
{query_params, input} =
[
{"expectedVersion", "expectedVersion"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a stream.
Requires permission to access the
[DeleteStream](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_stream(%Client{} = client, stream_id, input, options \\ []) do
url_path = "/streams/#{AWS.Util.encode_uri(stream_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified thing.
Returns successfully with no error if the deletion is successful or you specify
a thing that doesn't exist.
Requires permission to access the
[DeleteThing](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_thing(%Client{} = client, thing_name, input, options \\ []) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}"
headers = []
{query_params, input} =
[
{"expectedVersion", "expectedVersion"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a thing group.
Requires permission to access the
[DeleteThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_thing_group(%Client{} = client, thing_group_name, input, options \\ []) do
url_path = "/thing-groups/#{AWS.Util.encode_uri(thing_group_name)}"
headers = []
{query_params, input} =
[
{"expectedVersion", "expectedVersion"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified thing type.
You cannot delete a thing type if it has things associated with it. To delete a
thing type, first mark it as deprecated by calling `DeprecateThingType`, then
remove any associated things by calling `UpdateThing` to change the thing type
on any associated thing, and finally use `DeleteThingType` to delete the thing
type.
Requires permission to access the
[DeleteThingType](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_thing_type(%Client{} = client, thing_type_name, input, options \\ []) do
url_path = "/thing-types/#{AWS.Util.encode_uri(thing_type_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the rule.
Requires permission to access the
[DeleteTopicRule](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_topic_rule(%Client{} = client, rule_name, input, options \\ []) do
url_path = "/rules/#{AWS.Util.encode_uri(rule_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a topic rule destination.
Requires permission to access the
[DeleteTopicRuleDestination](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_topic_rule_destination(%Client{} = client, arn, input, options \\ []) do
url_path = "/destinations/#{AWS.Util.encode_multi_segment_uri(arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a logging level.
Requires permission to access the
[DeleteV2LoggingLevel](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def delete_v2_logging_level(%Client{} = client, input, options \\ []) do
url_path = "/v2LoggingLevel"
headers = []
{query_params, input} =
[
{"targetName", "targetName"},
{"targetType", "targetType"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deprecates a thing type.
You can not associate new things with deprecated thing type.
Requires permission to access the
[DeprecateThingType](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def deprecate_thing_type(%Client{} = client, thing_type_name, input, options \\ []) do
url_path = "/thing-types/#{AWS.Util.encode_uri(thing_type_name)}/deprecate"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Gets information about the Device Defender audit settings for this account.
Settings include how audit notifications are sent and which audit checks are
enabled or disabled.
Requires permission to access the
[DescribeAccountAuditConfiguration](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_account_audit_configuration(%Client{} = client, options \\ []) do
url_path = "/audit/configuration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a single audit finding.
Properties include the reason for noncompliance, the severity of the issue, and
the start time when the audit that returned the finding.
Requires permission to access the
[DescribeAuditFinding](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_audit_finding(%Client{} = client, finding_id, options \\ []) do
url_path = "/audit/findings/#{AWS.Util.encode_uri(finding_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about an audit mitigation task that is used to apply mitigation
actions to a set of audit findings.
Properties include the actions being applied, the audit checks to which they're
being applied, the task status, and aggregated task statistics.
"""
def describe_audit_mitigation_actions_task(%Client{} = client, task_id, options \\ []) do
url_path = "/audit/mitigationactions/tasks/#{AWS.Util.encode_uri(task_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a Device Defender audit suppression.
"""
def describe_audit_suppression(%Client{} = client, input, options \\ []) do
url_path = "/audit/suppressions/describe"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Gets information about a Device Defender audit.
Requires permission to access the
[DescribeAuditTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_audit_task(%Client{} = client, task_id, options \\ []) do
url_path = "/audit/tasks/#{AWS.Util.encode_uri(task_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes an authorizer.
Requires permission to access the
[DescribeAuthorizer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_authorizer(%Client{} = client, authorizer_name, options \\ []) do
url_path = "/authorizer/#{AWS.Util.encode_uri(authorizer_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns information about a billing group.
Requires permission to access the
[DescribeBillingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_billing_group(%Client{} = client, billing_group_name, options \\ []) do
url_path = "/billing-groups/#{AWS.Util.encode_uri(billing_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes a registered CA certificate.
Requires permission to access the
[DescribeCACertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_ca_certificate(%Client{} = client, certificate_id, options \\ []) do
url_path = "/cacertificate/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about the specified certificate.
Requires permission to access the
[DescribeCertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_certificate(%Client{} = client, certificate_id, options \\ []) do
url_path = "/certificates/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a Device Defender detect custom metric.
Requires permission to access the
[DescribeCustomMetric](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_custom_metric(%Client{} = client, metric_name, options \\ []) do
url_path = "/custom-metric/#{AWS.Util.encode_uri(metric_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes the default authorizer.
Requires permission to access the
[DescribeDefaultAuthorizer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_default_authorizer(%Client{} = client, options \\ []) do
url_path = "/default-authorizer"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a Device Defender ML Detect mitigation action.
Requires permission to access the
[DescribeDetectMitigationActionsTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_detect_mitigation_actions_task(%Client{} = client, task_id, options \\ []) do
url_path = "/detect/mitigationactions/tasks/#{AWS.Util.encode_uri(task_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Provides details about a dimension that is defined in your Amazon Web Services
accounts.
Requires permission to access the
[DescribeDimension](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_dimension(%Client{} = client, name, options \\ []) do
url_path = "/dimensions/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets summary information about a domain configuration.
Requires permission to access the
[DescribeDomainConfiguration](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_domain_configuration(%Client{} = client, domain_configuration_name, options \\ []) do
url_path = "/domainConfigurations/#{AWS.Util.encode_uri(domain_configuration_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a unique endpoint specific to the Amazon Web Services account making the
call.
Requires permission to access the
[DescribeEndpoint](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_endpoint(%Client{} = client, endpoint_type \\ nil, options \\ []) do
url_path = "/endpoint"
headers = []
query_params = []
query_params =
if !is_nil(endpoint_type) do
[{"endpointType", endpoint_type} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes event configurations.
Requires permission to access the
[DescribeEventConfigurations](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_event_configurations(%Client{} = client, options \\ []) do
url_path = "/event-configurations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about the specified fleet metric.
Requires permission to access the
[DescribeFleetMetric](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_fleet_metric(%Client{} = client, metric_name, options \\ []) do
url_path = "/fleet-metric/#{AWS.Util.encode_uri(metric_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes a search index.
Requires permission to access the
[DescribeIndex](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_index(%Client{} = client, index_name, options \\ []) do
url_path = "/indices/#{AWS.Util.encode_uri(index_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes a job.
Requires permission to access the
[DescribeJob](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_job(%Client{} = client, job_id, options \\ []) do
url_path = "/jobs/#{AWS.Util.encode_uri(job_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes a job execution.
Requires permission to access the
[DescribeJobExecution](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_job_execution(
%Client{} = client,
job_id,
thing_name,
execution_number \\ nil,
options \\ []
) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}/jobs/#{AWS.Util.encode_uri(job_id)}"
headers = []
query_params = []
query_params =
if !is_nil(execution_number) do
[{"executionNumber", execution_number} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns information about a job template.
"""
def describe_job_template(%Client{} = client, job_template_id, options \\ []) do
url_path = "/job-templates/#{AWS.Util.encode_uri(job_template_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
View details of a managed job template.
"""
def describe_managed_job_template(
%Client{} = client,
template_name,
template_version \\ nil,
options \\ []
) do
url_path = "/managed-job-templates/#{AWS.Util.encode_uri(template_name)}"
headers = []
query_params = []
query_params =
if !is_nil(template_version) do
[{"templateVersion", template_version} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a mitigation action.
Requires permission to access the
[DescribeMitigationAction](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_mitigation_action(%Client{} = client, action_name, options \\ []) do
url_path = "/mitigationactions/actions/#{AWS.Util.encode_uri(action_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns information about a fleet provisioning template.
Requires permission to access the
[DescribeProvisioningTemplate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_provisioning_template(%Client{} = client, template_name, options \\ []) do
url_path = "/provisioning-templates/#{AWS.Util.encode_uri(template_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns information about a fleet provisioning template version.
Requires permission to access the
[DescribeProvisioningTemplateVersion](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_provisioning_template_version(
%Client{} = client,
template_name,
version_id,
options \\ []
) do
url_path =
"/provisioning-templates/#{AWS.Util.encode_uri(template_name)}/versions/#{AWS.Util.encode_uri(version_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes a role alias.
Requires permission to access the
[DescribeRoleAlias](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_role_alias(%Client{} = client, role_alias, options \\ []) do
url_path = "/role-aliases/#{AWS.Util.encode_uri(role_alias)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a scheduled audit.
Requires permission to access the
[DescribeScheduledAudit](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_scheduled_audit(%Client{} = client, scheduled_audit_name, options \\ []) do
url_path = "/audit/scheduledaudits/#{AWS.Util.encode_uri(scheduled_audit_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a Device Defender security profile.
Requires permission to access the
[DescribeSecurityProfile](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_security_profile(%Client{} = client, security_profile_name, options \\ []) do
url_path = "/security-profiles/#{AWS.Util.encode_uri(security_profile_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a stream.
Requires permission to access the
[DescribeStream](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_stream(%Client{} = client, stream_id, options \\ []) do
url_path = "/streams/#{AWS.Util.encode_uri(stream_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about the specified thing.
Requires permission to access the
[DescribeThing](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_thing(%Client{} = client, thing_name, options \\ []) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describe a thing group.
Requires permission to access the
[DescribeThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_thing_group(%Client{} = client, thing_group_name, options \\ []) do
url_path = "/thing-groups/#{AWS.Util.encode_uri(thing_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Describes a bulk thing provisioning task.
Requires permission to access the
[DescribeThingRegistrationTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_thing_registration_task(%Client{} = client, task_id, options \\ []) do
url_path = "/thing-registration-tasks/#{AWS.Util.encode_uri(task_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about the specified thing type.
Requires permission to access the
[DescribeThingType](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def describe_thing_type(%Client{} = client, thing_type_name, options \\ []) do
url_path = "/thing-types/#{AWS.Util.encode_uri(thing_type_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Detaches a policy from the specified target.
Because of the distributed nature of Amazon Web Services, it can take up to five
minutes after a policy is detached before it's ready to be deleted.
Requires permission to access the
[DetachPolicy](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def detach_policy(%Client{} = client, policy_name, input, options \\ []) do
url_path = "/target-policies/#{AWS.Util.encode_uri(policy_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the specified policy from the specified certificate.
This action is deprecated. Please use `DetachPolicy` instead.
Requires permission to access the
[DetachPrincipalPolicy](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def detach_principal_policy(%Client{} = client, policy_name, input, options \\ []) do
url_path = "/principal-policies/#{AWS.Util.encode_uri(policy_name)}"
{headers, input} =
[
{"principal", "x-amzn-iot-principal"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Disassociates a Device Defender security profile from a thing group or from this
account.
Requires permission to access the
[DetachSecurityProfile](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def detach_security_profile(%Client{} = client, security_profile_name, input, options \\ []) do
url_path = "/security-profiles/#{AWS.Util.encode_uri(security_profile_name)}/targets"
headers = []
{query_params, input} =
[
{"securityProfileTargetArn", "securityProfileTargetArn"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Detaches the specified principal from the specified thing.
A principal can be X.509 certificates, IAM users, groups, and roles, Amazon
Cognito identities or federated identities.
This call is asynchronous. It might take several seconds for the detachment to
propagate.
Requires permission to access the
[DetachThingPrincipal](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def detach_thing_principal(%Client{} = client, thing_name, input, options \\ []) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}/principals"
{headers, input} =
[
{"principal", "x-amzn-principal"}
]
|> Request.build_params(input)
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Disables the rule.
Requires permission to access the
[DisableTopicRule](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def disable_topic_rule(%Client{} = client, rule_name, input, options \\ []) do
url_path = "/rules/#{AWS.Util.encode_uri(rule_name)}/disable"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Enables the rule.
Requires permission to access the
[EnableTopicRule](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def enable_topic_rule(%Client{} = client, rule_name, input, options \\ []) do
url_path = "/rules/#{AWS.Util.encode_uri(rule_name)}/enable"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns a Device Defender's ML Detect Security Profile training model's status.
Requires permission to access the
[GetBehaviorModelTrainingSummaries](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_behavior_model_training_summaries(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
security_profile_name \\ nil,
options \\ []
) do
url_path = "/behavior-model-training/summaries"
headers = []
query_params = []
query_params =
if !is_nil(security_profile_name) do
[{"securityProfileName", security_profile_name} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Aggregates on indexed data with search queries pertaining to particular fields.
Requires permission to access the
[GetBucketsAggregation](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_buckets_aggregation(%Client{} = client, input, options \\ []) do
url_path = "/indices/buckets"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the approximate count of unique values that match the query.
Requires permission to access the
[GetCardinality](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_cardinality(%Client{} = client, input, options \\ []) do
url_path = "/indices/cardinality"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Gets a list of the policies that have an effect on the authorization behavior of
the specified device when it connects to the IoT device gateway.
Requires permission to access the
[GetEffectivePolicies](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_effective_policies(%Client{} = client, input, options \\ []) do
url_path = "/effective-policies"
headers = []
{query_params, input} =
[
{"thingName", "thingName"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Gets the indexing configuration.
Requires permission to access the
[GetIndexingConfiguration](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_indexing_configuration(%Client{} = client, options \\ []) do
url_path = "/indexing/config"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets a job document.
Requires permission to access the
[GetJobDocument](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_job_document(%Client{} = client, job_id, options \\ []) do
url_path = "/jobs/#{AWS.Util.encode_uri(job_id)}/job-document"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets the logging options.
NOTE: use of this command is not recommended. Use `GetV2LoggingOptions` instead.
Requires permission to access the
[GetLoggingOptions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_logging_options(%Client{} = client, options \\ []) do
url_path = "/loggingOptions"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets an OTA update.
Requires permission to access the
[GetOTAUpdate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_ota_update(%Client{} = client, ota_update_id, options \\ []) do
url_path = "/otaUpdates/#{AWS.Util.encode_uri(ota_update_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Groups the aggregated values that match the query into percentile groupings.
The default percentile groupings are: 1,5,25,50,75,95,99, although you can
specify your own when you call `GetPercentiles`. This function returns a value
for each percentile group specified (or the default percentile groupings). The
percentile group "1" contains the aggregated field value that occurs in
approximately one percent of the values that match the query. The percentile
group "5" contains the aggregated field value that occurs in approximately five
percent of the values that match the query, and so on. The result is an
approximation, the more values that match the query, the more accurate the
percentile values.
Requires permission to access the
[GetPercentiles](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_percentiles(%Client{} = client, input, options \\ []) do
url_path = "/indices/percentiles"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Gets information about the specified policy with the policy document of the
default version.
Requires permission to access the
[GetPolicy](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_policy(%Client{} = client, policy_name, options \\ []) do
url_path = "/policies/#{AWS.Util.encode_uri(policy_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about the specified policy version.
Requires permission to access the
[GetPolicyVersion](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_policy_version(%Client{} = client, policy_name, policy_version_id, options \\ []) do
url_path =
"/policies/#{AWS.Util.encode_uri(policy_name)}/version/#{AWS.Util.encode_uri(policy_version_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets a registration code used to register a CA certificate with IoT.
Requires permission to access the
[GetRegistrationCode](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_registration_code(%Client{} = client, options \\ []) do
url_path = "/registrationcode"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the count, average, sum, minimum, maximum, sum of squares, variance, and
standard deviation for the specified aggregated field.
If the aggregation field is of type `String`, only the count statistic is
returned.
Requires permission to access the
[GetStatistics](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_statistics(%Client{} = client, input, options \\ []) do
url_path = "/indices/statistics"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Gets information about the rule.
Requires permission to access the
[GetTopicRule](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_topic_rule(%Client{} = client, rule_name, options \\ []) do
url_path = "/rules/#{AWS.Util.encode_uri(rule_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a topic rule destination.
Requires permission to access the
[GetTopicRuleDestination](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_topic_rule_destination(%Client{} = client, arn, options \\ []) do
url_path = "/destinations/#{AWS.Util.encode_multi_segment_uri(arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets the fine grained logging options.
Requires permission to access the
[GetV2LoggingOptions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def get_v2_logging_options(%Client{} = client, options \\ []) do
url_path = "/v2LoggingOptions"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the active violations for a given Device Defender security profile.
Requires permission to access the
[ListActiveViolations](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_active_violations(
%Client{} = client,
behavior_criteria_type \\ nil,
list_suppressed_alerts \\ nil,
max_results \\ nil,
next_token \\ nil,
security_profile_name \\ nil,
thing_name \\ nil,
verification_state \\ nil,
options \\ []
) do
url_path = "/active-violations"
headers = []
query_params = []
query_params =
if !is_nil(verification_state) do
[{"verificationState", verification_state} | query_params]
else
query_params
end
query_params =
if !is_nil(thing_name) do
[{"thingName", thing_name} | query_params]
else
query_params
end
query_params =
if !is_nil(security_profile_name) do
[{"securityProfileName", security_profile_name} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(list_suppressed_alerts) do
[{"listSuppressedAlerts", list_suppressed_alerts} | query_params]
else
query_params
end
query_params =
if !is_nil(behavior_criteria_type) do
[{"behaviorCriteriaType", behavior_criteria_type} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the policies attached to the specified thing group.
Requires permission to access the
[ListAttachedPolicies](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_attached_policies(%Client{} = client, target, input, options \\ []) do
url_path = "/attached-policies/#{AWS.Util.encode_uri(target)}"
headers = []
{query_params, input} =
[
{"marker", "marker"},
{"pageSize", "pageSize"},
{"recursive", "recursive"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Lists the findings (results) of a Device Defender audit or of the audits
performed during a specified time period.
(Findings are retained for 90 days.)
Requires permission to access the
[ListAuditFindings](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_audit_findings(%Client{} = client, input, options \\ []) do
url_path = "/audit/findings"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Gets the status of audit mitigation action tasks that were executed.
Requires permission to access the
[ListAuditMitigationActionsExecutions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_audit_mitigation_actions_executions(
%Client{} = client,
action_status \\ nil,
finding_id,
max_results \\ nil,
next_token \\ nil,
task_id,
options \\ []
) do
url_path = "/audit/mitigationactions/executions"
headers = []
query_params = []
query_params =
if !is_nil(task_id) do
[{"taskId", task_id} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(finding_id) do
[{"findingId", finding_id} | query_params]
else
query_params
end
query_params =
if !is_nil(action_status) do
[{"actionStatus", action_status} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets a list of audit mitigation action tasks that match the specified filters.
Requires permission to access the
[ListAuditMitigationActionsTasks](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_audit_mitigation_actions_tasks(
%Client{} = client,
audit_task_id \\ nil,
end_time,
finding_id \\ nil,
max_results \\ nil,
next_token \\ nil,
start_time,
task_status \\ nil,
options \\ []
) do
url_path = "/audit/mitigationactions/tasks"
headers = []
query_params = []
query_params =
if !is_nil(task_status) do
[{"taskStatus", task_status} | query_params]
else
query_params
end
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(finding_id) do
[{"findingId", finding_id} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
query_params =
if !is_nil(audit_task_id) do
[{"auditTaskId", audit_task_id} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists your Device Defender audit listings.
Requires permission to access the
[ListAuditSuppressions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_audit_suppressions(%Client{} = client, input, options \\ []) do
url_path = "/audit/suppressions/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Lists the Device Defender audits that have been performed during a given time
period.
Requires permission to access the
[ListAuditTasks](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_audit_tasks(
%Client{} = client,
end_time,
max_results \\ nil,
next_token \\ nil,
start_time,
task_status \\ nil,
task_type \\ nil,
options \\ []
) do
url_path = "/audit/tasks"
headers = []
query_params = []
query_params =
if !is_nil(task_type) do
[{"taskType", task_type} | query_params]
else
query_params
end
query_params =
if !is_nil(task_status) do
[{"taskStatus", task_status} | query_params]
else
query_params
end
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the authorizers registered in your account.
Requires permission to access the
[ListAuthorizers](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_authorizers(
%Client{} = client,
ascending_order \\ nil,
marker \\ nil,
page_size \\ nil,
status \\ nil,
options \\ []
) do
url_path = "/authorizers/"
headers = []
query_params = []
query_params =
if !is_nil(status) do
[{"status", status} | query_params]
else
query_params
end
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the billing groups you have created.
Requires permission to access the
[ListBillingGroups](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_billing_groups(
%Client{} = client,
max_results \\ nil,
name_prefix_filter \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/billing-groups"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(name_prefix_filter) do
[{"namePrefixFilter", name_prefix_filter} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the CA certificates registered for your Amazon Web Services account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
Requires permission to access the
[ListCACertificates](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_ca_certificates(
%Client{} = client,
ascending_order \\ nil,
marker \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/cacertificates"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the certificates registered in your Amazon Web Services account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
Requires permission to access the
[ListCertificates](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_certificates(
%Client{} = client,
ascending_order \\ nil,
marker \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/certificates"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List the device certificates signed by the specified CA certificate.
Requires permission to access the
[ListCertificatesByCA](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_certificates_by_ca(
%Client{} = client,
ca_certificate_id,
ascending_order \\ nil,
marker \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/certificates-by-ca/#{AWS.Util.encode_uri(ca_certificate_id)}"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists your Device Defender detect custom metrics.
Requires permission to access the
[ListCustomMetrics](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_custom_metrics(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/custom-metrics"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists mitigation actions executions for a Device Defender ML Detect Security
Profile.
Requires permission to access the
[ListDetectMitigationActionsExecutions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_detect_mitigation_actions_executions(
%Client{} = client,
end_time \\ nil,
max_results \\ nil,
next_token \\ nil,
start_time \\ nil,
task_id \\ nil,
thing_name \\ nil,
violation_id \\ nil,
options \\ []
) do
url_path = "/detect/mitigationactions/executions"
headers = []
query_params = []
query_params =
if !is_nil(violation_id) do
[{"violationId", violation_id} | query_params]
else
query_params
end
query_params =
if !is_nil(thing_name) do
[{"thingName", thing_name} | query_params]
else
query_params
end
query_params =
if !is_nil(task_id) do
[{"taskId", task_id} | query_params]
else
query_params
end
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List of Device Defender ML Detect mitigation actions tasks.
Requires permission to access the
[ListDetectMitigationActionsTasks](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_detect_mitigation_actions_tasks(
%Client{} = client,
end_time,
max_results \\ nil,
next_token \\ nil,
start_time,
options \\ []
) do
url_path = "/detect/mitigationactions/tasks"
headers = []
query_params = []
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List the set of dimensions that are defined for your Amazon Web Services
accounts.
Requires permission to access the
[ListDimensions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_dimensions(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/dimensions"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets a list of domain configurations for the user.
This list is sorted alphabetically by domain configuration name.
Requires permission to access the
[ListDomainConfigurations](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_domain_configurations(
%Client{} = client,
marker \\ nil,
page_size \\ nil,
service_type \\ nil,
options \\ []
) do
url_path = "/domainConfigurations"
headers = []
query_params = []
query_params =
if !is_nil(service_type) do
[{"serviceType", service_type} | query_params]
else
query_params
end
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all your fleet metrics.
Requires permission to access the
[ListFleetMetrics](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_fleet_metrics(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/fleet-metrics"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the search indices.
Requires permission to access the
[ListIndices](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_indices(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/indices"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the job executions for a job.
Requires permission to access the
[ListJobExecutionsForJob](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_job_executions_for_job(
%Client{} = client,
job_id,
max_results \\ nil,
next_token \\ nil,
status \\ nil,
options \\ []
) do
url_path = "/jobs/#{AWS.Util.encode_uri(job_id)}/things"
headers = []
query_params = []
query_params =
if !is_nil(status) do
[{"status", status} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the job executions for the specified thing.
Requires permission to access the
[ListJobExecutionsForThing](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_job_executions_for_thing(
%Client{} = client,
thing_name,
job_id \\ nil,
max_results \\ nil,
namespace_id \\ nil,
next_token \\ nil,
status \\ nil,
options \\ []
) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}/jobs"
headers = []
query_params = []
query_params =
if !is_nil(status) do
[{"status", status} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(namespace_id) do
[{"namespaceId", namespace_id} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(job_id) do
[{"jobId", job_id} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of job templates.
Requires permission to access the
[ListJobTemplates](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_job_templates(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/job-templates"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists jobs.
Requires permission to access the
[ListJobs](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_jobs(
%Client{} = client,
max_results \\ nil,
namespace_id \\ nil,
next_token \\ nil,
status \\ nil,
target_selection \\ nil,
thing_group_id \\ nil,
thing_group_name \\ nil,
options \\ []
) do
url_path = "/jobs"
headers = []
query_params = []
query_params =
if !is_nil(thing_group_name) do
[{"thingGroupName", thing_group_name} | query_params]
else
query_params
end
query_params =
if !is_nil(thing_group_id) do
[{"thingGroupId", thing_group_id} | query_params]
else
query_params
end
query_params =
if !is_nil(target_selection) do
[{"targetSelection", target_selection} | query_params]
else
query_params
end
query_params =
if !is_nil(status) do
[{"status", status} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(namespace_id) do
[{"namespaceId", namespace_id} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns a list of managed job templates.
"""
def list_managed_job_templates(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
template_name \\ nil,
options \\ []
) do
url_path = "/managed-job-templates"
headers = []
query_params = []
query_params =
if !is_nil(template_name) do
[{"templateName", template_name} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets a list of all mitigation actions that match the specified filter criteria.
Requires permission to access the
[ListMitigationActions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_mitigation_actions(
%Client{} = client,
action_type \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/mitigationactions/actions"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(action_type) do
[{"actionType", action_type} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists OTA updates.
Requires permission to access the
[ListOTAUpdates](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_ota_updates(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
ota_update_status \\ nil,
options \\ []
) do
url_path = "/otaUpdates"
headers = []
query_params = []
query_params =
if !is_nil(ota_update_status) do
[{"otaUpdateStatus", ota_update_status} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists certificates that are being transferred but not yet accepted.
Requires permission to access the
[ListOutgoingCertificates](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_outgoing_certificates(
%Client{} = client,
ascending_order \\ nil,
marker \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/certificates-out-going"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists your policies.
Requires permission to access the
[ListPolicies](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_policies(
%Client{} = client,
ascending_order \\ nil,
marker \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/policies"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the principals associated with the specified policy.
**Note:** This action is deprecated. Please use `ListTargetsForPolicy` instead.
Requires permission to access the
[ListPolicyPrincipals](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_policy_principals(
%Client{} = client,
ascending_order \\ nil,
marker \\ nil,
page_size \\ nil,
policy_name,
options \\ []
) do
url_path = "/policy-principals"
headers = []
headers =
if !is_nil(policy_name) do
[{"x-amzn-iot-policy", policy_name} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the versions of the specified policy and identifies the default version.
Requires permission to access the
[ListPolicyVersions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_policy_versions(%Client{} = client, policy_name, options \\ []) do
url_path = "/policies/#{AWS.Util.encode_uri(policy_name)}/version"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the policies attached to the specified principal.
If you use an Cognito identity, the ID must be in [AmazonCognito Identity format](https://docs.aws.amazon.com/cognitoidentity/latest/APIReference/API_GetCredentialsForIdentity.html#API_GetCredentialsForIdentity_RequestSyntax).
**Note:** This action is deprecated. Please use `ListAttachedPolicies` instead.
Requires permission to access the
[ListPrincipalPolicies](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_principal_policies(
%Client{} = client,
ascending_order \\ nil,
marker \\ nil,
page_size \\ nil,
principal,
options \\ []
) do
url_path = "/principal-policies"
headers = []
headers =
if !is_nil(principal) do
[{"x-amzn-iot-principal", principal} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the things associated with the specified principal.
A principal can be X.509 certificates, IAM users, groups, and roles, Amazon
Cognito identities or federated identities.
Requires permission to access the
[ListPrincipalThings](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_principal_things(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
principal,
options \\ []
) do
url_path = "/principals/things"
headers = []
headers =
if !is_nil(principal) do
[{"x-amzn-principal", principal} | headers]
else
headers
end
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
A list of fleet provisioning template versions.
Requires permission to access the
[ListProvisioningTemplateVersions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_provisioning_template_versions(
%Client{} = client,
template_name,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/provisioning-templates/#{AWS.Util.encode_uri(template_name)}/versions"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the fleet provisioning templates in your Amazon Web Services account.
Requires permission to access the
[ListProvisioningTemplates](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_provisioning_templates(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/provisioning-templates"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the role aliases registered in your account.
Requires permission to access the
[ListRoleAliases](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_role_aliases(
%Client{} = client,
ascending_order \\ nil,
marker \\ nil,
page_size \\ nil,
options \\ []
) do
url_path = "/role-aliases"
headers = []
query_params = []
query_params =
if !is_nil(page_size) do
[{"pageSize", page_size} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of your scheduled audits.
Requires permission to access the
[ListScheduledAudits](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_scheduled_audits(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/audit/scheduledaudits"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the Device Defender security profiles you've created.
You can filter security profiles by dimension or custom metric.
Requires permission to access the
[ListSecurityProfiles](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
`dimensionName` and `metricName` cannot be used in the same request.
"""
def list_security_profiles(
%Client{} = client,
dimension_name \\ nil,
max_results \\ nil,
metric_name \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/security-profiles"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(metric_name) do
[{"metricName", metric_name} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(dimension_name) do
[{"dimensionName", dimension_name} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the Device Defender security profiles attached to a target (thing group).
Requires permission to access the
[ListSecurityProfilesForTarget](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_security_profiles_for_target(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
recursive \\ nil,
security_profile_target_arn,
options \\ []
) do
url_path = "/security-profiles-for-target"
headers = []
query_params = []
query_params =
if !is_nil(security_profile_target_arn) do
[{"securityProfileTargetArn", security_profile_target_arn} | query_params]
else
query_params
end
query_params =
if !is_nil(recursive) do
[{"recursive", recursive} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of the streams in your Amazon Web Services account.
Requires permission to access the
[ListStreams](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_streams(
%Client{} = client,
ascending_order \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/streams"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the tags (metadata) you have assigned to the resource.
Requires permission to access the
[ListTagsForResource](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_tags_for_resource(%Client{} = client, next_token \\ nil, resource_arn, options \\ []) do
url_path = "/tags"
headers = []
query_params = []
query_params =
if !is_nil(resource_arn) do
[{"resourceArn", resource_arn} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List targets for the specified policy.
Requires permission to access the
[ListTargetsForPolicy](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_targets_for_policy(%Client{} = client, policy_name, input, options \\ []) do
url_path = "/policy-targets/#{AWS.Util.encode_uri(policy_name)}"
headers = []
{query_params, input} =
[
{"marker", "marker"},
{"pageSize", "pageSize"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Lists the targets (thing groups) associated with a given Device Defender
security profile.
Requires permission to access the
[ListTargetsForSecurityProfile](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_targets_for_security_profile(
%Client{} = client,
security_profile_name,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/security-profiles/#{AWS.Util.encode_uri(security_profile_name)}/targets"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List the thing groups in your account.
Requires permission to access the
[ListThingGroups](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_thing_groups(
%Client{} = client,
max_results \\ nil,
name_prefix_filter \\ nil,
next_token \\ nil,
parent_group \\ nil,
recursive \\ nil,
options \\ []
) do
url_path = "/thing-groups"
headers = []
query_params = []
query_params =
if !is_nil(recursive) do
[{"recursive", recursive} | query_params]
else
query_params
end
query_params =
if !is_nil(parent_group) do
[{"parentGroup", parent_group} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(name_prefix_filter) do
[{"namePrefixFilter", name_prefix_filter} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List the thing groups to which the specified thing belongs.
Requires permission to access the
[ListThingGroupsForThing](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_thing_groups_for_thing(
%Client{} = client,
thing_name,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}/thing-groups"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the principals associated with the specified thing.
A principal can be X.509 certificates, IAM users, groups, and roles, Amazon
Cognito identities or federated identities.
Requires permission to access the
[ListThingPrincipals](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_thing_principals(
%Client{} = client,
thing_name,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}/principals"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Information about the thing registration tasks.
"""
def list_thing_registration_task_reports(
%Client{} = client,
task_id,
max_results \\ nil,
next_token \\ nil,
report_type,
options \\ []
) do
url_path = "/thing-registration-tasks/#{AWS.Util.encode_uri(task_id)}/reports"
headers = []
query_params = []
query_params =
if !is_nil(report_type) do
[{"reportType", report_type} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
List bulk thing provisioning tasks.
Requires permission to access the
[ListThingRegistrationTasks](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_thing_registration_tasks(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
status \\ nil,
options \\ []
) do
url_path = "/thing-registration-tasks"
headers = []
query_params = []
query_params =
if !is_nil(status) do
[{"status", status} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the existing thing types.
Requires permission to access the
[ListThingTypes](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_thing_types(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
thing_type_name \\ nil,
options \\ []
) do
url_path = "/thing-types"
headers = []
query_params = []
query_params =
if !is_nil(thing_type_name) do
[{"thingTypeName", thing_type_name} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists your things.
Use the **attributeName** and **attributeValue** parameters to filter your
things. For example, calling `ListThings` with attributeName=Color and
attributeValue=Red retrieves all things in the registry that contain an
attribute **Color** with the value **Red**.
Requires permission to access the
[ListThings](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
You will not be charged for calling this API if an `Access denied` error is
returned. You will also not be charged if no attributes or pagination token was
provided in request and no pagination token and no results were returned.
"""
def list_things(
%Client{} = client,
attribute_name \\ nil,
attribute_value \\ nil,
max_results \\ nil,
next_token \\ nil,
thing_type_name \\ nil,
use_prefix_attribute_value \\ nil,
options \\ []
) do
url_path = "/things"
headers = []
query_params = []
query_params =
if !is_nil(use_prefix_attribute_value) do
[{"usePrefixAttributeValue", use_prefix_attribute_value} | query_params]
else
query_params
end
query_params =
if !is_nil(thing_type_name) do
[{"thingTypeName", thing_type_name} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(attribute_value) do
[{"attributeValue", attribute_value} | query_params]
else
query_params
end
query_params =
if !is_nil(attribute_name) do
[{"attributeName", attribute_name} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the things you have added to the given billing group.
Requires permission to access the
[ListThingsInBillingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_things_in_billing_group(
%Client{} = client,
billing_group_name,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/billing-groups/#{AWS.Util.encode_uri(billing_group_name)}/things"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the things in the specified group.
Requires permission to access the
[ListThingsInThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_things_in_thing_group(
%Client{} = client,
thing_group_name,
max_results \\ nil,
next_token \\ nil,
recursive \\ nil,
options \\ []
) do
url_path = "/thing-groups/#{AWS.Util.encode_uri(thing_group_name)}/things"
headers = []
query_params = []
query_params =
if !is_nil(recursive) do
[{"recursive", recursive} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all the topic rule destinations in your Amazon Web Services account.
Requires permission to access the
[ListTopicRuleDestinations](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_topic_rule_destinations(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/destinations"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the rules for the specific topic.
Requires permission to access the
[ListTopicRules](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_topic_rules(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
rule_disabled \\ nil,
topic \\ nil,
options \\ []
) do
url_path = "/rules"
headers = []
query_params = []
query_params =
if !is_nil(topic) do
[{"topic", topic} | query_params]
else
query_params
end
query_params =
if !is_nil(rule_disabled) do
[{"ruleDisabled", rule_disabled} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists logging levels.
Requires permission to access the
[ListV2LoggingLevels](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_v2_logging_levels(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
target_type \\ nil,
options \\ []
) do
url_path = "/v2LoggingLevel"
headers = []
query_params = []
query_params =
if !is_nil(target_type) do
[{"targetType", target_type} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the Device Defender security profile violations discovered during the
given time period.
You can use filters to limit the results to those alerts issued for a particular
security profile, behavior, or thing (device).
Requires permission to access the
[ListViolationEvents](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def list_violation_events(
%Client{} = client,
behavior_criteria_type \\ nil,
end_time,
list_suppressed_alerts \\ nil,
max_results \\ nil,
next_token \\ nil,
security_profile_name \\ nil,
start_time,
thing_name \\ nil,
verification_state \\ nil,
options \\ []
) do
url_path = "/violation-events"
headers = []
query_params = []
query_params =
if !is_nil(verification_state) do
[{"verificationState", verification_state} | query_params]
else
query_params
end
query_params =
if !is_nil(thing_name) do
[{"thingName", thing_name} | query_params]
else
query_params
end
query_params =
if !is_nil(start_time) do
[{"startTime", start_time} | query_params]
else
query_params
end
query_params =
if !is_nil(security_profile_name) do
[{"securityProfileName", security_profile_name} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(list_suppressed_alerts) do
[{"listSuppressedAlerts", list_suppressed_alerts} | query_params]
else
query_params
end
query_params =
if !is_nil(end_time) do
[{"endTime", end_time} | query_params]
else
query_params
end
query_params =
if !is_nil(behavior_criteria_type) do
[{"behaviorCriteriaType", behavior_criteria_type} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Set a verification state and provide a description of that verification state on
a violation (detect alarm).
"""
def put_verification_state_on_violation(%Client{} = client, violation_id, input, options \\ []) do
url_path = "/violations/verification-state/#{AWS.Util.encode_uri(violation_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Registers a CA certificate with IoT.
This CA certificate can then be used to sign device certificates, which can be
then registered with IoT. You can register up to 10 CA certificates per Amazon
Web Services account that have the same subject field. This enables you to have
up to 10 certificate authorities sign your device certificates. If you have more
than one CA certificate registered, make sure you pass the CA certificate when
you register your device certificates with the `RegisterCertificate` action.
Requires permission to access the
[RegisterCACertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def register_ca_certificate(%Client{} = client, input, options \\ []) do
url_path = "/cacertificate"
headers = []
{query_params, input} =
[
{"allowAutoRegistration", "allowAutoRegistration"},
{"setAsActive", "setAsActive"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Registers a device certificate with IoT.
If you have more than one CA certificate that has the same subject field, you
must specify the CA certificate that was used to sign the device certificate
being registered.
Requires permission to access the
[RegisterCertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def register_certificate(%Client{} = client, input, options \\ []) do
url_path = "/certificate/register"
headers = []
{query_params, input} =
[
{"setAsActive", "setAsActive"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Register a certificate that does not have a certificate authority (CA).
For supported certificates, consult [ Certificate signing algorithms supported by
IoT](https://docs.aws.amazon.com/iot/latest/developerguide/x509-client-certs.html#x509-cert-algorithms).
"""
def register_certificate_without_ca(%Client{} = client, input, options \\ []) do
url_path = "/certificate/register-no-ca"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Provisions a thing in the device registry.
RegisterThing calls other IoT control plane APIs. These calls might exceed your
account level [ IoT Throttling Limits](https://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html#limits_iot)
and cause throttle errors. Please contact [Amazon Web Services Customer Support](https://console.aws.amazon.com/support/home) to raise your throttling
limits if necessary.
Requires permission to access the
[RegisterThing](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def register_thing(%Client{} = client, input, options \\ []) do
url_path = "/things"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Rejects a pending certificate transfer.
After IoT rejects a certificate transfer, the certificate status changes from
**PENDING_TRANSFER** to **INACTIVE**.
To check for pending certificate transfers, call `ListCertificates` to enumerate
your certificates.
This operation can only be called by the transfer destination. After it is
called, the certificate will be returned to the source's account in the INACTIVE
state.
Requires permission to access the
[RejectCertificateTransfer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def reject_certificate_transfer(%Client{} = client, certificate_id, input, options \\ []) do
url_path = "/reject-certificate-transfer/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the given thing from the billing group.
Requires permission to access the
[RemoveThingFromBillingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
This call is asynchronous. It might take several seconds for the detachment to
propagate.
"""
def remove_thing_from_billing_group(%Client{} = client, input, options \\ []) do
url_path = "/billing-groups/removeThingFromBillingGroup"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Remove the specified thing from the specified group.
You must specify either a `thingGroupArn` or a `thingGroupName` to identify the
thing group and either a `thingArn` or a `thingName` to identify the thing to
remove from the thing group.
Requires permission to access the
[RemoveThingFromThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def remove_thing_from_thing_group(%Client{} = client, input, options \\ []) do
url_path = "/thing-groups/removeThingFromThingGroup"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Replaces the rule.
You must specify all parameters for the new rule. Creating rules is an
administrator-level action. Any user who has permission to create rules will be
able to access data processed by the rule.
Requires permission to access the
[ReplaceTopicRule](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def replace_topic_rule(%Client{} = client, rule_name, input, options \\ []) do
url_path = "/rules/#{AWS.Util.encode_uri(rule_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
The query search index.
Requires permission to access the
[SearchIndex](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def search_index(%Client{} = client, input, options \\ []) do
url_path = "/indices/search"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sets the default authorizer.
This will be used if a websocket connection is made without specifying an
authorizer.
Requires permission to access the
[SetDefaultAuthorizer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def set_default_authorizer(%Client{} = client, input, options \\ []) do
url_path = "/default-authorizer"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sets the specified version of the specified policy as the policy's default
(operative) version.
This action affects all certificates to which the policy is attached. To list
the principals the policy is attached to, use the `ListPrincipalPolicies`
action.
Requires permission to access the
[SetDefaultPolicyVersion](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def set_default_policy_version(
%Client{} = client,
policy_name,
policy_version_id,
input,
options \\ []
) do
url_path =
"/policies/#{AWS.Util.encode_uri(policy_name)}/version/#{AWS.Util.encode_uri(policy_version_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sets the logging options.
NOTE: use of this command is not recommended. Use `SetV2LoggingOptions` instead.
Requires permission to access the
[SetLoggingOptions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def set_logging_options(%Client{} = client, input, options \\ []) do
url_path = "/loggingOptions"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sets the logging level.
Requires permission to access the
[SetV2LoggingLevel](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def set_v2_logging_level(%Client{} = client, input, options \\ []) do
url_path = "/v2LoggingLevel"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sets the logging options for the V2 logging service.
Requires permission to access the
[SetV2LoggingOptions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def set_v2_logging_options(%Client{} = client, input, options \\ []) do
url_path = "/v2LoggingOptions"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Starts a task that applies a set of mitigation actions to the specified target.
Requires permission to access the
[StartAuditMitigationActionsTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def start_audit_mitigation_actions_task(%Client{} = client, task_id, input, options \\ []) do
url_path = "/audit/mitigationactions/tasks/#{AWS.Util.encode_uri(task_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Starts a Device Defender ML Detect mitigation actions task.
Requires permission to access the
[StartDetectMitigationActionsTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def start_detect_mitigation_actions_task(%Client{} = client, task_id, input, options \\ []) do
url_path = "/detect/mitigationactions/tasks/#{AWS.Util.encode_uri(task_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Starts an on-demand Device Defender audit.
Requires permission to access the
[StartOnDemandAuditTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def start_on_demand_audit_task(%Client{} = client, input, options \\ []) do
url_path = "/audit/tasks"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a bulk thing provisioning task.
Requires permission to access the
[StartThingRegistrationTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def start_thing_registration_task(%Client{} = client, input, options \\ []) do
url_path = "/thing-registration-tasks"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Cancels a bulk thing provisioning task.
Requires permission to access the
[StopThingRegistrationTask](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def stop_thing_registration_task(%Client{} = client, task_id, input, options \\ []) do
url_path = "/thing-registration-tasks/#{AWS.Util.encode_uri(task_id)}/cancel"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds to or modifies the tags of the given resource.
Tags are metadata which can be used to manage a resource.
Requires permission to access the
[TagResource](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
url_path = "/tags"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Tests if a specified principal is authorized to perform an IoT action on a
specified resource.
Use this to test and debug the authorization behavior of devices that connect to
the IoT device gateway.
Requires permission to access the
[TestAuthorization](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def test_authorization(%Client{} = client, input, options \\ []) do
url_path = "/test-authorization"
headers = []
{query_params, input} =
[
{"clientId", "clientId"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Tests a custom authorization behavior by invoking a specified custom authorizer.
Use this to test and debug the custom authorization behavior of devices that
connect to the IoT device gateway.
Requires permission to access the
[TestInvokeAuthorizer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def test_invoke_authorizer(%Client{} = client, authorizer_name, input, options \\ []) do
url_path = "/authorizer/#{AWS.Util.encode_uri(authorizer_name)}/test"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Transfers the specified certificate to the specified Amazon Web Services
account.
Requires permission to access the
[TransferCertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
You can cancel the transfer until it is acknowledged by the recipient.
No notification is sent to the transfer destination's account. It is up to the
caller to notify the transfer target.
The certificate being transferred must not be in the ACTIVE state. You can use
the `UpdateCertificate` action to deactivate it.
The certificate must not have any policies attached to it. You can use the
`DetachPolicy` action to detach them.
"""
def transfer_certificate(%Client{} = client, certificate_id, input, options \\ []) do
url_path = "/transfer-certificate/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
{query_params, input} =
[
{"targetAwsAccount", "targetAwsAccount"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the given tags (metadata) from the resource.
Requires permission to access the
[UntagResource](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
url_path = "/untag"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Configures or reconfigures the Device Defender audit settings for this account.
Settings include how audit notifications are sent and which audit checks are
enabled or disabled.
Requires permission to access the
[UpdateAccountAuditConfiguration](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_account_audit_configuration(%Client{} = client, input, options \\ []) do
url_path = "/audit/configuration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a Device Defender audit suppression.
"""
def update_audit_suppression(%Client{} = client, input, options \\ []) do
url_path = "/audit/suppressions/update"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates an authorizer.
Requires permission to access the
[UpdateAuthorizer](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_authorizer(%Client{} = client, authorizer_name, input, options \\ []) do
url_path = "/authorizer/#{AWS.Util.encode_uri(authorizer_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates information about the billing group.
Requires permission to access the
[UpdateBillingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_billing_group(%Client{} = client, billing_group_name, input, options \\ []) do
url_path = "/billing-groups/#{AWS.Util.encode_uri(billing_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a registered CA certificate.
Requires permission to access the
[UpdateCACertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_ca_certificate(%Client{} = client, certificate_id, input, options \\ []) do
url_path = "/cacertificate/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
{query_params, input} =
[
{"newAutoRegistrationStatus", "newAutoRegistrationStatus"},
{"newStatus", "newStatus"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the status of the specified certificate.
This operation is idempotent.
Requires permission to access the
[UpdateCertificate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
Certificates must be in the ACTIVE state to authenticate devices that use a
certificate to connect to IoT.
Within a few minutes of updating a certificate from the ACTIVE state to any
other state, IoT disconnects all devices that used that certificate to connect.
Devices cannot use a certificate that is not in the ACTIVE state to reconnect.
"""
def update_certificate(%Client{} = client, certificate_id, input, options \\ []) do
url_path = "/certificates/#{AWS.Util.encode_uri(certificate_id)}"
headers = []
{query_params, input} =
[
{"newStatus", "newStatus"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a Device Defender detect custom metric.
Requires permission to access the
[UpdateCustomMetric](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_custom_metric(%Client{} = client, metric_name, input, options \\ []) do
url_path = "/custom-metric/#{AWS.Util.encode_uri(metric_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the definition for a dimension.
You cannot change the type of a dimension after it is created (you can delete it
and recreate it).
Requires permission to access the
[UpdateDimension](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_dimension(%Client{} = client, name, input, options \\ []) do
url_path = "/dimensions/#{AWS.Util.encode_uri(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates values stored in the domain configuration.
Domain configurations for default endpoints can't be updated.
Requires permission to access the
[UpdateDomainConfiguration](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_domain_configuration(
%Client{} = client,
domain_configuration_name,
input,
options \\ []
) do
url_path = "/domainConfigurations/#{AWS.Util.encode_uri(domain_configuration_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a dynamic thing group.
Requires permission to access the
[UpdateDynamicThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_dynamic_thing_group(%Client{} = client, thing_group_name, input, options \\ []) do
url_path = "/dynamic-thing-groups/#{AWS.Util.encode_uri(thing_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the event configurations.
Requires permission to access the
[UpdateEventConfigurations](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_event_configurations(%Client{} = client, input, options \\ []) do
url_path = "/event-configurations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the data for a fleet metric.
Requires permission to access the
[UpdateFleetMetric](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_fleet_metric(%Client{} = client, metric_name, input, options \\ []) do
url_path = "/fleet-metric/#{AWS.Util.encode_uri(metric_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the search configuration.
Requires permission to access the
[UpdateIndexingConfiguration](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_indexing_configuration(%Client{} = client, input, options \\ []) do
url_path = "/indexing/config"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates supported fields of the specified job.
Requires permission to access the
[UpdateJob](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_job(%Client{} = client, job_id, input, options \\ []) do
url_path = "/jobs/#{AWS.Util.encode_uri(job_id)}"
headers = []
{query_params, input} =
[
{"namespaceId", "namespaceId"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the definition for the specified mitigation action.
Requires permission to access the
[UpdateMitigationAction](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_mitigation_action(%Client{} = client, action_name, input, options \\ []) do
url_path = "/mitigationactions/actions/#{AWS.Util.encode_uri(action_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a fleet provisioning template.
Requires permission to access the
[UpdateProvisioningTemplate](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_provisioning_template(%Client{} = client, template_name, input, options \\ []) do
url_path = "/provisioning-templates/#{AWS.Util.encode_uri(template_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a role alias.
Requires permission to access the
[UpdateRoleAlias](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_role_alias(%Client{} = client, role_alias, input, options \\ []) do
url_path = "/role-aliases/#{AWS.Util.encode_uri(role_alias)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a scheduled audit, including which checks are performed and how often
the audit takes place.
Requires permission to access the
[UpdateScheduledAudit](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_scheduled_audit(%Client{} = client, scheduled_audit_name, input, options \\ []) do
url_path = "/audit/scheduledaudits/#{AWS.Util.encode_uri(scheduled_audit_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a Device Defender security profile.
Requires permission to access the
[UpdateSecurityProfile](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_security_profile(%Client{} = client, security_profile_name, input, options \\ []) do
url_path = "/security-profiles/#{AWS.Util.encode_uri(security_profile_name)}"
headers = []
{query_params, input} =
[
{"expectedVersion", "expectedVersion"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates an existing stream.
The stream version will be incremented by one.
Requires permission to access the
[UpdateStream](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_stream(%Client{} = client, stream_id, input, options \\ []) do
url_path = "/streams/#{AWS.Util.encode_uri(stream_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the data for a thing.
Requires permission to access the
[UpdateThing](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_thing(%Client{} = client, thing_name, input, options \\ []) do
url_path = "/things/#{AWS.Util.encode_uri(thing_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Update a thing group.
Requires permission to access the
[UpdateThingGroup](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_thing_group(%Client{} = client, thing_group_name, input, options \\ []) do
url_path = "/thing-groups/#{AWS.Util.encode_uri(thing_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the groups to which the thing belongs.
Requires permission to access the
[UpdateThingGroupsForThing](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_thing_groups_for_thing(%Client{} = client, input, options \\ []) do
url_path = "/thing-groups/updateThingGroupsForThing"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a topic rule destination.
You use this to change the status, endpoint URL, or confirmation URL of the
destination.
Requires permission to access the
[UpdateTopicRuleDestination](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def update_topic_rule_destination(%Client{} = client, input, options \\ []) do
url_path = "/destinations"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Validates a Device Defender security profile behaviors specification.
Requires permission to access the
[ValidateSecurityProfileBehaviors](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awsiot.html#awsiot-actions-as-permissions)
action.
"""
def validate_security_profile_behaviors(%Client{} = client, input, options \\ []) do
url_path = "/security-profile-behaviors/validate"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/iot.ex
| 0.816113
| 0.459986
|
iot.ex
|
starcoder
|
defmodule Dict.Behaviour do
@moduledoc """
This module makes it easier to create your own `Dict` compliant
module, by providing default implementations for some required functions.
Usage:
defmodule MyDict do
use Dict.Behaviour
# implement required functions (see below)
# override default implementations if needed
end
The client module must contain following functions: `size/1`, `fetch/2`,
`put/3`, `update/4`, `delete/2` and `reduce/3`. All of them are part of
the Dict behaviour, so no extra functions are actually required.
Based on these functions, `Dict.Behaviour` generates default implementations
for other functions such as `drop`, `take`, etc. All of the functions are
defined as overridable, so you can provide your own implementation if
needed.
If you implement `new/0` and `new/1` functions, you can also test your custom
module via `Dict` doctests:
defmodule MyDict do
def new(keywords // []) do
...
end
end
defmodule MyTests do
use ExUnit.Case
doctest Dict
defp dict_impl, do: MyDict
end
"""
defmacro __using__(_) do
quote do
@behaviour Dict
def get(dict, key, default // nil) do
case fetch(dict, key) do
{ :ok, value } -> value
:error -> default
end
end
defoverridable get: 2, get: 3
def fetch!(dict, key) do
case fetch(dict, key) do
{ :ok, value } -> value
:error -> raise(KeyError, key: key)
end
end
defoverridable fetch!: 2
def has_key?(dict, key) do
match? { :ok, _ }, fetch(dict, key)
end
defoverridable has_key?: 2
def put_new(dict, key, value) do
update(dict, key, value, fn(v) -> v end)
end
defoverridable put_new: 3
def drop(dict, []), do: dict
def drop(dict, [key|keys]) do
drop(delete(dict, key), keys)
end
defoverridable drop: 2
def take(dict, keys) do
take(dict, keys, new)
end
defoverridable take: 2
defp take(_dict, [], acc), do: acc
defp take(dict, [key|keys], acc) do
case fetch(dict, key) do
{ :ok, value } -> take(dict, keys, put(acc, key, value))
:error -> take(dict, keys, acc)
end
end
def to_list(dict), do: reduce(dict, [], &[&1|&2]) |> Enum.reverse
defoverridable to_list: 1
def keys(dict), do: reduce(dict, [], fn({k, _}, acc) -> [k | acc] end) |> Enum.reverse
defoverridable keys: 1
def values(dict), do: reduce(dict, [], fn({_, v}, acc) -> [v | acc] end) |> Enum.reverse
defoverridable values: 1
def equal?(dict1, dict2) do
case size(dict1) == size(dict2) do
false -> false
true ->
try do
reduce(dict1, nil, fn({ k, v }, _acc) ->
unless fetch(dict2, k) == { :ok, v }, do: throw(:error)
end)
true
catch
:error -> false
end
end
end
defoverridable equal?: 2
def merge(dict, enumerable, callback // fn(_k, _v1, v2) -> v2 end) do
Enum.reduce(enumerable, dict, fn({key, value}, acc) ->
update(acc, key, value, fn(v1) -> callback.(key, v1, value) end)
end)
end
defoverridable merge: 2, merge: 3
end
end
end
|
lib/elixir/lib/dict/behaviour.ex
| 0.789721
| 0.642587
|
behaviour.ex
|
starcoder
|
defmodule OMG.Watcher.ExitProcessor do
@moduledoc """
Tracks and handles the exits from the child chain, their validity and challenges.
Keeps a state of exits that are in progress, updates it with news from the root chain contract, compares to the
state of the ledger (`OMG.State`), issues notifications as it finds suitable.
Should manage all kinds of exits allowed in the protocol and handle the interactions between them.
For functional logic and more info see `OMG.Watcher.ExitProcessor.Core`
NOTE: Note that all calls return `db_updates` and relay on the caller to do persistence.
"""
alias OMG.Block
alias OMG.DB
alias OMG.DB.Models.PaymentExitInfo
alias OMG.Eth
alias OMG.Eth.EthereumHeight
alias OMG.Eth.RootChain
alias OMG.State
alias OMG.State.Transaction
alias OMG.Utxo
alias OMG.Watcher.ExitProcessor
alias OMG.Watcher.ExitProcessor.Core
alias OMG.Watcher.ExitProcessor.ExitInfo
alias OMG.Watcher.ExitProcessor.StandardExit
alias OMG.Watcher.ExitProcessor.Tools
use OMG.Utils.LoggerExt
require Utxo
@timeout 60_000
### Client
@doc """
Starts the `GenServer` process with options. For documentation of the options see `init/1`
"""
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
@doc """
Accepts events and processes them in the state - new exits are tracked.
Returns `db_updates` to be sent to `OMG.DB` by the caller
"""
# empty list clause to not block the server for no-ops
def new_exits([]), do: {:ok, []}
def new_exits(exits) do
GenServer.call(__MODULE__, {:new_exits, exits}, @timeout)
end
@doc """
Accepts events and processes them in the state - new in flight exits are tracked.
Returns `db_updates` to be sent to `OMG.DB` by the caller
"""
# empty list clause to not block the server for no-ops
def new_in_flight_exits([]), do: {:ok, []}
def new_in_flight_exits(in_flight_exit_started_events) do
GenServer.call(__MODULE__, {:new_in_flight_exits, in_flight_exit_started_events}, @timeout)
end
@doc """
Accepts events and processes them in the state - finalized exits are untracked _if valid_ otherwise raises alert
Returns `db_updates` to be sent to `OMG.DB` by the caller
"""
# empty list clause to not block the server for no-ops
def finalize_exits([]), do: {:ok, []}
def finalize_exits(finalizations) do
GenServer.call(__MODULE__, {:finalize_exits, finalizations}, @timeout)
end
@doc """
Accepts events and processes them in the state - new piggybacks are tracked, if invalid raises an alert
Returns `db_updates` to be sent to `OMG.DB` by the caller
"""
# empty list clause to not block the server for no-ops
def piggyback_exits([]), do: {:ok, []}
def piggyback_exits(piggybacks) do
GenServer.call(__MODULE__, {:piggyback_exits, piggybacks}, @timeout)
end
@doc """
Accepts events and processes them in the state - challenged exits are untracked
Returns `db_updates` to be sent to `OMG.DB` by the caller
"""
# empty list clause to not block the server for no-ops
def challenge_exits([]), do: {:ok, []}
def challenge_exits(challenges) do
GenServer.call(__MODULE__, {:challenge_exits, challenges}, @timeout)
end
@doc """
Accepts events and processes them in the state.
Marks the challenged IFE as non-canonical and persists information about the competitor and its age.
Competitors are stored for future use (i.e. to challenge an in flight exit).
Returns `db_updates` to be sent to `OMG.DB` by the caller
"""
# empty list clause to not block the server for no-ops
def new_ife_challenges([]), do: {:ok, []}
def new_ife_challenges(challenges) do
GenServer.call(__MODULE__, {:new_ife_challenges, challenges}, @timeout)
end
@doc """
Accepts events and processes them in state.
Marks the IFE as canonical and perists information about the inclusion age as responded with in the contract.
Returns `db_updates` to be sent to `OMG.DB` by the caller
"""
# empty list clause to not block the server for no-ops
def respond_to_in_flight_exits_challenges([]), do: {:ok, []}
def respond_to_in_flight_exits_challenges(responds) do
GenServer.call(__MODULE__, {:respond_to_in_flight_exits_challenges, responds}, @timeout)
end
@doc """
Accepts events and processes them in state.
Returns `db_updates` to be sent to `OMG.DB` by the caller
"""
# empty list clause to not block the server for no-ops
def challenge_piggybacks([]), do: {:ok, []}
def challenge_piggybacks(challenges) do
GenServer.call(__MODULE__, {:challenge_piggybacks, challenges}, @timeout)
end
@doc """
Accepts events and processes them in state - finalized outputs are applied to the state.
Returns `db_updates` to be sent to `OMG.DB` by the caller
"""
# empty list clause to not block the server for no-ops
def finalize_in_flight_exits([]), do: {:ok, []}
def finalize_in_flight_exits(finalizations) do
GenServer.call(__MODULE__, {:finalize_in_flight_exits, finalizations}, @timeout)
end
@doc """
Checks validity of all exit-related events and returns the list of actionable items.
Works with `OMG.State` to discern validity.
This function may also update some internal caches to make subsequent calls not redo the work,
but under unchanged conditions, it should have unchanged behavior from POV of an outside caller.
"""
def check_validity() do
GenServer.call(__MODULE__, :check_validity, @timeout)
end
def check_validity(timeout) do
GenServer.call(__MODULE__, :check_validity, timeout)
end
@doc """
Returns a map of requested in flight exits, keyed by transaction hash
"""
@spec get_active_in_flight_exits() :: {:ok, Core.in_flight_exits_response_t()}
def get_active_in_flight_exits() do
GenServer.call(__MODULE__, :get_active_in_flight_exits, @timeout)
end
@doc """
Returns all information required to produce a transaction to the root chain contract to present a competitor for
a non-canonical in-flight exit
"""
@spec get_competitor_for_ife(binary()) ::
{:ok, ExitProcessor.Canonicity.competitor_data_t()}
| {:error, :competitor_not_found}
| {:error, :no_viable_competitor_found}
def get_competitor_for_ife(txbytes) do
GenServer.call(__MODULE__, {:get_competitor_for_ife, txbytes}, @timeout)
end
@doc """
Returns all information required to produce a transaction to the root chain contract to present a proof of canonicity
for a challenged in-flight exit
"""
@spec prove_canonical_for_ife(binary()) ::
{:ok, ExitProcessor.Canonicity.prove_canonical_data_t()} | {:error, :no_viable_canonical_proof_found}
def prove_canonical_for_ife(txbytes) do
GenServer.call(__MODULE__, {:prove_canonical_for_ife, txbytes}, @timeout)
end
@doc """
Returns all information required to challenge an invalid input piggyback
"""
@spec get_input_challenge_data(Transaction.Signed.tx_bytes(), Transaction.input_index_t()) ::
{:ok, ExitProcessor.Piggyback.input_challenge_data()}
| {:error, ExitProcessor.Piggyback.piggyback_challenge_data_error()}
def get_input_challenge_data(txbytes, input_index) do
GenServer.call(__MODULE__, {:get_input_challenge_data, txbytes, input_index}, @timeout)
end
@doc """
Returns all information required to challenge an invalid output piggyback
"""
@spec get_output_challenge_data(Transaction.Signed.tx_bytes(), Transaction.input_index_t()) ::
{:ok, ExitProcessor.Piggyback.output_challenge_data()}
| {:error, ExitProcessor.Piggyback.piggyback_challenge_data_error()}
def get_output_challenge_data(txbytes, output_index) do
GenServer.call(__MODULE__, {:get_output_challenge_data, txbytes, output_index}, @timeout)
end
@doc """
Returns challenge for an invalid standard exit
"""
@spec create_challenge(Utxo.Position.t()) ::
{:ok, StandardExit.Challenge.t()} | {:error, :utxo_not_spent | :exit_not_found}
def create_challenge(exiting_utxo_pos) do
GenServer.call(__MODULE__, {:create_challenge, exiting_utxo_pos}, @timeout)
end
### Server
use GenServer
@doc """
Initializes the state of the `ExitProcessor`'s `GenServer`.
Reads the exit data from `OMG.DB`.
Options:
- `exit_processor_sla_margin`: number of blocks after exit start before it's considered late (and potentially:
unchallenged)
- `exit_processor_sla_margin_forced`: if `true` will override the check of `exit_processor_sla_margin` against
`min_exit_period_seconds`
- `min_exit_period_seconds`: should reflect the value of this parameter for the specific child chain watched,
- `ethereum_block_time_seconds`: just to relate blocks to seconds for the `exit_processor_sla_margin` check
- `metrics_collection_interval`: how often are the metrics sent to `telemetry` (in milliseconds)
"""
def init(
exit_processor_sla_margin: exit_processor_sla_margin,
exit_processor_sla_margin_forced: exit_processor_sla_margin_forced,
metrics_collection_interval: metrics_collection_interval,
min_exit_period_seconds: min_exit_period_seconds,
ethereum_block_time_seconds: ethereum_block_time_seconds,
child_block_interval: child_block_interval
) do
{:ok, db_exits} = PaymentExitInfo.all_exit_infos()
{:ok, db_ifes} = PaymentExitInfo.all_in_flight_exits_infos()
{:ok, db_competitors} = DB.competitors_info()
:ok =
Core.check_sla_margin(
exit_processor_sla_margin,
exit_processor_sla_margin_forced,
min_exit_period_seconds,
ethereum_block_time_seconds
)
{:ok, processor} =
Core.init(
db_exits,
db_ifes,
db_competitors,
min_exit_period_seconds,
child_block_interval,
exit_processor_sla_margin
)
{:ok, _} = :timer.send_interval(metrics_collection_interval, self(), :send_metrics)
_ = Logger.info("Initializing with: #{inspect(processor)}")
{:ok, processor}
end
@doc """
See `new_exits/1`. Flow:
- takes a list of standard exit start events from the contract
- fetches the currently observed exit status in the contract (to decide if exits are "inactive on recognition", which
helps cover the case when the Watcher is syncing up)
- updates the `ExitProcessor`'s state
- returns `db_updates`
"""
def handle_call({:new_exits, exits}, _from, state) do
_ = if not Enum.empty?(exits), do: Logger.info("Recognized #{Enum.count(exits)} exits: #{inspect(exits)}")
{:ok, exit_contract_statuses} = Eth.RootChain.get_standard_exit_structs(get_in(exits, [Access.all(), :exit_id]))
exit_maps =
exits
|> Task.async_stream(
fn exit_event ->
put_timestamp_and_sft(exit_event, state.min_exit_period_seconds, state.child_block_interval)
end,
timeout: 50_000,
on_timeout: :exit,
max_concurrency: System.schedulers_online() * 2
)
|> Enum.map(fn {:ok, result} -> result end)
{new_state, db_updates} = Core.new_exits(state, exit_maps, exit_contract_statuses)
{:reply, {:ok, db_updates}, new_state}
end
@doc """
See `new_in_flight_exits/1`. Flow:
- takes a list of IFE exit start events from the contract
- fetches the currently observed exit status in the contract (to decide if exits are "inactive on recognition", which
helps cover the case when the Watcher is syncing up)
- updates the `ExitProcessor`'s state
- returns `db_updates`
"""
def handle_call({:new_in_flight_exits, exits}, _from, state) do
_ = if not Enum.empty?(exits), do: Logger.info("Recognized #{Enum.count(exits)} in-flight exits: #{inspect(exits)}")
contract_ife_ids =
Enum.map(exits, fn %{call_data: %{in_flight_tx: txbytes}} ->
ExPlasma.InFlightExit.txbytes_to_id(txbytes)
end)
# Prepare events data for internal bus
:ok =
exits
|> Enum.map(fn %{call_data: %{input_utxos_pos: inputs}} = event ->
{event, inputs}
end)
|> Tools.to_bus_events_data()
|> publish_internal_bus_events("InFlightExitStarted")
{:ok, statuses} = Eth.RootChain.get_in_flight_exit_structs(contract_ife_ids)
ife_contract_statuses = Enum.zip(statuses, contract_ife_ids)
{new_state, db_updates} = Core.new_in_flight_exits(state, exits, ife_contract_statuses)
{:reply, {:ok, db_updates}, new_state}
end
@doc """
See `finalize_exits/1`. Flow:
- takes a list of standard exit finalization events from the contract
- discovers the `OMG.State`'s native key for the finalizing exits (`utxo_pos`) (`Core.exit_key_by_exit_id/2`)
- marks as spent these UTXOs in `OMG.State` expecting it to tell which of those were valid finalizations (UTXOs exist)
- reflects this result in the `ExitProcessor`'s state
- returns `db_updates`, concatenated with those related to the call to `OMG.State`
"""
def handle_call({:finalize_exits, exits}, _from, state) do
_ = if not Enum.empty?(exits), do: Logger.info("Recognized #{Enum.count(exits)} finalizations: #{inspect(exits)}")
{:ok, db_updates_from_state, validities} =
exits |> Enum.map(&Core.exit_key_by_exit_id(state, &1.exit_id)) |> State.exit_utxos()
{new_state, db_updates} = Core.finalize_exits(state, validities)
{:reply, {:ok, db_updates ++ db_updates_from_state}, new_state}
end
@doc """
See `piggyback_exits/1`. Flow:
- takes a list of IFE piggybacking events from the contract
- updates the `ExitProcessor`'s state
- returns `db_updates`
"""
def handle_call({:piggyback_exits, exits}, _from, state) do
_ = if not Enum.empty?(exits), do: Logger.info("Recognized #{Enum.count(exits)} piggybacks: #{inspect(exits)}")
{new_state, db_updates} = Core.new_piggybacks(state, exits)
:ok =
exits
|> Tools.to_bus_events_data()
|> publish_internal_bus_events("InFlightTxOutputPiggybacked")
{:reply, {:ok, db_updates}, new_state}
end
@doc """
See `challenge_exits/1`. Flow:
- takes a list of standard exit challenge events from the contract
- updates the `ExitProcessor`'s state
- returns `db_updates`
"""
def handle_call({:challenge_exits, exits}, _from, state) do
_ = if not Enum.empty?(exits), do: Logger.info("Recognized #{Enum.count(exits)} challenges: #{inspect(exits)}")
{new_state, db_updates} = Core.challenge_exits(state, exits)
{:reply, {:ok, db_updates}, new_state}
end
@doc """
See `new_ife_challenges/1`. Flow:
- takes a list of IFE exit canonicity challenge events from the contract
- updates the `ExitProcessor`'s state
- returns `db_updates`
"""
def handle_call({:new_ife_challenges, challenges}, _from, state) do
_ =
if not Enum.empty?(challenges),
do: Logger.info("Recognized #{Enum.count(challenges)} ife challenges: #{inspect(challenges)}")
{new_state, db_updates} = Core.new_ife_challenges(state, challenges)
{:reply, {:ok, db_updates}, new_state}
end
@doc """
See `challenge_piggybacks/1`. Flow:
- takes a list of IFE piggyback challenge events from the contract
- updates the `ExitProcessor`'s state
- returns `db_updates`
"""
def handle_call({:challenge_piggybacks, challenges}, _from, state) do
_ =
if not Enum.empty?(challenges),
do: Logger.info("Recognized #{Enum.count(challenges)} piggyback challenges: #{inspect(challenges)}")
{new_state, db_updates} = Core.challenge_piggybacks(state, challenges)
{:reply, {:ok, db_updates}, new_state}
end
@doc """
See `respond_to_in_flight_exits_challenges/1`. Flow:
- takes a list of IFE exit canonicity challenge response events from the contract
- updates the `ExitProcessor`'s state
- returns `db_updates`
"""
def handle_call({:respond_to_in_flight_exits_challenges, responds}, _from, state) do
_ =
if not Enum.empty?(responds),
do: Logger.info("Recognized #{Enum.count(responds)} response to IFE challenge: #{inspect(responds)}")
{new_state, db_updates} = Core.respond_to_in_flight_exits_challenges(state, responds)
{:reply, {:ok, db_updates}, new_state}
end
@doc """
See `finalize_in_flight_exits/1`. Flow:
- takes a list of IFE exit finalization events from the contract
- pulls current information on IFE transaction inclusion
- discovers the `OMG.State`'s native key for the finalizing exits (`utxo_pos`)
(`Core.prepare_utxo_exits_for_in_flight_exit_finalizations/2`)
- marks as spent these UTXOs in `OMG.State` expecting it to tell which of those were valid finalizations (UTXOs exist)
- reflects this result in the `ExitProcessor`'s state
- returns `db_updates`, concatenated with those related to the call to `OMG.State`
"""
def handle_call({:finalize_in_flight_exits, finalizations}, _from, state) do
_ = Logger.info("Recognized #{Enum.count(finalizations)} ife finalizations: #{inspect(finalizations)}")
# necessary, so that the processor knows the current state of inclusion of exiting IFE txs
state2 = update_with_ife_txs_from_blocks(state)
{:ok, exiting_positions, events_with_utxos} =
Core.prepare_utxo_exits_for_in_flight_exit_finalizations(state2, finalizations)
# NOTE: it's not straightforward to track from utxo position returned when exiting utxo in State to ife id
# See issue #671 https://github.com/omisego/elixir-omg/issues/671
{invalidities, state_db_updates} =
Enum.reduce(exiting_positions, {%{}, []}, &collect_invalidities_and_state_db_updates/2)
{:ok, state3, db_updates} = Core.finalize_in_flight_exits(state2, finalizations, invalidities)
:ok =
events_with_utxos
|> Tools.to_bus_events_data()
|> publish_internal_bus_events("InFlightExitOutputWithdrawn")
{:reply, {:ok, state_db_updates ++ db_updates}, state3}
end
@doc """
See `check_validity/0`. Flow:
- pulls current information on IFE transaction inclusion
- gets a list of interesting UTXOs to check for existence in `OMG.State`
- combines this information to discover the state of all the exits to report (mainly byzantine events)
"""
def handle_call(:check_validity, _from, state) do
new_state = update_with_ife_txs_from_blocks(state)
response =
%ExitProcessor.Request{}
|> fill_request_with_spending_data(new_state)
|> Core.check_validity(new_state)
{:reply, response, new_state}
end
@doc """
See `get_active_in_flight_exits/0`.
"""
def handle_call(:get_active_in_flight_exits, _from, state) do
{:reply, {:ok, Core.get_active_in_flight_exits(state)}, state}
end
@doc """
See `get_competitor_for_ife/1`. Flow:
- pulls current information on IFE transaction inclusion
- gets a list of interesting UTXOs to check for existence in `OMG.State`
- combines this information to compose the challenge data
"""
def handle_call({:get_competitor_for_ife, txbytes}, _from, state) do
# TODO: run_status_gets and getting all non-existent UTXO positions imaginable can be optimized out heavily
# only the UTXO positions being inputs to `txbytes` must be looked at, but it becomes problematic as
# txbytes can be invalid so we'd need a with here...
new_state = update_with_ife_txs_from_blocks(state)
competitor_result =
%ExitProcessor.Request{}
|> fill_request_with_spending_data(new_state)
|> Core.get_competitor_for_ife(new_state, txbytes)
{:reply, competitor_result, new_state}
end
@doc """
See `prove_canonical_for_ife/1`. Flow:
- pulls current information on IFE transaction inclusion
- gets a list of interesting UTXOs to check for existence in `OMG.State`
- combines this information to compose the challenge data
"""
def handle_call({:prove_canonical_for_ife, txbytes}, _from, state) do
new_state = update_with_ife_txs_from_blocks(state)
canonicity_result = Core.prove_canonical_for_ife(new_state, txbytes)
{:reply, canonicity_result, new_state}
end
@doc """
See `get_input_challenge_data/2`. Flow:
- gets a list of interesting UTXOs to check for existence in `OMG.State`
- combines this information to compose the challenge data
"""
def handle_call({:get_input_challenge_data, txbytes, input_index}, _from, state) do
response =
%ExitProcessor.Request{}
|> fill_request_with_spending_data(state)
|> Core.get_input_challenge_data(state, txbytes, input_index)
{:reply, response, state}
end
@doc """
See `get_output_challenge_data/2`. Flow:
- pulls current information on IFE transaction inclusion
- gets a list of interesting UTXOs to check for existence in `OMG.State`
- combines this information to compose the challenge data
"""
def handle_call({:get_output_challenge_data, txbytes, output_index}, _from, state) do
new_state = update_with_ife_txs_from_blocks(state)
response =
%ExitProcessor.Request{}
|> fill_request_with_spending_data(new_state)
|> Core.get_output_challenge_data(new_state, txbytes, output_index)
{:reply, response, new_state}
end
@doc """
See `create_challenge/1`. Flow:
- leverages `OMG.State` to quickly learn if the exiting UTXO exists or was spent
- pulls some additional data from `OMG.DB`, if needed
- combines this information to compose the challenge data
"""
def handle_call({:create_challenge, exiting_utxo_pos}, _from, state) do
request = %ExitProcessor.Request{se_exiting_pos: exiting_utxo_pos}
exiting_utxo_exists = State.utxo_exists?(exiting_utxo_pos)
response =
with {:ok, request} <- Core.determine_standard_challenge_queries(request, state, exiting_utxo_exists),
do:
request
|> fill_request_with_standard_challenge_data()
|> Core.create_challenge(state)
{:reply, response, state}
end
def handle_info(:send_metrics, state) do
:ok = :telemetry.execute([:process, __MODULE__], %{}, state)
{:noreply, state}
end
defp fill_request_with_standard_challenge_data(%ExitProcessor.Request{se_spending_blocks_to_get: positions} = request) do
%ExitProcessor.Request{request | se_spending_blocks_result: do_get_spending_blocks(positions)}
end
# based on the exits being processed, fills the request structure with data required to process queries
@spec fill_request_with_spending_data(ExitProcessor.Request.t(), Core.t()) :: ExitProcessor.Request.t()
defp fill_request_with_spending_data(request, state) do
request
|> run_status_gets()
|> Core.determine_utxo_existence_to_get(state)
|> get_utxo_existence()
|> Core.determine_spends_to_get(state)
|> get_spending_blocks()
end
# based on in-flight exiting transactions, updates the state with witnesses of those transactions' inclusions in block
@spec update_with_ife_txs_from_blocks(Core.t()) :: Core.t()
defp update_with_ife_txs_from_blocks(state) do
prepared_request =
%ExitProcessor.Request{}
|> run_status_gets()
# To find if IFE was included, see first if its inputs were spent.
|> Core.determine_ife_input_utxos_existence_to_get(state)
|> get_ife_input_utxo_existence()
# Next, check by what transactions they were spent.
|> Core.determine_ife_spends_to_get(state)
|> get_ife_input_spending_blocks()
# Compare found txes with ife.tx.
# If equal, persist information about position.
Core.find_ifes_in_blocks(state, prepared_request)
end
defp run_status_gets(%ExitProcessor.Request{eth_height_now: nil, blknum_now: nil} = request) do
{:ok, eth_height_now} = EthereumHeight.get()
{blknum_now, _} = State.get_status()
_ = Logger.debug("eth_height_now: #{inspect(eth_height_now)}, blknum_now: #{inspect(blknum_now)}")
%{request | eth_height_now: eth_height_now, blknum_now: blknum_now}
end
defp get_utxo_existence(%ExitProcessor.Request{utxos_to_check: positions} = request),
do: %{request | utxo_exists_result: do_utxo_exists?(positions)}
defp get_ife_input_utxo_existence(%ExitProcessor.Request{ife_input_utxos_to_check: positions} = request),
do: %{request | ife_input_utxo_exists_result: do_utxo_exists?(positions)}
defp do_utxo_exists?(positions) do
result = Enum.map(positions, &State.utxo_exists?/1)
_ = Logger.debug("utxos_to_check: #{inspect(positions)}, utxo_exists_result: #{inspect(result)}")
result
end
defp get_spending_blocks(%ExitProcessor.Request{spends_to_get: positions} = request) do
%{request | blocks_result: do_get_spending_blocks(positions)}
end
defp get_ife_input_spending_blocks(%ExitProcessor.Request{ife_input_spends_to_get: positions} = request) do
%{request | ife_input_spending_blocks_result: do_get_spending_blocks(positions)}
end
defp do_get_spending_blocks(spent_positions_to_get) do
blknums = Enum.map(spent_positions_to_get, &do_get_spent_blknum/1)
_ = Logger.debug("spends_to_get: #{inspect(spent_positions_to_get)}, spent_blknum_result: #{inspect(blknums)}")
blknums
|> Core.handle_spent_blknum_result(spent_positions_to_get)
|> do_get_blocks()
end
defp do_get_blocks(blknums) do
{:ok, hashes} = OMG.DB.block_hashes(blknums)
_ = Logger.debug("blknums: #{inspect(blknums)}, hashes: #{inspect(hashes)}")
{:ok, blocks} = OMG.DB.blocks(hashes)
_ = Logger.debug("blocks_result: #{inspect(blocks)}")
Enum.map(blocks, &Block.from_db_value/1)
end
defp do_get_spent_blknum(position) do
position |> Utxo.Position.to_input_db_key() |> OMG.DB.spent_blknum()
end
defp collect_invalidities_and_state_db_updates(
{ife_id, exiting_positions},
{invalidities_by_ife_id, state_db_updates}
) do
{:ok, exits_state_updates, {_, invalidities}} = State.exit_utxos(exiting_positions)
_ =
if not Enum.empty?(invalidities), do: Logger.warn("Invalid in-flight exit finalization: #{inspect(invalidities)}")
invalidities_by_ife_id = Map.put(invalidities_by_ife_id, ife_id, invalidities)
state_db_updates = exits_state_updates ++ state_db_updates
{invalidities_by_ife_id, state_db_updates}
end
@spec put_timestamp_and_sft(map(), pos_integer(), pos_integer()) :: map()
defp put_timestamp_and_sft(
%{eth_height: eth_height, call_data: %{utxo_pos: utxo_pos_enc}} = exit_event,
min_exit_period_seconds,
child_block_interval
) do
{:utxo_position, blknum, _, _} = Utxo.Position.decode!(utxo_pos_enc)
{_block_hash, utxo_creation_block_timestamp} = RootChain.blocks(blknum)
{:ok, exit_block_timestamp} = Eth.get_block_timestamp_by_number(eth_height)
{:ok, scheduled_finalization_time} =
ExitInfo.calculate_sft(
blknum,
exit_block_timestamp,
utxo_creation_block_timestamp,
min_exit_period_seconds,
child_block_interval
)
exit_event
|> Map.put(:scheduled_finalization_time, scheduled_finalization_time)
|> Map.put(:block_timestamp, exit_block_timestamp)
end
defp publish_internal_bus_events([], _), do: :ok
defp publish_internal_bus_events(events_data, topic) when is_list(events_data) and is_binary(topic) do
{:watcher, topic}
|> OMG.Bus.Event.new(:data, events_data)
|> OMG.Bus.direct_local_broadcast()
end
end
|
apps/omg_watcher/lib/omg_watcher/exit_processor.ex
| 0.864067
| 0.455441
|
exit_processor.ex
|
starcoder
|
defmodule AWS.Amplify do
@moduledoc """
Amplify enables developers to develop and deploy cloud-powered mobile and
web apps. The Amplify Console provides a continuous delivery and hosting
service for web applications. For more information, see the [Amplify
Console User
Guide](https://docs.aws.amazon.com/amplify/latest/userguide/welcome.html).
The Amplify Framework is a comprehensive set of SDKs, libraries, tools, and
documentation for client app development. For more information, see the
[Amplify Framework.](https://docs.amplify.aws/)
"""
@doc """
Creates a new Amplify app.
"""
def create_app(client, input, options \\ []) do
path_ = "/apps"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new backend environment for an Amplify app.
"""
def create_backend_environment(client, app_id, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/backendenvironments"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new branch for an Amplify app.
"""
def create_branch(client, app_id, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a deployment for a manually deployed Amplify app. Manually deployed
apps are not connected to a repository.
"""
def create_deployment(client, app_id, branch_name, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}/deployments"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new domain association for an Amplify app. This action associates
a custom domain with the Amplify app
"""
def create_domain_association(client, app_id, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/domains"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new webhook on an Amplify app.
"""
def create_webhook(client, app_id, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/webhooks"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an existing Amplify app specified by an app ID.
"""
def delete_app(client, app_id, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a backend environment for an Amplify app.
"""
def delete_backend_environment(client, app_id, environment_name, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/backendenvironments/#{URI.encode(environment_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a branch for an Amplify app.
"""
def delete_branch(client, app_id, branch_name, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a domain association for an Amplify app.
"""
def delete_domain_association(client, app_id, domain_name, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/domains/#{URI.encode(domain_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a job for a branch of an Amplify app.
"""
def delete_job(client, app_id, branch_name, job_id, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}/jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a webhook.
"""
def delete_webhook(client, webhook_id, input, options \\ []) do
path_ = "/webhooks/#{URI.encode(webhook_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Returns the website access logs for a specific time range using a presigned
URL.
"""
def generate_access_logs(client, app_id, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/accesslogs"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns an existing Amplify app by appID.
"""
def get_app(client, app_id, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns the artifact info that corresponds to an artifact id.
"""
def get_artifact_url(client, artifact_id, options \\ []) do
path_ = "/artifacts/#{URI.encode(artifact_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a backend environment for an Amplify app.
"""
def get_backend_environment(client, app_id, environment_name, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/backendenvironments/#{URI.encode(environment_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a branch for an Amplify app.
"""
def get_branch(client, app_id, branch_name, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns the domain information for an Amplify app.
"""
def get_domain_association(client, app_id, domain_name, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/domains/#{URI.encode(domain_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a job for a branch of an Amplify app.
"""
def get_job(client, app_id, branch_name, job_id, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}/jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns the webhook information that corresponds to a specified webhook ID.
"""
def get_webhook(client, webhook_id, options \\ []) do
path_ = "/webhooks/#{URI.encode(webhook_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of the existing Amplify apps.
"""
def list_apps(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/apps"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of artifacts for a specified app, branch, and job.
"""
def list_artifacts(client, app_id, branch_name, job_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}/jobs/#{URI.encode(job_id)}/artifacts"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the backend environments for an Amplify app.
"""
def list_backend_environments(client, app_id, environment_name \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/backendenvironments"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(environment_name) do
[{"environmentName", environment_name} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the branches of an Amplify app.
"""
def list_branches(client, app_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns the domain associations for an Amplify app.
"""
def list_domain_associations(client, app_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/domains"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the jobs for a branch of an Amplify app.
"""
def list_jobs(client, app_id, branch_name, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}/jobs"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of tags for a specified Amazon Resource Name (ARN).
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of webhooks for an Amplify app.
"""
def list_webhooks(client, app_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/webhooks"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Starts a deployment for a manually deployed app. Manually deployed apps are
not connected to a repository.
"""
def start_deployment(client, app_id, branch_name, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}/deployments/start"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Starts a new job for a branch of an Amplify app.
"""
def start_job(client, app_id, branch_name, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}/jobs"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Stops a job that is in progress for a branch of an Amplify app.
"""
def stop_job(client, app_id, branch_name, job_id, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}/jobs/#{URI.encode(job_id)}/stop"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Tags the resource with a tag key and value.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Untags a resource with a specified Amazon Resource Name (ARN).
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Updates an existing Amplify app.
"""
def update_app(client, app_id, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates a branch for an Amplify app.
"""
def update_branch(client, app_id, branch_name, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/branches/#{URI.encode(branch_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new domain association for an Amplify app.
"""
def update_domain_association(client, app_id, domain_name, input, options \\ []) do
path_ = "/apps/#{URI.encode(app_id)}/domains/#{URI.encode(domain_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates a webhook.
"""
def update_webhook(client, webhook_id, input, options \\ []) do
path_ = "/webhooks/#{URI.encode(webhook_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, Poison.Parser.t(), Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "amplify"}
host = build_host("amplify", client)
url = host
|> build_url(path, client)
|> add_query(query)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, response}
{:ok, %HTTPoison.Response{status_code: status_code, body: body} = response}
when status_code == 200 or status_code == 202 or status_code == 204 ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} ->
{:ok, %{}, response}
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, []) do
url
end
defp add_query(url, query) do
querystring = AWS.Util.encode_query(query)
"#{url}?#{querystring}"
end
defp encode_payload(input) do
if input != nil, do: Poison.Encoder.encode(input, %{}), else: ""
end
end
|
lib/aws/amplify.ex
| 0.711331
| 0.463201
|
amplify.ex
|
starcoder
|
defmodule AbsintheAuth.Policy do
@moduledoc """
Helper functions for use in policies.
## Usage
```
defmodule MyPolicy do
use AbsintheAuth.Policy
end
```
"""
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
@doc """
Allows a request
For example:
```
def check(resolution, _opts) do
allow!(resolution)
end
```
"""
@spec allow!(resolution :: Absinthe.Resolution.t) :: Absinthe.Resolution.t
def allow!(%{private: private} = resolution) do
%{resolution | private: Map.put(private, :authorisation, :done)}
end
@doc """
Denies a request
For example:
```
def check(resolution, _opts) do
deny!(resolution)
end
```
"""
@spec deny!(resolution :: Absinthe.Resolution.t) :: Absinthe.Resolution.t
def deny!(resolution) do
Absinthe.Resolution.put_result(resolution, {:error, "Denied"})
end
# TODO: Don't force defer in the middleware (let policy implementors decide)
@doc """
Defers a request for a decision in a subsequent policy.
If no decision is made the request will be denied.
For example:
```
def check(resolution, _opts) do
defer!(resolution)
end
```
"""
@spec defer(resolution :: Absinthe.Resolution.t) :: Absinthe.Resolution.t
def defer(%{private: private} = resolution) do
%{resolution | private: Map.put(private, :authorisation, :pending)}
end
@doc """
Returns true if the current request is a mutation"
"""
@spec is_mutation?(resolution :: Absinthe.Resolution.t) :: boolean
def is_mutation?(resolution) do
resolution.parent_type.identifier == :mutation
end
@doc """
Fetches an argument from the current resolution.
Say we have a schema as follows:
```
query do
field :movie, :movie do
arg :id, non_null(:id)
policy MoviePolicy, :view
resolve &MovieResolver.find_movie/2
end
end
```
In our policy we can fetch the `id` used that passed to the request:
```
defmodule MoviePolicy do
use AbsintheAuth.Policy
def view(resolution, _) do
id = arg(resolution, id)
SomeModule.that_checks_if_we_can_view_this_movie(id)
end
end
```
"""
def arg(resolution, arg) do
# TODO: Ideally we'd cast the argument to the type defined in the schema
# (By default we get a string here)
get_in(resolution.arguments, List.wrap(arg))
end
end
|
lib/absinthe_auth/policy.ex
| 0.717507
| 0.807726
|
policy.ex
|
starcoder
|
defmodule EctoTablestore.Schema do
@moduledoc ~S"""
Defines a schema for Tablestore.
Since the atomic increment may need to return the increased value, `EctoTablestore.Schema` module underlying uses `Ecto.Schema`,
and automatically append all `:integer` field type with `read_after_writes: true` option by default.
An Ecto schema is used to map any data source into an Elixir struct. The definition of the schema is
possible through the API: `tablestore_schema/2`.
`tablestore_schema/2` is typically used to map data from a persisted source, usually a Tablestore table,
into Elixir structs and vice-versa. For this reason, the first argument of `tablestore_schema/2` is the
source(table) name. Structs defined with `tablestore_schema/2` also contain a `__meta__` field with metadata holding
the status of struct, for example, if it has bee built, loaded or deleted.
Since Tablestore is a NoSQL database service, `embedded_schema/1` is not supported so far.
## About timestamps
Since Tablestore's column does not support `DateTime` type, use UTC timestamp (:integer type)
as `timestamps()` macro for the generated `inserted_at` and `updated_at` fields by default.
## About primary key
* The primary key supports `:id` (integer()) and `:binary_id` (binary()).
* By default the `:primary_key` option is `false`.
* The first defined primary key by the written order in the `tablestore_schema` is the partition key.
* Up to 4 primary key(s), it is limited by TableStore product server side.
* Up to 1 primary key with `autogenerate: true` option, it is limited by TableStore product server side.
* The primary key set with `autogenerate: true` will use the TableStore product server's AUTO_INCREMENT feature.
* If the partition key set as `autogenerate: true` is not allowed to take advantage of the AUTO_INCREMENT feature which it
is limited by server, but there is a built-in implement to use the `Sequence` to achieve the same atomic increment operation
in `ecto_tablestore` library.
## Example
```elixir
defmodule User do
use EctoTablestore.Schema
tablestore_schema "users" do
field :outer_id, :binary_id, primary_key: true
field :internal_id, :id, primary_key: true, autogenerate: true
field :name, :string
field :desc
end
end
```
By default, if not explicitly set field type will process it as `:string` type.
"""
defmacro __using__(_) do
quote do
use Ecto.Schema
import EctoTablestore.Schema, only: [tablestore_schema: 2]
@primary_key false
@timestamps_opts [
type: :integer,
autogenerate: {EctoTablestore.Schema, :__timestamps__, []}
]
end
end
def __timestamps__() do
DateTime.utc_now() |> DateTime.to_unix()
end
defmacro tablestore_schema(source, do: block) do
{block, hashids} = check_block(block, __CALLER__.module)
quote do
Ecto.Schema.schema(unquote(source), do: unquote(block))
unquote(generate_hashids_config(hashids))
end
end
defp generate_hashids_config(hashids) do
for {key, hashids_item} <- hashids do
quote location: :keep do
def hashids(unquote(key)) do
unquote(hashids_item)
end
end
end
end
defp check_block({:__block__, info, fields}, schema_module) do
{fields, hashids} = supplement_fields(fields, [], [], schema_module)
{
{:__block__, info, fields},
Macro.escape(hashids)
}
end
defp check_block(block, _) do
block
end
defp supplement_fields([], prepared, hashids, _schema_module) do
{Enum.reverse(prepared), hashids}
end
defp supplement_fields(
[{defined_macro, field_line, [field_name, :integer]} | rest_fields],
prepared,
hashids,
schema_module
) do
update = {
defined_macro,
field_line,
[
field_name,
{:__aliases__, field_line, [:EctoTablestore, :Integer]},
[read_after_writes: true]
]
}
supplement_fields(rest_fields, [update | prepared], hashids, schema_module)
end
defp supplement_fields(
[{defined_macro, field_line, [field_name, :integer, opts]} = field | rest_fields],
prepared,
hashids,
schema_module
) do
field =
if Keyword.get(opts, :primary_key, false) do
field
else
{
defined_macro,
field_line,
[
field_name,
{:__aliases__, field_line, [:EctoTablestore, :Integer]},
Keyword.put(opts, :read_after_writes, true)
]
}
end
supplement_fields(rest_fields, [field | prepared], hashids, schema_module)
end
defp supplement_fields(
[
{defined_macro, field_line,
[field_name, {:__aliases__, line, [:EctoTablestore, :Integer]}]}
| rest_fields
],
prepared,
hashids,
schema_module
) do
update = {
defined_macro,
field_line,
[field_name, {:__aliases__, line, [:EctoTablestore, :Integer]}, [read_after_writes: true]]
}
supplement_fields(rest_fields, [update | prepared], hashids, schema_module)
end
defp supplement_fields(
[
{defined_macro, field_line,
[field_name, {:__aliases__, line, [:EctoTablestore, :Integer]}, opts]} = field
| rest_fields
],
prepared,
hashids,
schema_module
) do
field =
if Keyword.get(opts, :primary_key, false) do
field
else
{
defined_macro,
field_line,
[
field_name,
{:__aliases__, line, [:EctoTablestore, :Integer]},
Keyword.put(opts, :read_after_writes, true)
]
}
end
supplement_fields(rest_fields, [field | prepared], hashids, schema_module)
end
defp supplement_fields(
[
{defined_macro, field_line, [field_name, :hashids, opts]} = field
| rest_fields
],
prepared,
prepared_hashids,
schema_module
) do
if Keyword.get(opts, :primary_key, false) do
{field, new_hashids} =
supplement_hashids_field(defined_macro, field_line, field_name, opts, schema_module)
supplement_fields(
rest_fields,
[field | prepared],
[new_hashids | prepared_hashids],
schema_module
)
else
supplement_fields(rest_fields, [field | prepared], prepared_hashids, schema_module)
end
end
defp supplement_fields(
[
{defined_macro, field_line, [field_name, {:__aliases__, _line, type}, opts]} = field
| rest_fields
],
prepared,
prepared_hashids,
schema_module
)
when type == [:EctoTablestore, :Hashids]
when type == [:Hashids] do
if Keyword.get(opts, :primary_key, false) do
{field, new_hashids} =
supplement_hashids_field(defined_macro, field_line, field_name, opts, schema_module)
supplement_fields(
rest_fields,
[field | prepared],
[new_hashids | prepared_hashids],
schema_module
)
else
supplement_fields(rest_fields, [field | prepared], prepared_hashids, schema_module)
end
end
defp supplement_fields(
[{defined_macro, field_line, field_info} | rest_fields],
prepared,
hashids,
schema_module
) do
supplement_fields(
rest_fields,
[{defined_macro, field_line, field_info} | prepared],
hashids,
schema_module
)
end
defp supplement_hashids_field(defined_macro, field_line, field_name, opts, schema_module) do
hashids_opts = fetch_hashids_opts(opts[:hashids], schema_module)
hashids =
hashids_opts
|> Keyword.take([:salt, :min_len, :alphabet])
|> Hashids.new()
field = {
defined_macro,
field_line,
[
field_name,
EctoTablestore.Hashids,
opts
]
}
new_hashids = {field_name, hashids}
{field, new_hashids}
end
defp fetch_hashids_opts(nil, schema_module) do
Application.fetch_env!(:ecto_tablestore, :hashids) |> Keyword.get(schema_module, [])
end
defp fetch_hashids_opts(opts, _schema_module) when is_list(opts) do
opts
end
end
|
lib/ecto_tablestore/schema.ex
| 0.910364
| 0.844601
|
schema.ex
|
starcoder
|
defmodule Ash.Filter.Runtime do
@moduledoc """
Checks a record to see if it matches a filter statement.
We can't always tell if a record matches a filter statement, and as such
this function may return `:unknown`
"""
alias Ash.Filter.{Expression, Not, Predicate}
def matches?(api, record, filter, dirty_fields \\ []) do
case do_matches?(record, filter, dirty_fields) do
{:ok, boolean} ->
boolean
{:side_load, side_loads} ->
case do_matches?(api.side_load!(record, side_loads), filter, dirty_fields) do
{:ok, boolean} -> boolean
_ -> false
end
end
end
defp do_matches?(record, filter, dirty_fields, side_loads \\ []) do
case filter.expression do
nil ->
{:ok, true}
%Predicate{predicate: predicate, relationship_path: [], attribute: attribute} ->
if attribute.name in dirty_fields do
{:ok, :unknown}
else
{:ok, Predicate.match?(predicate, Map.get(record, attribute.name), attribute.type)}
end
%Predicate{predicate: predicate, attribute: attribute, relationship_path: relationship_path} ->
side_load_predicate_matches(record, relationship_path, predicate, attribute, side_loads)
%Not{expression: expression} ->
case do_matches?(record, expression, dirty_fields, side_loads) do
{:ok, :unknown} ->
{:ok, :unknown}
{:ok, match?} ->
{:ok, !match?}
{:side_load, side_loads} ->
{:side_load, side_loads}
end
%Expression{op: op, left: left, right: right} ->
expression_matches(op, left, right, record, dirty_fields, side_loads)
end
end
defp side_load_predicate_matches(record, relationship_path, predicate, attribute, side_loads) do
if loaded?(record, relationship_path) do
records = get_related(record, relationship_path)
Enum.reduce_while(records, {:ok, false}, fn record, {:ok, status} ->
case Predicate.match?(predicate, Map.get(record, attribute.name), attribute.type) do
:unknown ->
if status == false do
{:ok, :unknown}
else
{:ok, status}
end
true ->
{:halt, {:ok, true}}
false ->
{:ok, false}
end
end)
else
{:side_load, [relationship_path | side_loads]}
end
end
defp expression_matches(:and, left, right, record, dirty_fields, side_loads) do
case do_matches?(record, left, dirty_fields, side_loads) do
{:ok, true} ->
do_matches?(record, right, dirty_fields, side_loads)
{:ok, :unknown} ->
{:ok, :unknown}
{:ok, false} ->
{:ok, false}
{:side_load, side_loads} ->
do_matches?(record, right, dirty_fields, side_loads)
end
end
defp expression_matches(:or, left, right, record, dirty_fields, side_loads) do
case do_matches?(record, left, dirty_fields, side_loads) do
{:ok, true} ->
{:ok, true}
{:ok, :unknown} ->
case do_matches?(record, right, dirty_fields, side_loads) do
{:ok, false} -> {:ok, :unknown}
other -> other
end
{:ok, false} ->
do_matches?(record, right, dirty_fields, side_loads)
{:side_load, side_loads} ->
do_matches?(record, right, dirty_fields, side_loads)
end
end
defp get_related(record, path) when not is_list(record) do
get_related([record], path)
end
defp get_related(records, []) do
records
end
defp get_related(records, [key | rest]) when is_list(records) do
Enum.flat_map(records, fn record ->
case Map.get(record, key) do
%Ash.NotLoaded{type: :relationship} ->
[]
value ->
get_related(value, rest)
end
end)
end
defp loaded?(records, path) when is_list(records) do
Enum.all?(records, &loaded?(&1, path))
end
defp loaded?(%Ash.NotLoaded{}, _), do: false
defp loaded?(_, []), do: true
defp loaded?(record, [key | rest]) do
record
|> Map.get(key)
|> loaded?(rest)
end
end
|
lib/ash/filter/runtime.ex
| 0.770033
| 0.574992
|
runtime.ex
|
starcoder
|
defmodule SSD1322.Device do
@moduledoc """
This module provides a high-level interface to control and display content on
a SSD1322 based OLED display.
Note that this module is stateless - there is no protection here at all for
concurrent access
For details regarding the magic values used herein, consult the [SSD1322 datasheet](https://www.newhavendisplay.com/app_notes/SSD1322.pdf)
"""
defstruct conn: nil, width: 0, height: 0
alias SSD1322.SPIConnection
@gdram_row_width 480
@doc """
Sets up a connection to an SSD1322. Returns an opaque struct of type
SSD1322.Device suitable for passing to other functions in this module.
Options are passed as a keyword list with the following possible values:
* `spi_connection_opts`: A nested keyword list containing any of the possible
values below:
* `spi_dev`: The name of the spi device to connect to. Defaults to `spidev0.0`
* `dc_pin`: The GPIO pin number of the line to use for D/C select. Defaults to 24
* `reset_pin`: The GPIO pin number of the line to use for reset. Defaults to 25
* `conn`: A pre-existing SSD1322.SPIConnection struct, if you already have one
* `width`: The width of the display in pixels. Must be a multiple of 4. Defaults to 256
* `height`: The height of the display in pixels. Defaults to 64
"""
def init(opts \\ []) do
spi_connection_opts = opts |> Keyword.get(:spi_connection_opts, [])
session = %__MODULE__{
conn: Keyword.get(opts, :conn, SPIConnection.init(spi_connection_opts)),
width: Keyword.get(opts, :width, 256),
height: Keyword.get(opts, :height, 64)
}
reset(session)
clear(session)
contrast(session, 255)
display_on(session)
session
end
@doc """
Issues a reset to the SSD1322 device.
"""
def reset(%__MODULE__{conn: conn}) do
SPIConnection.reset(conn)
SPIConnection.command(conn, <<0xFD>>, <<0x12>>)
SPIConnection.command(conn, <<0xA4>>)
SPIConnection.command(conn, <<0xB3>>, <<0xF2>>)
SPIConnection.command(conn, <<0xCA>>, <<0x3F>>)
SPIConnection.command(conn, <<0xA2>>, <<0x00>>)
SPIConnection.command(conn, <<0xA1>>, <<0x00>>)
SPIConnection.command(conn, <<0xA0>>, <<0x14, 0x11>>)
SPIConnection.command(conn, <<0xB5>>, <<0x00>>)
SPIConnection.command(conn, <<0xAB>>, <<0x01>>)
SPIConnection.command(conn, <<0xB4>>, <<0xA0, 0xFD>>)
SPIConnection.command(conn, <<0xC7>>, <<0x0F, 0xB9>>)
SPIConnection.command(conn, <<0xB1>>, <<0xF0>>)
SPIConnection.command(conn, <<0xD1>>, <<0x82, 0x20>>)
SPIConnection.command(conn, <<0xBB>>, <<0x0D>>)
SPIConnection.command(conn, <<0xB6>>, <<0x08>>)
SPIConnection.command(conn, <<0xBE>>, <<0x00>>)
SPIConnection.command(conn, <<0xA6>>)
SPIConnection.command(conn, <<0xA9>>)
end
@doc """
Turns the display on.
"""
def display_on(%__MODULE__{conn: conn}) do
SPIConnection.command(conn, <<0xAF>>)
end
@doc """
Turns the display off.
"""
def display_off(%__MODULE__{conn: conn}) do
SPIConnection.command(conn, <<0xAE>>)
end
@doc """
Sets the contrast of the display. Valid values range from 0 (lowest contrast) to 255 (highest contrast).
"""
def contrast(%__MODULE__{conn: conn}, contrast) do
SPIConnection.command(conn, <<0xC1>>, <<contrast::8>>)
end
@doc """
Clears the display to the specified grey level.
Valid values for `grey` are from 0 (black) to 15 (whatever colour your display is). Defaults to 0.
"""
def clear(%__MODULE__{width: width, height: height} = device, grey \\ 0) do
draw(device, :binary.copy(<<grey::4, grey::4>>, div(width * height, 2)), {0, 0}, {width, height})
end
@doc """
Draws the specified bitmap. The bitmap must be in packed 4-bit greyscale format and the size
of the full display as configured.
The pixel format is packed 4-bit greyscale in the following format
* Row length must be a multiple of 4
* Each pixel is represented by 4 bits from 0 (fully off) to 15 (fully on)
* Pixels are packed 2 per byte, in left-to-right order
* Rows are packed in top-to-bottom order
This function does not perform any clipping or validation on the given binary other than to other than to
validate that its x offset and width are both multiples of 4.
"""
def draw(%__MODULE__{conn: conn, width: display_width}, binary, {x, y}, {width, height}) do
if rem(x, 4) != 0, do: raise("Cannot draw when x is not divisible by 4 (x=#{x})")
if rem(width, 4) != 0, do: raise("Cannot draw when width is not divisible by 4 (width=#{width})")
# Memory row widths don't know anything about the actual number of pixels on the device. The SSD1322 chip
# supports displays up to 480 pixels wide, and if a display is narrower than that its pixels are centered in the
# memory row. As a consequence, pixel column 0 on the display is actually (480 - display_width) / 2 bytes into the
# row.
display_zero = div(@gdram_row_width - display_width, 2)
offset = display_zero + x
# GDRAM addresses 16 bit words, and each pixel is 4 bits, so offsets / widths need to divide by 4
offset_to_write = div(offset, 4)
width_to_write = div(width, 4)
SPIConnection.command(conn, <<0x15>>, <<offset_to_write, offset_to_write + width_to_write - 1>>)
SPIConnection.command(conn, <<0x75>>, <<y, y + height - 1>>)
SPIConnection.command(conn, <<0x5C>>, binary)
end
end
|
lib/ssd1322/device.ex
| 0.846831
| 0.58062
|
device.ex
|
starcoder
|
defmodule StepFlow.WorkerDefinitions do
@moduledoc """
The WorkerDefinitions context.
"""
import Ecto.Query, warn: false
alias StepFlow.Repo
alias StepFlow.WorkerDefinitions.WorkerDefinition
@doc """
Returns the list of WorkerDefinitions.
## Examples
iex> StepFlow.WorkerDefinitions.list_worker_definitions()
%{data: [], page: 0, size: 10, total: 0}
"""
def list_worker_definitions(params \\ %{}) do
page =
Map.get(params, "page", 0)
|> StepFlow.Integer.force()
size =
Map.get(params, "size", 10)
|> StepFlow.Integer.force()
offset = page * size
query = from(worker_definition in WorkerDefinition)
total_query = from(item in query, select: count(item.id))
total =
Repo.all(total_query)
|> List.first()
query =
from(
job in query,
order_by: [desc: :inserted_at],
offset: ^offset,
limit: ^size
)
jobs = Repo.all(query)
%{
data: jobs,
total: total,
page: page,
size: size
}
end
@doc """
Gets a single WorkerDefinition.
Raises `Ecto.NoResultsError` if the WorkerDefinition does not exist.
"""
def get_worker_definition!(id), do: Repo.get!(WorkerDefinition, id)
@doc """
Creates a WorkerDefinition.
## Examples
iex> result = StepFlow.WorkerDefinitions.create_worker_definition(%{
...> queue_name: "my_queue",
...> label: "My Queue",
...> version: "1.2.3",
...> short_description: "short description",
...> description: "long description",
...> parameters: %{}
...> })
...> match?({:ok, %StepFlow.WorkerDefinitions.WorkerDefinition{}}, result)
true
iex> result = StepFlow.WorkerDefinitions.create_worker_definition(%{field: :bad_value})
...> match?({:error, %Ecto.Changeset{}}, result)
true
"""
def create_worker_definition(attrs \\ %{}) do
%WorkerDefinition{}
|> WorkerDefinition.changeset(attrs)
|> Repo.insert()
end
def exists(%{"queue_name" => queue_name, "version" => version}) do
case Repo.get_by(WorkerDefinition, queue_name: queue_name, version: version) do
nil -> false
_ -> true
end
end
def exists(_), do: false
end
|
lib/step_flow/worker_definitions/worker_definitions.ex
| 0.755276
| 0.457864
|
worker_definitions.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.