code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
defmodule Model.Vehicle do @moduledoc """ Vehicle represents the current status of a vehicle. """ use Recordable, [ :id, :trip_id, :stop_id, :route_id, :direction_id, :label, :latitude, :longitude, :bearing, :speed, :current_status, :current_stop_sequence, :updated_at, :effective_route_id, :consist, :occupancy_status ] alias Model.WGS84 @typedoc """ Unique ID for vehicle. """ @type id :: String.t() @typedoc """ Status of vehicle relative to the stops. | _**Value**_ | _**Description**_ | |------------------|------------------------------------------------------------------------------------------------------------| | `:in_transit_to` | The vehicle has departed the previous stop and is in transit. | | `:incoming_at` | The vehicle is just about to arrive at the stop (on a stop display, the vehicle symbol typically flashes). | | `:stopped_at` | The vehicle is standing at the stop. | """ @type current_status :: :in_transit_to | :incoming_at | :stopped_at @type occupancy_status :: :empty | :many_seats_available | :few_seats_available | :standing_room_only | :crushed_standing_room_only | :full | :not_accepting_passengers @typedoc """ Meters per second """ @type speed :: float @typedoc """ The current status of vehicle (bus, ferry, or train) * `:id` - unique ID for vehicle. See [GTFS-realtime VehicleDescriptor id](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-vehicledescriptor). * `:bearing` - in degrees, clockwise from True North, i.e., 0 is North and 90 is East. This can be the compass bearing, or the direction towards the next stop or intermediate location. See [GTFS-realtime Position bearing](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-position). * `:current_status` - Status of vehicle relative to the stops. See [GTFS-realtime VehicleStopStatus](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#enum-vehiclestopstatus). * `:current_stop_sequence` - Index of current stop along trip. See [GTFS-realtime VehiclePosition current_stop_sequence](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-vehicleposition) * `:direction_id` - Direction of travel of the vehicle * `:effective_route_id` - The `Model.Route.id` of the `Model.Route.t` that the vehicle is _currently_ on. When `trip_id` has only one route, then `effective_route_id` and `route_id` will always match. If `trip_id` has multiple routes, then `effective_route_id` can be any of the routes of `trip_id`. * `:label` - User visible label, such as the one on the signage on the vehicle. See [GTFS-realtime VehicleDescriptor label](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-vehicledescriptor). * `:consist` - Set of user visible labels (such as the one on the signage on the vehicle) on individual cars. Only present for light and heavy rail. * `:updated_at` - Time at which vehicle information was last updated. * `:latitude` - Latitude of the vehicle's current position. See [GTFS-realtime Position latitude](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-position). * `:longitude` - Longitude of the vehicle's current position. See [GTFS-realtime Position longitude](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-position).", * `:route_id` - The `Model.Route.id` of the primary `Model.Route.t` that the vehicle is on. When `trip_id` has only one route, then `effective_route_id` and `route_id` will always match. If `trip_id` has multiple routes, then `effective_route_id` can be any of the routes of `trip_id`. * `:speed` - Speed that the vehicle is traveling. See [GTFS-realtime Position speed](https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#message-position). * `:stop_id` - The `Model.Stop.id` of the `Model.Stop.t` that the vehicle is `:current_status` relative to. * `:trip_id` - The `Model.Trip.id` of the `Model.Trip.t` that the vehicle is on. """ @type t :: %__MODULE__{ id: id | nil, bearing: non_neg_integer | nil, current_status: current_status, current_stop_sequence: non_neg_integer | nil, direction_id: Model.Direction.id() | nil, effective_route_id: Model.Route.id() | nil, label: String.t() | nil, updated_at: DateTime.t(), latitude: WGS84.latitude() | nil, longitude: WGS84.longitude() | nil, route_id: Model.Route.id() | nil, speed: speed | nil, stop_id: Model.Stop.id() | nil, trip_id: Model.Trip.id() | nil, consist: [String.t()] | nil, occupancy_status: occupancy_status() | nil } def primary?(%__MODULE__{route_id: id, effective_route_id: id}), do: true def primary?(%__MODULE__{}), do: false end
apps/model/lib/model/vehicle.ex
0.903146
0.655169
vehicle.ex
starcoder
defmodule Exredis do @moduledoc """ Redis client for Elixir """ defmacro __using__(_opts) do quote do import Exredis end end @type reconnect_sleep :: :no_reconnect | integer @type start_link :: { :ok, pid } | { :error, term } @doc """ Connect to the Redis server using a connection string: * `start_using_connection_string("redis://user:password@127.0.0.1:6379/0")` * `start_using_connection_string("redis://127.0.0.1:6379")` Returns pid of the connected client """ @spec start_using_connection_string(binary, :no_reconnect | integer) :: pid def start_using_connection_string(connection_string, reconnect_sleep \\ :no_reconnect) do config = Exredis.ConnectionString.parse(connection_string) start(config.host, config.port, config.db, config.password, reconnect_sleep) end @doc """ Connect to the Redis server: * `start` * `start("127.0.0.1", 6379)` Returns pid of the connected client """ @spec start(binary, integer, integer, binary, :no_reconnect | integer) :: pid def start(host \\ "127.0.0.1", port \\ 6379, database \\ 0, password \\ "", reconnect_sleep \\ :no_reconnect), do: start_link(host, port, database, password, reconnect_sleep) |> elem 1 @doc """ Connect to the Redis server, erlang way: * `start_link` * `start_link("127.0.0.1", 6379)` Returns tuple { :ok, pid } """ @spec start_link(binary, integer, integer, binary, reconnect_sleep) :: start_link def start_link(host \\ "127.0.0.1", port \\ 6379, database \\ 0, password \\ "", reconnect_sleep \\ :no_reconnect), do: :eredis.start_link(String.to_char_list(host), port, database, String.to_char_list(password), reconnect_sleep) @doc """ Disconnect from the Redis server: `stop client` Client is a pid getting from start command """ @spec stop(pid) :: :ok def stop(client), do: client |> :eredis.stop @doc """ Make query * `query(client, ["SET", "foo", "bar"])` * `query(client, ["GET", "foo"])` * `query(client, ["MSET" | ["k1", "v1", "k2", "v2", "k3", "v3"]])` * `query(client, ["MGET" | ["k1", "k2", "k3"]])` See more commands in official Redis documentation """ @spec query(pid, list) :: any def query(client, command) when is_pid(client) and is_list(command), do: client |> :eredis.q(command) |> elem 1 @doc """ Pipeline query ``` query_pipe(client, [["SET", :a, "1"], ["LPUSH", :b, "3"], ["LPUSH", :b, "2"]]) ``` """ @spec query_pipe(pid, list) :: any def query_pipe(client, command) when is_pid(client) and is_list(command), do: client |> :eredis.qp command end
lib/exredis.ex
0.737064
0.444685
exredis.ex
starcoder
defmodule AWS.ManagedBlockchain do @moduledoc """ <p/> Amazon Managed Blockchain is a fully managed service for creating and managing blockchain networks using open source frameworks. Blockchain allows you to build applications where multiple parties can securely and transparently run transactions and share data without the need for a trusted, central authority. Currently, Managed Blockchain supports the Hyperledger Fabric open source framework. """ @doc """ Creates a member within a Managed Blockchain network. """ def create_member(client, network_id, input, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Creates a new blockchain network using Amazon Managed Blockchain. """ def create_network(client, input, options \\ []) do path_ = "/networks" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Creates a peer node in a member. """ def create_node(client, member_id, network_id, input, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members/#{URI.encode(member_id)}/nodes" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Creates a proposal for a change to the network that other members of the network can vote on, for example, a proposal to add a new member to the network. Any member can create a proposal. """ def create_proposal(client, network_id, input, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/proposals" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Deletes a member. Deleting a member removes the member and all associated resources from the network. `DeleteMember` can only be called for a specified `MemberId` if the principal performing the action is associated with the AWS account that owns the member. In all other cases, the `DeleteMember` action is carried out as the result of an approved proposal to remove a member. If `MemberId` is the last member in a network specified by the last AWS account, the network is deleted also. """ def delete_member(client, member_id, network_id, input, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members/#{URI.encode(member_id)}" headers = [] query_ = [] request(client, :delete, path_, query_, headers, input, options, nil) end @doc """ Deletes a peer node from a member that your AWS account owns. All data on the node is lost and cannot be recovered. """ def delete_node(client, member_id, network_id, node_id, input, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members/#{URI.encode(member_id)}/nodes/#{URI.encode(node_id)}" headers = [] query_ = [] request(client, :delete, path_, query_, headers, input, options, nil) end @doc """ Returns detailed information about a member. """ def get_member(client, member_id, network_id, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members/#{URI.encode(member_id)}" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns detailed information about a network. """ def get_network(client, network_id, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns detailed information about a peer node. """ def get_node(client, member_id, network_id, node_id, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members/#{URI.encode(member_id)}/nodes/#{URI.encode(node_id)}" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns detailed information about a proposal. """ def get_proposal(client, network_id, proposal_id, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/proposals/#{URI.encode(proposal_id)}" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns a listing of all invitations made on the specified network. """ def list_invitations(client, max_results \\ nil, next_token \\ nil, options \\ []) do path_ = "/invitations" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns a listing of the members in a network and properties of their configurations. """ def list_members(client, network_id, is_owned \\ nil, max_results \\ nil, name \\ nil, next_token \\ nil, status \\ nil, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members" headers = [] query_ = [] query_ = if !is_nil(status) do [{"status", status} | query_] else query_ end query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(name) do [{"name", name} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end query_ = if !is_nil(is_owned) do [{"isOwned", is_owned} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns information about the networks in which the current AWS account has members. """ def list_networks(client, framework \\ nil, max_results \\ nil, name \\ nil, next_token \\ nil, status \\ nil, options \\ []) do path_ = "/networks" headers = [] query_ = [] query_ = if !is_nil(status) do [{"status", status} | query_] else query_ end query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(name) do [{"name", name} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end query_ = if !is_nil(framework) do [{"framework", framework} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns information about the nodes within a network. """ def list_nodes(client, member_id, network_id, max_results \\ nil, next_token \\ nil, status \\ nil, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members/#{URI.encode(member_id)}/nodes" headers = [] query_ = [] query_ = if !is_nil(status) do [{"status", status} | query_] else query_ end query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns the listing of votes for a specified proposal, including the value of each vote and the unique identifier of the member that cast the vote. """ def list_proposal_votes(client, network_id, proposal_id, max_results \\ nil, next_token \\ nil, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/proposals/#{URI.encode(proposal_id)}/votes" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns a listing of proposals for the network. """ def list_proposals(client, network_id, max_results \\ nil, next_token \\ nil, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/proposals" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Rejects an invitation to join a network. This action can be called by a principal in an AWS account that has received an invitation to create a member and join a network. """ def reject_invitation(client, invitation_id, input, options \\ []) do path_ = "/invitations/#{URI.encode(invitation_id)}" headers = [] query_ = [] request(client, :delete, path_, query_, headers, input, options, nil) end @doc """ Updates a member configuration with new parameters. """ def update_member(client, member_id, network_id, input, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members/#{URI.encode(member_id)}" headers = [] query_ = [] request(client, :patch, path_, query_, headers, input, options, nil) end @doc """ Updates a node configuration with new parameters. """ def update_node(client, member_id, network_id, node_id, input, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/members/#{URI.encode(member_id)}/nodes/#{URI.encode(node_id)}" headers = [] query_ = [] request(client, :patch, path_, query_, headers, input, options, nil) end @doc """ Casts a vote for a specified `ProposalId` on behalf of a member. The member to vote as, specified by `VoterMemberId`, must be in the same AWS account as the principal that calls the action. """ def vote_on_proposal(client, network_id, proposal_id, input, options \\ []) do path_ = "/networks/#{URI.encode(network_id)}/proposals/#{URI.encode(proposal_id)}/votes" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) :: {:ok, Poison.Parser.t(), Poison.Response.t()} | {:error, Poison.Parser.t()} | {:error, HTTPoison.Error.t()} defp request(client, method, path, query, headers, input, options, success_status_code) do client = %{client | service: "managedblockchain"} host = build_host("managedblockchain", client) url = host |> build_url(path, client) |> add_query(query) additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}] headers = AWS.Request.add_headers(additional_headers, headers) payload = encode_payload(input) headers = AWS.Request.sign_v4(client, method, url, headers, payload) perform_request(method, url, payload, headers, options, success_status_code) end defp perform_request(method, url, payload, headers, options, nil) do case HTTPoison.request(method, url, payload, headers, options) do {:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} -> {:ok, response} {:ok, %HTTPoison.Response{status_code: status_code, body: body} = response} when status_code == 200 or status_code == 202 or status_code == 204 -> {:ok, Poison.Parser.parse!(body, %{}), response} {:ok, %HTTPoison.Response{body: body}} -> error = Poison.Parser.parse!(body, %{}) {:error, error} {:error, %HTTPoison.Error{reason: reason}} -> {:error, %HTTPoison.Error{reason: reason}} end end defp perform_request(method, url, payload, headers, options, success_status_code) do case HTTPoison.request(method, url, payload, headers, options) do {:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} -> {:ok, %{}, response} {:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} -> {:ok, Poison.Parser.parse!(body, %{}), response} {:ok, %HTTPoison.Response{body: body}} -> error = Poison.Parser.parse!(body, %{}) {:error, error} {:error, %HTTPoison.Error{reason: reason}} -> {:error, %HTTPoison.Error{reason: reason}} end end defp build_host(_endpoint_prefix, %{region: "local"}) do "localhost" end defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do "#{endpoint_prefix}.#{region}.#{endpoint}" end defp build_url(host, path, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}#{path}" end defp add_query(url, []) do url end defp add_query(url, query) do querystring = AWS.Util.encode_query(query) "#{url}?#{querystring}" end defp encode_payload(input) do if input != nil, do: Poison.Encoder.encode(input, %{}), else: "" end end
lib/aws/managed_blockchain.ex
0.836187
0.488466
managed_blockchain.ex
starcoder
defmodule SteamEx.IPlayerService do @moduledoc """ Provides additional methods for interacting with Steam Users. See [ISteamUser](https://partner.steamgames.com/doc/webapi/ISteamUser) for the primary interface. **NOTE:** This is a Service interface, methods in this interface should be called with the `input_json` parameter. For more info on how to use the Steamworks Web API please see the [Web API Overview](https://partner.steamgames.com/doc/webapi_overview). """ import SteamEx.API.Base @interface "IPlayerService" @doc """ Tracks playtime for a user when they are offline | Name | Type | Required | Description | | steamid | uint64 | ✔ | The player we're asking about| | ticket | string | ✔ | The users authentication ticket.| | play_sessions | {message} | ✔ | The recorded play sessions.| See other: [https://partner.steamgames.com/doc/webapi/IPlayerService#RecordOfflinePlaytime](https://partner.steamgames.com/doc/webapi/IPlayerService#RecordOfflinePlaytime) """ def record_offline_playtime(access_key, params \\ %{}, headers \\ %{}) do post(@interface <> "/RecordOfflinePlaytime/v1/", access_key, params, headers) end @doc """ Gets information about a player's recently played games | Name | Type | Required | Description | | key | string | ✔ | Steamworks Web API user authentication key.| | steamid | uint64 | ✔ | The player we're asking about| | count | uint32 | ✔ | The number of games to return (0/unset: all)| See other: [https://partner.steamgames.com/doc/webapi/IPlayerService#GetRecentlyPlayedGames](https://partner.steamgames.com/doc/webapi/IPlayerService#GetRecentlyPlayedGames) """ def get_recently_played_games(access_key, params \\ %{}, headers \\ %{}) do get(@interface <> "/GetRecentlyPlayedGames/v1/", access_key, params, headers) end @doc """ Return a list of games owned by the player | Name | Type | Required | Description | | key | string | ✔ | Steamworks Web API user authentication key.| | steamid | uint64 | ✔ | The player we're asking about| | include_appinfo | bool | ✔ | true if we want additional details (name, icon) about each game| | include_played_free_games| bool | ✔ | Free games are excluded by default. If this is set, free games the user has played will be returned.| | appids_filter | uint32 | ✔ | if set, restricts result set to the passed in apps| See other: [https://partner.steamgames.com/doc/webapi/IPlayerService#GetOwnedGames](https://partner.steamgames.com/doc/webapi/IPlayerService#GetOwnedGames) """ def get_owned_games(access_key, params \\ %{}, headers \\ %{}) do get(@interface <> "/GetOwnedGames/v1/", access_key, params, headers) end @doc """ Returns the Steam Level of a user | Name | Type | Required | Description | | key | string | ✔ | Steamworks Web API user authentication key.| | steamid | uint64 | ✔ | The player we're asking about| See other: [https://partner.steamgames.com/doc/webapi/IPlayerService#GetSteamLevel](https://partner.steamgames.com/doc/webapi/IPlayerService#GetSteamLevel) """ def get_steam_level(access_key, params \\ %{}, headers \\ %{}) do get(@interface <> "/GetSteamLevel/v1/", access_key, params, headers) end @doc """ Gets badges that are owned by a specific user | Name | Type | Required | Description | | key | string | ✔ | Steamworks Web API user authentication key.| | steamid | uint64 | ✔ | The player we're asking about| See other: [https://partner.steamgames.com/doc/webapi/IPlayerService#GetBadges](https://partner.steamgames.com/doc/webapi/IPlayerService#GetBadges) """ def get_badges(access_key, params \\ %{}, headers \\ %{}) do get(@interface <> "/GetBadges/v1/", access_key, params, headers) end @doc """ Gets all the quests needed to get the specified badge, and which are completed | Name | Type | Required | Description | | key | string | ✔ | Steamworks Web API user authentication key.| | steamid | uint64 | ✔ | The player we're asking about| | badgeid | int32 | ✔ | The badge we're asking about| See other: [https://partner.steamgames.com/doc/webapi/IPlayerService#GetCommunityBadgeProgress](https://partner.steamgames.com/doc/webapi/IPlayrService#GetCommunityBadgeProgress) """ def get_community_badge_progress(access_key, params \\ %{}, headers \\ %{}) do get(@interface <> "/GetCommunityBadgeProgress/v1/", access_key, params, headers) end @doc """ Returns valid lender SteamID if game currently played is borrowed | Name | Type | Required | Description | | key | string | ✔ | Steamworks Web API user authentication key.| | steamid | uint64 | ✔ | The player we're asking about| | appid_playing | uint32 | ✔ | The game player is currently playing| See other: [https://partner.steamgames.com/doc/webapi/IPlayerService#IsPlayingSharedGame](https://partner.steamgames.com/doc/webapi/IPlayrService#IsPlayingSharedGame) """ def is_playing_shared_game(access_key, params \\ %{}, headers \\ %{}) do get(@interface <> "/IsPlayingSharedGame/v1/", access_key, params, headers) end end
lib/interfaces/i_player_service.ex
0.782704
0.474266
i_player_service.ex
starcoder
defmodule ExAws.S3.Upload do @moduledoc """ Represents an AWS S3 Multipart Upload operation ## Examples ``` "path/to/big/file" |> S3.Upload.stream_file |> S3.upload("my-bucket", "path/on/s3") |> ExAws.request! #=> :done ``` """ @enforce_keys ~w(bucket path src)a defstruct [ :src, :bucket, :path, :upload_id, opts: [], service: :s3, ] @type t :: %__MODULE__{ src: Enumerable.t, bucket: binary, path: binary, upload_id: binary, opts: Keyword.t, service: :s3 } def complete!(parts, op, config) do ExAws.S3.complete_multipart_upload(op.bucket, op.path, op.upload_id, Enum.sort_by(parts, &elem(&1, 0))) |> ExAws.request!(config) end def initialize!(op, config) do %{body: %{upload_id: upload_id}} = ExAws.S3.initiate_multipart_upload(op.bucket, op.path, op.opts) |> ExAws.request!(config) %{op | upload_id: upload_id} end @doc """ Open a file stream for use in an upload. Chunk size must be at least 5 MiB. Defaults to 5 MiB """ @spec stream_file(path :: binary) :: File.Stream.t @spec stream_file(path :: binary, opts :: [chunk_size: pos_integer]) :: File.Stream.t def stream_file(path, opts \\ []) do path |> File.stream!([:raw, :read_ahead, :binary], opts[:chunk_size] || 5 * 1024 * 1024) end @doc """ Upload a chunk for an operation. The first argument is a tuple with the binary contents of the chunk, and a positive integer index indicating which chunk it is. It will return this index along with the `etag` response from AWS necessary to complete the multipart upload. """ @spec upload_chunk!({binary, pos_integer}, t, ExAws.Config.t) :: {pos_integer, binary} def upload_chunk!({chunk, i}, op, config) do %{headers: headers} = ExAws.S3.upload_part(op.bucket, op.path, op.upload_id, i, chunk, op.opts) |> ExAws.request!(config) {_, etag} = Enum.find(headers, fn {k, _v} -> String.downcase(k) == "etag" end) {i, etag} end end defimpl ExAws.Operation, for: ExAws.S3.Upload do alias Experimental.Flow alias ExAws.S3.Upload def perform(op, config) do op = Upload.initialize!(op, config) op.src |> build_producer(op.opts) |> Flow.map(&Upload.upload_chunk!(&1, op, config)) |> Enum.to_list |> Upload.complete!(op, config) {:ok, :done} end def stream!(_, _), do: raise "not implemented" defp build_producer(%Flow{} = flow, _opts) do flow end defp build_producer(source, opts) do source |> Stream.with_index(1) |> Flow.from_enumerable( stages: Keyword.get(opts, :max_concurrency, 4), max_demand: 2 ) end end
lib/ex_aws/s3/upload.ex
0.858229
0.749706
upload.ex
starcoder
defmodule Mtpo.Guesses.Guess do use Ecto.Schema import Ecto.Changeset alias Mtpo.Guesses.Guess alias Mtpo.Rounds @value_regex ~r/^(?<min>\d+)?[\.:]?(?<sec>\d\d)[:\.](?<frac>\d\d)$/ schema "guesses" do field :value, :string belongs_to :user, Mtpo.Users.User belongs_to :round, Mtpo.Rounds.Round timestamps() end @doc """ Parse and validate a time-formatted string. ## Examples iex> check_time("0:40.99") {:ok, "0:40.99"} iex> check_time("40.99") {:ok, "0:40.99"} iex> check_time("foo") :error """ def check_time(value) do case Regex.named_captures(@value_regex, value) do %{"min" => "", "sec" => sec, "frac" => frac} -> {:ok, "0:" <> sec <> "." <> frac} %{"min" => min, "sec" => sec, "frac" => frac} -> {:ok, min <> ":" <> sec <> "." <> frac} nil -> :error end end @doc """ Parse a time-formatted string into a float. ## Examples iex> value_seconds("0:40.99") 40.99 """ def value_seconds(value) do %{ "min" => minutes, "sec" => seconds, "frac" => fraction } = Regex.named_captures(@value_regex, value) {minutes, ""} = Float.parse(minutes) {seconds, ""} = Float.parse(seconds <> "." <> fraction) minutes * 60.0 + seconds end @doc """ Verify that associated rounds are in_progress. """ def validate_round(%Ecto.Changeset{} = changeset) do %{changes: changes, errors: errors} = changeset value = Map.get(changes, :round_id) new = if not is_nil(value) and Rounds.get_round!(value).state == :in_progress do [] else [{:state, {"round is not in progress", [validation: :round]}}] end case new do [] -> changeset [_|_] -> %{changeset | errors: new ++ errors, valid?: false} end end @doc false def changeset(%Guess{} = guess, attrs) do guess |> cast(attrs, [:round_id, :user_id, :value]) |> validate_required([:round_id, :user_id, :value]) |> validate_format(:value, @value_regex) |> Guess.validate_round |> unique_constraint(:round_id, name: :guesses_user_id_round_id_index) |> unique_constraint(:round_id, name: :guesses_value_round_id_index) end end
lib/mtpo/guesses/guess.ex
0.668664
0.402128
guess.ex
starcoder
defmodule X509.RDNSequence do @moduledoc """ Convenience functions for creating `:rdnSquence` tuples, as defined in Erlang's `:public_key` module as the `issuer_name()` type, and representing the X.509 RDNSequence type. RDNSequences are primarily used for the Subject and Issuer fields of certificates, as well as the Subject field of CSRs. Note that this module implements a commonly used subset of RDNSequence values. It supports only a single attribute type/value pair for each element in the sequence, and it implements the attribute types specified in RFC5280, section 4.1.2.4, with a few extensions from LDAP: * countryName (C) * organizationName (O) * organizationalUnitName (OU) * dnQualifier * stateOrProvinceName (ST) * commonName (CN) * serialNumber * localityName (L) * title * name * surname (SN) * givenName (GN) * initials * pseudonym * generationQualifier * domainComponent (DC) * emailAddress (E) This module encodes values as UTF8String where possible, or else PrintableString/IA5String where required. """ import X509.ASN1 @typedoc "RDN sequence for use in OTP certificate and CSR records" @type t :: :public_key.issuer_name() @typedoc """ RDN type/value pair. The first element must be a string (short or long form) or atom (long form) matching one of the supported attribute types. The second element is the associated string value, which will be encoded as appropriate for the attribute type. Alternatively, the attribute_type can be specified as an OID, in which case the value is passed to Erlang's `:public_key` module as-is. Examples of acceptable values include `'character list'` and `{:utf8String, "string"}`. """ @type attr :: {binary() | atom(), binary()} | {:public_key.oid() | term()} @typedoc "List of RDN type/value pairs" @type attr_list :: [attr] @doc """ Creates a new `:rdnSquence` tuple. The value can be specified in one of the following formats: * A string specifying the attributes in hierarchical format, e.g. "/C=US/ST=NT/L=Springfield/O=ACME Inc." * A string specifying a comma-separated list of attributes, e.g. "C=US, ST=NT, L=Springfield, O=ACME Inc." * An RDN attribute list (see type documentation) Note that the string parsers for the first two formats do not (currently) recognize escape characters: separator characters ('/' and ',', respectively) are not allowed in attribute values. The optional second parameter can be used to select the output format: * `:plain` - for use in `:Certificate` and `:CertificationRequest` records (default) * `:otp` - for use in `:OTPCertificate` records (see `X509.Certificate`) Raises an error when the given value cannot be parsed, contains unsupported attribute types, when attribute values exceed the maximum length ('upper bound' in the RFC) or when values cannot be coerced into the expected ASN.1 type. ## Examples: iex> X509.RDNSequence.new("/C=US/CN=Bob") {:rdnSequence, [ [{:AttributeTypeAndValue, {2, 5, 4, 6}, <<19, 2, 85, 83>>}], [{:AttributeTypeAndValue, {2, 5, 4, 3}, <<12, 3, 66, 111, 98>>}] ]} iex> X509.RDNSequence.new("C=CN, givenName=麗") {:rdnSequence, [ [{:AttributeTypeAndValue, {2, 5, 4, 6}, <<19, 2, 67, 78>>}], [{:AttributeTypeAndValue, {2, 5, 4, 42}, <<12, 3, 233, 186, 151>>}] ]} iex> X509.RDNSequence.new(commonName: "Elixir") {:rdnSequence, [ [{:AttributeTypeAndValue, {2, 5, 4, 3}, <<12, 6, 69, 108, 105, 120, 105, 114>>}] ]} iex> X509.RDNSequence.new(language: "Elixir") ** (FunctionClauseError) no function clause matching in X509.RDNSequence.new_attr/1 iex> X509.RDNSequence.new("C=!!") ** (ArgumentError) unsupported character(s) in `PrintableString` attribute """ @spec new(String.t() | attr_list(), :plain | :otp) :: t() def new(rdn, type \\ :plain) def new("/" <> string, type) do string |> String.split("/") |> Enum.map(&split_attr/1) |> new(type) end def new(string, type) when is_binary(string) do string |> String.split(~r/,\s*/) |> Enum.map(&split_attr/1) |> new(type) end def new(list, :plain) do # FIXME: avoid calls to undocumented functions in :public_key app list |> new(:otp) |> :pubkey_cert_records.transform(:encode) end def new(list, :otp) do {:rdnSequence, Enum.map(list, &[new_attr(&1)])} end @doc """ Converts an `:rdnSquence` tuple to a human readable string, in hierarchical format. ## Examples: iex> X509.RDNSequence.new("C=CN, givenName=麗") |> X509.RDNSequence.to_string "/C=CN/GN=麗" """ @spec to_string(t()) :: String.t() def to_string({:rdnSequence, sequence}) do "/" <> (sequence |> List.flatten() |> Enum.map(&attr_to_string/1) |> Enum.join("/")) end defp attr_to_string({:AttributeTypeAndValue, _, value} = attr) when is_binary(value) do # FIXME: avoid calls to undocumented functions in :public_key app attr |> :pubkey_cert_records.transform(:decode) |> attr_to_string() end defp attr_to_string({:AttributeTypeAndValue, oid, value}) do attr_oid_to_string(oid) <> "=" <> attr_value_to_string(value) end defp attr_oid_to_string(oid(:"id-at-countryName")), do: "C" defp attr_oid_to_string(oid(:"id-at-organizationName")), do: "O" defp attr_oid_to_string(oid(:"id-at-organizationalUnitName")), do: "OU" defp attr_oid_to_string(oid(:"id-at-dnQualifier")), do: "dnQualifier" defp attr_oid_to_string(oid(:"id-at-stateOrProvinceName")), do: "ST" defp attr_oid_to_string(oid(:"id-at-commonName")), do: "CN" defp attr_oid_to_string(oid(:"id-at-serialNumber")), do: "serialNumber" defp attr_oid_to_string(oid(:"id-at-localityName")), do: "L" defp attr_oid_to_string(oid(:"id-at-title")), do: "title" defp attr_oid_to_string(oid(:"id-at-name")), do: "name" defp attr_oid_to_string(oid(:"id-at-surname")), do: "SN" defp attr_oid_to_string(oid(:"id-at-givenName")), do: "GN" defp attr_oid_to_string(oid(:"id-at-initials")), do: "initials" defp attr_oid_to_string(oid(:"id-at-pseudonym")), do: "pseudonym" defp attr_oid_to_string(oid(:"id-at-generationQualifier")), do: "generationQualifier" defp attr_oid_to_string(oid(:"id-domainComponent")), do: "DC" defp attr_oid_to_string(oid(:"id-emailAddress")), do: "E" defp attr_oid_to_string(oid) do oid |> Tuple.to_list() |> Enum.map(&Integer.to_string/1) |> Enum.join(".") end defp attr_value_to_string({:utf8String, value}), do: value defp attr_value_to_string(value), do: List.to_string(value) # Splits an attribute in the form of "type=value" into a {type, value} tuple defp split_attr(string) do string |> String.split("=", parts: 2) |> List.to_tuple() end # From RFC5280, Annex A.1 @x520name_ub 131_072 # Short name string mapping defp new_attr({"C", value}), do: new_attr({:countryName, value}) defp new_attr({"O", value}), do: new_attr({:organizationName, value}) defp new_attr({"OU", value}), do: new_attr({:organizationalUnitName, value}) defp new_attr({"ST", value}), do: new_attr({:stateOrProvinceName, value}) defp new_attr({"CN", value}), do: new_attr({:commonName, value}) defp new_attr({"L", value}), do: new_attr({:localityName, value}) defp new_attr({"SN", value}), do: new_attr({:surName, value}) defp new_attr({"GN", value}), do: new_attr({:givenName, value}) defp new_attr({"DC", value}), do: new_attr({:domainComponent, value}) defp new_attr({"E", value}), do: new_attr({:emailAddress, value}) # Full name string mapping defp new_attr({"countryName", value}), do: new_attr({:countryName, value}) defp new_attr({"organizationName", value}), do: new_attr({:organizationName, value}) defp new_attr({"organizationalUnitName", value}), do: new_attr({:organizationalUnitName, value}) defp new_attr({"dnQualifier", value}), do: new_attr({:dnQualifier, value}) defp new_attr({"stateOrProvinceName", value}), do: new_attr({:stateOrProvinceName, value}) defp new_attr({"commonName", value}), do: new_attr({:commonName, value}) defp new_attr({"serialNumber", value}), do: new_attr({:serialNumber, value}) defp new_attr({"localityName", value}), do: new_attr({:localityName, value}) defp new_attr({"title", value}), do: new_attr({:title, value}) defp new_attr({"name", value}), do: new_attr({:name, value}) defp new_attr({"surname", value}), do: new_attr({:surname, value}) defp new_attr({"givenName", value}), do: new_attr({:givenName, value}) defp new_attr({"initials", value}), do: new_attr({:initials, value}) defp new_attr({"pseudonym", value}), do: new_attr({:pseudonym, value}) defp new_attr({"generationQualifier", value}), do: new_attr({:generationQualifier, value}) defp new_attr({"domainComponent", value}), do: new_attr({:domainComponent, value}) defp new_attr({"emailAddress", value}), do: new_attr({:emailAddress, value}) defp new_attr({:name, value}) when byte_size(value) <= @x520name_ub do attribute_type_and_value(type: oid(:"id-at-name"), value: {:utf8String, value}) end defp new_attr({:surname, value}) when byte_size(value) <= @x520name_ub do attribute_type_and_value(type: oid(:"id-at-surname"), value: {:utf8String, value}) end defp new_attr({:givenName, value}) when byte_size(value) <= @x520name_ub do attribute_type_and_value(type: oid(:"id-at-givenName"), value: {:utf8String, value}) end defp new_attr({:initials, value}) when byte_size(value) <= @x520name_ub do attribute_type_and_value(type: oid(:"id-at-initials"), value: {:utf8String, value}) end defp new_attr({:generationQualifier, value}) when byte_size(value) <= @x520name_ub do attribute_type_and_value(type: oid(:"id-at-generationQualifier"), value: {:utf8String, value}) end defp new_attr({:dnQualifier, value}) do attribute_type_and_value(type: oid(:"id-at-dnQualifier"), value: printableString(value)) end defp new_attr({:countryName, value}) do attribute_type_and_value(type: oid(:"id-at-countryName"), value: printableString(value, 2)) end defp new_attr({:serialNumber, value}) do attribute_type_and_value(type: oid(:"id-at-serialNumber"), value: printableString(value, 64)) end defp new_attr({:commonName, value}) when byte_size(value) <= 256 do attribute_type_and_value(type: oid(:"id-at-commonName"), value: {:utf8String, value}) end defp new_attr({:localityName, value}) when byte_size(value) <= 256 do attribute_type_and_value(type: oid(:"id-at-localityName"), value: {:utf8String, value}) end defp new_attr({:stateOrProvinceName, value}) when byte_size(value) <= 256 do attribute_type_and_value(type: oid(:"id-at-stateOrProvinceName"), value: {:utf8String, value}) end defp new_attr({:organizationName, value}) when byte_size(value) <= 256 do attribute_type_and_value(type: oid(:"id-at-organizationName"), value: {:utf8String, value}) end defp new_attr({:organizationalUnitName, value}) when byte_size(value) <= 256 do attribute_type_and_value( type: oid(:"id-at-organizationalUnitName"), value: {:utf8String, value} ) end defp new_attr({:title, value}) when byte_size(value) <= 256 do attribute_type_and_value(type: oid(:"id-at-title"), value: {:utf8String, value}) end defp new_attr({:pseudonym, value}) when byte_size(value) <= 256 do attribute_type_and_value(type: oid(:"id-at-pseudonym"), value: {:utf8String, value}) end defp new_attr({:domainComponent, value}) when byte_size(value) <= 63 do attribute_type_and_value(type: oid(:"id-domainComponent"), value: ia5String(value)) end defp new_attr({:emailAddress, value}) when byte_size(value) <= 255 do attribute_type_and_value(type: oid(:"id-emailAddress"), value: ia5String(value)) end # Opaque values can be specified by OID; the value is not interpreted defp new_attr({oid, value}) when is_tuple(oid) do attribute_type_and_value(type: oid, value: value) end @printableString [ ?A..?Z |> Enum.into([]), ?a..?z |> Enum.into([]), ?0..?9 |> Enum.into([]), ' \'()+,-./:=?' ] |> List.flatten() # Concert a string (or character list) to ASN.1 PrintableString format. # Raises ArgumentError if the string contains unsupported characters or # exceeds the given maximum length defp printableString(string, ub \\ nil) defp printableString(string, ub) when is_binary(string) do string |> String.to_charlist() |> printableString(ub) end defp printableString(charlist, ub) do if Enum.all?(charlist, &(&1 in @printableString)) do if is_nil(ub) or length(charlist) <= ub do charlist else raise ArgumentError, "attribute value exceeds maximum length" end else raise ArgumentError, "unsupported character(s) in `PrintableString` attribute" end end # Only allow printable IA5 characters @ia5String 32..125 |> Enum.into([]) # Concert a string (or character list) to ASN.1 IA5String format. # Raises ArgumentError if the string contains unsupported characters or # exceeds the given maximum length defp ia5String(string, ub \\ nil) defp ia5String(string, ub) when is_binary(string) do string |> String.to_charlist() |> ia5String(ub) end defp ia5String(charlist, ub) do if Enum.all?(charlist, &(&1 in @ia5String)) do if is_nil(ub) or length(charlist) <= ub do charlist else raise ArgumentError, "attribute value exceeds maximum length" end else raise ArgumentError, "unsupported character(s) in `IA5String` attribute" end end end
lib/x509/rdn_sequence.ex
0.86738
0.581125
rdn_sequence.ex
starcoder
defmodule AutoApi.DashboardLightsState do @moduledoc """ Keeps Dashboard Lights state """ alias AutoApi.{CommonData, State} use AutoApi.State, spec_file: "dashboard_lights.json" @type light_name :: :high_beam | :low_beam | :hazard_warning | :brake_failure | :hatch_open | :fuel_level | :engine_coolant_temperature | :battery_charging_condition | :engine_oil | :position_lights | :front_fog_light | :rear_fog_light | :park_heating | :engine_indicator | :service_call | :transmission_fluid_temperature | :transmission_failure | :anti_lock_brake_failure | :worn_brake_linings | :windscreen_washer_fluid | :tire_failure | :engine_oil_level | :engine_coolant_level | :steering_failure | :esc_indication | :brake_lights | :adblue_level | :fuel_filter_diff_pressure | :seat_belt | :advanced_braking | :acc | :trailer_connected | :airbag | :esc_switched_off | :lane_departure_warning_off | :air_filter_minder | :air_suspension_ride_control_fault | :all_wheel_drive_disabled | :anti_theft | :blind_spot_detection | :charge_system_fault | :check_fuel_cap | :check_fuel_fill_inlet | :check_fuel_filter | :dc_temp_warning | :dc_warning_status | :diesel_engine_idle_shutdown | :diesel_engine_warning | :diesel_exhaust_fluid_system_fault | :diesel_exhaust_over_temp | :diesel_exhaust_fluid_quality | :diesel_filter_regeneration | :diesel_particulate_filter | :diesel_pre_heat | :electric_trailer_brake_connection | :ev_battery_cell_max_volt_warning | :ev_battery_cell_min_volt_warning | :ev_battery_charge_energy_storage_warning | :ev_battery_high_level_warning | :ev_battery_high_temperature_warning | :ev_battery_insulation_resist_warning | :ev_battery_jump_level_warning | :ev_battery_low_level_warning | :ev_battery_max_volt_veh_energy_warning | :ev_battery_min_volt_veh_energy_warning | :ev_battery_over_charge_warning | :ev_battery_poor_cell_warning | :ev_battery_temp_diff_warning | :forward_collision_warning | :fuel_door_open | :hill_descent_control_fault | :hill_start_assist_warning | :hv_interlocking_status_warning | :lighting_system_failure | :malfunction_indicator | :motor_controller_temp_warning | :park_aid_malfunction | :passive_entry_passive_start | :powertrain_malfunction | :restraints_indicator_warning | :start_stop_engine_warning | :traction_control_disabled | :traction_control_active | :traction_motor_temp_warning | :tire_pressure_monitor_system_warning | :water_in_fuel | :tire_warning_front_right | :tire_warning_front_left | :tire_warning_rear_right | :tire_warning_rear_left | :tire_warning_system_error | :battery_low_warning | :brake_fluid_warning @type dashboard_light :: %{name: light_name, state: CommonData.on_off()} @type t :: %__MODULE__{ dashboard_lights: State.multiple_property(dashboard_light()) } @doc """ Build state based on binary value iex> bin = <<1, 0, 5, 1, 0, 2, 1, 1>> iex> AutoApi.DashboardLightsState.from_bin(bin) %AutoApi.DashboardLightsState{dashboard_lights: [%AutoApi.Property{data: %{name: :low_beam, state: :on}}]} """ @spec from_bin(binary) :: __MODULE__.t() def from_bin(bin) do parse_bin_properties(bin, %__MODULE__{}) end @spec to_bin(__MODULE__.t()) :: binary @doc """ Parse state to bin iex> state = %AutoApi.DashboardLightsState{dashboard_lights: [%AutoApi.Property{data: %{name: :low_beam, state: :off}}]} iex> AutoApi.DashboardLightsState.to_bin(state) <<1, 0, 5, 1, 0, 2, 1, 0>> """ def to_bin(%__MODULE__{} = state) do parse_state_properties(state) end end
lib/auto_api/states/dashboard_lights_state.ex
0.695131
0.404155
dashboard_lights_state.ex
starcoder
defmodule Default do defmodule Types do import AttribTools @compile :inline_list_funcs @type stack_to_type :: %{Stack.name() => Type.t()} require Type @doc """ The default phenome stack names and types """ @spec phenome_stack_to_type() :: %{Stack.name() => Type.t()} attrib_and_getter( phenome_stack_to_type, %{ exec: Type.code(), code: Type.code(), number: Type.number(), boolean: Type.boolean(), string: Type.string(), name: Type.name(), error: Type.code(), type: Type.type(), stack: Type.name(), return: Type.code(), aux: Type.code() } ) @doc """ The default genome stack names and types """ attrib_and_getter( genome_stack_to_type, %{blocks: Type.code(), curr_block: Type.code()} |> Map.merge(@phenome_stack_to_type) ) @doc """ `to_stacks` takes a map of stack names to types, and returns a map of stack names to empty stacks """ @spec to_stacks(stack_to_type()) :: %{Stack.name() => []} def to_stacks(stack_to_type_map), do: stack_to_type_map |> Enum.map(fn {stack, _} -> {stack, []} end) |> Enum.into(%{}) @doc """ `type_to_stacks_map` takes a map of stack names to types, and returns a map of types to stack names. The stack names are in alphabetical order """ @spec type_to_stacks_map(stack_to_type()) :: %{Type.t() => [Stack.name()]} def type_to_stacks_map(stack_to_type_map), do: stack_to_type_map |> Enum.into([]) |> Enum.group_by(&elem(&1, 1)) |> Enum.map(fn {type, stack_kw} -> {type, Keyword.keys(stack_kw) |> Enum.sort()} end) |> Enum.into(%{}) @doc """ Type to default to if a type is needed but either the current type isn't compatible, or there is no current type """ attrib_and_getter(default_type, Type.code()) @doc """ Stack to default to if a stack is needed but either the current stack isn't compatible, or there is no current stack """ attrib_and_getter(default_stack, :code) end import AttribTools use Op.Record @compile :inline_list_funcs attrib_and_getter(op_modules, [Op.Number, Op.Exec, Op.Stacks, Op.Code, Op.Tag]) @spec defined_names_map([module]) :: %{atom => Type.expr()} def defined_names_map(modules \\ @op_modules) do for( op = oper(name: name) <- List.flatten(Enum.map(modules, fn mod -> mod.ops() end)), into: %{} ) do {name, op} end end # TODO(ORBAT): use max_points @doc """ The default maximum number of steps allowed per execution. One instruction or literal is one step, and descending into a list (so each `[]`) is one step. """ @spec evalpush_limit() :: integer() attrib_and_getter(evalpush_limit, 1000) @doc """ Default maximum number of points. Defaults to 100 """ attrib_and_getter(max_points, 100) @doc """ Default bottom type. Defaults to `:code` """ attrib_and_getter(input_bottom_type, :code) @doc """ Maximum `GBTree` `delete` operations in a row before forcing a rebalance """ attrib_and_getter(max_gbtree_deletes, 4) end
lib/mulix/vm/default.ex
0.654122
0.604749
default.ex
starcoder
defmodule Cldr.Print.Format do @moduledoc false import Cldr.Print.Transform alias Cldr.Number.Format.Meta def format("d" = type, format, options) do backend = Keyword.get(options, :backend, Cldr.Print.Backend) formatter = Module.concat(backend, Number.Formatter.Decimal) meta = meta_from_format(type, format, backend, options) value = format[:value] options = maybe_set_number_system(format, backend, options) case formatter.to_string(value, meta, options) do {:error, {exception, reason}} -> raise exception, reason string -> maybe_add_padding(string, format[:width], format[:left_justify]) end end def format("i", format, options) do {_, format} = Keyword.get_and_update(format, :value, fn value -> {value, truncate(value)} end) format = Keyword.put(format, :precision, 0) format("d", format, options) end def format("f", format, options) do format = maybe_add_precision(format, format[:precision]) format("d", format, options) end def format("u", format, options) do {_, format} = Keyword.get_and_update(format, :value, fn value -> {value, absolute(value)} end) format("d", format, options) end def format("e", format, options) do format = Keyword.put(format, :exponent, true) format("d", format, options) |> String.downcase end def format("E", format, options) do format = Keyword.put(format, :exponent, true) format("d", format, options) |> String.upcase end def format("g", format, options) do format_e = format("e", format, options) if choose_f_or_e(format_e, format, "e") == :e do maybe_remove_zero_fraction(format_e) else format("f", format, options) end end def format("G", format, options) do format_e = format("E", format, options) if choose_f_or_e(format_e, format, "E") == :e do maybe_remove_zero_fraction(format_e) else format("F", format, options) end end def format("s", format, _options) do padding = format[:width] || 0 precision = format[:precision] left_or_right = format[:left_justify] value = format[:value] value |> slice(precision) |> justify(padding, left_or_right) end def format("o", format, options) do {_, format} = Keyword.get_and_update(format, :value, fn value -> {value, Integer.to_string(truncate(value), 8) |> String.downcase} end) format = format |> maybe_add_zero_x("0", format[:leading_zero_x], options) |> maybe_add_plus(format[:with_plus], options) format("s", format, options) end def format("x", format, options) do {_, format} = Keyword.get_and_update(format, :value, fn value -> {value, Integer.to_string(truncate(value), 16) |> String.downcase} end) format = format |> maybe_add_zero_x("0x", format[:leading_zero_x], options) |> maybe_add_plus(format[:with_plus], options) format("s", format, options) end def format("X", format, options) do {_, format} = Keyword.get_and_update(format, :value, fn value -> {value, Integer.to_string(truncate(value), 16)} end) format = format |> maybe_add_zero_x("0X", format[:leading_zero_x], options) |> maybe_add_plus(format[:with_plus], options) format("s", format, options) end def meta_from_format("d", format, backend, options) do Meta.new |> maybe_add_plus(format[:with_plus]) |> maybe_add_fraction_digits(format[:precision]) |> maybe_add_zero_fill(format, format[:left_justify], format[:zero_fill], format[:width]) |> maybe_add_group(format[:group], backend, options) |> maybe_add_exponent(format[:exponent], format[:precision]) end defp truncate(number) when is_integer(number) do number end defp truncate(number) when is_float(number) do trunc(number) end defp truncate(%Decimal{} = number) do Decimal.round(number, 0) end def absolute(number) when is_number(number) do abs(number) end def absolute(%Decimal{} = number) do Decimal.abs(number) end defp slice(string, nil) do string end defp slice(string, precision) do String.slice(string, 0, precision) end defp justify(string, padding, true) do String.pad_trailing(string, padding) end defp justify(string, padding, nil) do String.pad_leading(string, padding) end defp choose_f_or_e(string, format, type) do [_, exponent] = String.split(string, type) exponent = String.to_integer(exponent) if (exponent < -4) || (exponent > format[:precision]), do: :e, else: :f end end
lib/format.ex
0.591959
0.416945
format.ex
starcoder
defmodule OMG.Eth.RootChain.AbiFunctionSelector do @moduledoc """ We define Solidity Function selectors that help us decode returned values from function calls """ # workaround for https://github.com/omgnetwork/elixir-omg/issues/1632 def start_exit() do %ABI.FunctionSelector{ function: "startExit", input_names: [ "utxoPosToExit", "rlpOutputTxToContract", "outputTxToContractInclusionProof", "rlpInputCreationTx", "inputCreationTxInclusionProof", "utxoPosInput" ], inputs_indexed: nil, method_id: <<191, 31, 49, 109>>, returns: [], type: :function, types: [{:uint, 256}, :bytes, :bytes, :bytes, :bytes, {:uint, 256}] } end def start_standard_exit() do %ABI.FunctionSelector{ function: "startStandardExit", input_names: ["utxoPos", "rlpOutputTx", "outputTxInclusionProof"], inputs_indexed: nil, method_id: <<112, 224, 20, 98>>, returns: [], type: :function, types: [tuple: [{:uint, 256}, :bytes, :bytes]] } end def challenge_in_flight_exit_not_canonical() do %ABI.FunctionSelector{ function: "challengeInFlightExitNotCanonical", input_names: [ "inputTx", "inputUtxoPos", "inFlightTx", "inFlightTxInputIndex", "competingTx", "competingTxInputIndex", "competingTxPos", "competingTxInclusionProof", "competingTxWitness" ], inputs_indexed: [true, true, true, true, true, true, true, true, true], method_id: <<232, 54, 34, 152>>, returns: [], type: :function, types: [ tuple: [ :bytes, {:uint, 256}, :bytes, {:uint, 16}, :bytes, {:uint, 16}, {:uint, 256}, :bytes, :bytes ] ] } end def start_in_flight_exit() do %ABI.FunctionSelector{ function: "startInFlightExit", input_names: ["inFlightTx", "inputTxs", "inputUtxosPos", "inputTxsInclusionProofs", "inFlightTxWitnesses"], inputs_indexed: nil, method_id: <<90, 82, 133, 20>>, returns: [], type: :function, types: [ tuple: [ :bytes, {:array, :bytes}, {:array, {:uint, 256}}, {:array, :bytes}, {:array, :bytes} ] ] } end # min_exit_period/0, get_version/0, exit_games/0, vaults/0 are # victims of unfortinate bug: https://github.com/poanetwork/ex_abi/issues/25 # All these selectors were intially pulled in with # `ABI.parse_specification(contract_abi_json_decoded,include_events?: true)` # and later modified so that `types` hold what `returns` should have because of # issue 25. # the commented properties of the struct is what it was generated, # the new types were added to mitigate the bug. def min_exit_period() do %ABI.FunctionSelector{ function: "minExitPeriod", input_names: ["min_exit_period"], inputs_indexed: nil, method_id: <<212, 162, 180, 239>>, # returns: [uint: 256], type: :function, # types: [] types: [uint: 256] } end def get_version() do %ABI.FunctionSelector{ function: "getVersion", input_names: ["version"], inputs_indexed: nil, method_id: <<13, 142, 110, 44>>, # returns: [:string], type: :function, # types: [] types: [:string] } end def exit_games() do %ABI.FunctionSelector{ function: "exitGames", input_names: ["exit_game_address"], inputs_indexed: nil, method_id: <<175, 7, 151, 100>>, # returns: [:address], type: :function, # types: [uint: 256] types: [:address] } end def vaults() do %ABI.FunctionSelector{ function: "vaults", input_names: ["vault_address"], inputs_indexed: nil, method_id: <<140, 100, 234, 74>>, # returns: [:address], type: :function, # types: [uint: 256] types: [:address] } end def child_block_interval() do %ABI.FunctionSelector{ function: "childBlockInterval", input_names: ["child_block_interval"], inputs_indexed: nil, method_id: <<56, 169, 224, 188>>, # returns: [uint: 256], type: :function, # types: [] types: [uint: 256] } end def next_child_block() do %ABI.FunctionSelector{ function: "nextChildBlock", input_names: ["block_number"], inputs_indexed: nil, method_id: <<76, 168, 113, 79>>, # returns: [uint: 256], type: :function, # types: [] types: [uint: 256] } end def blocks() do %ABI.FunctionSelector{ function: "blocks", input_names: ["block_hash", "block_timestamp"], inputs_indexed: nil, method_id: <<242, 91, 63, 153>>, # returns: [bytes: 32, uint: 256], type: :function, # types: [uint: 256] types: [bytes: 32, uint: 256] } end def standard_exits() do %ABI.FunctionSelector{ function: "standardExits", input_names: ["standard_exit_structs"], inputs_indexed: nil, method_id: <<12, 165, 182, 118>>, # returns: [ # array: {:tuple, [:bool, {:uint, 256}, {:bytes, 32}, :address, {:uint, 256}, {:uint, 256}]} # ], type: :function, # types: [array: {:uint, 160}] types: [ array: {:tuple, [:bool, {:uint, 256}, {:bytes, 32}, :address, {:uint, 256}, {:uint, 256}]} ] } end def in_flight_exits() do %ABI.FunctionSelector{ function: "inFlightExits", input_names: ["in_flight_exit_structs"], inputs_indexed: nil, method_id: <<206, 201, 225, 167>>, # returns: [ # array: {:tuple, # [ # :bool, # {:uint, 64}, # {:uint, 256}, # {:uint, 256}, # {:array, :tuple, 4}, # {:array, :tuple, 4}, # :address, # {:uint, 256}, # {:uint, 256} # ]} # ], type: :function, # types: [array: {:uint, 160}] types: [ {:array, {:tuple, [:bool, {:uint, 64}, {:uint, 256}, {:uint, 256}, :address, {:uint, 256}, {:uint, 256}]}} ] } end end
apps/omg_eth/lib/omg_eth/root_chain/abi_function_selector.ex
0.630002
0.480479
abi_function_selector.ex
starcoder
defmodule Money.Currency do @moduledoc """ Provides currency support to `Money` Some useful helper methods include: * `get/1` * `get!/1` * `exists?/1` * `to_atom/1` * `name/1` * `name!/1` * `symbol/1` * `symbol!/1` * `all/0` A helper function exists for each currency using the lowercase three-character currency code ## Examples iex> Money.Currency.usd(100) %Money{amount: 100, currency: :USD} """ defp custom_currencies do Enum.into(Application.get_env(:money, :custom_currencies, []), %{}) end @currencies %{ AED: %{name: "UAE Dirham", symbol: "د.إ", exponent: 2, number: 784}, AFN: %{name: "Afghani", symbol: "؋", exponent: 2, number: 971}, ALL: %{name: "Lek", symbol: "Lek", exponent: 2, number: 008}, AMD: %{name: "Armenian Dram", symbol: "AMD", exponent: 2, number: 051}, ANG: %{name: "Netherlands Antillian Guilder", symbol: "ƒ", exponent: 2, number: 532}, AOA: %{name: "Kwanza", symbol: "Kz", exponent: 2, number: 973}, ARS: %{name: "Argentine Peso", symbol: "$", exponent: 2, number: 032}, AUD: %{name: "Australian Dollar", symbol: "$", exponent: 2, number: 036}, AWG: %{name: "Aruban Guilder", symbol: "ƒ", exponent: 2, number: 533}, AZN: %{name: "Azerbaijanian Manat", symbol: "ман", exponent: 2, number: 944}, BAM: %{name: "Convertible Marks", symbol: "KM", exponent: 2, number: 977}, BBD: %{name: "Barbados Dollar", symbol: "$", exponent: 2, number: 052}, BDT: %{name: "Taka", symbol: "৳", exponent: 2, number: 050}, BGN: %{name: "Bulgarian Lev", symbol: "лв", exponent: 2, number: 975}, BHD: %{name: "Bahraini Dinar", symbol: ".د.ب", exponent: 3, number: 048}, BIF: %{name: "Burundi Franc", symbol: "FBu", exponent: 0, number: 108}, BMD: %{name: "Bermudian Dollar (customarily known as Bermuda Dollar)", symbol: "$", exponent: 2, number: 060}, BND: %{name: "Brunei Dollar", symbol: "$", exponent: 2, number: 096}, BOB: %{name: "Boliviano Mvdol", symbol: "$b", exponent: 2, number: 068}, BOV: %{name: "Boliviano Mvdol", symbol: "$b", exponent: 2, number: 984}, BRL: %{name: "Brazilian Real", symbol: "R$", exponent: 2, number: 986}, BSD: %{name: "Bahamian Dollar", symbol: "$", exponent: 2, number: 044}, BTN: %{name: "Indian Rupee Ngultrum", symbol: "Nu.", exponent: 2, number: 064}, BWP: %{name: "Pula", symbol: "P", exponent: 2, number: 072}, BYN: %{name: "Belarusian Ruble", symbol: "p.", exponent: 2, number: 933}, BYR: %{name: "Belarusian Ruble", symbol: "p.", exponent: 0, number: 933}, BZD: %{name: "Belize Dollar", symbol: "BZ$", exponent: 2, number: 084}, CAD: %{name: "Canadian Dollar", symbol: "$", exponent: 2, number: 124}, CDF: %{name: "<NAME>", symbol: "CF", exponent: 2, number: 976}, CHF: %{name: "Sw<NAME>", symbol: "CHF", exponent: 2, number: 756}, CLF: %{name: "Chilean Peso Unidades de fomento", symbol: "$", exponent: 4, number: 990}, CLP: %{name: "Chilean Peso Unidades de fomento", symbol: "$", exponent: 0, number: 152}, CNY: %{name: "<NAME>", symbol: "¥", exponent: 2, number: 156}, COP: %{name: "Colombian Peso", symbol: "$", exponent: 2, number: 170}, COU: %{name: "Colombian Peso Unidad de Valor Real", symbol: "$", exponent: 2, number: 970}, CRC: %{name: "Costa R<NAME>", symbol: "₡", exponent: 2, number: 188}, CUC: %{name: "Cuban Peso Peso Convertible", symbol: "₱", exponent: 2, number: 931}, CUP: %{name: "Cuban Peso Peso Convertible", symbol: "₱", exponent: 2, number: 192}, CVE: %{name: "Cape Ver<NAME>", symbol: "$", exponent: 0, number: 132}, CZK: %{name: "Czech Koruna", symbol: "Kč", exponent: 2, number: 203}, DJF: %{name: "<NAME>", symbol: "Fdj", exponent: 0, number: 262}, DKK: %{name: "<NAME>", symbol: "kr.", exponent: 2, number: 208}, DOP: %{name: "Dominican Peso", symbol: "RD$", exponent: 2, number: 214}, DZD: %{name: "Algerian Dinar", symbol: "دج", exponent: 2, number: 012}, EEK: %{name: "Kroon", symbol: "KR", exponent: 2, number: 233}, EGP: %{name: "Egyptian Pound", symbol: "£", exponent: 2, number: 818}, ERN: %{name: "Nakfa", symbol: "Nfk", exponent: 2, number: 232}, ETB: %{name: "Ethiopian Birr", symbol: "Br", exponent: 2, number: 230}, EUR: %{name: "Euro", symbol: "€", exponent: 2, number: 978}, FJD: %{name: "Fiji Dollar", symbol: "$", exponent: 2, number: 242}, FKP: %{name: "Falkland Islands Pound", symbol: "£", exponent: 2, number: 238}, GBP: %{name: "Pound Sterling", symbol: "£", exponent: 2, number: 826}, GEL: %{name: "Lari", symbol: "₾", exponent: 2, number: 981}, GHS: %{name: "Cedi", symbol: "GH₵", exponent: 2, number: 936}, GIP: %{name: "Gibraltar Pound", symbol: "£", exponent: 2, number: 292}, GMD: %{name: "Dalasi", symbol: "D", exponent: 2, number: 270}, GNF: %{name: "Guinea Franc", symbol: "FG", exponent: 0, number: 324}, GTQ: %{name: "Quetzal", symbol: "Q", exponent: 2, number: 320}, GYD: %{name: "Guyana Dollar", symbol: "$", exponent: 2, number: 328}, HKD: %{name: "Hong Kong Dollar", symbol: "$", exponent: 2, number: 344}, HNL: %{name: "Lempira", symbol: "L", exponent: 2, number: 340}, HRK: %{name: "Croatian Kuna", symbol: "kn", exponent: 2, number: 191}, HTG: %{name: "Gourde US Dollar", symbol: " ", exponent: 2, number: 332}, HUF: %{name: "Forint", symbol: "Ft", exponent: 2, number: 348}, IDR: %{name: "Rupiah", symbol: "Rp", exponent: 2, number: 360}, ILS: %{name: "New Israeli Sheqel", symbol: "₪", exponent: 2, number: 376}, INR: %{name: "Indian Rupee", symbol: "₹", exponent: 2, number: 356}, IQD: %{name: "Iraqi Dinar", symbol: "‎ع.د", exponent: 3, number: 368}, IRR: %{name: "Iranian Rial", symbol: "﷼", exponent: 2, number: 364}, ISK: %{name: "Iceland Krona", symbol: "kr", exponent: 0, number: 352}, JMD: %{name: "Jamaican Dollar", symbol: "J$", exponent: 2, number: 388}, JOD: %{name: "<NAME>", symbol: "JOD", exponent: 3, number: 400}, JPY: %{name: "Yen", symbol: "¥", exponent: 0, number: 392}, KES: %{name: "<NAME>", symbol: "KSh", exponent: 2, number: 404}, KGS: %{name: "Som", symbol: "лв", exponent: 2, number: 417}, KHR: %{name: "Riel", symbol: "៛", exponent: 2, number: 116}, KMF: %{name: "<NAME>", symbol: "CF", exponent: 0, number: 174}, KPW: %{name: "North Korean Won", symbol: "₩", exponent: 2, number: 408}, KRW: %{name: "Won", symbol: "₩", exponent: 0, number: 410}, KWD: %{name: "<NAME>", symbol: "د.ك", exponent: 3, number: 414}, KYD: %{name: "Cayman Islands Dollar", symbol: "$", exponent: 2, number: 136}, KZT: %{name: "Tenge", symbol: "лв", exponent: 2, number: 398}, LAK: %{name: "Kip", symbol: "₭", exponent: 2, number: 418}, LBP: %{name: "Lebanese Pound", symbol: "£", exponent: 2, number: 422}, LKR: %{name: "Sri Lanka Rupee", symbol: "₨", exponent: 2, number: 144}, LRD: %{name: "Liberian Dollar", symbol: "$", exponent: 2, number: 430}, LSL: %{name: "<NAME>", symbol: " ", exponent: 2, number: 426}, LTL: %{name: "Lithuanian Litas", symbol: "Lt", exponent: 2, number: 440}, LVL: %{name: "Latvian Lats", symbol: "Ls", exponent: 2, number: 428}, LYD: %{name: "<NAME>", symbol: "ل.د", exponent: 3, number: 434}, MAD: %{name: "<NAME>", symbol: "د.م.", exponent: 2, number: 504}, MDL: %{name: "<NAME>", symbol: "MDL", exponent: 2, number: 498}, MGA: %{name: "<NAME>", symbol: "Ar", exponent: 2, number: 969}, MKD: %{name: "Denar", symbol: "ден", exponent: 2, number: 807}, MMK: %{name: "Kyat", symbol: "K", exponent: 2, number: 104}, MNT: %{name: "Tugrik", symbol: "₮", exponent: 2, number: 496}, MOP: %{name: "Pataca", symbol: "MOP$", exponent: 2, number: 446}, MRO: %{name: "Ouguiya", symbol: "UM", exponent: 2, number: 478}, MRU: %{name: "Ouguiya", symbol: "UM", exponent: 2, number: 929}, MUR: %{name: "<NAME>", symbol: "₨", exponent: 2, number: 480}, MVR: %{name: "Rufiyaa", symbol: "Rf", exponent: 2, number: 462}, MWK: %{name: "Kwacha", symbol: "MK", exponent: 2, number: 454}, MXN: %{name: "Mexican Peso", symbol: "$", exponent: 2, number: 484}, MXV: %{name: "Mexican Peso Mexican Unidad de Inversion (UDI)", symbol: "UDI", exponent: 2, number: 979}, MYR: %{name: "<NAME>", symbol: "RM", exponent: 2, number: 458}, MZN: %{name: "Metical", symbol: "MT", exponent: 2, number: 943}, NAD: %{name: "<NAME>", symbol: "$", exponent: 2, number: 516}, NGN: %{name: "Naira", symbol: "₦", exponent: 2, number: 566}, NIO: %{name: "<NAME>", symbol: "C$", exponent: 2, number: 558}, NOK: %{name: "Norwegian Krone", symbol: "kr", exponent: 2, number: 578}, NPR: %{name: "Nepalese Rupee", symbol: "₨", exponent: 2, number: 524}, NZD: %{name: "New Zealand Dollar", symbol: "$", exponent: 2, number: 554}, OMR: %{name: "R<NAME>", symbol: "﷼", exponent: 3, number: 512}, PAB: %{name: "Balboa US Dollar", symbol: "B/.", exponent: 2, number: 590}, PEN: %{name: "Nuevo Sol", symbol: "S/.", exponent: 2, number: 604}, PGK: %{name: "Kina", symbol: "K", exponent: 2, number: 598}, PHP: %{name: "Phil<NAME>", symbol: "₱", exponent: 2, number: 608}, PKR: %{name: "Pakistan Rupee", symbol: "₨", exponent: 2, number: 586}, PLN: %{name: "Zloty", symbol: "zł", exponent: 2, number: 985}, PYG: %{name: "Guarani", symbol: "₲", exponent: 0, number: 600}, QAR: %{name: "Qatari Rial", symbol: "﷼", exponent: 2, number: 634}, RON: %{name: "New Leu", symbol: "lei", exponent: 2, number: 946}, RSD: %{name: "<NAME>", symbol: "Дин.", exponent: 2, number: 941}, RUB: %{name: "Russian Ruble", symbol: "₽", exponent: 2, number: 643}, RWF: %{name: "Rwanda Franc", symbol: " ", exponent: 0, number: 646}, SAR: %{name: "Saudi Riyal", symbol: "﷼", exponent: 2, number: 682}, SBD: %{name: "Solomon Islands Dollar", symbol: "$", exponent: 2, number: 090}, SCR: %{name: "Seychelles Rupee", symbol: "₨", exponent: 2, number: 690}, SDG: %{name: "Sudanese Pound", symbol: "SDG", exponent: 2, number: 938}, SEK: %{name: "Swedish Krona", symbol: "kr", exponent: 2, number: 752}, SGD: %{name: "Singapore Dollar", symbol: "$", exponent: 2, number: 702}, SHP: %{name: "S<NAME>", symbol: "£", exponent: 2, number: 654}, SLL: %{name: "Leone", symbol: "Le", exponent: 2, number: 694}, SOS: %{name: "S<NAME>", symbol: "S", exponent: 2, number: 706}, SRD: %{name: "Sur<NAME>", symbol: "$", exponent: 2, number: 968}, SSP: %{name: "South Sudanese Pound", symbol: "SS£", exponent: 2, number: 728}, STD: %{name: "Dobra", symbol: "Db", exponent: 2, number: 678}, STN: %{name: "Dobra", symbol: "Db", exponent: 2, number: 930}, SVC: %{name: "El Salvador Colon US Dollar", symbol: "$", exponent: 2, number: 222}, SYP: %{name: "<NAME>", symbol: "£", exponent: 2, number: 760}, SZL: %{name: "Lilangeni", symbol: "E", exponent: 2, number: 748}, THB: %{name: "Baht", symbol: "฿", exponent: 2, number: 764}, TJS: %{name: "Somoni", symbol: " ", exponent: 2, number: 972}, TMT: %{name: "Manat", symbol: "₼", exponent: 2, number: 934}, TND: %{name: "<NAME>", symbol: "د.ت", exponent: 2, number: 788}, TOP: %{name: "Pa'anga", symbol: "T$", exponent: 2, number: 776}, TRY: %{name: "Tur<NAME>", symbol: "TL", exponent: 2, number: 949}, TTD: %{name: "Trinidad and Tobago Dollar", symbol: "TT$", exponent: 2, number: 780}, TWD: %{name: "New Taiwan Dollar", symbol: "NT$", exponent: 2, number: 901}, TZS: %{name: "T<NAME>", symbol: "Tsh", exponent: 2, number: 834}, UAH: %{name: "Hryvnia", symbol: "₴", exponent: 2, number: 980}, UGX: %{name: "Ug<NAME>", symbol: "Ush", exponent: 0, number: 800}, USD: %{name: "US Dollar", symbol: "$", exponent: 2, number: 840}, USN: %{name: "US Dollar next-day funds", symbol: "$", exponent: 2, number: 997}, UYI: %{name: "Peso Uruguayo Uruguay Peso en Unidades Indexadas", symbol: "$U", exponent: 0, number: 940}, UYU: %{name: "Peso Uruguayo Uruguay Peso en Unidades Indexadas", symbol: "$U", exponent: 2, number: 858}, UZS: %{name: "Uzbekistan Sum", symbol: "лв", exponent: 2, number: 860}, VEF: %{name: "<NAME>", symbol: "Bs", exponent: 2, number: 937}, VES: %{name: "<NAME>", symbol: "Bs", exponent: 2, number: 928}, VND: %{name: "Dong", symbol: "₫", exponent: 0, number: 704}, VUV: %{name: "Vatu", symbol: "VT", exponent: 0, number: 548}, WST: %{name: "Tala", symbol: "WS$", exponent: 2, number: 882}, XAF: %{name: "CFA Franc BEAC", symbol: "FCFA", exponent: 0, number: 950}, XAG: %{name: "Silver", symbol: " ", exponent: 2, number: 961}, XAU: %{name: "Gold", symbol: " ", exponent: 2, number: 959}, XBA: %{name: "Bond Markets Units European Composite Unit (EURCO)", symbol: " ", exponent: 2, number: 955}, XBB: %{name: "European Monetary Unit (E.M.U.-6)", symbol: " ", exponent: 2, number: 956}, XBC: %{name: "European Unit of Account 9(E.U.A.-9)", symbol: " ", exponent: 2, number: 957}, XBD: %{name: "European Unit of Account 17(E.U.A.-17)", symbol: " ", exponent: 2, number: 958}, XCD: %{name: "East Caribbean Dollar", symbol: "$", exponent: 2, number: 951}, XDR: %{name: "SDR", symbol: " ", exponent: 2, number: 960}, XFU: %{name: "UIC-Franc", symbol: " ", exponent: 2, number: 000}, XOF: %{name: "CFA Franc BCEAO", symbol: " ", exponent: 0, number: 952}, XPD: %{name: "Palladium", symbol: " ", exponent: 2, number: 964}, XPF: %{name: "CFP Franc", symbol: " ", exponent: 0, number: 953}, XPT: %{name: "Platinum", symbol: " ", exponent: 2, number: 962}, XTS: %{name: "Codes specifically reserved for testing purposes", symbol: " ", exponent: 2, number: 963}, XSU: %{name: "Sucre", symbol: " ", exponent: 2, number: 994}, XUA: %{name: "ADB Unit of Account", symbol: " ", exponent: 2, number: 965}, YER: %{name: "<NAME>", symbol: "﷼", exponent: 2, number: 886}, ZAR: %{name: "Rand", symbol: "R", exponent: 2, number: 710}, ZMW: %{name: "<NAME>", symbol: "ZK", exponent: 2, number: 967}, ZWL: %{name: "Z<NAME>", symbol: "$", exponent: 2, number: 932} } @type index_type() :: Money.t() | String.t() | atom | integer defp all_currencies do Map.merge(@currencies, custom_currencies()) end Enum.each(@currencies, fn {cur, detail} -> currency = cur |> to_string() |> String.downcase() @doc """ Convenience method to create a `Money` object for the #{detail.name} (#{cur}) currency. ## Examples iex> Money.Currency.#{currency}(100) %Money{amount: 100, currency: :#{cur}} """ def unquote(:"#{currency}")(amount) do Money.new(amount, unquote(cur)) end end) @spec all() :: map @doc ~S""" Returns all the currencies ## Examples iex> Money.Currency.all |> Map.fetch!(:GBP) %{name: "Pound Sterling", symbol: "£", exponent: 2, number: 826} """ def all, do: all_currencies() @spec exists?(index_type()) :: boolean @doc ~S""" Returns true if a currency is defined ## Examples iex> Money.Currency.exists?(:USD) true iex> Money.Currency.exists?("USD") true iex> Money.Currency.exists?(:WRONG) false """ def exists?(%Money{currency: currency}), do: exists?(currency) def exists?(currency), do: Map.has_key?(all_currencies(), convert_currency(currency)) @spec get(index_type()) :: map | nil @doc ~S""" Returns a map with the name and symbol of the currency or nil if it doesn’t exist. ## Examples iex> Money.Currency.get(:USD) %{name: "US Dollar", symbol: "$", exponent: 2, number: 840} iex> Money.Currency.get(:WRONG) nil iex> Money.Currency.get(826) %{name: "Pound Sterling", symbol: "£", exponent: 2, number: 826} """ def get(currency) def get(%Money{currency: currency}), do: get(currency) def get(currency), do: all_currencies()[convert_currency(currency)] @spec get!(index_type()) :: map @doc ~S""" Returns a map with the name and symbol of the currency. An ArgumentError is raised if the currency doesn’t exist. ## Examples iex> Money.Currency.get!(:USD) %{name: "US Dollar", symbol: "$", exponent: 2, number: 840} iex> Money.Currency.get!(:WRONG) ** (ArgumentError) currency WRONG doesn’t exist """ def get!(currency), do: get(currency) || currency_doesnt_exist_error(currency) @spec to_atom(index_type()) :: atom @doc ~S""" Returns the atom representation of the currency key An ArgumentError is raised if the currency doesn’t exist. ## Examples iex> Money.Currency.to_atom("usd") :USD iex> Money.Currency.to_atom(:WRONG) ** (ArgumentError) currency WRONG doesn’t exist """ def to_atom(%Money{currency: currency}), do: to_atom(currency) def to_atom(currency) do currency = convert_currency(currency) get!(currency) currency end @spec name(index_type()) :: String.t() | nil @doc ~S""" Returns the name of the currency or nil if it doesn’t exist. ## Examples iex> Money.Currency.name(:USD) "US Dollar" iex> Money.Currency.name(:WRONG) nil """ def name(%Money{currency: currency}), do: name(currency) def name(currency), do: get(currency)[:name] @spec name!(index_type()) :: String.t() @doc ~S""" Returns the name of the currency. An ArgumentError is raised if the currency doesn’t exist. ## Examples iex> Money.Currency.name!(:USD) "US Dollar" iex> Money.Currency.name!(:WRONG) ** (ArgumentError) currency WRONG doesn’t exist """ def name!(currency), do: name(currency) || currency_doesnt_exist_error(currency) @spec symbol(index_type()) :: String.t() | nil @doc ~S""" Returns the symbol of the currency or nil if it doesn’t exist. ## Examples iex> Money.Currency.symbol(:USD) "$" iex> Money.Currency.symbol(:WRONG) nil """ def symbol(%Money{currency: currency}), do: symbol(currency) def symbol(currency), do: get(currency)[:symbol] @spec symbol!(index_type()) :: String.t() @doc ~S""" Returns the symbol of the currency. An ArgumentError is raised if the currency doesn’t exist. ## Examples iex> Money.Currency.symbol!(:USD) "$" iex> Money.Currency.symbol!(:WRONG) ** (ArgumentError) currency WRONG doesn’t exist """ def symbol!(currency), do: symbol(currency) || currency_doesnt_exist_error(currency) @spec exponent(index_type()) :: integer | nil @doc ~S""" Returns the exponent of the currency or nil if it doesn’t exist. ## Examples iex> Money.Currency.exponent(:USD) 2 iex> Money.Currency.exponent(:WRONG) nil """ def exponent(%Money{currency: currency}), do: exponent(currency) def exponent(currency), do: get(currency)[:exponent] @spec exponent!(index_type()) :: integer @doc ~S""" Returns the exponent of the currency. An ArgumentError is raised if the currency doesn’t exist. ## Examples iex> Money.Currency.exponent!(:USD) 2 iex> Money.Currency.exponent!(:WRONG) ** (ArgumentError) currency WRONG doesn’t exist """ def exponent!(currency), do: exponent(currency) || currency_doesnt_exist_error(currency) @spec sub_units_count!(index_type()) :: integer @doc ~S""" Returns the sub_units_count of the currency. An ArgumentError is raised if the currency doesn’t exist. ## Examples iex> Money.Currency.sub_units_count!(:USD) 100 iex> Money.Currency.sub_units_count!(:JPY) 1 iex> Money.Currency.sub_units_count!(:WRONG) ** (ArgumentError) currency WRONG doesn’t exist """ def sub_units_count!(currency) do exponent = exponent!(currency) round(:math.pow(10, exponent)) end @spec number(index_type()) :: integer | nil @doc ~S""" Returns the number of the currency or nil if it doesn’t exist. ## Examples iex> Money.Currency.number(:USD) 840 iex> Money.Currency.number(:WRONG) nil """ def number(%Money{currency: currency}), do: number(currency) def number(currency), do: get(currency)[:number] @spec number!(index_type()) :: integer @doc ~S""" Returns the number of the currency. An ArgumentError is raised if the currency doesn’t exist. ## Examples iex> Money.Currency.number!(:EUR) 978 iex> Money.Currency.exponent!(:WRONG) ** (ArgumentError) currency WRONG doesn’t exist """ def number!(currency), do: number(currency) || currency_doesnt_exist_error(currency) defp convert_currency(currency) when is_binary(currency) do currency |> String.upcase() |> String.to_existing_atom() |> convert_currency rescue _ -> nil end defp convert_currency(currency) when is_integer(currency) do case Enum.find(all_currencies(), fn {_, %{number: number}} -> number == currency end) do {cur, _} -> cur nil -> nil end rescue _ -> nil end defp convert_currency(currency), do: currency defp currency_doesnt_exist_error(currency), do: raise(ArgumentError, "currency #{currency} doesn’t exist") end
lib/money/currency.ex
0.889385
0.624952
currency.ex
starcoder
defmodule Ace.HTTP.Service do @moduledoc """ Run a `Raxx.Server` application for HTTP/1.x and HTTP/2 clients **NOTE:** Ace services are served over a secure transport layer TLS(SSL), therefore `:cert` + `:key` or `:certfile` + `:keyfile` are required options. Starting a service will start and manage a cohort of endpoint process. The number of awaiting endpoint processes is set by the `:acceptors` option. Each endpoint process manages communicate to a single connected client. Using HTTP/1.1 pipelining of HTTP/2 multiplexing one connection may be used for multiple HTTP exchanges. An HTTP exchange consisting of one request from the client and one response from the server. Each exchange is isolated in a dedicated worker process. Raxx specifies early abortion of an exchange can be achieved by causing the worker process to exit. """ use GenServer require Logger @socket_options [ # Received packets are delivered as a binary("string"). {:mode, :binary}, # Handle packets as soon as they are available. {:packet, :raw}, # Set the socket to execute in passive mode, it must be prompted to read data. {:active, false}, # it is possible for the process to complete before the kernel has released the associated network resource, and this port cannot be bound to another process until the kernel has decided that it is done. # A detailed explaination is given at http://hea-www.harvard.edu/~fine/Tech/addrinuse.html # This setting is a security vulnerability only on multi-user machines. # It is NOT a vulnerability from outside the machine. {:reuseaddr, true}, # See the Examples section of gen_tcp docs http://erlang.org/doc/man/gen_tcp.html#examples # This timeout is only to help with cleanup. # Worker process that send data should have considered a connection dead after 5sec, the default Gen.call timout. # If this is ever exposed as an option to users this post contains some possibly useful information. # https://erlangcentral.org/wiki/Fast_TCP_sockets {:send_timeout, 10_000}, {:send_timeout_close, true}, {:alpn_preferred_protocols, ["h2", "http/1.1"]} ] @typedoc """ Process to manage a HTTP service. This process should be added to a supervision tree as a supervisor. """ @type service :: pid defmacro __using__(defaults) do {defaults, []} = Module.eval_quoted(__CALLER__, defaults) quote do def start_link(initial_state, options \\ []) do # DEBT Remove this for 1.0 release behaviours = __MODULE__.module_info() |> Keyword.get(:attributes, []) |> Keyword.get_values(:behaviour) |> List.flatten() if !Enum.member?(behaviours, Raxx.Server) do IO.warn( """ The service `#{inspect(__MODULE__)}` does not implement a Raxx server behaviour This module should either `use Raxx.Server` or `use Raxx.SimpleServer.` The behaviour Ace.HTTP.Service changed in release 0.18.0, see CHANGELOG for details. """, Macro.Env.stacktrace(__ENV__) ) end application = {__MODULE__, initial_state} options = options ++ unquote(defaults) unquote(__MODULE__).start_link(application, options) end defoverridable start_link: 1, start_link: 2 def child_spec([]) do child_spec([%{}]) end def child_spec([config]) do child_spec([config, []]) end def child_spec([config, options]) do %{ id: Keyword.get(options, :id, __MODULE__), start: {__MODULE__, :start_link, [config, options]}, type: :supervisor, restart: :permanent } end defoverridable child_spec: 1 end end @doc """ Start a HTTP web service. ## Options * `:cleartext` - Serve over TCP rather than TLS(ssl), will not support HTTP/2. * `:certfile` - the certificate. * `:keyfile` - the private key used to sign the certificate request. * `:cert` - the certificate. * `:key` - the private key used to sign the certificate request. * `:port` - the port to run the server on. Defaults to port 8443. * `:name` - name to register the spawned endpoint under. The supported values are the same as GenServers. * `:acceptors` - The number of servers simultaneously waiting for a connection. Defaults to 100. * `:error_response` - The response that Ace will send if a request handler crashes. Useful for setting custom headers, e.g. for CORS. Internal socket options can also be specified, most notably: * `:alpn_preferred_protocols` - which protocols should be negotiated for the https traffic. Defaults to `["h2", "http/1.1"]` The additional options will be passed to `:gen_tcp.listen/2` and `:ssl.listen/2` as appropriate. """ @spec start_link({module, any}, [{atom, any}]) :: {:ok, service} def start_link(app = {module, _config}, options) do Raxx.Server.verify_implementation!(module) case Ace.HTTP2.Settings.for_server(options) do {:ok, _settings} -> service_options = Keyword.take(options, [:name]) GenServer.start_link(__MODULE__, {app, options}, service_options) {:error, reason} -> {:error, reason} end end @doc false def child_spec([{mod, config}, options]) do %{ id: Keyword.get(options, :id, __MODULE__), start: {__MODULE__, :start_link, [{mod, config}, options]}, type: :supervisor, restart: :permanent } end @doc """ Fetch the port number of a running service. **OS assigned ports:** If an endpoint is started with port number `0` it will be assigned a port by the underlying system. This can be used to start many endpoints simultaneously. It can be useful running parallel tests. """ @spec port(service) :: {:ok, :inet.port_number()} def port(service) do GenServer.call(service, :port) end @doc """ Returns a number of connected clients for a given service. """ @spec count_connections(service) :: non_neg_integer() def count_connections(service) do GenServer.call(service, :count_connections) end ## SERVER CALLBACKS @impl GenServer def init({app, options}) do port = case Keyword.fetch(options, :port) do {:ok, port} when is_integer(port) -> port _ -> raise "#{__MODULE__} must be started with a port to listen too." end acceptors = Keyword.get(options, :acceptors, 100) listen_socket = case Keyword.fetch(options, :cleartext) do {:ok, true} -> tcp_options = Keyword.take(options ++ @socket_options, [ :mode, :packet, :active, :reuseaddr, :send_timeout, :send_timeout_close ]) {:ok, listen_socket} = :gen_tcp.listen(port, tcp_options) listen_socket = {:tcp, listen_socket} {:ok, port} = Ace.Socket.port(listen_socket) Logger.info("Serving cleartext using HTTP/1 on port #{port}") listen_socket _ -> ssl_options = Keyword.take(options ++ @socket_options, [ :mode, :packet, :active, :reuseaddr, :send_timeout, :send_timeout_close, :alpn_preferred_protocols, :cert, :key, :certfile, :keyfile ]) {:ok, listen_socket} = :ssl.listen(port, ssl_options) listen_socket = {:ssl, listen_socket} {:ok, port} = Ace.Socket.port(listen_socket) Logger.info( "Serving securely using #{ inspect(Keyword.get(ssl_options, :alpn_preferred_protocols)) } on port #{port}" ) listen_socket end {:ok, worker_supervisor} = DynamicSupervisor.start_link( strategy: :one_for_one, max_restarts: 5000, extra_arguments: [app] ) {:ok, endpoint_supervisor} = DynamicSupervisor.start_link( strategy: :one_for_one, max_restarts: 5000, extra_arguments: [worker_supervisor, options] ) # DEBT reduce restarts {:ok, governor_supervisor} = DynamicSupervisor.start_link( strategy: :one_for_one, max_restarts: 5000, extra_arguments: [endpoint_supervisor, listen_socket] ) for _index <- 1..acceptors do {:ok, _} = DynamicSupervisor.start_child( governor_supervisor, Ace.Governor.child_spec() ) end {:ok, {listen_socket, worker_supervisor, endpoint_supervisor, governor_supervisor}} end @impl GenServer def handle_call(:port, _from, state = {listen_socket, _, _, _}) do {:reply, Ace.Socket.port(listen_socket), state} end def handle_call(:count_connections, _from, state) do {_listen_socket, worker_supervisor, _endpoint_supervisor, _governor_supervisor} = state count = DynamicSupervisor.count_children(worker_supervisor).active {:reply, count, state} end end
lib/ace/http/service.ex
0.882314
0.573201
service.ex
starcoder
defmodule Yams.Server.Connection.Authenticator do @moduledoc """ This module is responsible for creating tasks that start TLS on sockets and perform authentication of the remote device using the supplied client certificate. """ use Task import Ecto.Query, only: [from: 2] alias Yams.Database.CertAuth alias Yams.Server.Connection @doc """ Spawns a task to start TLS and perform authentication on a `socket`. If authentication is successful the socket will be placed into a `Yams.Server.Connection` to be run under the specified dynamic `supervisor`. This function is used for the side effect only. Returns tuple `{:ok, pid}` with the pid of the started authentication task. The result of the task is not returned. """ def authenticate(tls_socket, supervisor), do: Task.start(__MODULE__, :run, [tls_socket, supervisor]) @doc """ Starts TLS on a `socket` and runs authentication against the client certificate supplied by the remote device. If authentication is successful the socket is passed as part of a `Yams.Server.Connection` to the specified dynamic `supervisor`. Returns `:ok` on success or `{:error, reason}` on failure. """ def run(tls_socket, supervisor) do # Authenticate connection and place under the passed dynamic supervisor. # Use case to evaluate return from separate auth function and start child under # supervisor or simply exit. with {:ok, cert} <- :ssl.peercert(tls_socket), %{serial_number: cert_serial, subject: cert_subject} <- EasySSL.parse_der(cert) do query = from c in CertAuth, where: c.cert_serial == ^cert_serial, where: c.active == true case Yams.Database.Repo.exists?(query) do false -> {:error, :auth_failed} true -> #{:ok, pid} = DynamicSupervisor.start_child(supervisor, {Connection, %Connection{tls_socket: tls_socket}}) {:ok, _pid} = DynamicSupervisor.start_child(supervisor, {Yams.Server.Connection.Echo, %Connection{tls_socket: tls_socket, client_name: cert_subject[:CN], distinguished_name: cert_subject[:aggregated]}}) :ok end else {:error, _reason} -> :ssl.send(tls_socket, "Error: Could not parse client cert") :ssl.close(tls_socket) {:error, :bad_cert} _ -> :ssl.send(tls_socket, "Error: Could not authenticate client") :ssl.close(tls_socket) {:error, :unknown} end end end
lib/server/connection/authenticator.ex
0.736021
0.423786
authenticator.ex
starcoder
defmodule TomlElixir.Validator do @moduledoc """ Parse and validate toml returned by parse """ alias TomlElixir.Error @doc """ Validate toml and parse it to proper format """ @spec validate(list) :: list def validate([]), do: [] def validate([{:table, to} | tail]) do [{:table, Enum.map(to, &to_string(&1))} | validate(tail)] end def validate([{:array_table, to} | tail]) do [{:array_table, Enum.map(to, &to_string(&1))} | validate(tail)] end def validate([{{:key, key}, {:array, values}} | tail]) do [{{:key, to_string(key)}, {:array, array(values)}} | validate(tail)] end def validate([{{:key, key}, {:datetime, dt, tz}} | tail]) do [{{:key, to_string(key)}, {:datetime, datetime(dt, tz)}} | validate(tail)] end def validate([{{:key, key}, {:datetime, dt}} | tail]) do [{{:key, to_string(key)}, {:datetime, datetime(dt, "")}} | validate(tail)] end def validate([{{:key, key}, {:date, val}} | tail]) do [{{:key, to_string(key)}, {:date, date(val)}} | validate(tail)] end def validate([{{:key, key}, {:time, val}} | tail]) do [{{:key, to_string(key)}, {:time, time(val)}} | validate(tail)] end def validate([{{:key, key}, {:string, value}} | tail]) do [{{:key, to_string(key)}, {:string, "#{value}"}} | validate(tail)] end def validate([{{:key, key}, {:string_ml, value}} | tail]) do [{{:key, to_string(key)}, {:string_ml, "#{value}"}} | validate(tail)] end def validate([{{:key, key}, {:literal, value}} | tail]) do [{{:key, to_string(key)}, {:literal, "#{value}"}} | validate(tail)] end def validate([{{:key, key}, {:literal_ml, value}} | tail]) do [{{:key, to_string(key)}, {:literal_ml, "#{value}"}} | validate(tail)] end def validate([{{:key, key}, {:inline_table, val}} | tail]) when is_list(val) do [{{:key, to_string(key)}, {:inline_table, validate(val)}} | validate(tail)] end def validate([{{:key, key}, {_, value}} | tail]) do [{{:key, to_string(key)}, value} | validate(tail)] end @spec array(list) :: list @spec array(list, atom) :: list defp array([]), do: [] defp array(values = [{:string_ml, _} | _tail]), do: array(values, :string) defp array(values = [{:literal, _} | _tail]), do: array(values, :string) defp array(values = [{:literal_ml, _} | _tail]), do: array(values, :string) defp array(values = [{type, _, _} | _tail]), do: array(values, type) defp array(values = [{type, _} | _tail]), do: array(values, type) defp array([], _type), do: [] defp array([{:array, values} | tail], :array) do [array(values) | array(tail, :array)] end defp array([{:datetime, dt, suffix} | tail], :datetime) do [datetime(dt, suffix) | array(tail, :datetime)] end defp array([{:datetime, dt} | tail], :datetime) do [datetime(dt, "") | array(tail, :datetime)] end defp array([{:date, val} | tail], :date) do [date(val) | array(tail, :date)] end defp array([{:time, val} | tail], :time) do [time(val) | array(tail, :time)] end defp array([{:string, value} | tail], :string) do [to_string(value) | array(tail, :string)] end defp array([{:string_ml, value} | tail], :string) do [to_string(value) | array(tail, :string)] end defp array([{:literal, value} | tail], :string) do [to_string(value) | array(tail, :string)] end defp array([{:literal_ml, value} | tail], :string) do [to_string(value) | array(tail, :string)] end defp array([{:integer, value} | tail], :integer) do [value | array(tail, :integer)] end defp array([{:float, value} | tail], :float) do [value | array(tail, :float)] end defp array([{:boolean, value} | tail], :boolean) do [value | array(tail, :boolean)] end defp array([{found, _, _} | _tail], type) do throw Error.exception("Array value should be #{type}, found #{found}") end defp array([{found, _} | _tail], type) do throw Error.exception("Array value should be #{type}, found #{found}") end @spec datetime(:calendar.datetime, binary) :: String.t defp datetime(dt, suffix) do dt = NaiveDateTime.from_erl!(dt) NaiveDateTime.to_iso8601(dt) <> to_string(suffix) end defp date(val) do to_string(Date.from_erl!(val)) end defp time({val, offset}) do time(val) <> "." <> to_string(offset) end defp time(val) do to_string(Time.from_erl!(val)) end end
lib/toml_elixir/validator.ex
0.770637
0.770896
validator.ex
starcoder
defmodule Statix do @moduledoc """ Writer for [StatsD](https://github.com/etsy/statsd)-compatible servers. To get started with Statix, you have to create a module that calls `use Statix`, like this: defmodule MyApp.Statix do use Statix end This will make `MyApp.Statix` a Statix connection that implements the `Statix` behaviour. This connection can be started with the `MyApp.Statix.connect/1` function (see the `c:connect/1` callback) and a few functions can be called on it to report metrics to the StatsD-compatible server read from the configuration. Usually, `connect/1` is called in your application's `c:Application.start/2` callback: def start(_type, _args) do :ok = MyApp.Statix.connect() # ... end ## Configuration Statix can be configured either globally or on a per-connection basis. The global configuration will affect all Statix connections created with `use Statix`; it can be specified by configuring the `:statix` application: config :statix, prefix: "sample", host: "stats.tld", port: 8181 The per-connection configuration can be specified by configuring each specific connection module under the `:statix` application: config :statix, MyApp.Statix, port: 8123 The following is a list of all the supported options: * `:prefix` - (binary) all metrics sent to the StatsD-compatible server through the configured Statix connection will be prefixed with the value of this option. By default this option is not present. * `:host` - (binary) the host where the StatsD-compatible server is running. Defaults to `"127.0.0.1"`. * `:port` - (integer) the port (on `:host`) where the StatsD-compatible server is running. Defaults to `8125`. * `:tags` - ([binary]) a list of global tags that will be sent with all metrics. By default this option is not present. See the "Tags" section for more information. * `:pool_size` - (integer) number of ports used to distribute the metric sending. Defaults to `1`. See the "Pooling" section for more information. By default, the configuration is evaluated once, at compile time. If you plan on changing the configuration at runtime, you must specify the `:runtime_config` option to be `true` when calling `use Statix`: defmodule MyApp.Statix do use Statix, runtime_config: true end ## Tags Tags are a way of adding dimensions to metrics: MyApp.Statix.gauge("memory", 1, tags: ["region:east"]) In the example above, the `memory` measurement has been tagged with `region:east`. Not all StatsD-compatible servers support this feature. Tags could also be added globally to be included in every metric sent: config :statix, tags: ["env:\#{Mix.env()}"] ## Sampling All the callbacks from the `Statix` behaviour that accept options support sampling via the `:sample_rate` option (see also the `t:options/0` type). MyApp.Statix.increment("page_view", 1, sample_rate: 0.5) In the example above, the UDP packet will only be sent to the server about half of the time, but the resulting value will be adjusted on the server according to the given sample rate. ## Pooling Statix transmits data using [ports](https://hexdocs.pm/elixir/Port.html). If a port is busy when you try to send a command to it, the sender may be suspended and some blocking may occur. This becomes more of an issue in highly concurrent environments. In order to get around that, Statix allows you to start multiple ports, and randomly picks one at the time of transmit. This option can be configured via the `:pool_size` option: config :statix, MyApp.Statix, pool_size: 3 """ alias __MODULE__.Conn @type key :: iodata @type options :: [sample_rate: float, tags: [String.t()]] @type on_send :: :ok | {:error, term} @doc """ Same as `connect([])`. """ @callback connect() :: :ok @doc """ Opens the connection to the StatsD-compatible server. The configuration is read from the environment for the `:statix` application (both globally and per connection). The given `options` override the configuration read from the application environment. """ @callback connect(options :: keyword) :: :ok @doc """ Increments the StatsD counter identified by `key` by the given `value`. `value` is supposed to be zero or positive and `c:decrement/3` should be used for negative values. ## Examples iex> MyApp.Statix.increment("hits", 1, []) :ok """ @callback increment(key, value :: number, options) :: on_send @doc """ Same as `increment(key, 1, [])`. """ @callback increment(key) :: on_send @doc """ Same as `increment(key, value, [])`. """ @callback increment(key, value :: number) :: on_send @doc """ Decrements the StatsD counter identified by `key` by the given `value`. Works same as `c:increment/3` but subtracts `value` instead of adding it. For this reason `value` should be zero or negative. ## Examples iex> MyApp.Statix.decrement("open_connections", 1, []) :ok """ @callback decrement(key, value :: number, options) :: on_send @doc """ Same as `decrement(key, 1, [])`. """ @callback decrement(key) :: on_send @doc """ Same as `decrement(key, value, [])`. """ @callback decrement(key, value :: number) :: on_send @doc """ Writes to the StatsD gauge identified by `key`. ## Examples iex> MyApp.Statix.gauge("cpu_usage", 0.83, []) :ok """ @callback gauge(key, value :: String.Chars.t(), options) :: on_send @doc """ Same as `gauge(key, value, [])`. """ @callback gauge(key, value :: String.Chars.t()) :: on_send @doc """ Writes `value` to the histogram identified by `key`. Not all StatsD-compatible servers support histograms. An example of a such server [statsite](https://github.com/statsite/statsite). ## Examples iex> MyApp.Statix.histogram("online_users", 123, []) :ok """ @callback histogram(key, value :: String.Chars.t(), options) :: on_send @doc """ Same as `histogram(key, value, [])`. """ @callback histogram(key, value :: String.Chars.t()) :: on_send @doc """ Same as `timing(key, value, [])`. """ @callback timing(key, value :: String.Chars.t()) :: on_send @doc """ Writes the given `value` to the StatsD timing identified by `key`. `value` is expected in milliseconds. ## Examples iex> MyApp.Statix.timing("rendering", 12, []) :ok """ @callback timing(key, value :: String.Chars.t(), options) :: on_send @doc """ Writes the given `value` to the StatsD set identified by `key`. ## Examples iex> MyApp.Statix.set("unique_visitors", "user1", []) :ok """ @callback set(key, value :: String.Chars.t(), options) :: on_send @doc """ Same as `set(key, value, [])`. """ @callback set(key, value :: String.Chars.t()) :: on_send @doc """ Measures the execution time of the given `function` and writes that to the StatsD timing identified by `key`. This function returns the value returned by `function`, making it suitable for easily wrapping existing code. ## Examples iex> MyApp.Statix.measure("integer_to_string", [], fn -> Integer.to_string(123) end) "123" """ @callback measure(key, options, function :: (() -> result)) :: result when result: var @doc """ Same as `measure(key, [], function)`. """ @callback measure(key, function :: (() -> result)) :: result when result: var defmacro __using__(opts) do current_statix = if Keyword.get(opts, :runtime_config, false) do quote do @statix_key Module.concat(__MODULE__, :__statix__) def connect(options \\ []) do statix = Statix.new(__MODULE__, options) Application.put_env(:statix, @statix_key, statix) Statix.open(statix) :ok end @compile {:inline, [current_statix: 0]} defp current_statix() do Application.fetch_env!(:statix, @statix_key) end end else quote do @statix Statix.new(__MODULE__, []) def connect(options \\ []) do if @statix != Statix.new(__MODULE__, options) do raise( "the current configuration for #{inspect(__MODULE__)} differs from " <> "the one that was given during the compilation.\n" <> "Be sure to use :runtime_config option " <> "if you want to have different configurations" ) end Statix.open(@statix) :ok end @compile {:inline, [current_statix: 0]} defp current_statix(), do: @statix end end quote location: :keep do @behaviour Statix unquote(current_statix) def increment(key, val \\ 1, options \\ []) when is_number(val) do Statix.transmit(current_statix(), :counter, key, val, options) end def decrement(key, val \\ 1, options \\ []) when is_number(val) do Statix.transmit(current_statix(), :counter, key, [?-, to_string(val)], options) end def gauge(key, val, options \\ []) do Statix.transmit(current_statix(), :gauge, key, val, options) end def histogram(key, val, options \\ []) do Statix.transmit(current_statix(), :histogram, key, val, options) end def timing(key, val, options \\ []) do Statix.transmit(current_statix(), :timing, key, val, options) end def measure(key, options \\ [], fun) when is_function(fun, 0) do {elapsed, result} = :timer.tc(fun) timing(key, div(elapsed, 1000), options) result end def set(key, val, options \\ []) do Statix.transmit(current_statix(), :set, key, val, options) end defoverridable( increment: 3, decrement: 3, gauge: 3, histogram: 3, timing: 3, measure: 3, set: 3 ) end end defstruct [:conn, :tags, :pool] @doc false def new(module, options) do config = get_config(module, options) conn = Conn.new(config.host, config.port) header = IO.iodata_to_binary([conn.header | config.prefix]) %__MODULE__{ conn: %{conn | header: header}, pool: build_pool(module, config.pool_size), tags: config.tags } end defp build_pool(module, 1), do: [module] defp build_pool(module, size) do Enum.map(1..size, &:"#{module}-#{&1}") end @doc false def open(%__MODULE__{conn: conn, pool: pool}) do Enum.each(pool, fn name -> %{sock: sock} = Conn.open(conn) Process.register(sock, name) end) end @doc false def transmit( %{conn: conn, pool: pool, tags: tags}, type, key, value, options ) when (is_binary(key) or is_list(key)) and is_list(options) do sample_rate = Keyword.get(options, :sample_rate) if is_nil(sample_rate) or sample_rate >= :rand.uniform() do options = put_global_tags(options, tags) %{conn | sock: pick_name(pool)} |> Conn.transmit(type, key, to_string(value), options) else :ok end end defp pick_name([name]), do: name defp pick_name(pool), do: Enum.random(pool) defp get_config(module, overrides) do {module_env, global_env} = :statix |> Application.get_all_env() |> Keyword.pop(module, []) env = module_env ++ global_env options = overrides ++ env tags = Keyword.get_lazy(overrides, :tags, fn -> env |> Keyword.get_values(:tags) |> Enum.concat() end) %{ prefix: build_prefix(env, overrides), host: Keyword.get(options, :host, "127.0.0.1"), port: Keyword.get(options, :port, 8125), pool_size: Keyword.get(options, :pool_size, 1), tags: tags } end defp build_prefix(env, overrides) do case Keyword.fetch(overrides, :prefix) do {:ok, prefix} -> [prefix, ?.] :error -> env |> Keyword.get_values(:prefix) |> Enum.map_join(&(&1 && [&1, ?.])) end end defp put_global_tags(options, []), do: options defp put_global_tags(options, tags) do Keyword.update(options, :tags, tags, &(&1 ++ tags)) end end
lib/statix.ex
0.921565
0.55254
statix.ex
starcoder
defmodule SmartCity.Dataset do @moduledoc """ Struct defining a dataset definition and functions for retrieving key elements of the dataset for handling. ```javascript const Dataset = { "id": "", // UUID "business": { // Project Open Data Metadata Schema v1.1 "authorEmail": "", "authorName": "", "benefitRating": 0, // value between 0.0 and 1.0 "categories": [""], "conformsToUri": "", "contactEmail": "", "contactName": "", "dataTitle": "", // user friendly (dataTitle) "describedByMimeType": "", "describedByUrl": "", "description": "", "homepage": "", "issuedDate": "", "keywords": [""], "language": "", "license": "", "modifiedDate": "", "orgTitle": "", // user friendly (orgTitle) "parentDataset": "", "publishFrequency": "", "referenceUrls": [""], "rights": "", "riskRating": 0, // value between 0.0 and 1.0 "spatial": "", "temporal": "" }, "technical": { "allow_duplicates": true "authHeaders": {"header1": "", "header2": ""} "authBody": {"name": "", "clientID": ""} "authBodyEncodeMethod": "", "authUrl": "", "cadence": "", "dataName": "", // ~r/[a-zA-Z_]+$/ "orgId": "", "orgName": "", // ~r/[a-zA-Z_]+$/ "protocol": "", // List of protocols to use. Defaults to nil. Can be [http1, http2] "schema": [{ "name": "", "type": "", "description": "" }], "sourceFormat": "", "sourceHeaders": { "header1": "", "header2": "" }, "sourceQueryParams": { "key1": "", "key2": "" }, "sourceType": "", // remote|stream|ingest|host "sourceUrl": "", "systemName": "", // ${orgName}__${dataName} "topLevelSelector": "" } } ``` """ alias SmartCity.Dataset.Business alias SmartCity.Dataset.Technical @type id :: term() @type t :: %SmartCity.Dataset{ business: SmartCity.Dataset.Business.t(), id: String.t(), technical: SmartCity.Dataset.Technical.t(), version: String.t() } @derive Jason.Encoder defstruct version: "0.6", id: nil, business: nil, technical: nil use Accessible alias SmartCity.BaseStruct @doc """ Returns a new `SmartCity.Dataset` struct. `SmartCity.Dataset.Business` and `SmartCity.Dataset.Technical` structs will be created along the way. ## Parameters - msg : map defining values of the struct to be created. Can be initialized by - map with string keys - map with atom keys - JSON """ @spec new(String.t() | map()) :: {:ok, map()} | {:error, term()} def new(msg) do msg |> BaseStruct.new() |> create() end defp create(%{id: id, business: biz, technical: tech}) do struct = struct(%__MODULE__{}, %{ id: id, business: Business.new(biz), technical: Technical.new(tech) }) {:ok, struct} rescue e -> {:error, e} end defp create(msg) do {:error, "Invalid Dataset: #{inspect(msg)}"} end @doc """ Returns true if `SmartCity.Dataset.Technical sourceType field is stream` """ def is_stream?(%__MODULE__{technical: %{sourceType: sourceType}}) do "stream" == sourceType end @doc """ Returns true if `SmartCity.Dataset.Technical sourceType field is remote` """ def is_remote?(%__MODULE__{technical: %{sourceType: sourceType}}) do "remote" == sourceType end @doc """ Returns true if `SmartCity.Dataset.Technical sourceType field is ingest` """ def is_ingest?(%__MODULE__{technical: %{sourceType: sourceType}}) do "ingest" == sourceType end @doc """ Returns true if `SmartCity.Dataset.Technical sourceType field is host` """ def is_host?(%__MODULE__{technical: %{sourceType: sourceType}}) do "host" == sourceType end end defimpl Brook.Deserializer.Protocol, for: SmartCity.Dataset do def deserialize(_struct, data) do SmartCity.Dataset.new(data) end end
lib/smart_city/dataset.ex
0.818011
0.819605
dataset.ex
starcoder
defmodule Aoc.Day01 do @moduledoc false import Aoc.Utils @spec run(1 | 2) :: number def run(1), do: solve1(get_input(1)) def run(2), do: solve2(get_input(1)) @spec solve1(binary) :: number def solve1(input) do String.split(input, ", ", trim: true) |> Enum.reduce({{0, 0}, :north}, fn e, acc -> turn = String.first(e) blocks = String.to_integer(String.slice(e, 1..10)) case {turn, acc} do {"R", {{x, y}, :north}} -> {{x + blocks, y}, :east} {"R", {{x, y}, :east}} -> {{x, y - blocks}, :south} {"R", {{x, y}, :south}} -> {{x - blocks, y}, :west} {"R", {{x, y}, :west}} -> {{x, y + blocks}, :north} {"L", {{x, y}, :north}} -> {{x - blocks, y}, :west} {"L", {{x, y}, :west}} -> {{x, y - blocks}, :south} {"L", {{x, y}, :south}} -> {{x + blocks, y}, :east} {"L", {{x, y}, :east}} -> {{x, y + blocks}, :north} _ -> acc end end) |> (fn {{x, y}, _} -> abs(x) + abs(y) end).() end @spec solve2(binary) :: number def solve2(input) do {path, _} = String.split(input, ", ", trim: true) |> Enum.map_reduce({{0, 0}, :north}, fn e, acc -> # This time we preserve the streches of path from starting location to # the ending one, in order to find where our path intersects itself # at first (eg. the first location visited twice). turn = String.first(e) blocks = String.to_integer(String.slice(e, 1..10)) case {turn, acc} do {"R", {{x, y}, :north}} -> strech = for x <- (x + 1)..(x + blocks), do: {x, y} {strech, {List.last(strech), :east}} {"R", {{x, y}, :east}} -> strech = for y <- (y - 1)..(y - blocks), do: {x, y} {strech, {List.last(strech), :south}} {"R", {{x, y}, :south}} -> strech = for x <- (x - 1)..(x - blocks), do: {x, y} {strech, {List.last(strech), :west}} {"R", {{x, y}, :west}} -> strech = for y <- (y + 1)..(y + blocks), do: {x, y} {strech, {List.last(strech), :north}} {"L", {{x, y}, :north}} -> strech = for x <- (x - 1)..(x - blocks), do: {x, y} {strech, {List.last(strech), :west}} {"L", {{x, y}, :west}} -> strech = for y <- (y - 1)..(y - blocks), do: {x, y} {strech, {List.last(strech), :south}} {"L", {{x, y}, :south}} -> strech = for x <- (x + 1)..(x + blocks), do: {x, y} {strech, {List.last(strech), :east}} {"L", {{x, y}, :east}} -> strech = for y <- (y + 1)..(y + blocks), do: {x, y} {strech, {List.last(strech), :north}} _ -> {acc, acc} end end) path |> List.flatten() |> fst_visited_twice([]) |> (fn {x, y} -> abs(x) + abs(y) end).() end defp fst_visited_twice([], _), do: nil defp fst_visited_twice([pos | to_visit], visited) do if Enum.member?(visited, pos) do pos else fst_visited_twice(to_visit, [pos | visited]) end end end
lib/aoc/day01.ex
0.723016
0.820218
day01.ex
starcoder
defmodule Meilisearch.Documents do @moduledoc """ Collection of functions used to manage documents. [MeiliSearch Documentation - Documents](https://docs.meilisearch.com/references/documents.html) """ alias Meilisearch.HTTP @doc """ Get one document by id. ([ref.](https://docs.meilisearch.com/reference/api/documents.html#get-one-document)) ## Example iex> Meilisearch.Documents.get("movies", 1) {:ok, %{ "id" => 25684, "title" => "American Ninja 5", "poster" => "https://image.tmdb.org/t/p/w1280/iuAQVI4mvjI83wnirpD8GVNRVuY.jpg", "overview" => "When a scientists daughter is kidnapped, American Ninja, attempts to find her, but this time he teams up with a youngster he has trained in the ways of the ninja.", "release_date" => "1993-01-01" }} """ @spec get(String.t(), String.t()) :: HTTP.response() def get(index_uid, document_id) do HTTP.get_request("indexes/#{index_uid}/documents/#{document_id}") end @doc """ List documents in given index. ([ref.](https://docs.meilisearch.com/reference/api/documents.html#get-documents)) ## Options * `offset` Number of documents to skip. Defaults to `0` * `limit` Maximum number of documents returned. Defaults to `20` * `attributesToRetrieve` Document attributes to show. ## Examples iex> Meilisearch.Documents.list("movies") {:ok, [ %{ "id" => 1, "release_date" => "1993-01-01", "poster" => "https://image.tmdb.org/t/p/w1280/iuAQVI4mvjI83wnirpD8GVNRVuY.jpg", "title" => "American Ninja 5", "overview" => "When a scientists daughter is kidnapped, American Ninja, attempts to find her, but this time he teams up with a youngster he has trained in the ways of the ninja." }, %{ "id" => 2, "title" => "Dead in a Week (Or Your Money Back)", "release_date" => "2018-09-12", "poster" => "https://image.tmdb.org/t/p/w1280/f4ANVEuEaGy2oP5M0Y2P1dwxUNn.jpg", "overview" => "William has failed to kill himself so many times that he outsources his suicide to aging assassin Leslie. But with the contract signed and death assured within a week (or his money back), William suddenly discovers reasons to live... However Leslie is under pressure from his boss to make sure the contract is completed." } ]} iex> Meilisearch.Documents.get("movies", limit: 2, offset: 4) {:ok, [ %{ "id" => 5, "release_date" => "1993-01-01", "poster" => "https://image.tmdb.org/t/p/w1280/iuAQVI4mvjI83wnirpD8GVNRVuY.jpg", "title" => "American Ninja 5", "overview" => "When a scientists daughter is kidnapped, American Ninja, attempts to find her, but this time he teams up with a youngster he has trained in the ways of the ninja." }, %{ "id" => 6, "title" => "Dead in a Week (Or Your Money Back)", "release_date" => "2018-09-12", "poster" => "https://image.tmdb.org/t/p/w1280/f4ANVEuEaGy2oP5M0Y2P1dwxUNn.jpg", "overview" => "William has failed to kill himself so many times that he outsources his suicide to aging assassin Leslie. But with the contract signed and death assured within a week (or his money back), William suddenly discovers reasons to live... However Leslie is under pressure from his boss to make sure the contract is completed." } ]} """ @spec list(String.t(), Keyword.t()) :: HTTP.response() def list(index_uid, opts \\ []) do HTTP.get_request("indexes/#{index_uid}/documents", [], params: opts) end @doc """ Add document(s) to given index. ([ref.](https://docs.meilisearch.com/reference/api/documents.html#add-or-replace-documents)) If an exisiting document (based on primary key) is given it will be replaced. ## Examples iex> Meilisearch.Documents.add_or_replace("movies", %{ "id" => 2, "tagline" => "You'll never go in the water again", "title" => "Jaws" }) {:ok, %{ "uid" => 1, "indexUid" => "movies", "status" => "enqueued", "type" => "documentAddition", "enqueuedAt" => "2021-08-11T09:25:53.000000Z" }} iex> Meilisearch.Documents.add_or_replace( "movies", [ %{ "id" => 6, "tagline" => "Who ya gonna call?", "title" => "Ghostbusters" }, %{ "id" => 5, "tagline" => "Be Afraid. Be very afraid.", "title" => "The Fly" } ] ) {:ok, %{ "uid" => 1, "indexUid" => "movies", "status" => "enqueued", "type" => "documentAddition", "enqueuedAt" => "2021-08-11T09:25:53.000000Z" }} """ @spec add_or_replace(String.t(), list(any), Keyword.t()) :: HTTP.response() def add_or_replace(index_uid, docs, opts \\ []) def add_or_replace(index_uid, doc, opts) when not is_list(doc) do add_or_replace(index_uid, [doc], opts) end def add_or_replace(index_uid, docs, opts) do HTTP.post_request("indexes/#{index_uid}/documents", docs, [], params: opts) end @doc """ Add document(s) to given index. ([ref.](https://docs.meilisearch.com/reference/api/documents.html#add-or-update-documents)) If an exisiting document (based on primary key) is given it will be updated with provided values. ## Examples iex> Meilisearch.Documents.add_or_update("movies", %{ "id" => 2, "tagline" => "You'll never go in the water again", "title" => "Jaws" }) {:ok, %{ "uid" => 1, "indexUid" => "movies", "status" => "enqueued", "type" => "documentPartial", "enqueuedAt" => "2021-08-11T09:25:53.000000Z" }} iex> Meilisearch.Documents.add_or_update( "movies", [ %{ "id" => 6, "tagline" => "Who ya gonna call?", "title" => "Ghostbusters" }, %{ "id" => 5, "tagline" => "Be Afraid. Be very afraid.", "title" => "The Fly" } ] ) {:ok, %{ "uid" => 1, "indexUid" => "movies", "status" => "enqueued", "type" => "documentPartial", "enqueuedAt" => "2021-08-11T09:25:53.000000Z" }} """ @spec add_or_update(String.t(), list(any), Keyword.t()) :: {:ok, any} | {:error, String.t()} def add_or_update(index_uid, docs, opts \\ []) def add_or_update(index_uid, doc, opts) when not is_list(doc) do add_or_update(index_uid, [doc], opts) end def add_or_update(index_uid, docs, opts) do HTTP.put_request("indexes/#{index_uid}/documents", docs, [], params: opts) end @doc """ Delete one or more documents based on id in a given index. ([ref. one](https://docs.meilisearch.com/reference/api/documents.html#delete-one-document)) ([ref. batch](https://docs.meilisearch.com/reference/api/documents.html#delete-documents)) ## Example iex> Meilisearch.Documents.delete("movies", 1) {:ok, %{ "uid" => 1, "indexUid" => "movies", "status" => "enqueued", "type" => "documentDeletion", "enqueuedAt" => "2021-08-11T09:25:53.000000Z" }} iex> Meilisearch.Documents.delete("movies", [1,2,3,4]) {:ok, %{ "uid" => 1, "indexUid" => "movies", "status" => "enqueued", "type" => "documentDeletion", "enqueuedAt" => "2021-08-11T09:25:53.000000Z" }} """ @spec delete(String.t(), String.t() | list(String.t())) :: {:ok, any} | {:error, String.t()} def delete(index_uid, document_ids) when is_list(document_ids) do HTTP.post_request("indexes/#{index_uid}/documents/delete-batch", document_ids) end def delete(index_uid, document_id) do HTTP.delete_request("indexes/#{index_uid}/documents/#{document_id}") end @doc """ Delete all documents in given index. ([ref.](https://docs.meilisearch.com/reference/api/documents.html#delete-all-documents)) ## Example iex> Meilisearch.Documents.delete_all("meilisearch_test") {:ok, %{ "uid" => 1, "indexUid" => "movies", "status" => "enqueued", "type" => "documentDeletion", "enqueuedAt" => "2021-08-11T09:25:53.000000Z" }} """ @spec delete_all(String.t()) :: {:ok, any} | {:error, binary} def delete_all(index_uid) do HTTP.delete_request("indexes/#{index_uid}/documents") end end
lib/meilisearch/documents.ex
0.675015
0.425546
documents.ex
starcoder
use Bitwise require Logger defmodule FlexId do @moduledoc """ Generator for distributed time-based ids. Ids are composed of 4 value: - time in ms - rolling sequence counter - partition used to segratate ids; this could be hard-coded by node or geo-region or be a hash to shard data by user - Luhn mod 16 checksum to check for incorrectly typed ids The number of bits for each value is flexible and can be tuned to your requirements. """ defstruct sequence_bits: 6, partition_bits: 6, checksum_bits: 4, sequence_mask: 0x3F, partition_mask: 0x3F, checksum_mask: 0x0F, epoch: 946_684_800_000, seq: 0, ms: 0 @doc """ Start an agent that can be used generate IDs with the given parameters. ## Parameters - epoch: the epoch to start the ids at in ms; defaults to `946_684_800_000` (2000-01-01 0:00 UTC) - sequence_bits: how many bits should be used for sub-millisecond precision; defaults to 6 for 64 ids/ms - partition_bits: how many bits should be used to identify partitions in the ids; defaults to 6 for 64 partitions - checksum_bits: how many bits should be used for checksum to detect invalid ids; must be 0 or 4, defaults to 4 ## Usage The default parameters have the following characteristics - a time range of 2231 years, from 2000 to 4229 (afterwhich values become 64-bits and will be negative when using signed integers) - a theoretical maximum of 64 ids/ms - 64 partitions (shards) - a 4-bit checksum to catch errors should ids need to be typed in manually ## Examples iex> {:ok, fid} = FlexId.start_link() id = FlexId.generate(fid, FlexId.make_partition("test")) """ def start_link( epoch \\ 946_684_800_000, sequence_bits \\ 6, partition_bits \\ 6, checksum_bits \\ 4, opts \\ [] ) do Agent.start_link(FlexId, :start_state, [epoch, sequence_bits, partition_bits, checksum_bits], opts) end @doc """ Used by the agent to construct the initial state. This is not meant to be called directly. """ def start_state(epoch, sequence_bits, partition_bits, checksum_bits) do %FlexId{ epoch: epoch, sequence_bits: sequence_bits, sequence_mask: make_mask(sequence_bits), partition_bits: partition_bits, partition_mask: make_mask(partition_bits), checksum_bits: checksum_bits, checksum_mask: make_mask(checksum_bits) } end @doc """ Returns the millisecond component of the id, without adding the epoch. """ def extract_raw_millis(agent, value) do Agent.get(agent, fn state -> value >>> (state.sequence_bits + state.partition_bits + state.checksum_bits) end) end @doc """ Returns the millisecond component of the id, adjusted for epoch. """ def extract_millis(agent, value) do Agent.get(agent, fn state -> (value >>> (state.sequence_bits + state.partition_bits + state.checksum_bits)) + state.epoch end) end @doc """ Returns the sequence component of the id. """ def extract_sequence(agent, value) do Agent.get(agent, fn state -> value >>> (state.partition_bits + state.checksum_bits) &&& state.sequence_mask end) end @doc """ Returns the checksum component of an id. This is useful for copying shard values from parent to child. """ def extract_partition(agent, value) do Agent.get(agent, fn state -> value >>> state.checksum_bits &&& state.partition_mask end) end @doc """ Returns the checksum component of an id. """ def extract_checksum(agent, value) do Agent.get(agent, fn state -> value &&& state.checksum_mask end) end @doc """ Logs information about the agent state. """ def log(agent) do Agent.cast(agent, fn state -> ms = round( :math.pow(2, 63 - state.sequence_bits - state.partition_bits - state.checksum_bits - 1) ) yr = round(ms / 1000 / 60 / 60 / 24 / 365) fr = DateTime.from_unix!(state.epoch, :millisecond) to = DateTime.from_unix!(state.epoch + ms, :millisecond) seq = round(:math.pow(2, state.sequence_bits)) par = round(:math.pow(2, state.partition_bits)) Logger.info( "Ids have a time range of #{yr} years (#{fr} to #{to}), #{seq} sequences, #{par} partitions, #{ state.checksum_bits } checksum bits" ) state end) end defp make_mask(bits) do Enum.reduce(1..bits, 0, fn _, acc -> acc <<< 1 ||| 1 end) end @doc """ Helper for generating a 16-bit partition value from the last two bytes of the sha1 hash of a text value. Use this to generate a shard value based on the data passed in. ## Examples partition = FlexId.make_partition(username) """ def make_partition(text) do hash = :crypto.hash(:sha, text) <<_::binary-18, partition::unsigned-big-integer-16>> = hash partition end @doc """ Generate an Id value. ## Examples iex> {:ok, fid} = FlexId.start_link value = FlexId.generate(fid, 0xB1) """ def generate(agent, partition) when is_integer(partition) do Agent.get_and_update(agent, FlexId, :get_and_update, [partition]) end @doc """ Used by the agent to get the next id and next agent state. This is not meant to be called directly. """ def get_and_update(state, partition) do ms = :os.system_time(:millisecond) - state.epoch seq = if state.ms == ms, do: state.seq + 1, else: 0 masked_seq = seq &&& state.sequence_mask if seq > 0 && masked_seq == 0 do raise("sequence overflow") end value = ms <<< (state.sequence_bits + state.partition_bits + state.checksum_bits) ||| masked_seq <<< (state.partition_bits + state.checksum_bits) ||| (partition &&& state.partition_mask) <<< state.checksum_bits value = if state.checksum_bits == 4, do: checksum(value), else: value {value, state |> Map.put(:seq, seq) |> Map.put(:ms, ms)} end @doc """ Adds a Luhn mod 16 checksum to the input. This is not meant to be called directly. """ def checksum(input) do {_, sum, _} = 1..15 |> Enum.reduce({input >>> 4, 0, 2}, fn _, {input, sum, factor} -> addend = factor * (input &&& 0xF) addend = div(addend, 0xF) + rem(addend, 0xF) {input >>> 4, sum + addend, if(factor == 2, do: 1, else: 2)} end) remainder = rem(sum, 0xF) check = rem(0xF - remainder, 0xF) input ||| check end @doc """ Returns true if the value has a valid checksum. """ def verify_checksum(input) do {_, sum, _} = 0..15 |> Enum.reduce({input, 0, 1}, fn _, {input, sum, factor} -> addend = factor * (input &&& 0xF) addend = div(addend, 0xF) + rem(addend, 0xF) {input >>> 4, sum + addend, if(factor == 2, do: 1, else: 2)} end) 0 == rem(sum, 0xF) end end
lib/flexid.ex
0.921992
0.538801
flexid.ex
starcoder
defmodule Akd.Build.Docker do @moduledoc """ A native Hook module that comes shipped with Akd. This module uses `Akd.Hook`. Provides a set of operations that build an elixir app in a Docker container using a specified `Dockerfile` at a deployment's `build_at` destination. This hook assumes that there is `docker 17.x` installed on the `build_at` destination. This hook also assumes that the given `Dockerfile` has all the operations setup. Ensures to cleanup and remove the Docker images/containers created by this build. `run_ensure` is set to `false` on default. So, by default it doesn't remove the containers. This is to speed up the next build. If you wish to remove these containers, make sure to pass `run_ensure` as `true`. Doesn't have any Rollback operations. # Options: * `run_ensure`: `boolean`. Specifies whether to a run a command or not. * `ignore_failure`: `boolean`. Specifies whether to continue if this hook fails. * `cmd_envs`: `list` of `tuples`. Specifies the environments to provide while building the distillery release. # Defaults: * `run_ensure`: `false` * `ignore_failure`: `false` """ use Akd.Hook @default_opts [run_ensure: false, ignore_failure: false, file: "Dockerfile", path: "."] @doc """ Callback implementation for `get_hooks/2`. This function returns a list of operations that can be used to build a release using distillery on the `build_at` destination of a deployment. ## Examples iex> deployment = %Akd.Deployment{mix_env: "prod", ...> build_at: Akd.Destination.local("."), ...> publish_to: Akd.Destination.local("."), ...> name: "name", ...> vsn: "0.1.1"} iex> Akd.Build.Docker.get_hooks(deployment, []) [%Akd.Hook{ensure: [%Akd.Operation{cmd: "docker rm $(docker ps -a -q)", cmd_envs: [], destination: %Akd.Destination{host: :local, path: ".", user: :current}}], ignore_failure: false, main: [%Akd.Operation{cmd: "docker build -f Dockerfile -t name:0.1.1 .", cmd_envs: [], destination: %Akd.Destination{host: :local, path: ".", user: :current}}], rollback: [], run_ensure: false}] """ @spec get_hooks(Akd.Deployment.t, Keyword.t) :: list(Akd.Hook.t) def get_hooks(deployment, opts) do [build_hook(deployment, uniq_merge(opts, @default_opts))] end # This function takes a deployment and options and returns an Akd.Hook.t # struct using FormHook DSL defp build_hook(deployment, opts) do destination = Akd.DestinationResolver.resolve(:build, deployment) path = Keyword.get(opts, :path) file = Keyword.get(opts, :file) tag = Keyword.get(opts, :tag, deployment.name <> ":" <> deployment.vsn) form_hook opts do main "docker build -f #{file} -t #{tag} #{path}", destination ensure "docker rm $(docker ps -a -q)", destination end end # This function takes two keyword lists and merges them keeping the keys # unique. If there are multiple values for a key, it takes the value from # the first value of keyword1 corresponding to that key. defp uniq_merge(keyword1, keyword2) do keyword2 |> Keyword.merge(keyword1) |> Keyword.new() end end
lib/akd/base/build/docker.ex
0.858926
0.5752
docker.ex
starcoder
defmodule JaSerializer.Serializer do @moduledoc """ A Behaviour for defining JSON-API.org spec complaint payloads. The following callbacks are available: * `id/2` - Return ID of struct to be serialized. * `type/2` - Return string type of struct to be serialized. * `attributes/2` - A map of attributes to be serialized. * `relationships/2`- A map of `HasMany` and `HasOne` data structures. * `links/2` - A keyword list of any links pertaining to this struct. * `meta/2` - A map of any additional meta information to be serialized. * `preload/3` - A special callback that can be used to preload related data. A Serializer (or view) is typically one of the few places in an API where content and context are both present. To accomodate this each callback gets the data being serialized (typically a struct, often called a model) and the Plug.Conn as arguments. Context data such as the current user, role, etc should typically be made available on the conn. When `use`ing this module all callbacks get a default, overridable implementation. The `JaSerializer.DSL` module also provides some default implementations of these callbacks built up from the DSL. When using the DSL overriding the Behaviour functions can be a great way to customize conditional logic. While not typically used directly, the interface for returning formatted data is also defined. The results still need to be encoded into JSON as appropriate. defmodule FooSerializer do use JaSerializer end # Format one foo FooSerializer.format(one_foo, conn, meta) # Format many foos FooSerializer.format(many_foos, conn, meta) """ @type id :: String.t() | Integer @type data :: map @doc """ The id to be used in the resource object. http://jsonapi.org/format/#document-resource-objects Default implementation attempts to get the :id field from the struct. To override simply define the id function: def id(struct, _conn), do: struct.slug """ @callback id(data, Plug.Conn.t()) :: id @doc """ The type to be used in the resource object. http://jsonapi.org/format/#document-resource-objects Default implementation attempts to infer the type from the serializer module's name. For example: MyApp.UserView becomes "user" MyApp.V1.Serializers.Post becomes "post" MyApp.V1.CommentsSerializer becomes "comments" To override simply define the type function: def type(_post,_conn), do: "category" """ @callback type(map, Plug.Conn.t()) :: String.t() @doc """ Returns a map of attributes to be serialized. The default implementation returns all the data's fields except `id`, `type`, and `__struct__`. A typical non-DSL implementation looks like: defmodule UserSerializer do def attributes(user, conn) do Map.take(user, [:email, :name]) end end UserSerializer.attributes(user, conn) # %{email: "...", name: "..."} If using the `JaSerializer.DSL` the default implementation is based on the `JaSerializer.DSL.attributes/1` macro. Eg: defmodule UserSerializer do attributes [:email, :name, :is_admin] end UserSerializer.attributes(user, conn) # %{email: "...", name: "...", is_admin: "..."} Overriding this callback can be a good way to customize attribute behaviour based on the context (conn) with super. defmodule UserSerializer do attributes [:email, :name, :is_admin] def attributes(user, %{assigns: %{current_user: %{is_admin: true}}}) do super(user, conn) end def attributes(user, conn) do super(user, conn) |> Map.take([:email, :name]) end end UserSerializer.attributes(user, conn) # %{email: "...", name: "..."} """ @callback attributes(map, Plug.Conn.t()) :: map @doc """ Adds meta data to the individual resource being serialized. NOTE: To add meta data to the top level object pass the `meta:` option into YourSerializer.format/3. A nil return value results in no meta key being added to the serializer. A map return value will be formated with JaSerializer.Formatter.format/1. The default implementation returns nil. """ @callback meta(map, Plug.Conn.t()) :: map | nil @doc """ A callback that should return a map of relationship structs. Example: def relationships(article, _conn) do %{ comments: %HasMany{ serializer: MyApp.CommentView, include: true, data: article.comments, }, author: %HasOne{ serializer: MyApp.AuthorView, include: true, data: article.author, } } end See JaSerializer.Relationship.HasMany for details on fields. When using the DSL this is defined for you based on the has_many and has_one macros. """ @callback relationships(map, Plug.Conn.t()) :: map @doc """ return links about this resource """ @callback links(map, Plug.Conn.t()) :: map @doc """ A special callback that can be used to preload related data. Unlike the other callbacks, this callback is ONLY executed on the top level data being serialized. Also unlike any other callback when serializing a list of data (eg: from an index action) it recieves the entire list, not each individual post. When serializing a single record (eg, show, create, update) a single record is received. The primary use case of the callback is to preload all the relationships you need. For example: @default_includes [:category, comments: :author] def preload(record_or_records, _conn, []) do MyApp.Repo.preload(record_or_records, @default_includes) end def preload(record_or_records, _conn, include_opts) do MyApp.Repo.preload(record_or_records, include_opts) end """ @callback preload(map | [map], Plug.Conn.t(), nil | Keyword.t()) :: map | [map] @doc false defmacro __using__(_) do quote do @behaviour JaSerializer.Serializer alias JaSerializer.Relationship.HasMany alias JaSerializer.Relationship.HasOne # Default Behaviour Callback Defintions unquote(define_default_id()) unquote(define_default_type(__CALLER__.module)) unquote(define_default_meta()) unquote(define_default_links()) unquote(define_default_attributes()) unquote(define_default_relationships()) unquote(define_default_preload()) # API to call into serialization unquote(define_api()) end end defp define_default_type(module) do type_from_module = module |> Module.split() |> List.last() |> String.replace("Serializer", "") |> String.replace("View", "") |> JaSerializer.Formatter.Utils.format_type() |> pluralize_type(Application.get_env(:ja_serializer, :pluralize_types)) quote do def type, do: unquote(type_from_module) def type(_data, _conn), do: type() defoverridable type: 2, type: 0 end end defp pluralize_type(type, true), do: Inflex.pluralize(type) defp pluralize_type(type, _bool), do: type defp define_default_id do quote do def id(data, _c), do: Map.get(data, :id) defoverridable id: 2 end end defp define_default_meta do quote do def meta(_struct, _conn), do: nil defoverridable meta: 2 end end defp define_default_links do quote do def links(_struct, _conn), do: %{} defoverridable links: 2 end end defp define_default_attributes do quote do def attributes(data, _conn), do: Map.drop(data, [:id, :type, :__struct__]) defoverridable attributes: 2 end end defp define_default_relationships do quote do def relationships(_struct, _conn), do: %{} defoverridable relationships: 2 end end defp define_default_preload do quote do def preload(data, _conn, _include_opts), do: data defoverridable preload: 3 end end defp define_api do quote do def format(data) do format(data, %{}) end def format(data, conn) do format(data, conn, []) end def format(data, conn, opts) do IO.write( :stderr, IO.ANSI.format([ :red, :bright, "warning: #{__MODULE__}.format/3 is deprecated.\n" <> "Please use JaSerializer.format/4 instead, eg:\n" <> "JaSerializer.format(#{__MODULE__}, data, conn, opts)\n" ]) ) JaSerializer.format(__MODULE__, data, conn, opts) end end end end
lib/ja_serializer/serializer.ex
0.872034
0.557815
serializer.ex
starcoder
defmodule ExPesa.Mpesa.B2c do @moduledoc """ Business to Customer (B2C) API enables the Business or organization to pay its customers who are the end-users of its products or services. Currently, the B2C API allows the org to perform around 3 types of transactions: SalaryPayments, BusinessPayments or Promotion payments. Salary payments are used by organizations paying their employees via M-Pesa, Business Payments are normal business transactions to customers e.g. bank transfers to mobile, Promotion payments are payments made by organization carrying out promotional services e.g. betting companies paying out winnings to clients """ import ExPesa.Mpesa.MpesaBase import ExPesa.Util @doc """ Initiates the Mpesa B2C . ## Configuration Add below config to dev.exs / prod.exs files `config.exs` ```elixir config :ex_pesa, mpesa: [ cert: "", b2c: [ initiator_name: "Safaricom1", password: "<PASSWORD>", timeout_url: "", result_url: "", security_credential: "" ] ] ``` To generate security_credential, head over to https://developer.safaricom.co.ke/test_credentials, then Initiator Security Password for your environment. Alternatively, generate security credential using certificate `cert` - This is the M-Pesa public key certificate used to encrypt your plain password. There are 2 types of certificates. - sandox - https://developer.safaricom.co.ke/sites/default/files/cert/cert_sandbox/cert.cer . - production - https://developer.safaricom.co.ke/sites/default/files/cert/cert_prod/cert.cer . `password` - This is a plain unencrypted password. Environment - production - set password from the organization portal. - sandbox - use your own custom password ## Parameters attrs: - a map containing: - `command_id` - Unique command for each transaction type, possible values are: BusinessPayBill, MerchantToMerchantTransfer, MerchantTransferFromMerchantToWorking, MerchantServicesMMFAccountTransfer, AgencyFloatAdvance. - `amount` - The amount being transacted. - `phone_number` - Phone number receiving the transaction. - `remarks` - Comments that are sent along with the transaction. - `occassion` - Optional. - `result_url` - The end-point that receives the response of the transaction. You can customize the result url. Defaults to the value passed in the config if not added to params ## Example iex> ExPesa.Mpesa.B2c.request(%{command_id: "BusinessPayment", amount: 10500, phone_number: "254722000000", remarks: "B2C Request", result_url: "https://58cb49b30213.ngrok.io/b2c/result_url"}) {:ok, %{ "ConversationID" => "AG_20201010_00006bd489ffcaf79e91", "OriginatorConversationID" => "27293-71728391-3", "ResponseCode" => "0", "ResponseDescription" => "Accept the service request successfully." }} """ def request(params) do case get_security_credential_for(:b2c) do nil -> {:error, "cannot generate security_credential due to missing configuration fields"} security_credential -> b2c_request(security_credential, params) end end defp b2c_request( security_credential, %{ command_id: command_id, amount: amount, phone_number: phone_number, remarks: remarks } = params ) do occassion = Map.get(params, :occassion) result_url = Map.get(params, :result_url, Application.get_env(:ex_pesa, :mpesa)[:b2c][:result_url]) payload = %{ "InitiatorName" => Application.get_env(:ex_pesa, :mpesa)[:b2c][:initiator_name], "SecurityCredential" => security_credential, "CommandID" => command_id, "Amount" => amount, "PartyA" => Application.get_env(:ex_pesa, :mpesa)[:b2c][:short_code], "PartyB" => phone_number, "Remarks" => remarks, "QueueTimeOutURL" => Application.get_env(:ex_pesa, :mpesa)[:b2c][:timeout_url], "ResultURL" => result_url, "Occassion" => occassion } make_request("/mpesa/b2c/v1/paymentrequest", payload) end defp b2c_request(_security_credential, _) do {:error, "Required Parameter missing, 'command_id','amount','phone_number', 'remarks'"} end end
lib/ex_pesa/Mpesa/b2c.ex
0.788868
0.817756
b2c.ex
starcoder
defmodule AWS.EMR do @moduledoc """ Amazon EMR is a web service that makes it easier to process large amounts of data efficiently. Amazon EMR uses Hadoop processing combined with several Amazon Web Services services to do tasks such as web indexing, data mining, log file analysis, machine learning, scientific simulation, and data warehouse management. """ alias AWS.Client alias AWS.Request def metadata do %AWS.ServiceMetadata{ abbreviation: "Amazon EMR", api_version: "2009-03-31", content_type: "application/x-amz-json-1.1", credential_scope: nil, endpoint_prefix: "elasticmapreduce", global?: false, protocol: "json", service_id: "EMR", signature_version: "v4", signing_name: "elasticmapreduce", target_prefix: "ElasticMapReduce" } end @doc """ Adds an instance fleet to a running cluster. The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x. """ def add_instance_fleet(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AddInstanceFleet", input, options) end @doc """ Adds one or more instance groups to a running cluster. """ def add_instance_groups(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AddInstanceGroups", input, options) end @doc """ AddJobFlowSteps adds new steps to a running cluster. A maximum of 256 steps are allowed in each job flow. If your cluster is long-running (such as a Hive data warehouse) or complex, you may require more than 256 steps to process your data. You can bypass the 256-step limitation in various ways, including using SSH to connect to the master node and submitting queries directly to the software running on the master node, such as Hive and Hadoop. For more information on how to do this, see [Add More than 256 Steps to a Cluster](https://docs.aws.amazon.com/emr/latest/ManagementGuide/AddMoreThan256Steps.html) in the *Amazon EMR Management Guide*. A step specifies the location of a JAR file stored either on the master node of the cluster or in Amazon S3. Each step is performed by the main function of the main class of the JAR file. The main class can be specified either in the manifest of the JAR or by using the MainFunction parameter of the step. Amazon EMR executes each step in the order listed. For a step to be considered complete, the main function must exit with a zero exit code and all Hadoop jobs started while the step was running must have completed and run successfully. You can only add steps to a cluster that is in one of the following states: STARTING, BOOTSTRAPPING, RUNNING, or WAITING. The string values passed into `HadoopJarStep` object cannot exceed a total of 10240 characters. """ def add_job_flow_steps(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AddJobFlowSteps", input, options) end @doc """ Adds tags to an Amazon EMR resource, such as a cluster or an Amazon EMR Studio. Tags make it easier to associate resources in various ways, such as grouping clusters to track your Amazon EMR resource allocation costs. For more information, see [Tag Clusters](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-tags.html). """ def add_tags(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "AddTags", input, options) end @doc """ Cancels a pending step or steps in a running cluster. Available only in Amazon EMR versions 4.8.0 and later, excluding version 5.0.0. A maximum of 256 steps are allowed in each CancelSteps request. CancelSteps is idempotent but asynchronous; it does not guarantee that a step will be canceled, even if the request is successfully submitted. When you use Amazon EMR versions 5.28.0 and later, you can cancel steps that are in a `PENDING` or `RUNNING` state. In earlier versions of Amazon EMR, you can only cancel steps that are in a `PENDING` state. """ def cancel_steps(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CancelSteps", input, options) end @doc """ Creates a security configuration, which is stored in the service and can be specified when a cluster is created. """ def create_security_configuration(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateSecurityConfiguration", input, options) end @doc """ Creates a new Amazon EMR Studio. """ def create_studio(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateStudio", input, options) end @doc """ Maps a user or group to the Amazon EMR Studio specified by `StudioId`, and applies a session policy to refine Studio permissions for that user or group. Use `CreateStudioSessionMapping` to assign users to a Studio when you use Amazon Web Services SSO authentication. For instructions on how to assign users to a Studio when you use IAM authentication, see [Assign a user or group to your EMR Studio](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-studio-manage-users.html#emr-studio-assign-users-groups). """ def create_studio_session_mapping(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "CreateStudioSessionMapping", input, options) end @doc """ Deletes a security configuration. """ def delete_security_configuration(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteSecurityConfiguration", input, options) end @doc """ Removes an Amazon EMR Studio from the Studio metadata store. """ def delete_studio(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteStudio", input, options) end @doc """ Removes a user or group from an Amazon EMR Studio. """ def delete_studio_session_mapping(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DeleteStudioSessionMapping", input, options) end @doc """ Provides cluster-level details including status, hardware and software configuration, VPC settings, and so on. """ def describe_cluster(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeCluster", input, options) end @doc """ This API is no longer supported and will eventually be removed. We recommend you use `ListClusters`, `DescribeCluster`, `ListSteps`, `ListInstanceGroups` and `ListBootstrapActions` instead. DescribeJobFlows returns a list of job flows that match all of the supplied parameters. The parameters can include a list of job flow IDs, job flow states, and restrictions on job flow creation date and time. Regardless of supplied parameters, only job flows created within the last two months are returned. If no parameters are supplied, then job flows matching either of the following criteria are returned: * Job flows created and completed in the last two weeks * Job flows created within the last two months that are in one of the following states: `RUNNING`, `WAITING`, `SHUTTING_DOWN`, `STARTING` Amazon EMR can return a maximum of 512 job flow descriptions. """ def describe_job_flows(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeJobFlows", input, options) end @doc """ Provides details of a notebook execution. """ def describe_notebook_execution(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeNotebookExecution", input, options) end @doc """ Provides EMR release label details, such as releases available the region where the API request is run, and the available applications for a specific EMR release label. Can also list EMR release versions that support a specified version of Spark. """ def describe_release_label(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeReleaseLabel", input, options) end @doc """ Provides the details of a security configuration by returning the configuration JSON. """ def describe_security_configuration(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeSecurityConfiguration", input, options) end @doc """ Provides more detail about the cluster step. """ def describe_step(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeStep", input, options) end @doc """ Returns details for the specified Amazon EMR Studio including ID, Name, VPC, Studio access URL, and so on. """ def describe_studio(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeStudio", input, options) end @doc """ Returns the auto-termination policy for an Amazon EMR cluster. """ def get_auto_termination_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetAutoTerminationPolicy", input, options) end @doc """ Returns the Amazon EMR block public access configuration for your Amazon Web Services account in the current Region. For more information see [Configure Block Public Access for Amazon EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/configure-block-public-access.html) in the *Amazon EMR Management Guide*. """ def get_block_public_access_configuration(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetBlockPublicAccessConfiguration", input, options) end @doc """ Fetches the attached managed scaling policy for an Amazon EMR cluster. """ def get_managed_scaling_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetManagedScalingPolicy", input, options) end @doc """ Fetches mapping details for the specified Amazon EMR Studio and identity (user or group). """ def get_studio_session_mapping(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetStudioSessionMapping", input, options) end @doc """ Provides information about the bootstrap actions associated with a cluster. """ def list_bootstrap_actions(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListBootstrapActions", input, options) end @doc """ Provides the status of all clusters visible to this Amazon Web Services account. Allows you to filter the list of clusters based on certain criteria; for example, filtering by cluster creation date and time or by status. This call returns a maximum of 50 clusters in unsorted order per call, but returns a marker to track the paging of the cluster list across multiple ListClusters calls. """ def list_clusters(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListClusters", input, options) end @doc """ Lists all available details about the instance fleets in a cluster. The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x versions. """ def list_instance_fleets(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListInstanceFleets", input, options) end @doc """ Provides all available details about the instance groups in a cluster. """ def list_instance_groups(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListInstanceGroups", input, options) end @doc """ Provides information for all active EC2 instances and EC2 instances terminated in the last 30 days, up to a maximum of 2,000. EC2 instances in any of the following states are considered active: AWAITING_FULFILLMENT, PROVISIONING, BOOTSTRAPPING, RUNNING. """ def list_instances(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListInstances", input, options) end @doc """ Provides summaries of all notebook executions. You can filter the list based on multiple criteria such as status, time range, and editor id. Returns a maximum of 50 notebook executions and a marker to track the paging of a longer notebook execution list across multiple `ListNotebookExecution` calls. """ def list_notebook_executions(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListNotebookExecutions", input, options) end @doc """ Retrieves release labels of EMR services in the region where the API is called. """ def list_release_labels(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListReleaseLabels", input, options) end @doc """ Lists all the security configurations visible to this account, providing their creation dates and times, and their names. This call returns a maximum of 50 clusters per call, but returns a marker to track the paging of the cluster list across multiple ListSecurityConfigurations calls. """ def list_security_configurations(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListSecurityConfigurations", input, options) end @doc """ Provides a list of steps for the cluster in reverse order unless you specify `stepIds` with the request or filter by `StepStates`. You can specify a maximum of 10 `stepIDs`. The CLI automatically paginates results to return a list greater than 50 steps. To return more than 50 steps using the CLI, specify a `Marker`, which is a pagination token that indicates the next set of steps to retrieve. """ def list_steps(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListSteps", input, options) end @doc """ Returns a list of all user or group session mappings for the Amazon EMR Studio specified by `StudioId`. """ def list_studio_session_mappings(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListStudioSessionMappings", input, options) end @doc """ Returns a list of all Amazon EMR Studios associated with the Amazon Web Services account. The list includes details such as ID, Studio Access URL, and creation time for each Studio. """ def list_studios(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListStudios", input, options) end @doc """ Modifies the number of steps that can be executed concurrently for the cluster specified using ClusterID. """ def modify_cluster(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ModifyCluster", input, options) end @doc """ Modifies the target On-Demand and target Spot capacities for the instance fleet with the specified InstanceFleetID within the cluster specified using ClusterID. The call either succeeds or fails atomically. The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x versions. """ def modify_instance_fleet(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ModifyInstanceFleet", input, options) end @doc """ ModifyInstanceGroups modifies the number of nodes and configuration settings of an instance group. The input parameters include the new target instance count for the group and the instance group ID. The call will either succeed or fail atomically. """ def modify_instance_groups(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ModifyInstanceGroups", input, options) end @doc """ Creates or updates an automatic scaling policy for a core instance group or task instance group in an Amazon EMR cluster. The automatic scaling policy defines how an instance group dynamically adds and terminates EC2 instances in response to the value of a CloudWatch metric. """ def put_auto_scaling_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "PutAutoScalingPolicy", input, options) end @doc """ Auto-termination is supported in Amazon EMR versions 5.30.0 and 6.1.0 and later. For more information, see [Using an auto-termination policy](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-auto-termination-policy.html). Creates or updates an auto-termination policy for an Amazon EMR cluster. An auto-termination policy defines the amount of idle time in seconds after which a cluster automatically terminates. For alternative cluster termination options, see [Control cluster termination](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-termination.html). """ def put_auto_termination_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "PutAutoTerminationPolicy", input, options) end @doc """ Creates or updates an Amazon EMR block public access configuration for your Amazon Web Services account in the current Region. For more information see [Configure Block Public Access for Amazon EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/configure-block-public-access.html) in the *Amazon EMR Management Guide*. """ def put_block_public_access_configuration(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "PutBlockPublicAccessConfiguration", input, options) end @doc """ Creates or updates a managed scaling policy for an Amazon EMR cluster. The managed scaling policy defines the limits for resources, such as EC2 instances that can be added or terminated from a cluster. The policy only applies to the core and task nodes. The master node cannot be scaled after initial configuration. """ def put_managed_scaling_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "PutManagedScalingPolicy", input, options) end @doc """ Removes an automatic scaling policy from a specified instance group within an EMR cluster. """ def remove_auto_scaling_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "RemoveAutoScalingPolicy", input, options) end @doc """ Removes an auto-termination policy from an Amazon EMR cluster. """ def remove_auto_termination_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "RemoveAutoTerminationPolicy", input, options) end @doc """ Removes a managed scaling policy from a specified EMR cluster. """ def remove_managed_scaling_policy(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "RemoveManagedScalingPolicy", input, options) end @doc """ Removes tags from an Amazon EMR resource, such as a cluster or Amazon EMR Studio. Tags make it easier to associate resources in various ways, such as grouping clusters to track your Amazon EMR resource allocation costs. For more information, see [Tag Clusters](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-tags.html). The following example removes the stack tag with value Prod from a cluster: """ def remove_tags(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "RemoveTags", input, options) end @doc """ RunJobFlow creates and starts running a new cluster (job flow). The cluster runs the steps specified. After the steps complete, the cluster stops and the HDFS partition is lost. To prevent loss of data, configure the last step of the job flow to store results in Amazon S3. If the `JobFlowInstancesConfig` `KeepJobFlowAliveWhenNoSteps` parameter is set to `TRUE`, the cluster transitions to the WAITING state rather than shutting down after the steps have completed. For additional protection, you can set the `JobFlowInstancesConfig` `TerminationProtected` parameter to `TRUE` to lock the cluster and prevent it from being terminated by API call, user intervention, or in the event of a job flow error. A maximum of 256 steps are allowed in each job flow. If your cluster is long-running (such as a Hive data warehouse) or complex, you may require more than 256 steps to process your data. You can bypass the 256-step limitation in various ways, including using the SSH shell to connect to the master node and submitting queries directly to the software running on the master node, such as Hive and Hadoop. For more information on how to do this, see [Add More than 256 Steps to a Cluster](https://docs.aws.amazon.com/emr/latest/ManagementGuide/AddMoreThan256Steps.html) in the *Amazon EMR Management Guide*. For long running clusters, we recommend that you periodically store your results. The instance fleets configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x versions. The RunJobFlow request can contain InstanceFleets parameters or InstanceGroups parameters, but not both. """ def run_job_flow(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "RunJobFlow", input, options) end @doc """ SetTerminationProtection locks a cluster (job flow) so the EC2 instances in the cluster cannot be terminated by user intervention, an API call, or in the event of a job-flow error. The cluster still terminates upon successful completion of the job flow. Calling `SetTerminationProtection` on a cluster is similar to calling the Amazon EC2 `DisableAPITermination` API on all EC2 instances in a cluster. `SetTerminationProtection` is used to prevent accidental termination of a cluster and to ensure that in the event of an error, the instances persist so that you can recover any data stored in their ephemeral instance storage. To terminate a cluster that has been locked by setting `SetTerminationProtection` to `true`, you must first unlock the job flow by a subsequent call to `SetTerminationProtection` in which you set the value to `false`. For more information, see[Managing Cluster Termination](https://docs.aws.amazon.com/emr/latest/ManagementGuide/UsingEMR_TerminationProtection.html) in the *Amazon EMR Management Guide*. """ def set_termination_protection(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "SetTerminationProtection", input, options) end @doc """ The SetVisibleToAllUsers parameter is no longer supported. Your cluster may be visible to all users in your account. To restrict cluster access using an IAM policy, see [Identity and Access Management for EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-access-iam.html). Sets the `Cluster$VisibleToAllUsers` value for an EMR cluster. When `true`, IAM principals in the Amazon Web Services account can perform EMR cluster actions that their IAM policies allow. When `false`, only the IAM principal that created the cluster and the Amazon Web Services account root user can perform EMR actions on the cluster, regardless of IAM permissions policies attached to other IAM principals. This action works on running clusters. When you create a cluster, use the `RunJobFlowInput$VisibleToAllUsers` parameter. For more information, see [Understanding the EMR Cluster VisibleToAllUsers Setting](https://docs.aws.amazon.com/emr/latest/ManagementGuide/security_iam_emr-with-iam.html#security_set_visible_to_all_users) in the *Amazon EMRManagement Guide*. """ def set_visible_to_all_users(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "SetVisibleToAllUsers", input, options) end @doc """ Starts a notebook execution. """ def start_notebook_execution(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "StartNotebookExecution", input, options) end @doc """ Stops a notebook execution. """ def stop_notebook_execution(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "StopNotebookExecution", input, options) end @doc """ TerminateJobFlows shuts a list of clusters (job flows) down. When a job flow is shut down, any step not yet completed is canceled and the EC2 instances on which the cluster is running are stopped. Any log files not already saved are uploaded to Amazon S3 if a LogUri was specified when the cluster was created. The maximum number of clusters allowed is 10. The call to `TerminateJobFlows` is asynchronous. Depending on the configuration of the cluster, it may take up to 1-5 minutes for the cluster to completely terminate and release allocated resources, such as Amazon EC2 instances. """ def terminate_job_flows(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "TerminateJobFlows", input, options) end @doc """ Updates an Amazon EMR Studio configuration, including attributes such as name, description, and subnets. """ def update_studio(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateStudio", input, options) end @doc """ Updates the session policy attached to the user or group for the specified Amazon EMR Studio. """ def update_studio_session_mapping(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "UpdateStudioSessionMapping", input, options) end end
lib/aws/generated/emr.ex
0.864525
0.581481
emr.ex
starcoder
defmodule Cast do @moduledoc """ This module provides macros to generate elixir code from C AST. # Usage First you need to generate an XML representation of C AST thanks to [castxml](https://github.com/CastXML) (version >= 0.2), in GCC mode (other modes may be supported in the future). `castxml` has many options. Its usage is out of the scope of this document. However, generating AST from a simple C project may be as simple as: ```sh castxml -c -x c -std=gnu11 --castxml-cc-gnu gcc --castxml-gccxml -o ast.xml hello.c ``` The module where you want to generate elixir code from C AST must begin with: ```elixir use Cast, ast: "/path/to/ast.xml" ``` Then, you can use the following macros to generate constants accessors or enumeration modules: * [`enum/2`](`Cast.DSL.enum/2`) * [`const/{1,2}`](`Cast.DSL.const/2`) WARNING: XML parsing is done through DOM parser, memory usage can be really high # Example: Given following C code: ```C const int version = 1; enum pets { elephant = 0, mouse } ``` Following elixir module: ```elixir defmodule Hello do use Cast, ast: "/path/to/ast.xml const :version, cast: &integer/1 enum Pets, name: "pets" end ``` Will result in: ```elixir %{elephant: 0, mouse: 1} = Hello.Pets.mapping() 0 == Hello.Pets.value :elephant 1 == Hello.Pets.value :mouse :elephant == Hello.Pets.key 0 :mouse == Hello.Pets.key 1 1 == Hello.version() ``` """ @type ast :: term @type enum_filter :: {:contains, String.t()} @type enum_filters :: [enum_filter] defmacro __using__(opts) do ast_src = Keyword.fetch!(opts, :ast) quote do import Cast.DSL, only: [enum: 2, const: 1, const: 2] import Cast.Caster Module.put_attribute(__MODULE__, :ast, unquote(ast_src)) Module.register_attribute(__MODULE__, :enums, accumulate: true) Module.register_attribute(__MODULE__, :consts, accumulate: true) @before_compile Cast.DSL end end end
lib/cast.ex
0.88782
0.792022
cast.ex
starcoder
defmodule Day11 do import Intcode def from_file(path) do File.read!(path) |> String.split(",") |> Enum.map(&Integer.parse/1) |> Enum.map(&(elem(&1, 0))) end def robot(program, initial_color) do robot_execute(execute(initial_state(program, [initial_color])), {{0, 0}, 0}, []) end def robot_execute(state, next \\ nil, painted \\ []) do [color, direction_mod] = state.output |> Enum.reverse {paint_position, _} = current = case next do nil -> {{0, 0}, 0} x -> x end {new_color_pos, _} = new_next = next_position(current, direction_mod) {_, current_color} = painted |> Enum.find({{0, 0}, 0}, fn {pos, _} -> pos == new_color_pos end) if Map.get(state, :done, false) do [{paint_position, color} | painted] |> Enum.reverse else robot_execute(execute(%{state | :inputs => [current_color], :output => []}), new_next, [{paint_position, color} | painted]) end end def next_position({{x, y}, direction}, direction_modifier) do new_direction = direction(direction, direction_modifier) cond do new_direction == 0 -> {{x, y - 1}, new_direction} new_direction == 1 -> {{x + 1, y}, new_direction} new_direction == 2 -> {{x, y + 1}, new_direction} new_direction == 3 -> {{x - 1, y}, new_direction} end end def direction(direction, modifier) do if modifier == 0 do rem(direction + 4 - 1, 4) else rem(direction + 1, 4) end end def print(painted) do y_boundary = painted |> Map.keys |> Enum.group_by(fn {_, y} -> y end) |> Map.keys y_range = (y_boundary |> Enum.min) .. (y_boundary |> Enum.max) x_boundary = painted |> Map.keys |> Enum.group_by(fn {x, _} -> x end) |> Map.keys x_range = (x_boundary |> Enum.min) .. (x_boundary |> Enum.max) Enum.each(y_range, fn y -> Enum.each(x_range, fn x -> color = Map.get(painted, {x, y}, 0) cond do color == 1 -> IO.write("#") color == 0 -> IO.write(".") end end) IO.puts("") end) end def solution do IO.puts("#{from_file("day11_input.txt") |> robot(0) |> Enum.uniq_by(fn {pos, _} -> pos end) |> length}") IO.puts("#{from_file("day11_input.txt") |> robot(1) |> Map.new |> print}") end end
lib/day11.ex
0.52829
0.491456
day11.ex
starcoder
defmodule BroadwayRedis.Producer do @moduledoc """ A GenStage producer that continuously receives messages from a Redis list. This implementation follows the [Reliable Queue](https://redis.io/commands/rpoplpush#pattern-reliable-queue) pattern outlined in the Redis documentation. ## Options * `:redis_instance` - Required. An atom representing the redis instance/connection. * `:list_name` - Required. The name of the redis list containing items you want to process. * `:working_list_name` - Required. The name of the redis 'working' or 'processing' list. * `:max_number_of_items` - Optional. The maximum number of items to be fetched per pipelined request. This value generally should be between `1` and `20`. Default is `10`. ## Additional Options * `:redis_client` - Optional. A module that implements the `BroadwayRedis.RedisClient` behaviour. This module is responsible for fetching and acknowledging the messages. Pay attention that all options passed to the producer will be forwarded to the client. It's up to the client to normalize the options it needs. Default is `RedixClient`. * `:receive_interval` - Optional. The duration (in milliseconds) for which the producer waits before making a request for more items. Default is `5000`. """ use GenStage @default_receive_interval 5_000 @impl true def init(opts) do client = opts[:redis_client] || BroadwayRedis.RedixClient receive_interval = opts[:receive_interval] || @default_receive_interval case client.init(opts) do {:error, message} -> raise ArgumentError, "invalid options given to #{inspect(client)}.init/1, " <> message {:ok, opts} -> {:producer, %{ demand: 0, receive_timer: nil, receive_interval: receive_interval, redis_client: {client, opts} }} end end @impl true def handle_demand(incoming_demand, %{demand: demand} = state) do handle_receive_messages(%{state | demand: demand + incoming_demand}) end @impl true def handle_info(:receive_messages, state) do handle_receive_messages(%{state | receive_timer: nil}) end @impl true def handle_info(_, state) do {:noreply, [], state} end def handle_receive_messages(%{receive_timer: nil, demand: demand} = state) when demand > 0 do messages = receive_messages_from_redis(state, demand) new_demand = demand - length(messages) receive_timer = case {messages, new_demand} do {[], _} -> schedule_receive_messages(state.receive_interval) {_, 0} -> nil _ -> schedule_receive_messages(0) end {:noreply, messages, %{state | demand: new_demand, receive_timer: receive_timer}} end def handle_receive_messages(state) do {:noreply, [], state} end defp receive_messages_from_redis(state, total_demand) do %{redis_client: {client, opts}} = state client.receive_messages(total_demand, opts) end defp schedule_receive_messages(interval) do Process.send_after(self(), :receive_messages, interval) end end
lib/broadway_redis/producer.ex
0.888345
0.51879
producer.ex
starcoder
defmodule Tai.VenueAdapters.Deribit.Product do @time_unit :millisecond def build(instrument, venue_id) do symbol = instrument.instrument_name |> to_symbol status = instrument |> to_status type = instrument |> to_type listing = Timex.from_unix(instrument.creation_timestamp, @time_unit) expiry = Timex.from_unix(instrument.expiration_timestamp, @time_unit) min_trade_amount = instrument.min_trade_amount |> Tai.Utils.Decimal.cast!(:normalize) tick_size = instrument.tick_size |> Tai.Utils.Decimal.cast!() contract_size = instrument.contract_size |> Tai.Utils.Decimal.cast!(:normalize) maker_fee = instrument.maker_commission |> Tai.Utils.Decimal.cast!() taker_fee = instrument.taker_commission |> Tai.Utils.Decimal.cast!() %Tai.Venues.Product{ venue_id: venue_id, symbol: symbol, venue_symbol: instrument.instrument_name, base: instrument.base_currency |> downcase_and_atom(), quote: instrument.quote_currency |> downcase_and_atom(), venue_base: instrument.base_currency, venue_quote: instrument.quote_currency, status: status, type: type, listing: listing, expiry: expiry, collateral: false, price_increment: tick_size, size_increment: min_trade_amount, min_price: tick_size, min_size: Decimal.new(1), value: contract_size, value_side: :quote, is_quanto: false, is_inverse: true, maker_fee: maker_fee, taker_fee: taker_fee, option_type: instrument.option_type |> option_type, strike: instrument.strike |> strike_price } end def to_symbol(venue_symbol) do venue_symbol |> String.replace("-", "_") |> downcase_and_atom() end def from_symbol(symbol) do symbol |> Atom.to_string() |> String.upcase() end defp downcase_and_atom(str), do: str |> String.downcase() |> String.to_atom() defp to_status(%ExDeribit.Instrument{is_active: true}), do: :trading defp to_status(%ExDeribit.Instrument{is_active: false}), do: :halt defp to_type(%ExDeribit.Instrument{kind: "option"}), do: :option defp to_type(%ExDeribit.Instrument{kind: "future", instrument_name: instrument_name}) do instrument_name |> String.split("-") |> Enum.reverse() |> case do ["PERPETUAL" | _] -> :swap _ -> :future end end defp option_type("put"), do: :put defp option_type("call"), do: :call defp option_type(_), do: nil defp strike_price(price) when is_number(price) do price |> Tai.Utils.Decimal.cast!(:normalize) |> Decimal.to_string(:normal) |> Decimal.new() end defp strike_price(_), do: nil end
apps/tai/lib/tai/venue_adapters/deribit/product.ex
0.648911
0.436682
product.ex
starcoder
defmodule PlugAttack do @moduledoc ~S""" A plug building toolkit for blocking and throttling abusive requests. PlugAttack is a set of macros that can be used to build a plug to protect your web app from bad clients. It allows safelisting, blocklisting and throttling based on arbitrary properties of the request. The throttling state is stored in a configurable storage. By default an implementation backed by `:ets` tables is offered. ## Example defmodule MyApp.PlugAttack do import Plug.Conn use PlugAttack # For more rules examples see PlugAttack.rule/2 macro documentation. rule "allow local", conn do allow conn.remote_ip == {127, 0, 0, 1} end # It's possible to customize what happens when conn is let through def allow_action(conn, _data, _opts), do: conn # Or when it's blocked def block_action(conn, _data, _opts) do conn |> send_resp(:forbidden, "Forbidden\n") |> halt end end """ @typedoc """ The rule return value. """ @type rule :: {:allow, term} | {:block, term} | nil @doc """ Action performed when the request is blocked. """ @callback block_action(conn :: Plug.Conn.t(), term, term) :: Plug.Conn.t() @doc """ Action performed when the request is allowed. """ @callback allow_action(conn :: Plug.Conn.t(), term, term) :: Plug.Conn.t() defmacro __using__(opts) do quote do @behaviour Plug @behaviour PlugAttack @plug_attack_opts unquote(opts) def init(opts) do opts end def call(conn, opts) do plug_attack_call(conn, opts) end def block_action(conn, _data, _opts) do conn |> Plug.Conn.send_resp(:forbidden, "Forbidden\n") |> Plug.Conn.halt() end def allow_action(conn, _data, _opts) do conn end defoverridable init: 1, call: 2, block_action: 3, allow_action: 3 import PlugAttack, only: [rule: 2, rule: 3] Module.register_attribute(__MODULE__, :plug_attack, accumulate: true) @before_compile PlugAttack end end @doc false defmacro __before_compile__(%{module: module} = env) do plug_attack = Module.get_attribute(module, :plug_attack) {conn, opts, body} = PlugAttack.compile(env, plug_attack) quote do defp plug_attack_call(unquote(conn), unquote(opts)), do: unquote(body) end end @doc ~S""" Defines a rule. A rule is an expression that returns either `{:allow, data}`, `{:block, data}`, or `nil`. If an allow or block tuple is returned we say the rule *matched*, otherwise the rule didn't match and further rules will be evaluated. If a rule matched the corresponding `allow_action/3` or `block_action/3` function on the defining module will be called passing the `conn`, the `data` value from the allow or block tuple and `opts` as returned by the `init/1` plug callback. If none rule matched, neither `allow_action/3` nor `block_action/3` will be called. Both actions should behave similarly to plugs, returning the modified `conn` argument. The default implementation of `allow_action/3` will return the conn unmodified. The default implementation of `block_action/3` will respond with status 403 Forbidden, the body `"Forbidden\n"` and halt the plug pipeline. Various predefined rules are defined in the `PlugAttack.Rule` module. This module is automatically imported in the rule's body. ## Examples rule "allow local", conn do allow conn.remote_ip == {127, 0, 0, 1} end rule "block 1.2.3.4", conn do block conn.remote_ip == {1, 2, 3, 4} end rule "throttle per ip", conn do # throttle to 5 requests per second throttle conn.remote_ip, period: 1_000, limit: 5, storage: {PlugAttack.Storage.Ets, MyApp.PlugAttack.Storage} end rule "throttle login requests", conn do if conn.method == "POST" and conn.path_info == ["login"] do throttle conn.params["email"], period: 60_000, limit: 10, storage: {PlugAttack.Storage.Ets, MyApp.PlugAttack.Storage} end end """ defmacro rule(message, var \\ quote(do: _), contents) do contents = case contents do [do: block] -> quote do import PlugAttack.Rule unquote(block) end _ -> quote do import PlugAttack.Rule try(unquote(contents)) end end var = Macro.escape(var) contents = Macro.escape(contents, unquote: true) quote bind_quoted: [message: message, var: var, contents: contents] do name = PlugAttack.register(__MODULE__, message) defp unquote(name)(unquote(var)), do: unquote(contents) end end @doc false def register(module, message) do name = :"rule #{message}" Module.put_attribute(module, :plug_attack, name) name end @doc false def compile(env, rules) do conn = quote(do: conn) opts = quote(do: opts) body = Enum.reduce(rules, conn, &quote_rule(&2, &1, conn, opts, env)) {conn, opts, body} end defp quote_rule(next, name, conn, opts, _env) do quote generated: true do case unquote(name)(unquote(conn)) do {:allow, data} -> allow_action(unquote(conn), data, unquote(opts)) {:block, data} -> block_action(unquote(conn), data, unquote(opts)) nil -> unquote(next) other -> raise "a PlugAttack rule should return `{:allow, data}`, " <> "`{:block, data}`, or `nil`, got: #{inspect(other)}" end end end end
lib/plug_attack.ex
0.798776
0.49762
plug_attack.ex
starcoder
defmodule Bench do @moduledoc """ Runs a benchmark. This benchmark works by having a parent node start two children, and once the parent node receives a message it will pass it to all the nodes it's connected to. Each child may also have two children, or none. Thus, sending a message into the node cluster will start an "atomic bomb" of sorts - with each node passing a message to the other and it gets exponentially worse until the node gets killed by the OS for running out of memory. The goal of the benchmark is for the first node to receive the highest number it can, which is ultimately a benchmark of the pre-emptive multitasking of Elixir. In languages with cooperative multitasking, the first node will typically not reach a high value, most likely due to the massive amount of work the other nodes have to do. """ @doc """ Starts the benchmark. The first argument does not need to be specified. """ def run(node_partitioner \\ NodePartitioner) do me = self() worker = Node.spawn(NodePartitioner.next_worker(node_partitioner), fn -> worker(node_partitioner, me, 1) end) send(worker, 1) do_run(0, worker) end defp do_run(max_num, worker) do receive do num -> send(worker, num + 1) new_max = max(num, max_num) if new_max != max_num do IO.puts "#{new_max}" end do_run(new_max, worker) end end # cut off making child nodes at a nesting > 10 defp worker(node_partitioner, parent, nesting) when nesting > 10 do receive do num -> send(parent, num + 1) worker(node_partitioner, parent, nesting) end end # make a worker node with two children defp worker(node_partitioner, parent, nesting) do me = self() left = Node.spawn(NodePartitioner.next_worker(node_partitioner), fn -> worker(node_partitioner, me, nesting + 1) end) right = Node.spawn(NodePartitioner.next_worker(node_partitioner), fn -> worker(node_partitioner, me, nesting + 1) end) do_worker(left, right, parent) end # loop to receive messages, and send the message back to the parent and children defp do_worker(left, right, parent) do receive do num -> send(parent, num + 1) send(left, num + 1) send(right, num + 1) end do_worker(left, right, parent) end end
lib/bench.ex
0.746878
0.691015
bench.ex
starcoder
defmodule Graph.Reducers.Bfs do @moduledoc """ This reducer traverses the graph using Breadth-First Search. """ use Graph.Reducer @doc """ Performs a breadth-first traversal of the graph, applying the provided mapping function to each new vertex encountered. NOTE: The algorithm will follow lower-weighted edges first. Returns a list of values returned from the mapper in the order they were encountered. ## Example iex> g = Graph.new |> Graph.add_vertices([1, 2, 3, 4]) ...> g = Graph.add_edges(g, [{1, 3}, {1, 4}, {3, 2}, {2, 3}]) ...> #{__MODULE__}.map(g, fn v -> v end) [1, 3, 4, 2] """ def map(g, fun) when is_function(fun, 1) do g |> reduce([], fn v, results -> {:next, [fun.(v) | results]} end) |> Enum.reverse() end @doc """ Performs a breadth-first traversal of the graph, applying the provided reducer function to each new vertex encountered and the accumulator. NOTE: The algorithm will follow lower-weighted edges first. The result will be the state of the accumulator after the last reduction. ## Example iex> g = Graph.new |> Graph.add_vertices([1, 2, 3, 4]) ...> g = Graph.add_edges(g, [{1, 3}, {1, 4}, {3, 2}, {2, 3}]) ...> #{__MODULE__}.reduce(g, [], fn v, acc -> {:next, [v|acc]} end) [2, 4, 3, 1] iex> g = Graph.new |> Graph.add_vertices([1, 2, 3, 4]) ...> g = Graph.add_edges(g, [{1, 3}, {1, 4}, {3, 2}, {2, 3}, {4, 5}]) ...> #{__MODULE__}.reduce(g, [], fn 5, acc -> {:skip, acc}; v, acc -> {:next, [v|acc]} end) [2, 4, 3, 1] iex> g = Graph.new |> Graph.add_vertices([1, 2, 3, 4]) ...> g = Graph.add_edges(g, [{1, 3}, {1, 4}, {3, 2}, {2, 3}, {4, 5}]) ...> #{__MODULE__}.reduce(g, [], fn 4, acc -> {:halt, acc}; v, acc -> {:next, [v|acc]} end) [3, 1] """ def reduce(%Graph{vertices: vs} = g, acc, fun) when is_function(fun, 2) do vs # Start with a cost of zero |> Stream.map(fn {id, _} -> {id, 0} end) # Only populate the initial queue with those vertices which have no inbound edges |> Stream.reject(fn {id, _cost} -> inbound_edges?(g, id) end) |> Enum.reduce(PriorityQueue.new(), fn {id, cost}, q -> PriorityQueue.push(q, id, cost) end) |> traverse(g, MapSet.new(), fun, acc) end defp inbound_edges?(%Graph{in_edges: ie}, v_id) do case Map.get(ie, v_id) do nil -> false edges -> MapSet.size(edges) > 0 end end defp traverse(q, %Graph{out_edges: oe, vertices: vertices} = g, visited, fun, acc) do case PriorityQueue.pop(q) do {{:value, v_id}, q1} -> if MapSet.member?(visited, v_id) do traverse(q1, g, visited, fun, acc) else v = Map.get(vertices, v_id) case fun.(v, acc) do {:next, acc2} -> visited = MapSet.put(visited, v_id) v_out = Map.get(oe, v_id, MapSet.new()) q2 = v_out |> MapSet.to_list() |> Enum.reduce(q1, fn id, q -> weight = Graph.Utils.edge_weight(g, v_id, id) PriorityQueue.push(q, id, weight) end) traverse(q2, g, visited, fun, acc2) {:skip, acc2} -> visited = MapSet.put(visited, v_id) traverse(q1, g, visited, fun, acc2) {:halt, acc2} -> acc2 end end {:empty, _} -> acc end end end
lib/graph/reducers/bfs.ex
0.897061
0.708692
bfs.ex
starcoder
defmodule LogWatcher.Tasks.Session do @moduledoc """ Defines a Session struct for use with schemaless changesets. Sessions are not stored in a SQL database. A session represents a directory on the file system that will contain the executable scripts, data and log files necessary to perform long running tasks. Each session has a required `:name` and `:description`, and a unique ID, the `:session_id` (usually a UUID or ULID). """ use TypedStruct require Logger typedstruct do @typedoc "A daptics session on disk somewhere" plugin(TypedStructEctoChangeset) field(:session_id, String.t(), enforce: true) field(:name, String.t(), enforce: true) field(:description, String.t(), enforce: true) field(:session_log_path, String.t(), enforce: true) field(:gen, integer(), default: -1) end @spec new() :: t() def new() do nil_values = @enforce_keys |> Enum.map(fn key -> {key, nil} end) Kernel.struct(__MODULE__, nil_values) end @spec required_fields([atom()]) :: [atom()] def required_fields(fields \\ []) def required_fields([]), do: @enforce_keys def required_fields(fields) when is_list(fields) do @enforce_keys -- @enforce_keys -- fields end @spec all_fields() :: [atom()] def all_fields(), do: Keyword.keys(@changeset_fields) @spec changeset_types() :: [{atom(), atom()}] def changeset_types(), do: @changeset_fields @spec log_file_name(String.t() | t()) :: String.t() def log_file_name(session_id) when is_binary(session_id) do "#{session_id}-sesslog.jsonl" end def log_file_name(%__MODULE__{session_id: session_id}) do "#{session_id}-sesslog.jsonl" end ## PubSub utilities @spec events_topic(String.t() | t()) :: String.t() def events_topic(session_id) when is_binary(session_id) do "session:#{session_id}" end def events_topic(%__MODULE__{session_id: session_id}) do "session:#{session_id}" end @spec subscribe(String.t()) :: :ok | {:error, term()} def subscribe(topic) do _ = Logger.info("#{inspect(self())} subscribing to #{topic}") Phoenix.PubSub.subscribe(LogWatcher.PubSub, topic) end @spec broadcast(String.t(), term()) :: :ok | {:error, term()} def broadcast(topic, message) when is_tuple(message) do _ = Logger.info("#{inspect(self())} broadcasting :#{Kernel.elem(message, 0)} to #{topic}") Phoenix.PubSub.broadcast(LogWatcher.PubSub, topic, message) end @spec write_event(t(), atom(), Keyword.t()) :: {:ok, t()} | {:error, term()} def write_event( %__MODULE__{session_id: session_id, session_log_path: session_log_path} = session, event_type, opts \\ [] ) do event = create_event(session, event_type, opts) with {:ok, content} <- Jason.encode(event) do file_name = log_file_name(session_id) log_file_path = Path.join(session_log_path, file_name) File.write(log_file_path, content <> "\n") {:ok, session} end end @spec create_event(t(), atom(), Keyword.t()) :: map() def create_event(%__MODULE__{} = session, event_type, opts \\ []) do event = Map.from_struct(session) |> Map.merge(%{time: LogWatcher.format_utcnow(), event: event_type}) Enum.into(opts, event) end end
apps/log_watcher/lib/log_watcher/tasks/session.ex
0.815526
0.470128
session.ex
starcoder
defmodule Aeutil.Scientific do @moduledoc """ Converts integer to scientific notation. It's used for storing the target in blocks and recalculating the next target """ use Bitwise @highest_target_int 0xFFFF000000000000000000000000000000000000000000000000000000000000 @spec scientific_to_integer(integer()) :: integer() def scientific_to_integer(scientific) do {exp, significand} = break_scientific(scientific) exp3 = exp - 3 case exp >= 0 do true -> bsl(significand, 8 * exp3) false -> bsr(significand, -8 * exp3) end end @spec integer_to_scientific(integer()) :: integer() def integer_to_scientific(integer) do {exp, significand} = int_to_sci(integer, 3) case exp >= 0 do true -> bsl(exp, 24) + significand false -> bsl(-exp, 24) + 0x800000 + significand end end @spec compare_bin_to_significand(binary(), integer(), integer(), integer()) :: boolean() def compare_bin_to_significand(binary, significand, zeros, number_of_bits) do case binary do <<0::size(zeros), integer::size(number_of_bits), _rest::binary()>> -> integer < significand <<0::size(zeros), _rest::binary()>> -> :error _ -> false end end @spec break_scientific(integer()) :: tuple() def break_scientific(scientific) do significand_mask = bsl(1, 24) - 1 exp = scientific |> bxor(significand_mask) |> bsr(24) significand = band(scientific, significand_mask) case band(0x800000, significand) do 0 -> {exp, significand} _ -> {-exp, significand - 0x800000} end end # The Difficulty is calculated from the Target and is used in Sync. # Difficulty is used to select the winning fork of new blocks: # the difficulty of chain of blocks is the sum of the diffculty of each block. @spec target_to_difficulty(non_neg_integer()) :: float() def target_to_difficulty(target) do @highest_target_int / scientific_to_integer(target) end # Internal Functions @spec int_to_sci(integer(), integer()) :: tuple() defp int_to_sci(integer, exp) when integer > 0x7FFFFF do int_to_sci(bsr(integer, 8), exp + 1) end defp int_to_sci(integer, exp) when integer < 0x008000 do int_to_sci(bsl(integer, 8), exp - 1) end defp int_to_sci(integer, exp) do {exp, integer} end end
apps/aeutil/lib/scientific.ex
0.696578
0.738669
scientific.ex
starcoder
defmodule BufEdit do @moduledoc """ Programmable buffer editor, similar in concept to [ed](https://en.wikipedia.org/wiki/Ed_(text_editor)). `BufEdit` reads a file into memory and provides a flexible API for editing it and writing it back to a file. Consider the following mix.exs file: ```elixir defmodule MyApp.MixProject do use Mix.Project def project do [ app: :my_app, version: "0.1.0", elixir: "~> 1.6", start_permanent: Mix.env() == :prod, deps: deps() ] end # Run "mix help compile.app" to learn about applications. def application do [ extra_applications: [:logger] ] end # Run "mix help deps" to learn about dependencies. defp deps do [ # {:dep_from_hexpm, "~> 0.3.0"}, # {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}, ] end end ``` We'll use `BufEdit` to load the file and jump to a line matching a regular expression: ```elixir iex> buf = BufEdit.load("mix.exs") iex> buf = BufEdit.search(buf, ~r/defp deps/, :down) %BufEdit{ col: 1, filename: "test/fixtures/mix.exs", lines: ["defmodule MyApp.MixProject do", " use Mix.Project", ""], line_num: 22, status: :ok } ``` (The `lines` key value above has been abbreviated in this example) If the line is found, the returned `%BufEdit{}` has a `:status` key set to `:ok`, and the `:line_num` key is set to the new line number. Now, let's say we want to remove the two comment lines from the dependencies list and add a new dependency: ```elixir iex> buf = BufEdit.search(buf, ~r/^\s*#/, :down) %BufEdit{ col: 1, filename: "test/fixtures/mix.exs", lines: ["defmodule MyApp.MixProject do", " use Mix.Project", ""], line_num: 24, status: :ok } ``` Now, we're at line 24. If you want to see line 24, use `line/1`: ```elixir iex> BufEdit.line(buf) " # {:dep_from_hexpm, \\"~> 0.3.0\\"}," ``` Yep, that's the line we're looking for. The next step is to delete the two comments: ```elixir iex> buf = BufEdit.delete_lines(buf, 2) %BufEdit{ col: 1, filename: "test/fixtures/mix.exs", lines: ["defmodule MyApp.MixProject do", " use Mix.Project", ""], line_num: 24, status: :ok } iex> BufEdit.line(buf) " ]" ``` Now that the lines are deleted, we're ready to add the new dependency: ```elixir iex> buf = BufEdit.insert_line(buf, " {:buffer, \\"~> 0.1.0\\"}") iex> BufEdit.dump(buf) |> IO.puts() defmodule MyApp.MixProject do use Mix.Project def project do [ app: :my_app, version: "0.1.0", elixir: "~> 1.6", start_permanent: Mix.env() == :prod, deps: deps() ] end # Run "mix help compile.app" to learn about applications. def application do [ extra_applications: [:logger] ] end # Run "mix help deps" to learn about dependencies. defp deps do [ {:buffer, "~> 0.1.0"} ] end end ``` Our new dependency is added! Now it's time to write the file, then we're done: ```elixir iex> BufEdit.save(buf) ``` """ @type t :: %__MODULE__{ filename: String.t(), lines: [String.t()], line_num: integer(), col: integer(), status: :ok | :not_found } defstruct filename: nil, lines: [], line_num: 1, col: 1, status: :ok @doc "Load the file into the buffer" @spec load(filename :: String.t()) :: t() def load(filename) do contents = File.read!(filename) lines = String.split(contents, ~r/\r\n|\n/) %__MODULE__{ filename: filename, lines: lines } end @doc "Dump the BufEdit to a string" @spec dump(t()) :: String.t() def dump(buf) do Enum.join(buf.lines, "\n") end @doc "Save the BufEdit to a file specified by the `:filename` value." @spec save(t()) :: :ok | no_return() def save(buf) do File.write!(buf.filename, dump(buf)) end @doc "Get the current line." @spec line(t()) :: String.t() def line(buf) do Enum.at(buf.lines, buf.line_num - 1) end @doc "Get a list of lines starting from the current line number." @spec lines(buf :: t(), length :: integer()) :: [String.t()] def lines(buf, length) do 0..(length - 1) |> Enum.map(fn i -> Enum.at(buf.lines, buf.line_num - 1 + i) end) end @doc "Search for a line using a regular expression." @spec search(buf :: t(), pattern :: Regex.t(), direction :: :down | :up) :: t() def search(buf, pattern, direction \\ :down) do line_with_index = matching_line_with_index(buf, pattern, direction) case line_with_index do {_line, index} -> buf = move_to(buf, index + 1, buf.col) %{buf | status: :ok} _ -> %{buf | status: :not_found} end end @doc "Insert a line at the current line number." @spec insert_line(buf :: t(), line :: String.t()) :: t() def insert_line(%{status: :ok} = buf, line) do lines = List.insert_at(buf.lines, buf.line_num - 1, line) buf |> set(:lines, lines) |> move_relative(1, buf.col) |> set(:status, :ok) end # Skip operation if :status != :ok def insert_line(buf, _line) do buf end @doc "Insert multiple lines at the current line number." @spec insert_lines(buf :: t(), lines :: [String.t()]) :: t() def insert_lines(buf, lines) do new_buf = lines |> Enum.reduce(buf, fn line, buf -> insert_line(buf, line) end) %{new_buf | status: :ok} end @doc "Move to the given line number and column" @spec move_to(buf :: t(), line_num :: integer(), col :: integer()) :: t() def move_to(buf, line_num, col) do %{buf | line_num: line_num, col: col, status: :ok} end @doc "Move to a line offset from the current line." @spec move_relative(buf :: t(), line_num_offset :: integer(), col_offset :: integer()) :: t() def move_relative(buf, line_num_offset, col_offset) do move_to(buf, buf.line_num + line_num_offset, buf.col + col_offset) end @doc "Move to the last line in the file." @spec move_to_end(buf :: t()) :: t() def move_to_end(buf) do count = length(buf.lines) move_to(buf, count, buf.col) end @doc "Delete a number of lines from the current line number" @spec delete_lines(buf :: t(), count :: integer()) :: t() def delete_lines(buf, count) do 1..count |> Enum.reduce(buf, fn _n, buf -> delete_line(buf) end) end @doc "Delete the current line" @spec delete_line(buf :: t()) :: t() def delete_line(%{status: :ok} = buf) do lines = List.delete_at(buf.lines, buf.line_num - 1) %{buf | lines: lines, status: :ok} end # Skip operation if :status != :ok def delete_line(buf) do buf end @doc "Replace a sub string within the current line matching pattern." @spec replace_in_line(buf :: t(), search :: String.t(), replace :: String.t()) :: t() def replace_in_line(buf, search, replace) do replace_line(buf, fn _buf, line -> String.replace(line, search, replace) end) end @doc """ Replace the current line with the output of a function. Example: Commenting out a line of Elixir: ``` iex> BufEdit.replace_line(buf, fn _buf, line -> "# \#{line}" end) ``` """ @spec replace_line(t(), (t(), line :: String.t() -> String.t())) :: t() def replace_line(%{status: :ok} = buf, fun) do line = line(buf) buf = delete_line(buf) new_line = fun.(buf, line) insert_line(buf, new_line) end # Skip operation if :status != :ok def replace_line(buf, _fun) do buf end ## PRIVATE FUNCTIONS defp set(buf, key, value) do struct(buf, %{key => value}) end defp matching_line_with_index(buf, pattern, direction) do lines = case direction do :down -> buf.lines :up -> Enum.reverse(buf.lines) end line_count = length(buf.lines) lines |> Enum.with_index() |> Enum.map(fn {line, index} -> case direction do :down -> {line, index} :up -> {line, line_count - index - 1} end end) |> Enum.filter(fn {_line, index} -> at_line_num = index + 1 case direction do :down -> at_line_num >= buf.line_num :up -> at_line_num <= buf.line_num end end) |> Enum.find(fn {line, _index} -> String.match?(line, pattern) end) end end
lib/buf_edit.ex
0.889409
0.862815
buf_edit.ex
starcoder
defmodule Ada.CLI.Markup do @moduledoc """ This module defines semantic helpers that can be used to format CLI-based reports. It includes (among other things) titles, headings, paragraphs (with support for wrapping text), lists and bars. The recommended usage pattern is to build lists of elements and then pass them to `IO.iodata_to_binary/1` for conversion to a printable binary. """ @bar_item "█" @break "\n" @dash "-" @ellipsis "…" @emdash "—" @left_pad " " @space " " alias IO.ANSI alias Ada.CLI.Format.WordWrap @doc "New line separator" def break, do: @break @doc "Space separator" def space, do: @space @doc "Left padding" def left_pad, do: @left_pad @doc """ Returns the text padded on the left, in red color and adds a break at the end. """ def title(contents) do [@left_pad, ANSI.format([:red, contents], true), @break] end @doc """ Separates left text and right text with a continuous line, pushing them to the left and right border of the page. The left text is wrapped in red. E.g. `Left ———————————————————————————————— Right` """ def double_title(left, right, max_length) do separator_length = max_length - String.length(left) - String.length(right) - 2 [ @left_pad, ANSI.format([:red, left], true), @space, String.duplicate(@emdash, separator_length), @space, right, @break ] end @doc """ Returns the text wrappend on the left, in primary color and adds a break at the end. """ def h1(contents) do [@left_pad, primary(contents), @break] end @doc """ Returns the text wrappend on the left, in secondary color and adds a break at the end. """ def h2(contents) do [@left_pad, secondary(contents), @break] end @doc """ Returns the text wrappend on the left, in tertiary color and adds a break at the end. """ def h3(contents) do [@left_pad, tertiary(contents), @break] end @doc """ Returns the text, wrapped at the specified length. All lines are padded on the left and it adds a break at the end. """ def p(contents, max_length) do [WordWrap.paragraph(contents, max_length, @left_pad), @break] end @doc """ Returns the text, wrapped at the specified length. All lines are padded on the left. """ def wrap(contents, max_length) do [WordWrap.paragraph(contents, max_length, @left_pad)] end @doc """ Returns a list item, i.e. a definition with a name (wrapped in secondary colour) and a value. Values are pretty printed according to their type: - maps, keyword lists and lists of tuples get expanded one pair per line, with the pair elements separated by a : - lists get expanded one element per line - other values are printed on one line beside the name. """ def list_item(name, values) when is_list(values) do [ h2(name <> ":"), Enum.map(values, fn {k, v} -> [@left_pad, @left_pad, "- #{k}: #{v}", @break] v -> [@left_pad, @left_pad, "- #{v}", @break] end) ] end def list_item(name, value) do [@left_pad, secondary(name <> ":"), @space, value, @break] end @doc "Wraps contents in primary color" def primary(contents) do ANSI.format([:cyan, contents], true) end @doc "Wraps contents in secondary color" def secondary(contents) do ANSI.format([:yellow, contents], true) end @doc "Wraps contents in tertiary color" def tertiary(contents) do ANSI.format([:green, contents], true) end @doc "Returns an emphasis dash, wrapped in spaces" def emdash do [ @space, @emdash, @space ] end @doc "Returns a dash, followed by a space" def dash do [ @dash, @space ] end @doc "Returns a bar of the specified length." def bar(length) do String.duplicate(@bar_item, length) end @doc "Truncates the text at the specified length, appending an ellipsis" def ellipsis(string, max_length) do if String.length(string) >= max_length do truncated_string = string |> String.slice(0, max_length - 1) |> String.trim_trailing() truncated_string <> @ellipsis else string end end end
lib/ada/cli/format/markup.ex
0.868395
0.500366
markup.ex
starcoder
defmodule AWS.ElasticBeanstalk do @moduledoc """ AWS Elastic Beanstalk AWS Elastic Beanstalk makes it easy for you to create, deploy, and manage scalable, fault-tolerant applications running on the Amazon Web Services cloud. For more information about this product, go to the [AWS Elastic Beanstalk](http://aws.amazon.com/elasticbeanstalk/) details page. The location of the latest AWS Elastic Beanstalk WSDL is [https://elasticbeanstalk.s3.amazonaws.com/doc/2010-12-01/AWSElasticBeanstalk.wsdl](https://elasticbeanstalk.s3.amazonaws.com/doc/2010-12-01/AWSElasticBeanstalk.wsdl). To install the Software Development Kits (SDKs), Integrated Development Environment (IDE) Toolkits, and command line tools that enable you to access the API, go to [Tools for Amazon Web Services](http://aws.amazon.com/tools/). **Endpoints** For a list of region-specific endpoints that AWS Elastic Beanstalk supports, go to [Regions and Endpoints](https://docs.aws.amazon.com/general/latest/gr/rande.html#elasticbeanstalk_region) in the *Amazon Web Services Glossary*. """ @doc """ Cancels in-progress environment configuration update or application version deployment. """ def abort_environment_update(client, input, options \\ []) do request(client, "AbortEnvironmentUpdate", input, options) end @doc """ Applies a scheduled managed action immediately. A managed action can be applied only if its status is `Scheduled`. Get the status and action ID of a managed action with `DescribeEnvironmentManagedActions`. """ def apply_environment_managed_action(client, input, options \\ []) do request(client, "ApplyEnvironmentManagedAction", input, options) end @doc """ Add or change the operations role used by an environment. After this call is made, Elastic Beanstalk uses the associated operations role for permissions to downstream services during subsequent calls acting on this environment. For more information, see [Operations roles](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/iam-operationsrole.html) in the *AWS Elastic Beanstalk Developer Guide*. """ def associate_environment_operations_role(client, input, options \\ []) do request(client, "AssociateEnvironmentOperationsRole", input, options) end @doc """ Checks if the specified CNAME is available. """ def check_d_n_s_availability(client, input, options \\ []) do request(client, "CheckDNSAvailability", input, options) end @doc """ Create or update a group of environments that each run a separate component of a single application. Takes a list of version labels that specify application source bundles for each of the environments to create or update. The name of each environment and other required information must be included in the source bundles in an environment manifest named `env.yaml`. See [Compose Environments](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/environment-mgmt-compose.html) for details. """ def compose_environments(client, input, options \\ []) do request(client, "ComposeEnvironments", input, options) end @doc """ Creates an application that has one configuration template named `default` and no application versions. """ def create_application(client, input, options \\ []) do request(client, "CreateApplication", input, options) end @doc """ Creates an application version for the specified application. You can create an application version from a source bundle in Amazon S3, a commit in AWS CodeCommit, or the output of an AWS CodeBuild build as follows: Specify a commit in an AWS CodeCommit repository with `SourceBuildInformation`. Specify a build in an AWS CodeBuild with `SourceBuildInformation` and `BuildConfiguration`. Specify a source bundle in S3 with `SourceBundle` Omit both `SourceBuildInformation` and `SourceBundle` to use the default sample application. <note> After you create an application version with a specified Amazon S3 bucket and key location, you can't change that Amazon S3 location. If you change the Amazon S3 location, you receive an exception when you attempt to launch an environment from the application version. </note> """ def create_application_version(client, input, options \\ []) do request(client, "CreateApplicationVersion", input, options) end @doc """ Creates an AWS Elastic Beanstalk configuration template, associated with a specific Elastic Beanstalk application. You define application configuration settings in a configuration template. You can then use the configuration template to deploy different versions of the application with the same configuration settings. Templates aren't associated with any environment. The `EnvironmentName` response element is always `null`. Related Topics <ul> <li> `DescribeConfigurationOptions` </li> <li> `DescribeConfigurationSettings` </li> <li> `ListAvailableSolutionStacks` </li> </ul> """ def create_configuration_template(client, input, options \\ []) do request(client, "CreateConfigurationTemplate", input, options) end @doc """ Launches an AWS Elastic Beanstalk environment for the specified application using the specified configuration. """ def create_environment(client, input, options \\ []) do request(client, "CreateEnvironment", input, options) end @doc """ Create a new version of your custom platform. """ def create_platform_version(client, input, options \\ []) do request(client, "CreatePlatformVersion", input, options) end @doc """ Creates a bucket in Amazon S3 to store application versions, logs, and other files used by Elastic Beanstalk environments. The Elastic Beanstalk console and EB CLI call this API the first time you create an environment in a region. If the storage location already exists, `CreateStorageLocation` still returns the bucket name but does not create a new bucket. """ def create_storage_location(client, input, options \\ []) do request(client, "CreateStorageLocation", input, options) end @doc """ Deletes the specified application along with all associated versions and configurations. The application versions will not be deleted from your Amazon S3 bucket. <note> You cannot delete an application that has a running environment. </note> """ def delete_application(client, input, options \\ []) do request(client, "DeleteApplication", input, options) end @doc """ Deletes the specified version from the specified application. <note> You cannot delete an application version that is associated with a running environment. </note> """ def delete_application_version(client, input, options \\ []) do request(client, "DeleteApplicationVersion", input, options) end @doc """ Deletes the specified configuration template. <note> When you launch an environment using a configuration template, the environment gets a copy of the template. You can delete or modify the environment's copy of the template without affecting the running environment. </note> """ def delete_configuration_template(client, input, options \\ []) do request(client, "DeleteConfigurationTemplate", input, options) end @doc """ Deletes the draft configuration associated with the running environment. Updating a running environment with any configuration changes creates a draft configuration set. You can get the draft configuration using `DescribeConfigurationSettings` while the update is in progress or if the update fails. The `DeploymentStatus` for the draft configuration indicates whether the deployment is in process or has failed. The draft configuration remains in existence until it is deleted with this action. """ def delete_environment_configuration(client, input, options \\ []) do request(client, "DeleteEnvironmentConfiguration", input, options) end @doc """ Deletes the specified version of a custom platform. """ def delete_platform_version(client, input, options \\ []) do request(client, "DeletePlatformVersion", input, options) end @doc """ Returns attributes related to AWS Elastic Beanstalk that are associated with the calling AWS account. The result currently has one set of attributes—resource quotas. """ def describe_account_attributes(client, input, options \\ []) do request(client, "DescribeAccountAttributes", input, options) end @doc """ Retrieve a list of application versions. """ def describe_application_versions(client, input, options \\ []) do request(client, "DescribeApplicationVersions", input, options) end @doc """ Returns the descriptions of existing applications. """ def describe_applications(client, input, options \\ []) do request(client, "DescribeApplications", input, options) end @doc """ Describes the configuration options that are used in a particular configuration template or environment, or that a specified solution stack defines. The description includes the values the options, their default values, and an indication of the required action on a running environment if an option value is changed. """ def describe_configuration_options(client, input, options \\ []) do request(client, "DescribeConfigurationOptions", input, options) end @doc """ Returns a description of the settings for the specified configuration set, that is, either a configuration template or the configuration set associated with a running environment. When describing the settings for the configuration set associated with a running environment, it is possible to receive two sets of setting descriptions. One is the deployed configuration set, and the other is a draft configuration of an environment that is either in the process of deployment or that failed to deploy. Related Topics <ul> <li> `DeleteEnvironmentConfiguration` </li> </ul> """ def describe_configuration_settings(client, input, options \\ []) do request(client, "DescribeConfigurationSettings", input, options) end @doc """ Returns information about the overall health of the specified environment. The **DescribeEnvironmentHealth** operation is only available with AWS Elastic Beanstalk Enhanced Health. """ def describe_environment_health(client, input, options \\ []) do request(client, "DescribeEnvironmentHealth", input, options) end @doc """ Lists an environment's completed and failed managed actions. """ def describe_environment_managed_action_history(client, input, options \\ []) do request(client, "DescribeEnvironmentManagedActionHistory", input, options) end @doc """ Lists an environment's upcoming and in-progress managed actions. """ def describe_environment_managed_actions(client, input, options \\ []) do request(client, "DescribeEnvironmentManagedActions", input, options) end @doc """ Returns AWS resources for this environment. """ def describe_environment_resources(client, input, options \\ []) do request(client, "DescribeEnvironmentResources", input, options) end @doc """ Returns descriptions for existing environments. """ def describe_environments(client, input, options \\ []) do request(client, "DescribeEnvironments", input, options) end @doc """ Returns list of event descriptions matching criteria up to the last 6 weeks. <note> This action returns the most recent 1,000 events from the specified `NextToken`. </note> """ def describe_events(client, input, options \\ []) do request(client, "DescribeEvents", input, options) end @doc """ Retrieves detailed information about the health of instances in your AWS Elastic Beanstalk. This operation requires [enhanced health reporting](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/health-enhanced.html). """ def describe_instances_health(client, input, options \\ []) do request(client, "DescribeInstancesHealth", input, options) end @doc """ Describes a platform version. Provides full details. Compare to `ListPlatformVersions`, which provides summary information about a list of platform versions. For definitions of platform version and other platform-related terms, see [AWS Elastic Beanstalk Platforms Glossary](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/platforms-glossary.html). """ def describe_platform_version(client, input, options \\ []) do request(client, "DescribePlatformVersion", input, options) end @doc """ Disassociate the operations role from an environment. After this call is made, Elastic Beanstalk uses the caller's permissions for permissions to downstream services during subsequent calls acting on this environment. For more information, see [Operations roles](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/iam-operationsrole.html) in the *AWS Elastic Beanstalk Developer Guide*. """ def disassociate_environment_operations_role(client, input, options \\ []) do request(client, "DisassociateEnvironmentOperationsRole", input, options) end @doc """ Returns a list of the available solution stack names, with the public version first and then in reverse chronological order. """ def list_available_solution_stacks(client, input, options \\ []) do request(client, "ListAvailableSolutionStacks", input, options) end @doc """ Lists the platform branches available for your account in an AWS Region. Provides summary information about each platform branch. For definitions of platform branch and other platform-related terms, see [AWS Elastic Beanstalk Platforms Glossary](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/platforms-glossary.html). """ def list_platform_branches(client, input, options \\ []) do request(client, "ListPlatformBranches", input, options) end @doc """ Lists the platform versions available for your account in an AWS Region. Provides summary information about each platform version. Compare to `DescribePlatformVersion`, which provides full details about a single platform version. For definitions of platform version and other platform-related terms, see [AWS Elastic Beanstalk Platforms Glossary](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/platforms-glossary.html). """ def list_platform_versions(client, input, options \\ []) do request(client, "ListPlatformVersions", input, options) end @doc """ Return the tags applied to an AWS Elastic Beanstalk resource. The response contains a list of tag key-value pairs. Elastic Beanstalk supports tagging of all of its resources. For details about resource tagging, see [Tagging Application Resources](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/applications-tagging-resources.html). """ def list_tags_for_resource(client, input, options \\ []) do request(client, "ListTagsForResource", input, options) end @doc """ Deletes and recreates all of the AWS resources (for example: the Auto Scaling group, load balancer, etc.) for a specified environment and forces a restart. """ def rebuild_environment(client, input, options \\ []) do request(client, "RebuildEnvironment", input, options) end @doc """ Initiates a request to compile the specified type of information of the deployed environment. Setting the `InfoType` to `tail` compiles the last lines from the application server log files of every Amazon EC2 instance in your environment. Setting the `InfoType` to `bundle` compresses the application server log files for every Amazon EC2 instance into a `.zip` file. Legacy and .NET containers do not support bundle logs. Use `RetrieveEnvironmentInfo` to obtain the set of logs. Related Topics <ul> <li> `RetrieveEnvironmentInfo` </li> </ul> """ def request_environment_info(client, input, options \\ []) do request(client, "RequestEnvironmentInfo", input, options) end @doc """ Causes the environment to restart the application container server running on each Amazon EC2 instance. """ def restart_app_server(client, input, options \\ []) do request(client, "RestartAppServer", input, options) end @doc """ Retrieves the compiled information from a `RequestEnvironmentInfo` request. Related Topics <ul> <li> `RequestEnvironmentInfo` </li> </ul> """ def retrieve_environment_info(client, input, options \\ []) do request(client, "RetrieveEnvironmentInfo", input, options) end @doc """ Swaps the CNAMEs of two environments. """ def swap_environment_c_n_a_m_es(client, input, options \\ []) do request(client, "SwapEnvironmentCNAMEs", input, options) end @doc """ Terminates the specified environment. """ def terminate_environment(client, input, options \\ []) do request(client, "TerminateEnvironment", input, options) end @doc """ Updates the specified application to have the specified properties. <note> If a property (for example, `description`) is not provided, the value remains unchanged. To clear these properties, specify an empty string. </note> """ def update_application(client, input, options \\ []) do request(client, "UpdateApplication", input, options) end @doc """ Modifies lifecycle settings for an application. """ def update_application_resource_lifecycle(client, input, options \\ []) do request(client, "UpdateApplicationResourceLifecycle", input, options) end @doc """ Updates the specified application version to have the specified properties. <note> If a property (for example, `description`) is not provided, the value remains unchanged. To clear properties, specify an empty string. </note> """ def update_application_version(client, input, options \\ []) do request(client, "UpdateApplicationVersion", input, options) end @doc """ Updates the specified configuration template to have the specified properties or configuration option values. <note> If a property (for example, `ApplicationName`) is not provided, its value remains unchanged. To clear such properties, specify an empty string. </note> Related Topics <ul> <li> `DescribeConfigurationOptions` </li> </ul> """ def update_configuration_template(client, input, options \\ []) do request(client, "UpdateConfigurationTemplate", input, options) end @doc """ Updates the environment description, deploys a new application version, updates the configuration settings to an entirely new configuration template, or updates select configuration option values in the running environment. Attempting to update both the release and configuration is not allowed and AWS Elastic Beanstalk returns an `InvalidParameterCombination` error. When updating the configuration settings to a new template or individual settings, a draft configuration is created and `DescribeConfigurationSettings` for this environment returns two setting descriptions with different `DeploymentStatus` values. """ def update_environment(client, input, options \\ []) do request(client, "UpdateEnvironment", input, options) end @doc """ Update the list of tags applied to an AWS Elastic Beanstalk resource. Two lists can be passed: `TagsToAdd` for tags to add or update, and `TagsToRemove`. Elastic Beanstalk supports tagging of all of its resources. For details about resource tagging, see [Tagging Application Resources](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/applications-tagging-resources.html). If you create a custom IAM user policy to control permission to this operation, specify one of the following two virtual actions (or both) instead of the API operation name: <dl> <dt>elasticbeanstalk:AddTags</dt> <dd> Controls permission to call `UpdateTagsForResource` and pass a list of tags to add in the `TagsToAdd` parameter. </dd> <dt>elasticbeanstalk:RemoveTags</dt> <dd> Controls permission to call `UpdateTagsForResource` and pass a list of tag keys to remove in the `TagsToRemove` parameter. </dd> </dl> For details about creating a custom user policy, see [Creating a Custom User Policy](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/AWSHowTo.iam.managed-policies.html#AWSHowTo.iam.policies). """ def update_tags_for_resource(client, input, options \\ []) do request(client, "UpdateTagsForResource", input, options) end @doc """ Takes a set of configuration settings and either a configuration template or environment, and determines whether those values are valid. This action returns a list of messages indicating any errors or warnings associated with the selection of option values. """ def validate_configuration_settings(client, input, options \\ []) do request(client, "ValidateConfigurationSettings", input, options) end @spec request(AWS.Client.t(), binary(), map(), list()) :: {:ok, Poison.Parser.t() | nil, Poison.Response.t()} | {:error, Poison.Parser.t()} | {:error, HTTPoison.Error.t()} defp request(client, action, input, options) do client = %{client | service: "elasticbeanstalk"} host = build_host("elasticbeanstalk", client) url = build_url(host, client) headers = [ {"Host", host}, {"Content-Type", "application/x-www-form-urlencoded"} ] input = Map.merge(input, %{"Action" => action, "Version" => "2010-12-01"}) payload = AWS.Util.encode_query(input) headers = AWS.Request.sign_v4(client, "POST", url, headers, payload) case HTTPoison.post(url, payload, headers, options) do {:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} -> {:ok, nil, response} {:ok, %HTTPoison.Response{status_code: 200, body: body} = response} -> {:ok, AWS.Util.decode_xml(body), response} {:ok, %HTTPoison.Response{body: body}} -> error = AWS.Util.decode_xml(body) {:error, error} {:error, %HTTPoison.Error{reason: reason}} -> {:error, %HTTPoison.Error{reason: reason}} end end defp build_host(_endpoint_prefix, %{region: "local"}) do "localhost" end defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do "#{endpoint_prefix}.#{region}.#{endpoint}" end defp build_url(host, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}/" end end
lib/aws/elastic_beanstalk.ex
0.831383
0.504883
elastic_beanstalk.ex
starcoder
defmodule Piazza.Crypto.License do @moduledoc """ Genserver for managing the license verification lifecycle. The protocol is basically: 1. On start, decode the license and pass to either on_failure or on_verify depending on the result. 2. At a fixed interval, reverify the license, in case it's expired/changed. The verification interval can be configured with ``` config :piazza_core, :license_interval, interval_in_ms ``` """ use GenServer alias Piazza.Crypto.RSA defmodule State, do: defstruct [:license, :on_failure, :on_verify, :public_key, :parsed] @interval Application.get_env(:piazza_core, :license_interval, 60 * 1000) def start_link(license, public_key, on_failure, on_verify \\ fn _, state -> state end) when is_function(on_failure) and is_function(on_verify) do GenServer.start_link(__MODULE__, [license, public_key, on_failure, on_verify], name: __MODULE__) end def fetch(pid \\ __MODULE__), do: GenServer.call(pid, :fetch) def init([license, public_key, on_failure, on_verify]) do :timer.send_interval(@interval, :verify) send self(), :verify {:ok, %State{ license: license, public_key: extract_key(public_key), on_failure: on_failure, on_verify: on_verify}} end def handle_call(:fetch, _, %State{parsed: parsed} = state), do: {:reply, parsed, state} def handle_info( :verify, %State{license: license, on_failure: on_failure, on_verify: on_verify, public_key: pub} = state ) do case RSA.decrypt(license, pub) do {:ok, res} -> {:noreply, update_state(on_verify.(res), state)} error -> on_failure.(error) {:noreply, state} end end defp update_state({parsed, license}, state), do: %{state | parsed: parsed, license: license} defp update_state(parsed, state), do: %{state | parsed: parsed} defp extract_key({atom, name}) do :code.priv_dir(atom) |> Path.join(name) |> File.read!() |> extract_key() end defp extract_key(pem) when is_binary(pem), do: ExPublicKey.loads!(pem) end
lib/crypto/license.ex
0.800848
0.727395
license.ex
starcoder
defmodule Nerves.Grove.OLED.Display do @moduledoc """ Seeed Studio [Grove OLED Display 96×96](http://wiki.seeedstudio.com/wiki/Grove_-_OLED_Display_1.12%22) ## Datasheet http://garden.seeedstudio.com/images/8/82/SSD1327_datasheet.pdf # Example alias Nerves.Grove.OLED {:ok, pid} = OLED.Display.start_link(address) OLED.Display.reset(pid) OLED.Display.clear(pid) OLED.Display.set_text_position(pid, 0, 0) OLED.Display.put_string(pid, "Hello, world") """ # alias ElixirALE.I2C, as: I2c @default_address 0x3C @command_mode 0x80 @data_mode 0x40 # 8x8 monochrome bitmap font for ASCII code points 32-128. @default_font \ {{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00}, {0x00,0x00,0x5F,0x00,0x00,0x00,0x00,0x00}, {0x00,0x00,0x07,0x00,0x07,0x00,0x00,0x00}, {0x00,0x14,0x7F,0x14,0x7F,0x14,0x00,0x00}, {0x00,0x24,0x2A,0x7F,0x2A,0x12,0x00,0x00}, {0x00,0x23,0x13,0x08,0x64,0x62,0x00,0x00}, {0x00,0x36,0x49,0x55,0x22,0x50,0x00,0x00}, {0x00,0x00,0x05,0x03,0x00,0x00,0x00,0x00}, {0x00,0x1C,0x22,0x41,0x00,0x00,0x00,0x00}, {0x00,0x41,0x22,0x1C,0x00,0x00,0x00,0x00}, {0x00,0x08,0x2A,0x1C,0x2A,0x08,0x00,0x00}, {0x00,0x08,0x08,0x3E,0x08,0x08,0x00,0x00}, {0x00,0xA0,0x60,0x00,0x00,0x00,0x00,0x00}, {0x00,0x08,0x08,0x08,0x08,0x08,0x00,0x00}, {0x00,0x60,0x60,0x00,0x00,0x00,0x00,0x00}, {0x00,0x20,0x10,0x08,0x04,0x02,0x00,0x00}, {0x00,0x3E,0x51,0x49,0x45,0x3E,0x00,0x00}, {0x00,0x00,0x42,0x7F,0x40,0x00,0x00,0x00}, {0x00,0x62,0x51,0x49,0x49,0x46,0x00,0x00}, {0x00,0x22,0x41,0x49,0x49,0x36,0x00,0x00}, {0x00,0x18,0x14,0x12,0x7F,0x10,0x00,0x00}, {0x00,0x27,0x45,0x45,0x45,0x39,0x00,0x00}, {0x00,0x3C,0x4A,0x49,0x49,0x30,0x00,0x00}, {0x00,0x01,0x71,0x09,0x05,0x03,0x00,0x00}, {0x00,0x36,0x49,0x49,0x49,0x36,0x00,0x00}, {0x00,0x06,0x49,0x49,0x29,0x1E,0x00,0x00}, {0x00,0x00,0x36,0x36,0x00,0x00,0x00,0x00}, {0x00,0x00,0xAC,0x6C,0x00,0x00,0x00,0x00}, {0x00,0x08,0x14,0x22,0x41,0x00,0x00,0x00}, {0x00,0x14,0x14,0x14,0x14,0x14,0x00,0x00}, {0x00,0x41,0x22,0x14,0x08,0x00,0x00,0x00}, {0x00,0x02,0x01,0x51,0x09,0x06,0x00,0x00}, {0x00,0x32,0x49,0x79,0x41,0x3E,0x00,0x00}, {0x00,0x7E,0x09,0x09,0x09,0x7E,0x00,0x00}, {0x00,0x7F,0x49,0x49,0x49,0x36,0x00,0x00}, {0x00,0x3E,0x41,0x41,0x41,0x22,0x00,0x00}, {0x00,0x7F,0x41,0x41,0x22,0x1C,0x00,0x00}, {0x00,0x7F,0x49,0x49,0x49,0x41,0x00,0x00}, {0x00,0x7F,0x09,0x09,0x09,0x01,0x00,0x00}, {0x00,0x3E,0x41,0x41,0x51,0x72,0x00,0x00}, {0x00,0x7F,0x08,0x08,0x08,0x7F,0x00,0x00}, {0x00,0x41,0x7F,0x41,0x00,0x00,0x00,0x00}, {0x00,0x20,0x40,0x41,0x3F,0x01,0x00,0x00}, {0x00,0x7F,0x08,0x14,0x22,0x41,0x00,0x00}, {0x00,0x7F,0x40,0x40,0x40,0x40,0x00,0x00}, {0x00,0x7F,0x02,0x0C,0x02,0x7F,0x00,0x00}, {0x00,0x7F,0x04,0x08,0x10,0x7F,0x00,0x00}, {0x00,0x3E,0x41,0x41,0x41,0x3E,0x00,0x00}, {0x00,0x7F,0x09,0x09,0x09,0x06,0x00,0x00}, {0x00,0x3E,0x41,0x51,0x21,0x5E,0x00,0x00}, {0x00,0x7F,0x09,0x19,0x29,0x46,0x00,0x00}, {0x00,0x26,0x49,0x49,0x49,0x32,0x00,0x00}, {0x00,0x01,0x01,0x7F,0x01,0x01,0x00,0x00}, {0x00,0x3F,0x40,0x40,0x40,0x3F,0x00,0x00}, {0x00,0x1F,0x20,0x40,0x20,0x1F,0x00,0x00}, {0x00,0x3F,0x40,0x38,0x40,0x3F,0x00,0x00}, {0x00,0x63,0x14,0x08,0x14,0x63,0x00,0x00}, {0x00,0x03,0x04,0x78,0x04,0x03,0x00,0x00}, {0x00,0x61,0x51,0x49,0x45,0x43,0x00,0x00}, {0x00,0x7F,0x41,0x41,0x00,0x00,0x00,0x00}, {0x00,0x02,0x04,0x08,0x10,0x20,0x00,0x00}, {0x00,0x41,0x41,0x7F,0x00,0x00,0x00,0x00}, {0x00,0x04,0x02,0x01,0x02,0x04,0x00,0x00}, {0x00,0x80,0x80,0x80,0x80,0x80,0x00,0x00}, {0x00,0x01,0x02,0x04,0x00,0x00,0x00,0x00}, {0x00,0x20,0x54,0x54,0x54,0x78,0x00,0x00}, {0x00,0x7F,0x48,0x44,0x44,0x38,0x00,0x00}, {0x00,0x38,0x44,0x44,0x28,0x00,0x00,0x00}, {0x00,0x38,0x44,0x44,0x48,0x7F,0x00,0x00}, {0x00,0x38,0x54,0x54,0x54,0x18,0x00,0x00}, {0x00,0x08,0x7E,0x09,0x02,0x00,0x00,0x00}, {0x00,0x18,0xA4,0xA4,0xA4,0x7C,0x00,0x00}, {0x00,0x7F,0x08,0x04,0x04,0x78,0x00,0x00}, {0x00,0x00,0x7D,0x00,0x00,0x00,0x00,0x00}, {0x00,0x80,0x84,0x7D,0x00,0x00,0x00,0x00}, {0x00,0x7F,0x10,0x28,0x44,0x00,0x00,0x00}, {0x00,0x41,0x7F,0x40,0x00,0x00,0x00,0x00}, {0x00,0x7C,0x04,0x18,0x04,0x78,0x00,0x00}, {0x00,0x7C,0x08,0x04,0x7C,0x00,0x00,0x00}, {0x00,0x38,0x44,0x44,0x38,0x00,0x00,0x00}, {0x00,0xFC,0x24,0x24,0x18,0x00,0x00,0x00}, {0x00,0x18,0x24,0x24,0xFC,0x00,0x00,0x00}, {0x00,0x00,0x7C,0x08,0x04,0x00,0x00,0x00}, {0x00,0x48,0x54,0x54,0x24,0x00,0x00,0x00}, {0x00,0x04,0x7F,0x44,0x00,0x00,0x00,0x00}, {0x00,0x3C,0x40,0x40,0x7C,0x00,0x00,0x00}, {0x00,0x1C,0x20,0x40,0x20,0x1C,0x00,0x00}, {0x00,0x3C,0x40,0x30,0x40,0x3C,0x00,0x00}, {0x00,0x44,0x28,0x10,0x28,0x44,0x00,0x00}, {0x00,0x1C,0xA0,0xA0,0x7C,0x00,0x00,0x00}, {0x00,0x44,0x64,0x54,0x4C,0x44,0x00,0x00}, {0x00,0x08,0x36,0x41,0x00,0x00,0x00,0x00}, {0x00,0x00,0x7F,0x00,0x00,0x00,0x00,0x00}, {0x00,0x41,0x36,0x08,0x00,0x00,0x00,0x00}, {0x00,0x02,0x01,0x01,0x02,0x01,0x00,0x00}, {0x00,0x02,0x05,0x05,0x02,0x00,0x00,0x00}} use Bitwise @spec start_link(byte) :: {:ok, pid} | {:error, any} def start_link(address \\ @default_address) do ElixirALE.I2C.start_link("i2c-1", address) end @spec reset(pid) :: :ok def reset(pid) do send_commands(pid, <<0xFD, 0x12>>) off(pid) set_multiplex_ratio(pid, 95) # 96 set_start_line(pid, 0) set_display_offset(pid, 96) set_vertical_mode(pid) send_commands(pid, <<0xAB, 0x01>>) set_contrast_level(pid, 0x53) # 100 nit send_commands(pid, <<0xB1, 0x51>>) send_commands(pid, <<0xB3, 0x01>>) send_commands(pid, <<0xB9>>) send_commands(pid, <<0xBC, 0x08>>) send_commands(pid, <<0xBE, 0x07>>) send_commands(pid, <<0xB6, 0x01>>) send_commands(pid, <<0xD5, 0x62>>) set_normal_mode(pid) set_activate_scroll(pid, false) on(pid) :timer.sleep(100) # ms set_row_address(pid, 0, 95) set_column_address(pid, 8, 8 + 47) end @spec on(pid) :: :ok def on(pid) do send_command(pid, 0xAF) end @spec off(pid) :: :ok def off(pid) do send_command(pid, 0xAE) end @spec clear(pid) :: :ok def clear(pid) do # TODO: optimize more once https://github.com/fhunleth/elixir_ale/issues/20 is fixed. block = :erlang.list_to_binary([@data_mode, String.duplicate("\x00", 16)]) Enum.each(1..48, fn _ -> Enum.each(1..div(96, 16), fn _ -> ElixirALE.I2C.write(pid, block) end) end) end @spec set_text_position(pid, byte, byte) :: any def set_text_position(pid, row, column) do set_column_address(pid, 0x08 + column * 4, 0x08 + 47) set_row_address(pid, 0x00 + row * 8, 0x07 + row * 8) end @spec put_string(pid, <<>>) :: :ok def put_string(_pid, <<>>), do: nil @spec put_string(pid, binary) :: any def put_string(pid, <<head, rest :: binary>>) do put_char(pid, head) put_string(pid, rest) end @spec put_char(pid, byte) :: any def put_char(pid, char) when is_integer(char) and char in 32..127 do c = char - 32 Enum.each([0, 2, 4, 6], fn i -> Enum.each(0..7, fn j -> glyph = elem(@default_font, c) bit1 = band(bsr(elem(glyph, i), j), 0x01) bit2 = band(bsr(elem(glyph, i + 1), j), 0x01) send_data(pid, bor(if bit1 != 0 do 0xF0 else 0 end, if bit2 != 0 do 0x0F else 0 end)) end) end) end @spec put_char(pid, byte) :: any def put_char(pid, char) when is_integer(char) and char in 0..31 do put_char(pid, ?\s) # replace with a space end @spec set_column_address(pid, byte, byte) :: :ok def set_column_address(pid, start, end_) do send_commands(pid, <<0x15, start, end_>>) end @spec set_row_address(pid, byte, byte) :: :ok def set_row_address(pid, start, end_) do send_commands(pid, <<0x75, start, end_>>) end @spec set_contrast_level(pid, byte) :: :ok def set_contrast_level(pid, level) do send_commands(pid, <<0x81, level>>) end @spec set_horizontal_mode(pid) :: :ok def set_horizontal_mode(pid) do send_commands(pid, <<0xA0, 0x42>>) set_row_address(pid, 0, 95) set_column_address(pid, 8, 8 + 47) end @spec set_vertical_mode(pid) :: :ok def set_vertical_mode(pid) do send_commands(pid, <<0xA0, 0x46>>) end @spec set_start_line(pid, 0..127) :: :ok def set_start_line(pid, row) do send_commands(pid, <<0xA1, row>>) end @spec set_display_offset(pid, 0..127) :: :ok def set_display_offset(pid, row) do send_commands(pid, <<0xA2, row>>) end @spec set_normal_mode(pid) :: :ok def set_normal_mode(pid) do send_command(pid, 0xA4) end @spec set_inverse_mode(pid) :: :ok def set_inverse_mode(pid) do send_command(pid, 0xA7) end @spec set_multiplex_ratio(pid, 16..128) :: :ok def set_multiplex_ratio(pid, ratio) do send_commands(pid, <<0xA8, ratio>>) end @spec set_activate_scroll(pid, false) :: :ok def set_activate_scroll(pid, false) do send_command(pid, 0x2E) end @spec set_activate_scroll(pid, true) :: :ok def set_activate_scroll(pid, true) do send_command(pid, 0x2F) end @spec send_commands(pid, <<>>) :: :ok defp send_commands(_pid, <<>>), do: nil @spec send_commands(pid, binary) :: :ok defp send_commands(pid, <<head, rest :: binary>>) do send_command(pid, head) send_commands(pid, rest) end @spec send_command(pid, byte) :: :ok defp send_command(pid, command) do ElixirALE.I2C.write(pid, <<@command_mode, command>>) end @spec send_data(pid, byte) :: :ok defp send_data(pid, data) do ElixirALE.I2C.write(pid, <<@data_mode, data>>) end end
lib/nerves_grove/oled_display.ex
0.590897
0.461927
oled_display.ex
starcoder
defmodule RDF.XSD.String do @moduledoc """ `RDF.XSD.Datatype` for XSD strings. """ @type valid_value :: String.t() use RDF.XSD.Datatype.Primitive, name: "string", id: RDF.Utils.Bootstrapping.xsd_iri("string") alias RDF.XSD def_applicable_facet XSD.Facets.MinLength def_applicable_facet XSD.Facets.MaxLength def_applicable_facet XSD.Facets.Length def_applicable_facet XSD.Facets.Pattern @doc false def min_length_conform?(min_length, value, _lexical) do String.length(value) >= min_length end @doc false def max_length_conform?(max_length, value, _lexical) do String.length(value) <= max_length end @doc false def length_conform?(length, value, _lexical) do String.length(value) == length end @doc false def pattern_conform?(pattern, value, _lexical) do XSD.Facets.Pattern.conform?(pattern, value) end @impl XSD.Datatype @spec lexical_mapping(String.t(), Keyword.t()) :: valid_value def lexical_mapping(lexical, _), do: to_string(lexical) @impl XSD.Datatype @spec elixir_mapping(any, Keyword.t()) :: value def elixir_mapping(value, _), do: to_string(value) @impl RDF.Literal.Datatype def do_cast(literal_or_iri) def do_cast(%RDF.IRI{value: value}), do: new(value) def do_cast(%datatype{} = literal) do cond do XSD.Decimal.datatype?(literal) -> try do literal.value |> Decimal.to_integer() |> XSD.Integer.new() |> cast() rescue _ -> default_canonical_cast(literal, datatype) end # we're catching XSD.Floats with this too XSD.Double.datatype?(datatype) -> cond do XSD.Numeric.negative_zero?(literal) -> new("-0") XSD.Numeric.zero?(literal) -> new("0") literal.value >= 0.000_001 and literal.value < 1_000_000 -> literal.value |> XSD.Decimal.new() |> cast() true -> default_canonical_cast(literal, datatype) end XSD.DateTime.datatype?(literal) -> literal |> XSD.DateTime.canonical_lexical_with_zone() |> new() XSD.Time.datatype?(literal) -> literal |> XSD.Time.canonical_lexical_with_zone() |> new() RDF.Literal.Datatype.Registry.xsd_datatype_struct?(datatype) -> default_canonical_cast(literal, datatype) true -> super(literal) end end defp default_canonical_cast(literal, datatype) do literal |> datatype.canonical_lexical() |> new() end end
lib/rdf/xsd/datatypes/string.ex
0.647018
0.506469
string.ex
starcoder
defmodule GGity.Geom.Segment do @moduledoc false alias GGity.{Draw, Geom, Plot} @type t() :: %__MODULE__{} @type plot() :: %Plot{} @type record() :: map() @type mapping() :: map() defstruct data: nil, mapping: nil, stat: :identity, position: :identity, key_glyph: :line, alpha: 1, color: "black", size: 0, custom_attributes: nil @spec new(mapping(), keyword()) :: Geom.Segment.t() def new(mapping, options) do struct(Geom.Segment, [{:mapping, mapping} | options]) end @spec draw(Geom.Segment.t(), list(map()), plot()) :: iolist() def draw(%Geom.Segment{} = geom_segment, data, plot), do: segments(geom_segment, data, plot) defp segments(%Geom.Segment{} = geom_segment, data, %Plot{scales: scales} = plot) do scale_transforms = geom_segment.mapping |> Map.keys() |> Enum.reduce(%{}, fn aesthetic, mapped -> Map.put(mapped, aesthetic, Map.get(scales[aesthetic], :transform)) end) transforms = geom_segment |> Map.take([:alpha, :color, :size]) |> Enum.reduce(%{}, fn {aesthetic, fixed_value}, fixed -> Map.put(fixed, aesthetic, fn _value -> fixed_value end) end) |> Map.merge(scale_transforms) Enum.map(data, fn row -> segment(row, transforms, geom_segment, plot) end) end defp segment(row, transforms, geom_segment, plot) do transformed_values = [ transforms.x.(row[geom_segment.mapping.x]), transforms.x.(row[geom_segment.mapping.xend]), transforms.y.(row[geom_segment.mapping.y]), transforms.y.(row[geom_segment.mapping.yend]), transforms.alpha.(row[geom_segment.mapping[:alpha]]), transforms.color.(row[geom_segment.mapping[:color]]), transforms.size.(row[geom_segment.mapping[:size]]) ] labelled_values = Enum.zip( [:x, :xend, :y, :yend, :stroke_opacity, :stroke, :fill, :stroke_width], transformed_values ) options = Keyword.drop(labelled_values, [:x, :xend, :y, :yend]) x1 = labelled_values[:x] + plot.area_padding x2 = labelled_values[:xend] + plot.area_padding y1 = (plot.width - labelled_values[:y]) / plot.aspect_ratio + plot.area_padding y2 = (plot.width - labelled_values[:yend]) / plot.aspect_ratio + plot.area_padding custom_attributes = GGity.Layer.custom_attributes(geom_segment, plot, row) Draw.line([{:x1, x1}, {:x2, x2}, {:y1, y1}, {:y2, y2} | options] ++ custom_attributes) end end
lib/ggity/geom/segment.ex
0.891525
0.606935
segment.ex
starcoder
defmodule Centaurus.Types do @moduledoc """ A collection of types used throughout Centaurus. The types are split into several different modules based on expected usage. """ @typedoc """ The socket type. """ @opaque socket :: reference @typedoc """ The stream type. """ @opaque stream :: reference @typedoc """ The IP Address of the peer connection. Used when connecting. """ @type peer_addr :: ip_addr @typedoc """ The port of the peer connection. Used when connecting. """ @type peer_port :: port_number @type ip_addr :: :inet.ip_addresss @type port_number :: :inet.port_number @typedoc """ The errors that can occur either from the runtime or setting up a configuration. """ @type error :: any @typedoc """ Internal types """ @opaque internal :: any @typedoc """ The error codes Quic uses to close streams and sockets. None: Communication is complete and there was no error. """ @type error_code :: :none | any defmodule SocketConfig do @moduledoc """ The struct for Quic sockets. The struct has the following components: bind_addr: The IP Address of the local connection bind_port: The port of the local connection server_name: The server name for the certificates server_key: The server's private key for the certificates certificates: The path of where the certificates are located options: The connection's options (see options for details) """ # TODO: Add certificates and server_name to enforced keys. @enforce_keys [] defstruct [ socket_pid: nil, bind_address: "0.0.0.0:0", server_name: "", private_key: nil, options: [], certificates: nil ] alias Centaurus.Types @type t :: %__MODULE__{ socket_pid: pid, bind_address: String.t, server_name: String.t, private_key: Path.t, options: Types.quic_options, certificates: Path.t } @spec set_opts(__MODULE__.t, opts) :: {:ok, __MODULE__.t} | {:error, Types.error} when opts: Types.quic_options def set_opts(socket_config, _opts) do {:ok, socket_config} end end defmodule StreamConfig do @moduledoc """ The struct for streams. The struct has the following components: stream_id: A pid to identify the stream owner socket_id: Ties the stream to the Quic socket direction: Either Bi-directional or Uni-directional access options: The stream's options (see options for details) data: Data to read from the stream. """ alias Centaurus.Types @enforce_keys [] defstruct [ stream_pid: nil, stream_type: :bi, options: [] ] @type t :: %__MODULE__{ stream_pid: pid, stream_type: :bi | :uni, options: Types.quic_options, } @spec set_opts(__MODULE__.t, opts) :: {:ok, __MODULE__.t} | {:error, Types.error} when opts: Types.quic_options def set_opts(stream_config, _opts) do {:ok, stream_config} end end defmodule Options do defstruct [ timeout: nil ] @typedoc """ The possible set of configuration options for Quic sockets and streams. """ @type t :: %__MODULE__{ timeout: non_neg_integer() } end end
lib/centaurus/types.ex
0.660391
0.521167
types.ex
starcoder
defmodule Pushito do use Application @moduledoc ~s""" Pushito is an APNs library over the HTTP/2 API. This is the Main module and the one you should use in your applications. ## Connect to APNs In order to create connections we will use `Pushito.connect/1` function. It only has one argument which is the config information. The `config` is a `Pushito.Config` struct and its format is: ```elixir defstruct type: :cert, name: nil, apple_host: nil, cert_file: nil, key_file: nil, token_key_file: nil, token_key_id: nil, team_id: nil ``` `Pushito.Config` provides setters in order to set the properties. There are two types of connection with APNs, `cert` and `token`. The first one is with `Provider Certificate` and the second one is with `Provider Authentication Tokens`. Depending the type you should fill some properties in the `config` structure. - For `Provider Certificate` type Apple gives us a `cer` file. We need to convert it to 2 `pem` files, you can check [here](convert-cer-to-pem.html) how to do it - For `Provider Authentication Token` Apple gives us a `.p8` file. It is fine, no conversion is needed. ### Mandatory Properties for both types - `type`: can be `:cert` or `:token`, default value is `:cert` - `apple_host`: the Apple host. Currently only `api.development.push.apple.com` or `api.push.apple.com` are available. ```elixir import Pushito.Config config = new() |> add_type(:cert) |> add_host("api.development.push.apple.com") ``` ### Optional Properties for both types - `name`: this is the name of the connection, it must be an `:atom`. If this property is set we could refer the connection by this name ```elixir config |> add_name(:my_first_connection) ``` ### Mandatory Properties for `Provider Certificate` type - `cert_file`: path to your certification file. - `key_file`: path to key file ```elixir config |> add_cert_file("priv/cert2.pem") |> add_key_file("priv/key2-noenc.pem") ``` ### Mandatory Properties for `Provider Authentication Tokens` type - `token_key_file` is the path to the token key file provided by Apple - `token_key_id` is the key id provided by Apple - `team_id` is your team developer id ```elixir config |> add_token_key_file("priv/APNsAuthKey_1234567890.p8") |> add_token_key_id("1234567890") |> add_team_id("THEATEAM") ``` ### Connect Once we have the `config` structure we can connect to APNs using `Pushito.connect/1` ```elixir {:ok, connection_pid} = Pushito.connect(config) ``` `connection_pid` is the connection pid. It is needed if the connection has no name, you must refer the connection with the `connection_pid` ## Push Notifications **Important** The process which calls `Pushito.connect/1` should be the same as the one which calls `Pushito.push/2` Once we have the connection done we can start pushing messages to APNs. In order to do that we will use `Pushito.push/2`. The first argument is the connection_name or the connection pid and the second one is the notification itself. The notification is a `Pushito.Notification` structure with the format: ```elixir defstruct device_id: nil, apns_expiration: 0, apns_priority: 10, apns_topic: nil, apns_id: nil, apns_collapse_id: nil, token: nil, timeout: 10, message: %{} ``` Depending the connection type there are some mandatory properties to fill. ### Mandatory Properties for both types - `device_id`: the device id we want to send the message - `apns_topic`: the topic identifying your App - `message`: the message we want to send ```elixir import Pushito.Notification notification = new() |> add_device_id("bd5c3ad01bbe4d884bf2fe8801ed77e94a71bc2e9de937c84f745f54eb4cb2f4") |> add_topic("com.inaka.myapp") |> add_message(%{:aps => %{:alert => "you have a message!!"}}) ``` ### Mandatory Properties for `Provider Authentication Tokens` type - `token`: The JWT token needed for push notifications with tokens. In order to get a token we have to call `Pushito.generate_token/1` where the argument is the `connection_name` or the connection id. ```elixir token = Pushito.generate_token(:my_token_connection) notification |> add_token(token) ``` ### Optional Properties for both types - `timeout`: the seconds util throw a timeout. If you get a timeout it means you exceed the time but maybe your notification was sent correctly. Default value is 10 seconds Those are the remaining apns fields. You can check them [here](https://developer.apple.com/library/content/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html#//apple_ref/doc/uid/TP40008194-CH11-SW1) - `apns_expiration`: default value is 0 - `apns_priority`: default value is 10 - `apns_id`: can be ignored. APNs will assign one and add it to the response - `apns_collapse_id` ```elixir notification |> add_timeout(20) |> add_expiration(0) |> add_priority(10) |> add_id("ID") |> add_collapse_id("collapse_id") ``` ### Response If you don't get a `timeout` after pushing a message you must receive a response. Every request to APNs servers returns a Response. The Responses are `Pushito.Response` struct with the format: ```elixir defstruct status: nil, headers: [], body: :no_body ``` - status: is the http2 result code. You can check the [HTTP/2 Response from APN](https://developer.apple.com/library/content/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html#//apple_ref/doc/uid/TP40008194-CH11-SW1) section - headers: is a list with the response headers - body: is a list with the body or `:no_body` when no body is returned ### Timeout If the call to `Pushito.push/2` returns a `{:timeout, stream_id}` it means your process exceeded the timeout time waiting for a response from APNs. That could be caused because your `timeout` should be greater, because the network went down or maybe other causes. If you get a timeout it doesn't mean your notification wasn't delivered correctly for sure. If the network went down `chatterbox` (the HTTP/2 client `pushito` relies) will try to connect again and it will send the message when the network goes up. If that is the case the caller process will receive a message on its mail box with the format `{:apns_response, connection_pid, stream_id, response}` where the `connection_pid` is the connection pid (same as `Process.whereis(connection_name)`) and the `stream_id` is the notification identifier returned in the timeout tuple. ## Reconnections If something unexpected happens and the `chatterbox` connection with APNs crashes `pushito` will send a message `{:reconnecting, connection_pid}` to the client process, that means `pushito` lost the connection and it is trying to reconnect. Once the connection has been recover a `{:connection_up, connection_pid}` message will be send. We implemented an Exponential Backoff strategy. ## Closing Connections Apple recommends us to keep our connections open and avoid opening and closing very often. You can check the [Best Practices for Managing Connections](https://developer.apple.com/library/content/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html). But when closing a connection makes sense `pushito` gives us the function `Pushito.close/1` where the parameter is the connection's name or pid. After closing a connection the name will be available for new connections again. """ @type connection_name :: atom @doc """ Application callback for starting Pushito. """ def start(_type, _args) do Pushito.Supervisor.start_link end ## Client API @doc """ Creates a connection with APNs ## Example We need a valid `Pushito.Config` structure, you can see above how to create it: {:ok, connection_pid} = Pushito.connect config {:ok, #PID<0.233.0>} """ @spec connect(Pushito.Config.t) :: {:ok, pid} def connect(config) do Pushito.Supervisor.start_connection(config, self()) end @doc """ Closes a connection by connection name or connection pid ## Example It will close the given connection name Pushito.close :my_connection :ok """ @spec close(connection_name | pid) :: :ok def close(connection) do Pushito.Connection.close connection end @doc """ Push notification to APNs ## Example We need a valid `Pushito.Notification` struct before push something, you can see above how to create it. The first parameter can be the connection's name (if the connection has a name) or the connection's pid. Pushito.push :my_connection, notification %Pushito.Response{body: :no_body, headers: [{"apns-id", "34F4B4F4-ADB6-982F-EB23-36632837520C"}], status: 200} That was a succesfull request but it can return errors or timeout, lets force a timeout: notification = notification |> Pushito.Notification.add_timeout(0) :ok Pushito.push :my_connection, notification {:timeout, 3} But it only means the time exceeded the timeout, we can see in the process mailbox if it worked: flush() {:apns_response, #PID<0.250.0>, 3, %Pushito.Response{body: :no_body, headers: [{"apns-id", "AA2C383F-2222-DC0B-B4B8-BA2E8A4F46F4"}], status: 200}} :ok This function can return also an error. This function only can be called by the process which started the connection (it called `Pushito.connect/1` previously). If it is called by a different process it will return `{:error, :not_connection_owner}`. """ @spec push(connection_name | pid, Pushito.Notification.t) :: Pushito.Response.t | {:timeout, integer} | {:error, :not_connection_owner} def push(connection, notification) do Pushito.Connection.push(connection, notification) end @doc """ Generates a JWT token in order to push notifications. This only makes sense when the connection type is `token`. The lifetime of each token will be 1 hour then it will expire ## Example In order to create the token, we must be added the mandatory fields for `token` type connections: config |> add_token_key_file("priv/APNsAuthKey_1234567890.p8") |> add_token_key_id("1234567890") |> add_team_id("THEATEAM") Then we can create the token with the connection name or the connection pid as an argument: token = Pushito.generate_token :my_connection "<KEY>" """ @spec generate_token(connection_name | pid) :: String.t def generate_token(connection) do import Joken config = Pushito.Connection.get_config(connection) key = JOSE.JWK.from_pem_file(config.token_key_file) token() |> with_claims(%{"iss" => config.team_id, "iat" => :os.system_time(:seconds)}) |> with_header_arg("alg", "ES256") |> with_header_arg("typ", "JWT") |> with_header_arg("kid", config.token_key_id) |> sign(es256(key)) |> get_compact end end
lib/pushito.ex
0.854111
0.851459
pushito.ex
starcoder
defmodule Hrx do @moduledoc """ # HRX [Human Readable Archive](https://github.com/google/hrx) parser for Elixir. HRX files are useful for loading data for a test suite where creating multiple files is not desired. The HRX format is easy to read and manually edit for quicker iteration. For more information, please see the official Github repository: [https://github.com/google/hrx](https://github.com/google/hrx) ## Quick Usage If we take this example HRX file called `test-suite.hrx`: ```hrx <===> test1/input.scss ul { margin-left: 1em; li { list-style-type: none; } } <===> test1/output.css ul { margin-left: 1em; } ul li { list-style-type: none; } ``` We can load it simply with: ```elixir {:ok, archive} = Hrx.load("test-suite.hrx") {:ok, contents} = Hrx.Archive.read(archive, "test1/input.scss") {:error, :enoent} = Hrx.Archive.read(archive, "non-existant-file.txt") ``` ## Installation If [available in Hex](https://hex.pm/docs/publish), the package can be installed by adding `hrx` to your list of dependencies in `mix.exs`: ```elixir def deps do [ {:hrx, "~> 0.1.0"} ] end ``` """ alias Hrx.Archive alias Hrx.Entry alias Hrx.Parser @doc """ Load an HRX archive """ @spec load(String.t()) :: {:error, String.t()} | {:ok, Hrx.Archive.t()} def load(path) do with {:ok, contents} <- File.read(path), {:ok, boundary_length} <- get_boundary_length(contents), {:ok, archive, "", _, _, _} <- Parser.parse(contents, context: %{boundary: boundary_length}) do {:ok, %Archive{entries: to_entries(archive)}} else # File loading error {:error, reason} when is_atom(reason) -> {:error, :file.format_error(reason) |> to_string()} # Generic error from parser {:error, reason} when is_binary(reason) -> {:error, reason} # Parsing error {:error, reason, _rest, _context, _line, _byte_offset} -> {:error, "Error processing archive: #{reason}"} _ -> {:error, "Unknown error occurred"} end end defp to_entries(archive) when is_list(archive) do archive |> Enum.map(&Entry.new/1) |> Enum.map(&{&1.path, &1}) |> Map.new() end defp get_boundary_length(<<"<", rest::binary>>) do # Look for the initial boundary so we can count the number of '='s in it. # Because HRX files can be nested, we have to check to make sure the # boundaries are exactly the same for each file. Nested archives will have # a different number to distinguish them. case :binary.match(rest, ">") do {start, _} -> {:ok, start} _ -> {:error, "Invalid archive format. Missing initial boundary."} end end end
lib/hrx.ex
0.742795
0.894835
hrx.ex
starcoder
defmodule Breezex.Util do @moduledoc """ Module responsible for transforming arguments for requests. """ @spec bang_output({:ok, any, any}) :: {any, any} @doc """ Unwraps request responses, raising if there's an error """ def bang_output({:ok, body, headers}), do: {body, headers} def bang_output({:error, error}), do: raise(to_string(error)) @doc """ Returns a string of format "YYYY-MM-DD" for a date ## Example iex> Breezex.Util.date_to_DDMMYYYY(%Date{day: 3, month: 5, year: 2018}) "2018-05-03" """ @spec date_to_YYYYMMDD(Date | DateTime) :: String.t() def date_to_YYYYMMDD(date) do day = Integer.to_string(date.day) |> String.pad_leading(2, "0") month = Integer.to_string(date.month) |> String.pad_leading(2, "0") year = Integer.to_string(date.year) |> String.pad_leading(4, "0") "#{year}-#{month}-#{day}" end @doc ~S""" Returns a string of format "DD-MM-YYYY" for a date ## Example iex> Breezex.Util.date_to_DDMMYYYY(%Date{day: 3, month: 5, year: 2018}) "03-05-2018" """ @spec date_to_DDMMYYYY(Date | DateTime) :: String.t() def date_to_DDMMYYYY(date) do day = Integer.to_string(date.day) |> String.pad_leading(2, "0") month = Integer.to_string(date.month) |> String.pad_leading(2, "0") year = Integer.to_string(date.year) |> String.pad_leading(4, "0") "#{day}-#{month}-#{year}" end # Below is copied and slightly modified from lob-elixir @doc """ Transforms a map of request params to a URL encoded query string. ## Example iex> Breezex.Util.build_query_string(%{count: 1, person_id: 5}) "?count=1&person_id=5" """ @spec build_query_string(map) :: String.t() def build_query_string(params) when is_map(params) do query_string = params |> Enum.reduce([], &(&2 ++ transform_argument(&1))) |> URI.encode_query() case query_string do "" -> "" _ -> "?" <> query_string end end @spec transform_argument({any, any}) :: list defp transform_argument({:merge_variables, v}), do: transform_argument({"merge_variables", v}) defp transform_argument({"merge_variables", v}) do [{"merge_variables", Poison.encode!(v), [{"Content-Type", "application/json"}]}] end defp transform_argument({k, v}) when is_list(v) do Enum.map(v, fn e -> {"#{to_string(k)}[]", to_string(e)} end) end # For context on the format of the struct see: # https://github.com/benoitc/hackney/issues/292 defp transform_argument({k, %{local_path: file_path}}) do [{:file, file_path, {"form-data", [name: to_string(k), filename: file_path]}, []}] end defp transform_argument({k, v}) when is_map(v) do Enum.map(v, fn {sub_k, sub_v} -> {"#{to_string(k)}[#{to_string(sub_k)}]", to_string(sub_v)} end) end defp transform_argument({k, v}) do [{to_string(k), to_string(v)}] end end
lib/breezex/util.ex
0.863348
0.404272
util.ex
starcoder
defmodule Absinthe.Adapter do @moduledoc """ Absinthe supports an adapter mechanism that allows developers to define their schema using one code convention (eg, `snake_cased` fields and arguments), but accept query documents and return results (including names in errors) in another (eg, `camelCase`). Adapters aren't a part of GraphQL, but a utility that Absinthe adds so that both client and server can use use conventions most natural to them. Absinthe ships with two adapters: * `Absinthe.Adapter.LanguageConventions`, which expects schemas to be defined in `snake_case` (the standard Elixir convention), translating to/from `camelCase` for incoming query documents and outgoing results. (This is the default as of v0.3.) * `Absinthe.Adapter.Passthrough`, which is a no-op adapter and makes no modifications. (Note at the current time this does not support introspection if you're using camelized conventions). To set an adapter, you can set an application configuration value: ``` config :absinthe, adapter: Absinthe.Adapter.TheAdapterName ``` Or, you can provide it as an option to `Absinthe.run/3`: ``` Absinthe.run(query, MyApp.Schema, adapter: Absinthe.Adapter.TheAdapterName) ``` Notably, this means you're able to switch adapters on case-by-case basis. In a Phoenix application, this means you could even support using different adapters for different clients. A custom adapter module must merely implement the `Absinthe.Adapter` protocol, in many cases with `use Absinthe.Adapter` and only overriding the desired functions. ## Writing Your Own Considering the default implementation of the callbacks handle traversing ASTs for you, there's a good chance all you may need to implement in your adapter is `to_internal_name/2` and `to_external_name/2`. Check out `Absinthe.Adapter.LanguageConventions` for a good example. Note that types that are defined external to your application (including the introspection types) may not be compatible if you're using a different adapter. """ alias Absinthe.Execution @type t :: atom defmacro __using__(_) do quote do require Logger @behaviour unquote(__MODULE__) alias Absinthe.Execution alias Absinthe.Language def load_variables(variables) do variables |> map_var end defp do_load_variables(item) when is_map(item) do item |> map_var end defp do_load_variables(item) when is_list(item) do item |> list_var end defp do_load_variables(item) do item end defp list_var(items) do items |> list_var([]) end defp list_var([], acc), do: :lists.reverse(acc) defp list_var([item | rest], acc) do list_var(rest, [do_load_variables(item) | acc]) end defp map_var(items) do items |> Map.to_list |> map_var([]) end defp map_var([], acc), do: :maps.from_list(acc) defp map_var([{k, v} | rest], acc) do item = {to_internal_name(k, :key_name), do_load_variables(v)} map_var(rest, [item | acc]) end def load_document(node), do: adapt(node, :load) def dump_document(node), do: adapt(node, :dump) @ignore_nodes [ Language.EnumTypeDefinition, Language.UnionTypeDefinition, Language.ScalarTypeDefinition, Language.StringValue, Language.BooleanValue, Language.IntValue, Language.FloatValue, Language.EnumValue ] # Rename a AST node and traverse children def adapt(%Language.Document{definitions: definitions} = node, adaptation) do %{ node | definitions: definitions |> Enum.map(&adapt(&1, adaptation)), } end def adapt(%Language.VariableDefinition{variable: variable} = node, adaptation) do %{ node | variable: Map.update!(variable, :name, &do_adapt(&1, :variable, adaptation)) } end def adapt(%Language.Directive{} = node, adaptation) do %{node | arguments: Enum.map(node.arguments, &adapt(&1, adaptation)) } end def adapt(%Language.Variable{} = node, adaptation) do %{node | name: do_adapt(node.name, :variable, adaptation)} end def adapt(%Language.OperationDefinition{} = node, adaptation) do %{node | name: node.name |> do_adapt(:operation, adaptation), selection_set: adapt(node.selection_set, adaptation), variable_definitions: node.variable_definitions |> Enum.map(&adapt(&1, adaptation)) } end def adapt(%Language.ObjectDefinition{} = node, adaptation) do %{node | fields: Enum.map(node.fields, &adapt(&1, adaptation)) } end def adapt(%Language.InputObjectDefinition{} = node, adaptation) do %{node | fields: Enum.map(node.fields, &adapt(&1, adaptation)) } end def adapt(%Language.Fragment{} = node, adaptation) do %{node | directives: node.directives |> Enum.map(&adapt(&1, adaptation)), selection_set: adapt(node.selection_set, adaptation), } end def adapt(%Language.InlineFragment{} = node, adaptation) do %{node | directives: node.directives |> Enum.map(&adapt(&1, adaptation)), selection_set: adapt(node.selection_set, adaptation)} end def adapt(%Language.FragmentSpread{} = node, adaptation) do %{node | directives: node.directives |> Enum.map(&adapt(&1, adaptation))} end def adapt(%Language.SelectionSet{} = node, adaptation) do %{node | selections: node.selections |> Enum.map(&adapt(&1, adaptation))} end def adapt(%Language.Field{} = node, adaptation) do %{node | name: node.name |> do_adapt(:field, adaptation), selection_set: adapt(node.selection_set, adaptation), arguments: node.arguments |> Enum.map(&adapt(&1, adaptation)), directives: node.directives |> Enum.map(&adapt(&1, adaptation)), } end def adapt(%Language.FieldDefinition{} = node, adaptation) do %{node | name: node.name |> do_adapt(:field, adaptation), arguments: node.arguments |> Enum.map(&adapt(&1, adaptation)), } end def adapt(%Language.InputValueDefinition{} = node, adaptation) do %{node | name: node.name |> do_adapt(:field, adaptation), } end def adapt(%Language.ObjectValue{} = node, adaptation) do %{node | fields: node.fields |> Enum.map(&adapt(&1, adaptation))} end def adapt(%Language.ObjectField{} = node, adaptation) do %{node | name: node.name |> do_adapt(:field, adaptation), value: adapt(node.value, adaptation)} end def adapt(%Language.Argument{} = node, adaptation) do %{node | name: node.name |> do_adapt(:argument, adaptation), value: adapt(node.value, adaptation)} end def adapt(%Language.ListValue{} = node, adaptation) do %{node | values: Enum.map(node.values, &adapt(&1, adaptation))} end def adapt(%Language.InterfaceDefinition{} = node, adaptation) do %{node | fields: node.fields |> Enum.map(&adapt(&1, adaptation))} end def adapt(%{__struct__: str} = node, _) when str in @ignore_nodes do node end def adapt(nil, _) do nil end def adapt(other, _) do Logger.warn "Absinthe: #{__MODULE__} could not adapt #{inspect other}" other end defp do_adapt(value, role, :load) do to_internal_name(value, role) end defp do_adapt(value, role, :dump) do to_external_name(value, role) end def dump_results(%{data: data} = results) do %{results | data: do_dump_results(data)} end def dump_results(results) do results end # Rename a result data value and traverse children defp do_dump_results(node) when is_map(node) do for {key, val} <- node, into: %{} do case key do name when is_binary(name) -> {to_external_name(name, :result), do_dump_results(val)} other -> {key, do_dump_results(val)} end end end defp do_dump_results([node|rest]) do [do_dump_results(node)|do_dump_results(rest)] end defp do_dump_results(node) do node end def to_internal_name(external_name, _role), do: external_name def to_external_name(internal_name, _role), do: internal_name def format_error(%{name: name, role: role, value: value}, locations) when is_function(value) do external_name = name |> to_external_name(role) %{ message: value.(external_name) |> to_string, locations: locations } end def format_error(%{value: value, role: role} = error_info, locations) when is_binary(value) do role_name = role |> to_string |> String.capitalize %{error_info | value: &"#{role_name} `#{&1}': #{value}"} |> format_error(locations) end def format_error(%{value: value} = error_info, locations) do %{error_info | value: inspect(value)} |> format_error(locations) end def format_error(%{value: value} = error_info) when not is_function(value) do %{error_info | value: inspect(value)} |> format_error([]) end def format_error(%{value: value} = error_info) do format_error(error_info, []) end defoverridable [load_document: 1, dump_document: 1, adapt: 2, dump_results: 1, format_error: 2, format_error: 1, to_internal_name: 2, to_external_name: 2] end end @doc """ Convert the incoming (external) parsed document to the canonical (internal) representation that matches the schema. ## Examples ``` def load_document(%{definitions: definitions} = document) do %{document | definitions: definitions |> Enum.map(&your_custom_transformation/1)} end ``` """ @callback load_document(Absinthe.Language.Document.t) :: Absinthe.Language.Document.t @doc """ Convert an AST node to/from a representation. Called by `load_document/1`. ## Examples ``` def adapt(%{definitions: definitions} = document, :load) do %{document | definitions: definitions |> Enum.map(&your_custom_loader/1)} end ``` """ @callback adapt(Absinthe.Language.t, :load | :dump) :: Absinthe.Language.t @doc """ Convert the canonical (internal) results to the output (external) representation. ## Examples ``` def dump_results(%{data: data} = results) do %{results | data: your_custom_transformation(data)} end ``` """ @callback dump_results(Absinthe.Execution.result_t) :: any @typedoc "The lexical role of a name within the document/schema." @type role_t :: :operation | :field | :argument | :result | :type | :directive @doc """ Convert a name from an external name to an internal name. ## Examples Prefix all names with their role, just for fun! ``` def to_internal_name(external_name, role) do role_name = role |> to_string role_name <> "_" <> external_name end ``` """ @callback to_internal_name(binary, role_t) :: binary @doc """ Convert a name from an internal name to an external name. ## Examples Remove the role-prefix (the inverse of what we did in `to_internal_name/2` above): ``` def to_external_name(internal_name, role) do internal_name |> String.replace(~r/^\#{role}_/, "") end ``` """ @callback to_external_name(binary, role_t) :: binary @doc """ Format an error using `value` for `name` located at the provided line/column locations. ## Examples Here's what the default implementation does: iex> format_error(%{name: "foo", role: :field, value: &"missing value '\#{&1}'"}, [%{line: 2, column: 4}]) %{message: "missing value `foo'", locations: [%{line: 2, column: 4}]} iex> format_error(%{name: "foo", role: :field, value: "missing value"}, [%{line: 2, column: 4}]) %{message: "Field `foo': missing value", locations: [%{line: 2, column: 4}]} # Without locations iex> format_error(%{name: "foo", role: :field, value: "missing value"}) %{message: "Field `foo': missing value"} """ @callback format_error(Execution.error_info_t, [Execution.error_location_t]) :: Execution.error_t @callback format_error(Execution.error_info_t) :: Execution.error_t end
lib/absinthe/adapter.ex
0.925129
0.846926
adapter.ex
starcoder
defmodule Rollbax.Logger do @moduledoc """ A module that can be used to report crashes and exits to Rollbar. In Elixir and Erlang, crashes from GenServers and other processes are reported through `:error_logger`. When installed, this module installs an `:error_logger` handler that can be used to report such crashes to Rollbar automatically. In order to use this functionality, you must configure the `:rollbax` application to report crashes with: config :rollbax, :enable_crash_reports, true All the configuration options for reporting crashes are documented in detail below. If you are upgrading from an older version of Rollbax that used this module as a logger backend via `config :logger, backends: [:console, Rollbax.Logger]` this config should be removed. `Rollbax.Logger` implements a mechanism of reporting based on *reporters*, which are modules that implement the `Rollbax.Reporter` behaviour. Every message received by `Rollbax.Logger` is run through a list of reporters and the behaviour is determined by the return value of each reporter's `c:Rollbax.Reporter.handle_event/2` callback: * when the callback returns a `Rollbax.Exception` struct, the exception is reported to Rollbar and no other reporters are called * when the callback returns `:next`, the reporter is skipped and Rollbax moves on to the next reporter * when the callback returns `:ignore`, the reported message is ignored and no more reporters are tried. The list of reporters can be configured in the `:reporters` key in the `:rollbax` application configuration. By default this list only contains `Rollbax.Reporter.Standard` (see its documentation for more information). Rollbax also comes equipped with a `Rollbax.Reporter.Silencing` reporter that doesn't report anything it receives. For examples on how to provide your own reporters, look at the source for `Rollbax.Reporter.Standard`. ## Configuration The following reporting-related options can be used to configure the `:rollbax` application: * `:enable_crash_reports` (boolean) - when `true`, `Rollbax.Logger` is registered as an `:error_logger` handler and the whole reporting flow described above is executed. * `:reporters` (list) - a list of modules implementing the `Rollbax.Reporter` behaviour. Defaults to `[Rollbax.Reporter.Standard]`. """ @behaviour :gen_event defstruct [:reporters] @doc false def init(_args) do reporters = Application.get_env(:rollbax, :reporters, [Rollbax.Reporter.Standard]) {:ok, %__MODULE__{reporters: reporters}} end @doc false def handle_event(event, state) # If the event is on a different node than the current node, we ignore it. def handle_event({_level, gl, _event}, state) when node(gl) != node() do {:ok, state} end def handle_event({level, _gl, event}, %__MODULE__{reporters: reporters} = state) do :ok = run_reporters(reporters, level, event) {:ok, state} end @doc false def handle_call(request, _state) do exit({:bad_call, request}) end @doc false def handle_info(_message, state) do {:ok, state} end @doc false def terminate(_reason, _state) do :ok end @doc false def code_change(_old_vsn, state, _extra) do {:ok, state} end defp run_reporters([reporter | rest], level, event) do case reporter.handle_event(level, event) do %Rollbax.Exception{} = exception -> Rollbax.report_exception(exception) :next -> run_reporters(rest, level, event) :ignore -> :ok end end # If no reporter ignored or reported this event, then we're gonna report this # as a Rollbar "message" with the same logic that Logger uses to translate # messages (so that it will have Elixir syntax when reported). defp run_reporters([], _level, _event) do :ok end end
lib/rollbax/logger.ex
0.846974
0.627666
logger.ex
starcoder
defmodule Ockam.Worker.Authorization do @moduledoc """ Helper functions to configure worker message authorization. Usage: ``` def is_authorized(message, _state) Authorization.from_addresses(message, ["one", "two"]) end ``` Pipelining helpers: ``` def is_authorized(message, state) Authorization.from_addresses(message, ["one", "two"]) |> Authorization.to_my_address(message, state) end ``` """ alias Ockam.Message @doc """ Allow any messages to be handled by the worker """ def allow_all(prev \\ :ok) do chain(prev, fn -> :ok end) end @doc """ Deny all messages from being handled by the worker """ def deny_all(prev \\ :ok) do chain(prev, fn -> {:error, :deny_all} end) end @doc """ Only allow messages from a set of addresses to be handled by the worker. Message address is taken from return_route in that case """ def from_addresses(prev, message, addresses) when is_list(addresses) do chain(prev, fn -> from_addresses(message, addresses) end) end def from_addresses(message, addresses) do case Message.return_route(message) do [return_address | _rest] -> case Enum.member?(addresses, return_address) do true -> :ok false -> {:error, {:from_address, :invalid_source, return_address}} end [] -> {:error, {:from_address, :empty_return_route}} end end @doc """ Only allow messages sent to a certain addresses to be handled by the worker Message address is taken from the message onward_route """ def to_addresses(prev, message, addresses) when is_list(addresses) do chain(prev, fn -> to_addresses(message, addresses) end) end def to_addresses(message, addresses) do case Message.onward_route(message) do [onward_address | _rest] -> case Enum.member?(addresses, onward_address) do true -> :ok false -> {:error, {:to_address, :invalid_destination, onward_address}} end [] -> {:error, {:to_address, :empty_onward_route}} end end @doc """ Allow messages sent to the addresses stored in the `all_addresses` of the state to be handled by the worker. Message address is taken from the message onward_route """ def to_my_address(prev \\ :ok, message, state) do addresses = Map.get(state, :all_addresses, []) to_addresses(prev, message, addresses) end @doc """ Allow messages which have `channel: :secure_channel` in their local metadata to be handled by the worker. """ def is_secure(prev \\ :ok, message) do chain(prev, fn -> case Message.local_metadata_value(message, :channel) do :secure_channel -> :ok ## TODO: error explanation other -> {:error, {:is_secure, :invalid_channel, other}} end end) end @doc """ Allow messages which have `identity: identity` in their local metadata to be handled by the worker. """ def from_identity(prev \\ :ok, message, identity) do chain(prev, fn -> case Message.local_metadata_value(message, :identity) do ^identity -> :ok other -> {:error, {:from_identity, :invalid_identity, other}} end end) end def chain(:ok, fun) do fun.() end def chain({:error, _reason} = error, _fun) do error end end
implementations/elixir/ockam/ockam/lib/ockam/worker/authorization.ex
0.787727
0.844473
authorization.ex
starcoder
defmodule Sim.Laboratory.InVitro do @moduledoc """ All in one server for testing simulations. Data, SimLoop and Access are all here, if the server crashes the whole state is loss. """ use GenServer, restart: :transient require Logger alias Phoenix.PubSub @sim_interval 1000 def start_link(opts \\ []) do GenServer.start_link(__MODULE__, {opts[:entry_id], opts[:pub_sub]}) end def init({id, pub_sub}) do {:ok, %{id: id, pub_sub: pub_sub, object: nil, sim_func: nil, sim_process: nil}} end def handle_call({:create, create_func}, _from, state) do Logger.debug("create object for #{state.id}") object = create_func.() {:reply, {:ok, object}, %{state | object: object}} end def handle_call(:object, _from, state) do {:reply, state.object, state} end def handle_call({:update_object, update_func}, _from, state) do object = update(state, update_func) {:reply, {:ok, object}, %{state | object: object}} end def handle_call({:start, sim_func}, _from, %{sim_process: nil} = state) do broadcast(state, {:sim, started: true}) Logger.debug("sim started for #{state.id}") {:reply, :ok, %{state | sim_func: sim_func, sim_process: next_sim_call()}} end def handle_call({:start, _sim_func}, _from, state) do {:reply, :no_change, state} end def handle_call(:stop, _from, %{sim_process: nil} = state) do {:reply, :no_change, state} end def handle_call(:stop, _from, state) do Process.cancel_timer(state.sim_process) broadcast(state, {:sim, started: false}) Logger.debug("sim stopped for #{state.id}") {:reply, :ok, %{state | sim_process: nil}} end def handle_info(:tick, state) do {:noreply, %{state | object: sim(state), sim_process: next_sim_call()}} end def next_sim_call() do Process.send_after(self(), :tick, @sim_interval) end defp update(state, update_func) do state.object |> update_func.() |> broadcast_sim_changes(state) end defp sim(state) do update(state, state.sim_func) end defp broadcast_sim_changes(object, state) do :ok = broadcast(state, {:update, data: object}) object end defp broadcast(state, payload) do PubSub.broadcast(state.pub_sub, state.id, payload) end end
apps/sim/lib/sim/laboratory/in_vitro.ex
0.634656
0.494202
in_vitro.ex
starcoder
defmodule Example_Str do def start do range = 1..10 stream = Stream.map(range, &(&1 * 5)) Enum.map(stream, &(&1 + 4)) end def start2 do stream = 1..10 |> Stream.map(&IO.inspect(&1)) |> Stream.map(&(&1 * 3)) |> Stream.map(&IO.inspect(&1)) Enum.to_list(stream) end def start3 do stream = Stream.chunk_by([1, 2, 2, 3, 4, 4, 4, 5, 6, 7], &(rem(&1, 2) == 1)) Enum.to_list(stream) end def start4 do Stream.chunk_every([1, 2, 3, 4, 5, 6], 2) |> Enum.to_list() end def start5 do Stream.chunk_every([1, 2, 3, 4, 5], 3, 2, [5]) |> Enum.to_list() end def start6 do chunk_fun = fn element, acc -> if rem(element, 2) == 0 do {:cont, Enum.reverse([element | acc]), []} else {:cont, [element | acc]} end end after_fun = fn [] -> {:cont, []} acc -> {:cont, Enum.reverse(acc, [])} end stream = Stream.chunk_while(1..10, [], chunk_fun, after_fun) Enum.to_list(stream) end def start7 do stream = Stream.concat([1..3, 4..6, 7..9]) Enum.to_list(stream) end def start8 do stream1 = Stream.cycle([1, 2, 3]) stream2 = Stream.cycle([4, 5, 6]) stream = Stream.concat(stream1, stream2) Enum.to_list(stream) end def start9 do stream = Stream.cycle([1, 2, 232]) Enum.take(stream, 15) end def start10 do Stream.dedup([1, 2, 3, 3, 2, 1]) |> Enum.to_list() end def start11 do Stream.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end) |> Enum.to_list() end def start12 do stream = Stream.drop(1..30, 3) Enum.to_list(stream) end def start13 do stream = Stream.drop_every([1, 2, 3, 4, 5], 2) Enum.to_list(stream) end def start14 do stream = Stream.drop_while(1..40, &(&1 <= 32)) Enum.to_list(stream) end def start15 do stream = Stream.duplicate("Elixir", 8) Enum.to_list(stream) end def start16 do stream = Stream.each([1, 2, 3, 4], fn x -> send(self(), x) end) Enum.to_list(stream) receive do x when is_integer(x) -> x end end def start17 do stream = Stream.flat_map([1, 2, 3, 4], fn x -> [x, x * 5] end) Enum.to_lust(stream) end def start18 do stream = Stream.flat_map([1, 2, 3], fn x -> [[x]] end) Enum.to_list(stream) end def start19 do stream = Stream.intersperse([1, 2, 3, 4], 33) |> Enum.to_list() end def start20 do Stream.interval(20) |> Enum.take(15) end def start21 do Stream.iterate(3, &(&1 * 44)) |> Enum.take(7) end def start22 do stream = Stream.map([1, 2, 3], fn x -> x * 23 end) Enum.to_list(stream) end def start23 do stream = Stream.map_every(1..30, 2, fn x -> x * 4 end) Enum.to_list(stream) end def start24 do stream = Stream.reject([1, 2, 3], fn x -> rem(x, 2) == 0 end) Enum.to_list(stream) end def start25 do :rand.seed(:exsss, {1, 2, 3}) Stream.repeatedly(&:rand.uniform/0) |> Enum.take(3) end def start26 do Stream.resource( fn -> {:ok, pid} = StringIO.open("string") pid end, fn pid -> case IO.getn(pid, "", 1) do :eof -> {:halt, pid} char -> {[char], pid} end end, fn pid -> StringIO.close(pid) end ) |> Enum.to_list() end def start27 do stream = Stream.scan(1..10, &(&1 + &2)) Enum.to_list(stream) end def start28 do stream = Stream.scan(1..10, 0, &(&1 + &2)) Enum.to_list(stream) end def start29 do stream = Stream.take(1..100, 5) Enum.to_list(stream) end def start30 do stream = Stream.take_every(1..40, 3) Enum.to_list(stream) end def start31 do stream = Stream.take_while(1..100, &(&1 <= 50)) Enum.to_list(stream) end def start32 do Stream.timer(10) |> Enum.to_list() end def start33 do enum = 1001..9999 n = 3 stream = Stream.transform(enum, 0, fn i, acc -> if acc < n, do: {[i], acc + 1}, else: {:halt, acc} end) Enum.to_list(stream) end def start34 do Stream.unfold(5, fn 0 -> nil n -> {n, n - 1} end) |> Enum.to_list() end def start35 do Stream.uniq([1, 2, 2, 3, 3, 3, 4, 5, 5]) |> Enum.to_list() end def start36 do Stream.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end) |> Enum.to_list() end def start37 do stream = Stream.with_index([1, 2, 3, 4, 5], 3) Enum.to_list(stream) end def start38 do concat = Stream.concat(1..3, 4..6) cycle = Stream.cycle(["foo", "bar", "baz"]) Stream.zip([concat, [:a, :b, :c], cycle]) |> Enum.to_list() end def start39 do concat = Stream.concat(1..4, 5..9) cycle = Stream.cycle([:a, :b, :c]) Stream.zip(concat, cycle) |> Enum.to_list() end def start40 do concat = Stream.concat(1..3, 4..6) Stream.zip_with([concat, concat], fn [a, b] -> a + b end) |> Enum.to_list() end end
lib/beam/stream/stream.ex
0.50415
0.582788
stream.ex
starcoder
defmodule GroundStationWeb.SimulatorLive do use Phoenix.LiveView @tick 30 @tick_seconds @tick / 1000 def render(assigns) do Phoenix.View.render(GroundStationWeb.PageView, "simulator.html", assigns) end def mount(session, socket) do if connected?(socket), do: :timer.send_interval(@tick, self(), :tick) {:ok, assign(socket, simulator: session.simulator)} end def handle_info(:tick, socket) do socket.assigns.simulator |> FlightSimulator.update(@tick_seconds) |> update_simulator(socket) end def handle_event("control_input", %{"code" => code}, socket) when code in ["ArrowLeft", "KeyA"] do socket.assigns.simulator |> FlightSimulator.roll_left() |> update_simulator(socket) end def handle_event("control_input", %{"code" => code}, socket) when code in ["ArrowRight", "KeyD"] do socket.assigns.simulator |> FlightSimulator.roll_right() |> update_simulator(socket) end def handle_event("control_input", %{"code" => code}, socket) when code in ["ArrowUp", "KeyW"] do socket.assigns.simulator |> FlightSimulator.pitch_down() |> update_simulator(socket) end def handle_event("control_input", %{"code" => code}, socket) when code in ["ArrowDown", "KeyS"] do socket.assigns.simulator |> FlightSimulator.pitch_up() |> update_simulator(socket) end def handle_event("control_input", %{"code" => "Minus"}, socket) do socket.assigns.simulator |> FlightSimulator.speed_down() |> update_simulator(socket) end def handle_event("control_input", %{"code" => "Equal"}, socket) do socket.assigns.simulator |> FlightSimulator.speed_up() |> update_simulator(socket) end def handle_event("control_input", %{"code" => "Comma"}, socket) do socket.assigns.simulator |> FlightSimulator.yaw_left() |> update_simulator(socket) end def handle_event("control_input", %{"code" => "Period"}, socket) do socket.assigns.simulator |> FlightSimulator.yaw_right() |> update_simulator(socket) end def handle_event("control_input", %{"code" => "Space"}, socket) do socket.assigns.simulator |> FlightSimulator.reset_attitude() |> update_simulator(socket) end def handle_event("control_input", %{"code" => "Escape"}, socket) do update_simulator(@initial_simulator, socket) end def handle_event("control_input", _key, socket) do {:noreply, socket} end def update_simulator(state, socket) do {:noreply, assign(socket, :simulator, state)} end end
lib/groundstation_web/live/simulator_live.ex
0.588061
0.48054
simulator_live.ex
starcoder
defmodule Chess.Game.Board do alias Chess.Game.Board alias Chess.Piece defstruct field: %{} def new() do coordinates = for x <- 1..8, y <- 1..8, do: {x, y} field = Enum.reduce(coordinates, %{}, fn x, acc -> Map.put(acc, x, nil) end) |> Map.put({1, 1}, %Piece.Rook{color: :white}) |> Map.put({2, 1}, %Piece.Knight{color: :white}) |> Map.put({3, 1}, %Piece.Bishop{color: :white}) |> Map.put({4, 1}, %Piece.Queen{color: :white}) |> Map.put({5, 1}, %Piece.King{color: :white}) |> Map.put({6, 1}, %Piece.Bishop{color: :white}) |> Map.put({7, 1}, %Piece.Knight{color: :white}) |> Map.put({8, 1}, %Piece.Rook{color: :white}) |> Map.put({1, 2}, %Piece.Pawn{color: :white}) |> Map.put({2, 2}, %Piece.Pawn{color: :white}) |> Map.put({3, 2}, %Piece.Pawn{color: :white}) |> Map.put({4, 2}, %Piece.Pawn{color: :white}) |> Map.put({5, 2}, %Piece.Pawn{color: :white}) |> Map.put({6, 2}, %Piece.Pawn{color: :white}) |> Map.put({7, 2}, %Piece.Pawn{color: :white}) |> Map.put({8, 2}, %Piece.Pawn{color: :white}) |> Map.put({1, 7}, %Piece.Pawn{color: :black}) |> Map.put({2, 7}, %Piece.Pawn{color: :black}) |> Map.put({3, 7}, %Piece.Pawn{color: :black}) |> Map.put({4, 7}, %Piece.Pawn{color: :black}) |> Map.put({5, 7}, %Piece.Pawn{color: :black}) |> Map.put({6, 7}, %Piece.Pawn{color: :black}) |> Map.put({7, 7}, %Piece.Pawn{color: :black}) |> Map.put({8, 7}, %Piece.Pawn{color: :black}) |> Map.put({1, 8}, %Piece.Rook{color: :black}) |> Map.put({2, 8}, %Piece.Knight{color: :black}) |> Map.put({3, 8}, %Piece.Bishop{color: :black}) |> Map.put({4, 8}, %Piece.Queen{color: :black}) |> Map.put({5, 8}, %Piece.King{color: :black}) |> Map.put({6, 8}, %Piece.Bishop{color: :black}) |> Map.put({7, 8}, %Piece.Knight{color: :black}) |> Map.put({8, 8}, %Piece.Rook{color: :black}) %Board{field: field} end end
lib/chess/game/board.ex
0.543106
0.656104
board.ex
starcoder
defmodule BSV.Script do @moduledoc """ Module for parsing, serialising and building Scripts. Script is the scripting language built into Bitcoin. Transaction outputs each contain a "locking script" which lock a number of satoshis. Transaction inputs contain an "unlocking script" which unlock the satoshis contained in a previous output. Both the unlocking script and previous locking script are concatenated in the following order: unlocking_script <> locking_script The entire script is evaluated and if it returns a truthy value, the output is unlocked and spent. """ alias BSV.{OpCode, ScriptNum} import BSV.Util, only: [decode: 2, decode!: 2, encode: 2] defstruct chunks: [], coinbase: nil @typedoc "Script struct" @type t() :: %__MODULE__{ chunks: list(chunk()), coinbase: nil | binary() } @typedoc "Script chunk" @type chunk() :: atom() | binary() @typedoc "Coinbase data" @type coinbase_data() :: %{ height: integer(), data: binary(), nonce: binary() } @doc """ Parses the given ASM encoded string into a `t:BSV.Script.t/0`. Returns the result in an `:ok` / `:error` tuple pair. ## Examples iex> Script.from_asm("OP_DUP OP_HASH160 5ae866af9de106847de6111e5f1faa168b2be689 OP_EQUALVERIFY OP_CHECKSIG") {:ok, %Script{chunks: [ :OP_DUP, :OP_HASH160, <<90, 232, 102, 175, 157, 225, 6, 132, 125, 230, 17, 30, 95, 31, 170, 22, 139, 43, 230, 137>>, :OP_EQUALVERIFY, :OP_CHECKSIG ]}} """ @spec from_asm(binary()) :: {:ok, t()} | {:error, term()} def from_asm(data) when is_binary(data) do chunks = data |> String.split(" ") |> Enum.map(&parse_asm_chunk/1) {:ok, struct(__MODULE__, chunks: chunks)} rescue _error -> {:error, {:invalid_encoding, :asm}} end @doc """ Parses the given ASM encoded string into a `t:BSV.Script.t/0`. As `from_asm/1` but returns the result or raises an exception. """ @spec from_asm!(binary()) :: t() def from_asm!(data) when is_binary(data) do case from_asm(data) do {:ok, script} -> script {:error, error} -> raise BSV.DecodeError, error end end @doc """ Parses the given binary into a `t:BSV.Script.t/0`. Returns the result in an `:ok` / `:error` tuple pair. ## Options The accepted options are: * `:encoding` - Optionally decode the binary with either the `:base64` or `:hex` encoding scheme. ## Examples iex> Script.from_binary("76a9145ae866af9de106847de6111e5f1faa168b2be68988ac", encoding: :hex) {:ok, %Script{chunks: [ :OP_DUP, :OP_HASH160, <<90, 232, 102, 175, 157, 225, 6, 132, 125, 230, 17, 30, 95, 31, 170, 22, 139, 43, 230, 137>>, :OP_EQUALVERIFY, :OP_CHECKSIG ]}} """ @spec from_binary(binary(), keyword()) :: {:ok, t()} | {:error, term()} def from_binary(data, opts \\ []) when is_binary(data) do encoding = Keyword.get(opts, :encoding) with {:ok, data} <- decode(data, encoding), {:ok, chunks} <- parse_bytes(data) do {:ok, struct(__MODULE__, chunks: chunks)} end end @doc """ Parses the given binary into a `t:BSV.Script.t/0`. As `from_binary/2` but returns the result or raises an exception. """ @spec from_binary!(binary(), keyword()) :: t() def from_binary!(data, opts \\ []) when is_binary(data) do case from_binary(data, opts) do {:ok, script} -> script {:error, error} -> raise BSV.DecodeError, error end end @doc """ Returns formatted coinbase data from the given Script. """ @spec get_coinbase_data(t()) :: coinbase_data() | binary() def get_coinbase_data(%__MODULE__{coinbase: data}) when is_binary(data) do case data do <<n::integer, blknum::bytes-size(n), d::integer, data::bytes-size(d), nonce::binary>> -> %{ block: BSV.ScriptNum.decode(blknum), data: data, nonce: nonce } data -> data end end @doc """ Pushes a chunk into the `t:BSV.Script.t/0`. The chunk can be any binary value, `t:BSV.OpCode.t/0` or `t:integer/0`. Integer values will be encoded as a `t:BSV.ScriptNum.t/0`. ## Examples iex> %Script{} ...> |> Script.push(:OP_FALSE) ...> |> Script.push(:OP_RETURN) ...> |> Script.push("Hello world!") ...> |> Script.push(2021) %Script{chunks: [ :OP_FALSE, :OP_RETURN, "Hello world!", <<229, 7>> ]} """ @spec push(t(), atom() | integer() | binary()) :: t() def push(%__MODULE__{} = script, data) when is_atom(data) do opcode = OpCode.to_atom!(data) push_chunk(script, opcode) end def push(%__MODULE__{} = script, data) when is_binary(data), do: push_chunk(script, data) def push(%__MODULE__{} = script, data) when data in 0..16, do: push_chunk(script, String.to_atom("OP_#{ data }")) def push(%__MODULE__{} = script, data) when is_integer(data), do: push_chunk(script, ScriptNum.encode(data)) @doc """ Returns the size of the Script in bytes. ## Examples iex> Script.get_size(@p2pkh_script) 25 """ @spec get_size(t()) :: non_neg_integer() def get_size(%__MODULE__{} = script), do: to_binary(script) |> byte_size() @doc """ Serialises the given `t:BSV.Script.t/0` into an ASM encoded string. ## Examples iex> Script.to_asm(@p2pkh_script) "OP_DUP OP_HASH160 5ae866af9de106847de6111e5f1faa168b2be689 OP_EQUALVERIFY OP_CHECKSIG" """ @spec to_asm(t()) :: binary() def to_asm(%__MODULE__{chunks: chunks}) do chunks |> Enum.map(&serialize_asm_chunk/1) |> Enum.join(" ") end @doc """ Serialises the given `t:BSV.Script.t/0` into a binary. ## Options The accepted options are: * `:encoding` - Optionally encode the binary with either the `:base64` or `:hex` encoding scheme. ## Examples iex> Script.to_binary(@p2pkh_script, encoding: :hex) "76a9145ae866af9de106847de6111e5f1faa168b2be68988ac" """ @spec to_binary(t(), keyword()) :: binary() def to_binary(script, opts \\ []) def to_binary(%__MODULE__{chunks: [], coinbase: data}, opts) when is_binary(data) do encoding = Keyword.get(opts, :encoding) encode(data, encoding) end def to_binary(%__MODULE__{chunks: chunks}, opts) do encoding = Keyword.get(opts, :encoding) chunks |> serialize_chunks() |> encode(encoding) end # Parses the ASM chunk into a Script chunk defp parse_asm_chunk(<<"OP_", _::binary>> = chunk), do: String.to_existing_atom(chunk) defp parse_asm_chunk("-1"), do: :OP_1NEGATE defp parse_asm_chunk("0"), do: :OP_0 defp parse_asm_chunk(chunk), do: decode!(chunk, :hex) # Parses the given binary into a list of Script chunks defp parse_bytes(data, chunks \\ []) defp parse_bytes(<<>>, chunks), do: {:ok, Enum.reverse(chunks)} defp parse_bytes(<<size::integer, chunk::bytes-size(size), data::binary>>, chunks) when size > 0 and size < 76 do parse_bytes(data, [chunk | chunks]) end defp parse_bytes(<<76, size::integer, chunk::bytes-size(size), data::binary>>, chunks) do parse_bytes(data, [chunk | chunks]) end defp parse_bytes(<<77, size::little-16, chunk::bytes-size(size), data::binary>>, chunks) do parse_bytes(data, [chunk | chunks]) end defp parse_bytes(<<78, size::little-32, chunk::bytes-size(size), data::binary>>, chunks) do parse_bytes(data, [chunk | chunks]) end defp parse_bytes(<<op::integer, data::binary>>, chunks) do opcode = OpCode.to_atom!(op) parse_bytes(data, [opcode | chunks]) end # Pushes the chunk onto the script defp push_chunk(%__MODULE__{} = script, data), do: update_in(script.chunks, & Enum.concat(&1, [data])) # Serilises the Script chunk as an ASM chunk defp serialize_asm_chunk(:OP_1NEGATE), do: "-1" defp serialize_asm_chunk(chunk) when chunk in [:OP_0, :OP_FALSE], do: "0" defp serialize_asm_chunk(chunk) when is_atom(chunk), do: Atom.to_string(chunk) defp serialize_asm_chunk(chunk) when is_binary(chunk), do: encode(chunk, :hex) # Serilises the list of Script chunks as a binary defp serialize_chunks(chunks, data \\ <<>>) defp serialize_chunks([], data), do: data defp serialize_chunks([chunk | chunks], data) when is_atom(chunk) do opcode = OpCode.to_integer(chunk) serialize_chunks(chunks, <<data::binary, opcode::integer>>) end defp serialize_chunks([chunk | chunks], data) when is_binary(chunk) do suffix = case byte_size(chunk) do op when op > 0 and op < 76 -> <<op::integer, chunk::binary>> len when len < 0x100 -> <<76::integer, len::integer, chunk::binary>> len when len < 0x10000 -> <<77::integer, len::little-16, chunk::binary>> len when len < 0x100000000 -> <<78::integer, len::little-32, chunk::binary>> op -> << op::integer >> end serialize_chunks(chunks, data <> suffix) end end
lib/bsv/script.ex
0.883469
0.562657
script.ex
starcoder
defmodule XDR.Enum do @moduledoc """ This module manages the `Enumeration` type based on the RFC4506 XDR Standard. """ @behaviour XDR.Declaration alias XDR.EnumError defstruct [:declarations, :identifier] @typedoc """ `XDR.Enum` structure type specification. """ @type t :: %XDR.Enum{declarations: keyword(), identifier: atom()} @doc """ Create a new `XDR.Enum` structure with the `declarations` and `identifier` passed. """ @spec new(declarations :: list(), identifier :: atom()) :: t() def new(declarations, identifier), do: %XDR.Enum{declarations: declarations, identifier: identifier} @doc """ Encode a `XDR.Enum` structure into a XDR format. """ @impl true def encode_xdr(%{declarations: declarations}) when not is_list(declarations), do: {:error, :not_list} def encode_xdr(%{identifier: identifier}) when not is_atom(identifier), do: {:error, :not_an_atom} def encode_xdr(%{declarations: declarations, identifier: identifier}) do case Keyword.has_key?(declarations, identifier) do true -> encode_valid_data(declarations, identifier) false -> {:error, :invalid_key} end end @doc """ Encode a `XDR.Enum` structure into a XDR format. If the `declarations` or `identifier` of the `enum` is not valid, an exception is raised. """ @impl true def encode_xdr!(enum) do case encode_xdr(enum) do {:ok, binary} -> binary {:error, reason} -> raise(EnumError, reason) end end @doc """ Decode the Enumeration in XDR format to a `XDR.Enum` structure. """ @impl true def decode_xdr(bytes, %{}) when not is_binary(bytes), do: {:error, :not_binary} def decode_xdr(_bytes, %{declarations: declarations}) when not is_list(declarations), do: {:error, :not_list} def decode_xdr(bytes, %{declarations: declarations}) do {int_xdr, rest} = XDR.Int.decode_xdr!(bytes) declarations |> Enum.find(fn {_key, value} -> value === int_xdr.datum end) |> decoded_enum(declarations, rest) end @doc """ Decode the Enumeration in XDR format to a `XDR.Enum` structure. If the binaries are not valid, an exception is raised. """ @impl true def decode_xdr!(bytes, enum) do case decode_xdr(bytes, enum) do {:ok, result} -> result {:error, reason} -> raise(EnumError, reason) end end @spec decoded_enum( identifier :: {atom(), any()} | nil, declarations :: keyword(), rest :: binary() ) :: {:ok, {t(), binary()}} | {:error, :invalid_key} defp decoded_enum(nil, _declarations, _rest), do: {:error, :invalid_key} defp decoded_enum({identifier, _value}, declarations, rest) do decoded_enum = new(declarations, identifier) {:ok, {decoded_enum, rest}} end @spec encode_valid_data(declarations :: keyword(), identifier :: atom()) :: {:ok, binary()} defp encode_valid_data(declarations, identifier) do binary = declarations[identifier] |> XDR.Int.new() |> XDR.Int.encode_xdr!() {:ok, binary} end end
lib/xdr/enum.ex
0.920808
0.417984
enum.ex
starcoder
defmodule Roll35Core.Util do @moduledoc """ Utility functions for the Roll35Core. """ alias Roll35Core.Types @doc """ Roll a random item from a list. If the list is a list of maps with `weight` and `value` keys, then we make a weighted selection based on that info and return the value. Otherwise, this is the same as `Enum.random/1`. """ @spec random([term]) :: term() def random(items) do firstitem = Enum.at(items, 0) if is_map(firstitem) and Map.has_key?(firstitem, :weight) and Map.has_key?(firstitem, :value) do items |> Stream.flat_map(fn item -> item.value |> Stream.iterate(fn i -> i end) |> Stream.take(item.weight) |> Enum.to_list() end) |> Enum.random() else Enum.random(items) end end @doc """ Convert string keys in a map into atoms. This operates recursively, processing any maps that are values within the passed map. """ @spec atomize_map(%{(atom | String.t()) => any}) :: %{atom => any} def atomize_map(map) when is_map(map) do Enum.reduce(map, %{}, fn {key, value}, acc when is_binary(key) and is_map(value) -> # credo:disable-for-next-line Credo.Check.Warning.UnsafeToAtom Map.put(acc, String.to_atom(key), atomize_map(value)) {key, value}, acc when is_binary(key) -> # credo:disable-for-next-line Credo.Check.Warning.UnsafeToAtom Map.put(acc, String.to_atom(key), value) {key, value}, acc when is_map(value) -> Map.put(acc, key, atomize_map(value)) {key, value}, acc -> Map.put(acc, key, value) end) end @doc """ Process compound list of weighted values. Each list entry must be a map with keys coresponding to the possible values for `t:Roll35Core.Types.rank()/1` with each such key bearing a weight to use for the entry when rolling a random item of the corresponding rank. All the remaining keys are placed as-is into a map which serves as the value to be returned the weighted random selection. """ @spec process_compound_itemlist(nonempty_list(%{any => any})) :: Types.itemlist() def process_compound_itemlist(data) when is_list(data) do newdata = Enum.map(data, &atomize_map/1) Types.ranks() |> Enum.map(fn rank -> { rank, newdata |> Enum.map(fn entry -> {_, value} = Map.split(entry, Types.ranks()) %{weight: entry[rank], value: value} end) |> Enum.filter(fn entry -> entry.weight != 0 end) } end) |> Map.new() end @doc """ Process ranked list of weighted values. This takes a map of ranks to maps of subranks to lists of maps of weighted items, and processes them into the format used by our weighted random selection in various data agent modules. """ @spec process_ranked_itemlist(map) :: Types.ranked_itemlist() def process_ranked_itemlist(data) when is_map(data) do newdata = atomize_map(data) ranks = if :minor in Map.keys(newdata) do Types.ranks() else Types.limited_ranks() end ranks |> Enum.map(fn rank -> value = process_subranked_itemlist(newdata[rank]) {rank, value} end) |> Map.new() end @doc """ Process a subranked list of weighted values. This takes a map of subranks to lists of maps of weighted items and processes them into the format used by our weighted random selection in various data agent modules. """ @spec process_subranked_itemlist(map) :: Types.subranked_itemlist() def process_subranked_itemlist(data) when is_map(data) do map = atomize_map(data) subranks = if :least in Map.keys(map) do Types.full_subranks() else Types.subranks() end subranks |> Enum.map(fn subrank -> value = Enum.map(map[subrank], fn entry -> entry |> atomize_map() |> (fn entry -> {_, value} = Map.split(entry, [:weight]) %{weight: entry.weight, value: value} end).() end) {subrank, value} end) |> Map.new() end @doc """ Process a base weapon or base armor list. This takes a list of base item maps and converts all the keys to atoms, as well as modifying the type and tags entries to have atom values. """ @spec process_base_armor_weapon_list(list()) :: list() def process_base_armor_weapon_list(data) do Enum.map(data, fn entry -> entry |> atomize_map() # credo:disable-for-next-line Credo.Check.Warning.UnsafeToAtom |> Map.update!(:type, &String.to_atom/1) |> Map.update!(:tags, fn data -> # credo:disable-for-next-line Credo.Check.Warning.UnsafeToAtom Enum.map(data, &String.to_atom/1) end) end) end @doc """ Process an armor or weapon enchantment table. """ @spec process_enchantment_table(map()) :: map() def process_enchantment_table(data) do data |> Enum.map(fn {key, value} -> {key, value |> Enum.map(fn {key, value} -> {key, Enum.map(value, fn entry -> entry = atomize_map(entry) {_, value} = Map.split(entry, [:weight]) value = if Map.has_key?(value, :limit) do Map.update!(value, :limit, fn item -> item |> Enum.map(fn {key, value} -> { key, # credo:disable-for-next-line Credo.Check.Warning.UnsafeToAtom Enum.map(value, &String.to_atom/1) } end) |> Map.new() end) else value end value = if Map.has_key?(value, :remove) do Map.update!(value, :remove, fn item -> # credo:disable-for-next-line Credo.Check.Warning.UnsafeToAtom Enum.map(item, &String.to_atom/1) end) else value end value = if Map.has_key?(value, :add) do Map.update!(value, :add, fn item -> # credo:disable-for-next-line Credo.Check.Warning.UnsafeToAtom Enum.map(item, &String.to_atom/1) end) else value end %{weight: entry.weight, value: value} end)} end) |> Map.new()} end) |> Map.new() end @doc """ Process an armor or weapon agent state to add a list of all tags. """ @spec generate_tags_entry(%{:base => list(), atom() => term()}, [atom()]) :: %{ :base => list(), :tags => list(), atom() => term() } def generate_tags_entry(data, base_tags \\ []) do tags = Enum.reduce(data.base, MapSet.new(base_tags), fn item, acc -> Enum.reduce(item.tags, acc, fn tag, acc -> MapSet.put(acc, tag) end) end) Map.put(data, :tags, MapSet.to_list(tags)) end @doc """ Square an integer. """ @spec squared(integer()) :: integer() def squared(x) when is_integer(x) do x * x end end
apps/roll35_core/lib/roll35_core/util.ex
0.849971
0.453141
util.ex
starcoder
defmodule Fiet.RSS2.Engine do defmodule ParsingError do defexception [:reason] def message(%__MODULE__{reason: reason}) do format_message(reason) end defp format_message({:not_atom, root_tag}) do "unexpected root tag #{inspect(root_tag)}, expected \"rss\"" end end @moduledoc """ A module to be used to implement RSS 2.0 parser. ## Examples A RSS 2.0 compliant parser can be implemented with: defmodule StandardParser do use Fiet.RSS2.Engine end Parsers can also be customized by using `:extras` option, with `:channel` being all the outstanding tags in `<channel>` and `:item` being all the outstanding tags in `<item>` in the feed. defmodule NotSoStardardParser do use Fiet.RSS2.Engine, [extras: [ channel: [{"atom:link", "atom:link"}], item: [{"content:encoded", "encoded_content"}] ]] end """ alias Fiet.RSS2 @doc false defmacro __using__(opts) do quote bind_quoted: [opts: Macro.escape(opts)], location: :keep do extras = Keyword.get(opts, :extras, []) extra_item_tags = Keyword.get(extras, :item, []) extra_channel_tags = Keyword.get(extras, :channel, []) def parse(document) do case Fiet.StackParser.parse(document, %{channel: %RSS2.Channel{}}, __MODULE__) do {:ok, %{channel: channel} = feed} -> channel = %{channel | items: Enum.reverse(channel.items)} {:ok, channel} {:ok, {:not_rss2, _root_tag} = reason} -> {:error, %ParsingError{reason: reason}} {:error, reason} -> {:error, %ParsingError{reason: reason}} end end @doc false def handle_event(:start_element, {root_tag, _, _}, [], feed) when root_tag != "rss" do {:stop, {:not_rss2, root_tag}} end def handle_event(:start_element, {"channel", _, _}, stack, feed) do channel = %RSS2.Channel{} %{feed | channel: channel} end def handle_event(:start_element, {"item", _, _}, _stack, %{channel: channel} = feed) do %RSS2.Channel{items: items} = channel channel = %{channel | items: [%RSS2.Item{extras: %{}} | items]} %{feed | channel: channel} end def handle_event(:start_element, _element, _stack, feed) do feed end def handle_event( :end_element, element, [{"image", _, _} | [{"channel", _, _} | _]], %{channel: channel} = feed ) do %RSS2.Channel{ image: image } = channel image = image || %RSS2.Image{} image = maybe_enrich_image(element, image) channel = Map.put(channel, :image, image) %{feed | channel: channel} end def handle_event(:end_element, element, [{"channel", _, _} | _], %{channel: channel} = feed) do %{feed | channel: maybe_enrich_channel(element, channel)} end def handle_event(:end_element, element, [{"item", _, _} | _], %{channel: channel} = feed) do %RSS2.Channel{ items: [item | items] } = channel item = maybe_enrich_item(element, item) channel = Map.put(channel, :items, [item | items]) %{feed | channel: channel} end def handle_event(:end_element, element, _stack, feed) do feed end @channel_tags [ {"title", :title}, {"link", :link}, {"description", :description}, {"language", :language}, {"copyright", :copyright}, {"managingEditor", :managing_editor}, {"webMaster", :web_master}, {"pubDate", :pub_date}, {"lastBuildDate", :last_build_date}, {"generator", :generator}, {"docs", :docs}, {"ttl", :ttl}, {"rating", :rating}, {"skipHours", :skip_hours}, {"skipDays", :skip_days} ] Enum.each(@channel_tags, fn {tag_name, key} -> defp maybe_enrich_channel({unquote(tag_name), _, content}, channel) do Map.put(channel, unquote(key), content) end end) defp maybe_enrich_channel({"category", _, _} = element, channel) do %{categories: categories} = channel {domain, value} = extract_category(element) category = %RSS2.Category{domain: domain, value: value} %{channel | categories: [category | categories]} end defp maybe_enrich_channel({"cloud", attributes, _} = element, channel) do domain = get_attribute_value(attributes, "domain") port = get_attribute_value(attributes, "port") path = get_attribute_value(attributes, "path") register_procedure = get_attribute_value(attributes, "register_procedure") protocol = get_attribute_value(attributes, "protocol") cloud = %RSS2.Channel.Cloud{ domain: domain, port: port, path: path, register_procedure: register_procedure, protocol: protocol } %{channel | cloud: cloud} end Enum.each(extra_channel_tags, fn {tag_name, key} -> defp maybe_enrich_channel({unquote(tag_name), attributes, content}, channel) do extras = Map.put(channel.extras, unquote(key), {attributes, content}) %{channel | extras: extras} end end) defp maybe_enrich_channel(_element, channel), do: channel @item_tags [ {"title", :title}, {"link", :link}, {"description", :description}, {"author", :author}, {"comments", :comments}, {"guid", :guid}, {"pubDate", :pub_date}, {"source", :source} ] Enum.each(@item_tags, fn {tag_name, key} -> defp maybe_enrich_item({unquote(tag_name), _, content}, item) do Map.put(item, unquote(key), content) end end) Enum.each(extra_item_tags, fn {tag_name, key} -> defp maybe_enrich_item( {unquote(tag_name), attributes, content}, %RSS2.Item{extras: extras} = item ) do extras = Map.put(extras, unquote(key), {attributes, content}) Map.put(item, :extras, extras) end end) defp maybe_enrich_item({"category", _, _} = element, item) do %{categories: categories} = item {domain, value} = extract_category(element) category = %RSS2.Category{domain: domain, value: value} %{item | categories: [category | categories]} end defp maybe_enrich_item({"enclosure", attributes, _} = element, channel) do length = get_attribute_value(attributes, "length") type = get_attribute_value(attributes, "type") url = get_attribute_value(attributes, "url") enclosure = %RSS2.Item.Enclosure{ length: length, type: type, url: url } %{channel | enclosure: enclosure} end defp maybe_enrich_item(_tag, item) do item end @image_tags [ {"title", :title}, {"link", :link}, {"description", :description}, {"url", :url}, {"width", :width}, {"height", :height} ] Enum.each(@image_tags, fn {tag_name, key} -> defp maybe_enrich_image({unquote(tag_name), _, content}, image) do Map.put(image, unquote(key), content) end end) defp maybe_enrich_image(_tag, image) do image end defp extract_category({"category", attributes, content}) do domain = get_attribute_value(attributes, "domain") {domain, content} end defp get_attribute_value(attributes, name) do for({key, value} <- attributes, key == name, do: value) |> List.first() end end end @doc """ Parses RSS 2.0 document. This function accepts RSS 2.0 document in raw binary and returns `{:ok, Fiet.RSS2.t()}`, `{:error, any}` otherwise. """ @callback parse(document :: binary) :: {:ok, Fiet.RSS2.t()} | {:error, any} end
lib/fiet/rss2/engine.ex
0.852307
0.495667
engine.ex
starcoder
defmodule D2 do @moduledoc """ --- Day 2: Password Philosophy --- Your flight departs in a few days from the coastal airport; the easiest way down to the coast from here is via toboggan. The shopkeeper at the North Pole Toboggan Rental Shop is having a bad day. "Something's wrong with our computers; we can't log in!" You ask if you can take a look. Their password database seems to be a little corrupted: some of the passwords wouldn't have been allowed by the Official Toboggan Corporate Policy that was in effect when they were chosen. To try to debug the problem, they have created a list (your puzzle input) of passwords (according to the corrupted database) and the corporate policy when that password was set. How many passwords are valid according to their policies? --- Part Two --- While it appears you validated the passwords correctly, they don't seem to be what the Official Toboggan Corporate Authentication System is expecting. The shopkeeper suddenly realizes that he just accidentally explained the password policy rules from his old job at the sled rental place down the street! The Official Toboggan Corporate Policy actually works a little differently. Each policy actually describes two positions in the password, where 1 means the first character, 2 means the second character, and so on. (Be careful; Toboggan Corporate Policies have no concept of "index zero"!) Exactly one of these positions must contain the given letter. Other occurrences of the letter are irrelevant for the purposes of policy enforcement. How many passwords are valid according to the new interpretation of the policies? """ @behaviour Day @regex ~r/(\d+)-(\d+) ([a-z]): ([a-z]+)/ @impl true def solve(input) do input = input |> Utils.to_strings() |> Enum.map(&Regex.run(@regex, &1, capture: :all_but_first)) |> Enum.map(fn [min, max, char, pass] -> min = Utils.to_int(min) max = Utils.to_int(max) <<char>> = char pass = to_charlist(pass) [min, max, char, pass] end) part_1 = Enum.count(input, fn [min, max, char, pass] -> count = Enum.count(pass, &(&1 == char)) min <= count and count <= max end) part_2 = Enum.count(input, fn [first, second, char, pass] -> p = Enum.at(pass, first - 1) == char q = Enum.at(pass, second - 1) == char (p or q) and !(p and q) end) {part_1, part_2} end end
lib/days/02.ex
0.578329
0.766774
02.ex
starcoder
defmodule Retex do @moduledoc false @type t() :: %Retex{} alias Retex.{Node, Protocol, Fact, Token} alias Node.{ Type, Test, Select, PNode, BetaMemory } @type action :: %{given: list(Retex.Wme.t()), then: list(Retex.Wme.t())} @type network_node :: Type.t() | Test.t() | Select.t() | PNode.t() | BetaMemory.t() defstruct graph: Graph.new(), wmes: %{}, agenda: [], activations: %{}, wme_activations: %{}, tokens: MapSet.new(), bindings: %{}, pending_activation: [] @spec root_vertex :: Retex.Root.t() def root_vertex(), do: Retex.Root.new() @spec new :: Retex.t() def new() do %{graph: graph} = %Retex{} graph = Graph.add_vertex(graph, Retex.Root.new()) %Retex{graph: graph} end @spec add_wme(Retex.t(), Retex.Wme.t()) :: Retex.t() def add_wme(%Retex{} = network, %Retex.Wme{} = wme) do wme = Map.put(wme, :timestamp, :os.system_time(:seconds)) network = %{network | wmes: Map.put(network.wmes, wme.id, wme)} {network, bindings} = propagate_activations(network, root_vertex(), wme, network.bindings) %{network | bindings: Map.merge(network.bindings, bindings)} end defp propagate_activation(neighbor, rete, wme, bindings, tokens \\ []) do Protocol.Activation.activate(neighbor, rete, wme, bindings, tokens) end @spec add_production(Retex.t(), %{given: list(Retex.Wme.t()), then: action()}) :: t() def add_production(%{graph: graph} = network, %{given: given, then: action} = rule) do {filters, given} = Enum.split_with(given, &is_filter?/1) {graph, alphas} = build_alpha_network(graph, given) {beta_memory, graph} = build_beta_network(graph, alphas) graph = add_p_node(Map.get(rule, :id), graph, beta_memory, action, filters) %{network | graph: graph} end defp is_filter?(%Fact.Filter{}), do: true defp is_filter?(_), do: false @spec build_alpha_network(Graph.t(), list()) :: {Graph.t(), list()} def build_alpha_network(graph, given) do Enum.reduce(given, {graph, []}, &Retex.Protocol.AlphaNetwork.append(&1, &2)) end @spec build_beta_network(Graph.t(), list(network_node())) :: {list(network_node()), Graph.t()} def build_beta_network(graph, disjoint_beta_network) do create_beta_nodes(graph, disjoint_beta_network) end @spec create_beta_nodes(Graph.t(), list(network_node())) :: {list(network_node()), Graph.t()} def create_beta_nodes(graph, [first | [second | list]]) do beta_memory = Node.BetaMemory.new() graph |> Graph.add_vertex(beta_memory) |> Graph.add_edge(first, beta_memory) |> Graph.add_edge(second, beta_memory) |> create_beta_nodes([beta_memory | list]) end def create_beta_nodes(graph, [beta_memory]) do {beta_memory, graph} end @spec add_p_node(any, Graph.t(), BetaMemory.t(), action(), list(Fact.Filter.t())) :: Graph.t() def add_p_node(id, graph, beta_memory, action, filters) do pnode = Node.PNode.new(action, filters) prod = if id, do: Map.put(pnode, :id, id), else: pnode graph |> Graph.add_vertex(prod) |> Graph.add_edge(beta_memory, prod) end @spec hash(any) :: String.t() def hash(:uuid4), do: UUIDTools.uuid4() def hash(data) do :sha256 |> :crypto.hash(inspect(data)) |> Base.encode16(case: :lower) end @spec replace_bindings(PNode.t(), map) :: PNode.t() def replace_bindings(%_{action: action_fun} = pnode, bindings) when is_function(action_fun) do %{pnode | action: action_fun, bindings: bindings} end def replace_bindings(%_{action: actions} = pnode, bindings) when is_map(bindings) do new_actions = Enum.map(actions, fn action -> replace_bindings(action, bindings) end) %{pnode | action: new_actions, bindings: bindings} end def replace_bindings(%Retex.Wme{} = action, bindings) when is_map(bindings) do populated = for {key, val} <- Map.from_struct(action), into: %{} do val = Map.get(bindings, val, val) {key, val} end struct(Retex.Wme, populated) end def replace_bindings(%_{action: actions} = pnode, bindings) when is_map(bindings) do new_actions = Enum.map(actions, fn action -> replace_bindings(action, bindings) end) %{pnode | action: new_actions, bindings: bindings} end def replace_bindings(tuple, bindings) when is_map(bindings) and is_tuple(tuple) do List.to_tuple( for element <- Tuple.to_list(tuple) do if is_binary(element), do: Map.get(bindings, element, element), else: element end ) end def replace_bindings(%_{action: actions} = pnode, {_, _, bindings}) when is_map(bindings) and is_list(actions) do new_actions = Enum.map(actions, fn action -> replace_bindings(action, bindings) end) %{pnode | action: new_actions, bindings: bindings} end def replace_bindings(anything, _bindings) do anything end @spec add_token(Retex.t(), network_node(), Retex.Wme.t(), map, list(Retex.Token.t())) :: Retex.t() def add_token( %Retex{tokens: rete_tokens} = rete, current_node, _wme, _bindings, [_ | _] = tokens ) do node_tokens = Map.get(rete_tokens, current_node.id, []) |> MapSet.new() all_tokens = MapSet.new(node_tokens) |> MapSet.union(MapSet.new(tokens)) new_tokens = Map.put(rete_tokens, current_node.id, all_tokens) %{rete | tokens: new_tokens} end def add_token(%Retex{tokens: rete_tokens} = rete, current_node, wme, bindings, tokens) do node_tokens = Map.get(rete_tokens, current_node.id, []) |> MapSet.new() token = Token.new() token = %{ token | wmem: wme, node: current_node.id, bindings: bindings } all_tokens = [token] |> MapSet.new() |> MapSet.union(node_tokens) |> MapSet.union(MapSet.new(tokens)) new_tokens = Map.put(rete_tokens, current_node.id, MapSet.new(all_tokens)) %{rete | tokens: new_tokens} end @spec create_activation(Retex.t(), network_node(), Retex.Wme.t()) :: Retex.t() def create_activation( %__MODULE__{activations: activations, wme_activations: wme_activations} = rete, current_node, wme ) do node_activations = Map.get(activations, current_node.id, []) new_activations = [wme.id | node_activations] new_rete = %{rete | activations: Map.put(activations, current_node.id, new_activations)} previous_wme_activations = Map.get(wme_activations, wme.id, []) new_wme_activations = Map.put(wme_activations, wme.id, [current_node.id | previous_wme_activations]) %{new_rete | wme_activations: new_wme_activations} end @spec propagate_activations( Retex.t(), network_node(), Retex.Wme.t(), map, list(Retex.Token.t()) ) :: {Retex.t(), map} def propagate_activations( %Retex{graph: graph} = rete, %{} = current_node, %Retex.Wme{} = wme, bindings, new_tokens ) do graph |> Graph.out_neighbors(current_node) |> Enum.reduce({rete, bindings}, fn vertex, {network, bindings} -> propagate_activation(vertex, network, wme, bindings, new_tokens) end) end @spec propagate_activations(Retex.t(), network_node(), Retex.Wme.t(), map) :: {Retex.t(), map} def propagate_activations( %Retex{graph: graph} = rete, %{} = current_node, %Retex.Wme{} = wme, bindings ) do graph |> Graph.out_neighbors(current_node) |> Enum.reduce({rete, bindings}, fn vertex, {network, bindings} -> propagate_activation(vertex, network, wme, bindings) end) end @spec deactivate_descendants(Retex.t(), network_node()) :: Retex.t() def deactivate_descendants(%Retex{activations: activations} = rete, %{} = current_node) do %{graph: graph} = rete children = Graph.out_neighbors(graph, current_node) Enum.reduce(children, rete, fn %type{} = vertex, network -> if type == Retex.Node.PNode do %{ network | agenda: Enum.reject(network.agenda, fn pnode -> pnode.id == vertex.id end) } else new_network = %{network | activations: Map.put(activations, vertex.id, [])} deactivate_descendants(new_network, vertex) end end) end @spec continue_traversal(Retex.t(), map, network_node(), Retex.Wme.t(), list(Retex.Token.t())) :: {Retex.t(), map} def continue_traversal( %Retex{} = new_rete, %{} = new_bindings, %_{} = current_node, %Retex.Wme{} = wme, tokens ) do propagate_activations(new_rete, current_node, wme, new_bindings, tokens) end @spec continue_traversal(Retex.t(), map, network_node(), Retex.Wme.t()) :: {Retex.t(), map} def continue_traversal( %Retex{} = new_rete, %{} = new_bindings, %_{} = current_node, %Retex.Wme{} = wme ) do propagate_activations(new_rete, current_node, wme, new_bindings) end @spec stop_traversal(Retex.t(), map) :: {Retex.t(), map} def stop_traversal(%Retex{} = rete, %{} = bindings) do {rete, bindings} end end
lib/retex.ex
0.808105
0.453867
retex.ex
starcoder
defmodule Icon.Schema.Types.Transaction do @moduledoc """ This module defines a transaction. A transaction has the following keys: Key | Type | Description :------------ | :------------------------------------------------- | :---------- `blockHash` | `Icon.Schema.Types.Hash.t()` | Hash of the block that includes the transaction. `blockHeight` | `Icon.Schema.Types.Integer.t()` | Height of the block that includes the transaction. `data` | Depends on `dataType`. | Data of the transaction. `dataType` | Either `message`, `:call`, `:deploy` or `:deposit` | Data type of the transaction. `from` | `Icon.Schema.Types.EOA.t()` | EOA address that sent the transaction. `nid` | `Icon.Schema.Types.Integer.t()` | Network ID (see `Icon.RPC.Identity` for more information). `nonce` | `Icon.Schema.Types.Integer.t()` | An arbitrary number used to prevent transaction hash collision. `signature` | `Icon.Schema.Types.Signature.t()` | Signature of the transaction. `stepLimit` | `Icon.Schema.Types.Loop.t()` | Maximum step allowance that can be used by the transaction. `timestamp` | `Icon.Schema.Types.Timestamp.t()` | Transaction creation time. Timestamp is in microsecond. `to` | `Icon.Schema.Types.Address.t()` | EOA address to receive coins, or SCORE address to execute the transaction. `txHash` | `Icon.Schema.Types.Hash.t()` | Transaction hash. `txIndex` | `Icon.Schema.Types.Integer.t()` | Transaction index in a block. `nil` when it is pending. `value` | `Icon.Schema.Types.Loop.t()` | Amount of ICX coins in loop to transfer. When omitted, assumes 0. (1 ICX = 1¹⁸ loop). `version` | `Icon.Schema.Types.Integer.t()` | Protocol version. """ use Icon.Schema defschema(%{ version: :integer, from: :eoa_address, to: :address, value: :loop, stepLimit: :loop, timestamp: :timestamp, nid: :integer, nonce: :integer, txHash: :hash, txIndex: :integer, blockHash: :hash, blockHeight: :integer, signature: :signature, dataType: enum([:message, :call, :deploy, :deposit, :base]), data: any( [ base: %{ result: %{ coveredByFee: :loop, coveredByOverIssuedICX: :loop, issue: :loop } }, message: :binary_data, call: %{ method: {:string, required: true}, params: :any }, deploy: %{ contentType: {:string, required: true}, content: {:binary_data, required: true}, params: :any }, deposit: %{ action: {enum([:add, :withdraw]), required: true}, id: :hash, amount: :loop } ], :dataType ) }) end
lib/icon/schema/types/transaction.ex
0.920254
0.778607
transaction.ex
starcoder
defmodule DataMorph.Struct do @moduledoc ~S""" Contains `from_rows/3` function that defines a struct and return structs created from rows, and `defmodulestruct/2` macro to define a struct. """ @doc ~S""" Defines a struct from given `kind` alias and list of `fields`. When called a second time with additional new fields it redefines struct, setting fields to be the union of the old and new fields. ## Examples iex> DataMorph.Struct.defmodulestruct(Foo.Bar, [:baz, :boom]) {:module, Foo.Bar, _, %Foo.Bar{baz: nil, boom: nil}} ...> %Foo.Bar{baz: "zy", boom: "boom"} %Foo.Bar{baz: "zy", boom: "boom"} ...> DataMorph.Struct.defmodulestruct(Foo.Bar, [:bish, :bash]) {:module, Foo.Bar, _, %Foo.Bar{bash: nil, baz: nil, bish: nil, boom: nil}} ...> %Foo.Bar{bish: "zy", bash: "boom"} %Foo.Bar{bash: "boom", baz: nil, bish: "zy", boom: nil} """ defmacro defmodulestruct(kind, fields) do quote do module = unquote(kind) fields = unquote(fields) fields_changeset = try do existing_fields = (struct(module) |> Map.keys()) -- [:__struct__] existing_fieldset = MapSet.new(existing_fields) new_fieldset = MapSet.new(fields) unless existing_fieldset === new_fieldset do existing_fieldset |> MapSet.union(new_fieldset) |> MapSet.to_list() end rescue UndefinedFunctionError -> fields end if fields_changeset do defmodule(Module.concat([module]), do: defstruct(fields_changeset)) else {:module, module, nil, struct(module)} end end end @doc ~S""" Defines a struct and returns structs created from `rows` list or stream, and a `namespace`, a `name`, and a list of `headers`. Redefines struct when called again with same namespace and name but different headers, sets struct fields to be the union of the old and new headers. ## Examples Defines a struct and returns stream of structs created from stream of `rows`. iex> headers = ["name","ISO code"] ...> [ ...> ["New Zealand","nz"], ...> ["United Kingdom","gb"] ...> ] \ ...> |> Stream.map(& &1) \ ...> |> DataMorph.Struct.from_rows(OpenRegister, "country", headers) \ ...> |> Enum.to_list [%OpenRegister.Country{iso_code: "nz", name: "New Zealand"}, %OpenRegister.Country{iso_code: "gb", name: "United Kingdom"}] Defines a struct and returns stream of structs created from list of `rows`. iex> headers = ["name","ISO code"] ...> [ ...> ["New Zealand","nz"], ...> ["United Kingdom","gb"] ...> ] \ ...> |> DataMorph.Struct.from_rows("open-register", Country, headers) \ ...> |> Enum.to_list [%OpenRegister.Country{iso_code: "nz", name: "New Zealand"}, %OpenRegister.Country{iso_code: "gb", name: "United Kingdom"}] """ def from_rows(rows, namespace, name, headers) do kind = DataMorph.Module.camelize_concat(namespace, name) fields = headers |> Enum.map(&normalize/1) defmodulestruct(kind, fields) rows |> Stream.map(&convert_row(&1, kind, fields)) end defp convert_row(row, kind, fields) do tuples = fields |> Enum.zip(row) struct(kind, tuples) end def normalize(string) do string |> String.downcase() |> String.replace(~r"\W", " ") |> String.replace(~r" +", " ") |> String.trim() |> String.replace(" ", "_") |> String.to_atom() end end
lib/data_morph/struct.ex
0.772659
0.631495
struct.ex
starcoder
defmodule Frettchen.Span do @moduledoc """ Spans are data structures that collect information which gets sent to a reporter. They are usually initialized and added to a Trace process which can handle the life cycle management. Tags and logs can be added to Spans to provide additional information. Spans can also be injected (serialized) so that they can be passed between differnent microservices. """ alias Jaeger.Thrift.{Log, Span, Tag} alias Frettchen.Trace @doc """ close/1 closes a span by calculating the duration as the difference between the start time and the current time """ def close(%Span{} = span) do %{span | duration: (Frettchen.Helpers.current_time() - span.start_time)} |> Trace.resolve_span() end @doc """ Extracts span data from a serialized inject """ def extract(string) do [trace_id_low, span_id, parent_span_id, _bits] = String.split(string, ":") trace_id_low = trace_id_low |> String.upcase() |> Integer.parse(16) |> elem(0) span_id = span_id |> String.upcase() |> Integer.parse(16) |> elem(0) parent_span_id = parent_span_id |> String.upcase() |> Integer.parse(16) |> elem(0) %{trace_id_low: trace_id_low, span_id: span_id, parent_span_id: parent_span_id} end @doc """ Converts the data in a span into an injectable string """ def inject(%Span{} = span) do "#{Integer.to_string(span.trace_id_low, 16)}:#{Integer.to_string(span.span_id, 16)}:#{Integer.to_string(span.parent_span_id,16)}:1" end @doc """ log/3 adds a tag struct with a timestamp to the logs list of a span. """ def log(%Span{} = span, key, value) when is_binary(key) do tag = %{Tag.new | key: key} |> tag_merge_value(value) log = %{Log.new | timestamp: Frettchen.Helpers.current_time(), fields: [tag]} %{span | logs: [log | span.logs]} end @doc """ open/2 creates a new span with a given name and assigns the passed span as the parent_id. The span is then added to the Trace process """ def open(%Span{} = span, name) do %{new_span(name) | trace_id_low: span.trace_id_low, parent_span_id: span.span_id} |> Trace.add_span() end @doc """ open/2 creates a new span with a given name and assigns the passed trace as the trace_id. The span is then added to the Trace process """ def open(%Trace{} = trace, name) do %{new_span(name) | trace_id_low: trace.id} |> Trace.add_span() end @doc """ open/2 creates a new span with a given name and assigns the passed trace as the trace_id. The span is then added to the Trace process """ def open(%Trace{} = trace, name) do %{new_span(name) | trace_id_low: trace.id} |> Trace.add_span() end @doc """ open/3 creates a new span with a given name and assigns the passed trace as the trace_id as well as a parent_span_id. The span is then added to the Trace process """ def open(%Trace{} = trace, name, parent_span_id) do %{new_span(name) | trace_id_low: trace.id, parent_span_id: parent_span_id} |> Trace.add_span() end @doc """ tag/3 adds a tag struct to the tags list of a span. """ def tag(%Span{} = span, key, value) when is_binary(key) do tag = %{Tag.new | key: key} |> tag_merge_value(value) %{span | tags: [tag | span.tags]} end defp new_span(name) do %{Span.new() | operation_name: Frettchen.Helpers.format_name(name), trace_id_low: 0, trace_id_high: 0, span_id: Frettchen.Helpers.random_id(), parent_span_id: 0, flags: 1, start_time: Frettchen.Helpers.current_time(), logs: [], tags: [] } end defp tag_merge_value(%Tag{} = tag, value) when is_binary(value) do %{tag | v_type: 0, v_str: value} end defp tag_merge_value(%Tag{} = tag, value) when is_float(value) do %{tag | v_type: 1, v_double: value} end defp tag_merge_value(%Tag{} = tag, value) when is_boolean(value) do %{tag | v_type: 2, v_bool: value} end defp tag_merge_value(%Tag{} = tag, value) when is_integer(value) do %{tag | v_type: 3, v_long: value} end end
lib/frettchen/span.ex
0.768863
0.576482
span.ex
starcoder
defmodule Akd.Publish.Distillery do @moduledoc """ A native Hook module that comes shipped with Akd. This module uses `Akd.Hook`. Provides a set of operations that copies a built distillery release from the `build_at` location to `publish_to` destination, and then publishes the release (by uncompressing the released tar file). Ensures to remove the tar.gz file created by this build. Doesn't have any Rollback operations. When using SCP to transfer between two remote nodes you must ensure they can communicate. This can be done by setting up authorized keys and known hosts on the destination server, or using your local credentials (see: scp_options). You will still require known hosts. Alternatively, you can use your local computer as an intermediate destination (no examples provided yet). # Options: * `run_ensure`: `boolean`. Specifies whether to a run a command or not. * `ignore_failure`: `boolean`. Specifies whether to continue if this hook fails. * `scp_options`: `string`. options to pass to the SCP command, example: "-o \"ForwardAgent yes\"" # Defaults: * `run_ensure`: `true` * `ignore_failure`: `false` * `scp_options`: `""` """ use Akd.Hook alias Akd.{Deployment, Destination, DestinationResolver} @default_opts [run_ensure: true, ignore_failure: false, scp_options: ""] @doc """ Callback implementation for `get_hooks/2`. This function returns a list of operations that can be used to publish a release using distillery on the `publish_to` destination of a deployment. ## Examples iex> deployment = %Akd.Deployment{mix_env: "prod", ...> build_at: Akd.Destination.local("."), ...> publish_to: Akd.Destination.local("."), ...> name: "name", ...> vsn: "0.1.1"} iex> Akd.Publish.Distillery.get_hooks(deployment, []) [%Akd.Hook{ensure: [%Akd.Operation{cmd: "rm ./name.tar.gz", cmd_envs: [], destination: %Akd.Destination{host: :local, path: ".", user: :current}}], ignore_failure: false, main: [%Akd.Operation{cmd: "cp ./_build/prod/rel/name/releases/0.1.1/name.tar.gz .\\n", cmd_envs: [], destination: %Akd.Destination{host: :local, path: ".", user: :current}}], rollback: [], run_ensure: true}, %Akd.Hook{ensure: [], ignore_failure: false, main: [%Akd.Operation{cmd: "cd .\\ntar xzf name.tar.gz\\n", cmd_envs: [], destination: %Akd.Destination{host: :local, path: ".", user: :current}}], rollback: [], run_ensure: true}] """ @spec get_hooks(Akd.Deployment.t, Keyword.t) :: list(Akd.Hook.t) def get_hooks(deployment, opts \\ []) do opts = uniq_merge(opts, @default_opts) [ copy_release_hook(deployment, opts), publish_hook(deployment, opts), ] end # This function takes a deployment and options and returns an Akd.Hook.t # struct using FormHook DSL defp copy_release_hook(deployment, opts) do build = DestinationResolver.resolve(:build, deployment) publish = DestinationResolver.resolve(:publish, deployment) scp_options = Keyword.get(opts, :scp_options, false) # Transfer between two remote servers by running the SCP command locally scp_destination = if build.host != publish.host do Akd.Destination.local() else build end form_hook opts do main copy_rel(deployment, scp_options), scp_destination ensure "rm #{publish.path}/#{deployment.name}.tar.gz", publish end end # This function takes a deployment and options and returns an Akd.Hook.t # struct using FormHook DSL defp publish_hook(deployment, opts) do publish = DestinationResolver.resolve(:publish, deployment) form_hook opts do main publish_rel(deployment), publish end end # This function returns the command to be used to copy the release from # build to production. # This assumes that you're running this command from the same server defp copy_rel(%Deployment{build_at: %Destination{host: s}, publish_to: %Destination{host: s}} = deployment, _scp_options) do """ cp #{path_to_release(deployment.build_at.path, deployment)} #{deployment.publish_to.path} """ end # This assumes that the publish server has ssh credentials to build server defp copy_rel(%Deployment{build_at: src, publish_to: dest} = deployment, scp_options) do """ scp #{scp_options} #{src |> Destination.to_string() |> path_to_release(deployment)} #{Destination.to_string(dest)} """ end # This function returns the command to be used to publish a release, i.e. # uncompress the tar.gz file associated with the deployment. defp publish_rel(deployment) do """ cd #{deployment.publish_to.path} tar xzf #{deployment.name}.tar.gz """ end # This function returns the path to the release based on deployment name # and mix environment. defp path_to_release(base, deployment) do "#{base}/_build/#{deployment.mix_env}/rel/#{deployment.name}/releases/#{deployment.vsn}/#{deployment.name}.tar.gz" end # This function takes two keyword lists and merges them keeping the keys # unique. If there are multiple values for a key, it takes the value from # the first value of keyword1 corresponding to that key. defp uniq_merge(keyword1, keyword2) do keyword2 |> Keyword.merge(keyword1) |> Keyword.new() end end
lib/akd/base/publish/distillery.ex
0.870879
0.496155
distillery.ex
starcoder
defmodule AWS.SMS do @moduledoc """ Amazon Server Migration Service automates the process of migrating servers to EC2. """ @doc """ The CreateReplicationJob API is used to create a ReplicationJob to replicate a server on AWS. Call this API to first create a ReplicationJob, which will then schedule periodic ReplicationRuns to replicate your server to AWS. Each ReplicationRun will result in the creation of an AWS AMI. """ def create_replication_job(client, input, options \\ []) do request(client, "CreateReplicationJob", input, options) end @doc """ The DeleteReplicationJob API is used to delete a ReplicationJob, resulting in no further ReplicationRuns. This will delete the contents of the S3 bucket used to store SMS artifacts, but will not delete any AMIs created by the SMS service. """ def delete_replication_job(client, input, options \\ []) do request(client, "DeleteReplicationJob", input, options) end @doc """ The DeleteServerCatalog API clears all servers from your server catalog. This means that these servers will no longer be accessible to the Server Migration Service. """ def delete_server_catalog(client, input, options \\ []) do request(client, "DeleteServerCatalog", input, options) end @doc """ The DisassociateConnector API will disassociate a connector from the Server Migration Service, rendering it unavailable to support replication jobs. """ def disassociate_connector(client, input, options \\ []) do request(client, "DisassociateConnector", input, options) end @doc """ The GetConnectors API returns a list of connectors that are registered with the Server Migration Service. """ def get_connectors(client, input, options \\ []) do request(client, "GetConnectors", input, options) end @doc """ The GetReplicationJobs API will return all of your ReplicationJobs and their details. This API returns a paginated list, that may be consecutively called with nextToken to retrieve all ReplicationJobs. """ def get_replication_jobs(client, input, options \\ []) do request(client, "GetReplicationJobs", input, options) end @doc """ The GetReplicationRuns API will return all ReplicationRuns for a given ReplicationJob. This API returns a paginated list, that may be consecutively called with nextToken to retrieve all ReplicationRuns for a ReplicationJob. """ def get_replication_runs(client, input, options \\ []) do request(client, "GetReplicationRuns", input, options) end @doc """ The GetServers API returns a list of all servers in your server catalog. For this call to succeed, you must previously have called ImportServerCatalog. """ def get_servers(client, input, options \\ []) do request(client, "GetServers", input, options) end @doc """ The ImportServerCatalog API is used to gather the complete list of on-premises servers on your premises. This API call requires connectors to be installed and monitoring all servers you would like imported. This API call returns immediately, but may take some time to retrieve all of the servers. """ def import_server_catalog(client, input, options \\ []) do request(client, "ImportServerCatalog", input, options) end @doc """ The StartOnDemandReplicationRun API is used to start a ReplicationRun on demand (in addition to those that are scheduled based on your frequency). This ReplicationRun will start immediately. StartOnDemandReplicationRun is subject to limits on how many on demand ReplicationRuns you may call per 24-hour period. """ def start_on_demand_replication_run(client, input, options \\ []) do request(client, "StartOnDemandReplicationRun", input, options) end @doc """ The UpdateReplicationJob API is used to change the settings of your existing ReplicationJob created using CreateReplicationJob. Calling this API will affect the next scheduled ReplicationRun. """ def update_replication_job(client, input, options \\ []) do request(client, "UpdateReplicationJob", input, options) end @spec request(map(), binary(), map(), list()) :: {:ok, Poison.Parser.t | nil, Poison.Response.t} | {:error, Poison.Parser.t} | {:error, HTTPoison.Error.t} defp request(client, action, input, options) do client = %{client | service: "sms"} host = get_host("sms", client) url = get_url(host, client) headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}, {"X-Amz-Target", "AWSServerMigrationService_V2016_10_24.#{action}"}] payload = Poison.Encoder.encode(input, []) headers = AWS.Request.sign_v4(client, "POST", url, headers, payload) case HTTPoison.post(url, payload, headers, options) do {:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} -> {:ok, nil, response} {:ok, response=%HTTPoison.Response{status_code: 200, body: body}} -> {:ok, Poison.Parser.parse!(body), response} {:ok, _response=%HTTPoison.Response{body: body}} -> error = Poison.Parser.parse!(body) exception = error["__type"] message = error["message"] {:error, {exception, message}} {:error, %HTTPoison.Error{reason: reason}} -> {:error, %HTTPoison.Error{reason: reason}} end end defp get_host(endpoint_prefix, client) do if client.region == "local" do "localhost" else "#{endpoint_prefix}.#{client.region}.#{client.endpoint}" end end defp get_url(host, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}/" end end
lib/aws/sms.ex
0.744006
0.460774
sms.ex
starcoder
defmodule Ecto.Query.API do @moduledoc """ This module lists all functions allowed in the query API. * Comparison operators: `==`, `!=`, `<=`, `>=`, `<`, `>` * Boolean operators: `and`, `or`, `not` * Inclusion operator: `in/2` * Search functions: `like/2` and `ilike/2` * Null check functions: `is_nil/1` * Aggregates: `count/1`, `avg/1`, `sum/1`, `min/1`, `max/1` * Date/time intervals: `datetime_add/3`, `date_add/3`, `from_now/2`, `ago/2` * Inside select: `struct/2`, `map/2` and literals (map, tuples, lists, etc) * General: `fragment/1`, `field/2` and `type/2` Note the functions in this module exist for documentation purposes and one should never need to invoke them directly. Furthermore, it is possible to define your own macros and use them in Ecto queries (see docs for `fragment/1`). """ @dialyzer :no_return @doc """ Binary `==` operation. """ def left == right, do: doc! [left, right] @doc """ Binary `!=` operation. """ def left != right, do: doc! [left, right] @doc """ Binary `<=` operation. """ def left <= right, do: doc! [left, right] @doc """ Binary `>=` operation. """ def left >= right, do: doc! [left, right] @doc """ Binary `<` operation. """ def left < right, do: doc! [left, right] @doc """ Binary `>` operation. """ def left > right, do: doc! [left, right] @doc """ Binary `and` operation. """ def left and right, do: doc! [left, right] @doc """ Binary `or` operation. """ def left or right, do: doc! [left, right] @doc """ Unary `not` operation. """ def not(value), do: doc! [value] @doc """ Checks if the left-value is included in the right one. from p in Post, where: p.id in [1, 2, 3] The right side may either be a list, a literal list or even a column in the database with array type: from p in Post, where: "elixir" in p.tags """ def left in right, do: doc! [left, right] @doc """ Searches for `search` in `string`. from p in Post, where: like(p.body, "Chapter%") Translates to the underlying SQL LIKE query, therefore its behaviour is dependent on the database. In particular, PostgreSQL will do a case-sensitive operation, while the majority of other databases will be case-insensitive. For performing a case-insensitive `like` in PostgreSQL, see `ilike/2`. You should be very careful when allowing user sent data to be used as part of LIKE query, since they allow to perform [LIKE-injections](https://githubengineering.com/like-injection/). """ def like(string, search), do: doc! [string, search] @doc """ Searches for `search` in `string` in a case insensitive fashion. from p in Post, where: ilike(p.body, "Chapter%") Translates to the underlying SQL ILIKE query. This operation is only available on PostgreSQL. """ def ilike(string, search), do: doc! [string, search] @doc """ Checks if the given value is nil. from p in Post, where: is_nil(p.published_at) """ def is_nil(value), do: doc! [value] @doc """ Counts the given entry. from p in Post, select: count(p.id) """ def count(value), do: doc! [value] @doc """ Counts the distinct values in given entry. from p in Post, select: count(p.id, :distinct) """ def count(value, :distinct), do: doc! [value, :distinct] @doc """ Calculates the average for the given entry. from p in Payment, select: avg(p.value) """ def avg(value), do: doc! [value] @doc """ Calculates the sum for the given entry. from p in Payment, select: sum(p.value) """ def sum(value), do: doc! [value] @doc """ Calculates the minimum for the given entry. from p in Payment, select: min(p.value) """ def min(value), do: doc! [value] @doc """ Calculates the maximum for the given entry. from p in Payment, select: max(p.value) """ def max(value), do: doc! [value] @doc """ Adds a given interval to a datetime. The first argument is a `datetime`, the second one is the count for the interval, which may be either positive or negative and the interval value: # Get all items published since the last month from p in Post, where: p.published_at > datetime_add(^Ecto.DateTime.utc, -1, "month") In the example above, we used `datetime_add/3` to subtract one month from the current datetime and compared it with the `p.published_at`. If you want to perform operations on date, `date_add/3` could be used. The following intervals are supported: year, month, week, day, hour, minute, second, millisecond and microsecond. """ def datetime_add(datetime, count, interval), do: doc! [datetime, count, interval] @doc """ Adds a given interval to a date. See `datetime_add/3` for more information. """ def date_add(date, count, interval), do: doc! [date, count, interval] @doc """ Adds the given interval to the current time in UTC. The current time in UTC is retrieved from Elixir and not from the database. ## Examples from a in Account, where: a.expires_at < from_now(3, "month") """ def from_now(count, interval), do: doc! [count, interval] @doc """ Subtracts the given interval from the current time in UTC. The current time in UTC is retrieved from Elixir and not from the database. ## Examples from p in Post, where: p.published_at > ago(3, "month") """ def ago(count, interval), do: doc! [count, interval] @doc """ Send fragments directly to the database. It is not possible to represent all possible database queries using Ecto's query syntax. When such is required, it is possible to use fragments to send any expression to the database: def unpublished_by_title(title) do from p in Post, where: is_nil(p.published_at) and fragment("lower(?)", p.title) == ^title end In the example above, we are using the lower procedure in the database to downcase the title column. It is very important to keep in mind that Ecto is unable to do any type casting described above when fragments are used. You can however use the `type/2` function to give Ecto some hints: fragment("lower(?)", p.title) == type(^title, :string) Or even say the right side is of the same type as `p.title`: fragment("lower(?)", p.title) == type(^title, p.title) It is possible to make use of PostgreSQL's JSON/JSONB data type with fragments, as well: fragment("?->>? ILIKE ?", p.map, "key_name", ^some_value) ## Keyword fragments In order to support databases that do not have string-based queries, like MongoDB, fragments also allow keywords to be given: from p in Post, where: fragment(title: ["$eq": ^some_value]) ## Defining custom functions using macros and fragment You can add a custom Ecto query function using macros. For example to expose SQL's coalesce function you can define this macro: defmodule CustomFunctions do defmacro coalesce(left, right) do quote do fragment("coalesce(?, ?)", unquote(left), unquote(right)) end end end To have coalesce/2 available, just import the module that defines it. import CustomFunctions The only downside is that it will show up as a fragment when inspecting the Elixir query. Other than that, it should be equivalent to a built-in Ecto query function. """ def fragment(fragments), do: doc! [fragments] @doc """ Allows a field to be dynamically accessed. def at_least_four(doors_or_tires) do from c in Car, where: field(c, ^doors_or_tires) >= 4 end In the example above, both `at_least_four(:doors)` and `at_least_four(:tires)` would be valid calls as the field is dynamically generated. """ def field(source, field), do: doc! [source, field] @doc """ Used in `select` to specify which struct fields should be returned. For example, if you don't need all fields to be returned as part of a struct, you can filter it to include only certain fields by using `struct/2`: from p in Post, select: struct(p, [:title, :body]) `struct/2` can also be used to dynamically select fields: fields = [:title, :body] from p in Post, select: struct(p, ^fields) As a convenience, `select` allows developers to take fields without an explicit call to `struct/2`: from p in Post, select: [:title, :body] Or even dynamically: fields = [:title, :body] from p in Post, select: ^fields However, `struct/2` is still useful when you want to limit the fields of different structs: from(city in City, join: country in assoc(city, :country), select: {struct(city, [:country_id, :name]), struct(country, [:id, :population])} For preloads, the selected fields may be specified from the parent: from(city in City, preload: :country, select: struct(city, [:country_id, :name, country: [:id, :population]])) **IMPORTANT**: When filtering fields for associations, you MUST include the foreign keys used in the relationship, otherwise Ecto will be unable to find associated records. """ def struct(source, fields), do: doc! [source, fields] @doc """ Used in `select` to specify which fields should be returned as a map. For example, if you don't need all fields to be returned or neither need a struct, you can use `map/2` to achieve both: from p in Post, select: map(p, [:title, :body]) `map/2` can also be used to dynamically select fields: fields = [:title, :body] from p in Post, select: map(p, ^fields) `map/2` is also useful when you want to limit the fields of different structs: from(city in City, join: country in assoc(city, :country), select: {map(city, [:country_id, :name]), map(country, [:id, :population])} For preloads, the selected fields may be specified from the parent: from(city in City, preload: :country, select: map(city, [:country_id, :name, country: [:id, :population]])) **IMPORTANT**: When filtering fields for associations, you MUST include the foreign keys used in the relationship, otherwise Ecto will be unable to find associated records. """ def map(source, fields), do: doc! [source, fields] @doc """ Casts the given value to the given type. Most of the times, Ecto is able to proper cast interpolated values due to its type checking mechanism. In some situations though, in particular when using fragments with `fragment/1`, you may want to tell Ecto you are expecting a particular type: fragment("lower(?)", p.title) == type(^title, :string) It is also possible to say the type must match the same of a column: fragment("lower(?)", p.title) == type(^title, p.title) """ def type(interpolated_value, type), do: doc! [interpolated_value, type] defp doc!(_) do raise "the functions in Ecto.Query.API should not be invoked directly, " <> "they serve for documentation purposes only" end end
lib/ecto/query/api.ex
0.865082
0.863363
api.ex
starcoder
defmodule Geometry.MultiPolygonM do @moduledoc """ A set of polygons from type `Geometry.PolygonM` `MultiPointM` implements the protocols `Enumerable` and `Collectable`. ## Examples iex> Enum.map( ...> MultiPolygonM.new([ ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(11, 12, 14), ...> PointM.new(11, 22, 24), ...> PointM.new(31, 22, 34), ...> PointM.new(11, 12, 14) ...> ]), ...> ]), ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(35, 10, 14), ...> PointM.new(45, 45, 24), ...> PointM.new(10, 20, 34), ...> PointM.new(35, 10, 14) ...> ]), ...> LineStringM.new([ ...> PointM.new(20, 30, 14), ...> PointM.new(35, 35, 24), ...> PointM.new(30, 20, 34), ...> PointM.new(20, 30, 14) ...> ]) ...> ]) ...> ]), ...> fn polygon -> length(polygon) == 1 end ...> ) [true, false] iex> Enum.into( ...> [ ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(11, 12, 14), ...> PointM.new(11, 22, 24), ...> PointM.new(31, 22, 34), ...> PointM.new(11, 12, 14) ...> ]) ...> ]) ...> ], ...> MultiPolygonM.new()) %MultiPolygonM{ polygons: MapSet.new([ [ [ [11, 12, 14], [11, 22, 24], [31, 22, 34], [11, 12, 14] ] ] ]) } """ alias Geometry.{GeoJson, MultiPolygonM, PolygonM, WKB, WKT} defstruct polygons: MapSet.new() @type t :: %MultiPolygonM{polygons: MapSet.t([Geometry.coordinates()])} @doc """ Creates an empty `MultiPolygonM`. ## Examples iex> MultiPolygonM.new() %MultiPolygonM{polygons: MapSet.new()} """ @spec new :: t() def new, do: %MultiPolygonM{} @doc """ Creates a `MultiPolygonM` from the given `Geometry.MultiPolygonM`s. ## Examples iex> MultiPolygonM.new([ ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(6, 2, 4), ...> PointM.new(8, 2, 5), ...> PointM.new(8, 4, 6), ...> PointM.new(6, 2, 4) ...> ]), ...> ]), ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(1, 1, 4), ...> PointM.new(9, 1, 5), ...> PointM.new(9, 8, 6), ...> PointM.new(1, 1, 4) ...> ]), ...> LineStringM.new([ ...> PointM.new(6, 2, 4), ...> PointM.new(7, 2, 5), ...> PointM.new(7, 3, 6), ...> PointM.new(6, 2, 4) ...> ]) ...> ]) ...> ]) %MultiPolygonM{ polygons: MapSet.new([ [ [[1, 1, 4], [9, 1, 5], [9, 8, 6], [1, 1, 4]], [[6, 2, 4], [7, 2, 5], [7, 3, 6], [6, 2, 4]] ], [[[6, 2, 4], [8, 2, 5], [8, 4, 6], [6, 2, 4]]] ]) } iex> MultiPolygonM.new([]) %MultiPolygonM{} """ @spec new([PolygonM.t()]) :: t() def new([]), do: %MultiPolygonM{} def new(polygons), do: %MultiPolygonM{ polygons: Enum.into(polygons, MapSet.new(), fn polygon -> polygon.rings end) } @doc """ Returns `true` if the given `MultiPolygonM` is empty. ## Examples iex> MultiPolygonM.empty?(MultiPolygonM.new()) true iex> MultiPolygonM.empty?( ...> MultiPolygonM.new([ ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(1, 1, 4), ...> PointM.new(1, 5, 8), ...> PointM.new(5, 4, 6), ...> PointM.new(1, 1, 4) ...> ]) ...> ]) ...> ]) ...> ) false """ @spec empty?(t()) :: boolean def empty?(%MultiPolygonM{} = multi_polygon), do: Enum.empty?(multi_polygon.polygons) @doc """ Creates a `MultiPolygonM` from the given coordinates. ## Examples iex> MultiPolygonM.from_coordinates([ ...> [ ...> [[6, 2, 4], [8, 2, 5], [8, 4, 6], [6, 2, 4]] ...> ], [ ...> [[1, 1, 4], [9, 1, 5], [9, 8, 6], [1, 1, 4]], ...> [[6, 2, 3], [7, 2, 7], [7, 3, 4], [6, 2, 3]] ...> ] ...> ]) %MultiPolygonM{ polygons: MapSet.new([ [ [[6, 2, 4], [8, 2, 5], [8, 4, 6], [6, 2, 4]], ], [ [[1, 1, 4], [9, 1, 5], [9, 8, 6], [1, 1, 4]], [[6, 2, 3], [7, 2, 7], [7, 3, 4], [6, 2, 3]] ] ]) } """ @spec from_coordinates([[Geometry.coordinates()]]) :: t() def from_coordinates(coordinates) do %MultiPolygonM{ polygons: MapSet.new(coordinates) } end @doc """ Returns an `:ok` tuple with the `MultiPolygonM` from the given GeoJSON term. Otherwise returns an `:error` tuple. ## Examples iex> ~s( ...> { ...> "type": "MultiPolygon", ...> "coordinates": [ ...> [ ...> [[6, 2, 4], [8, 2, 5], [8, 4, 6], [6, 2, 4]] ...> ], [ ...> [[1, 1, 4], [9, 1, 5], [9, 8, 6], [1, 1, 4]], ...> [[6, 2, 3], [7, 2, 7], [7, 3, 4], [6, 2, 3]] ...> ] ...> ] ...> } ...> ) ...> |> Jason.decode!() ...> |> MultiPolygonM.from_geo_json() {:ok, %MultiPolygonM{ polygons: MapSet.new([ [ [[1, 1, 4], [9, 1, 5], [9, 8, 6], [1, 1, 4]], [[6, 2, 3], [7, 2, 7], [7, 3, 4], [6, 2, 3]] ], [ [[6, 2, 4], [8, 2, 5], [8, 4, 6], [6, 2, 4]] ] ]) }} """ @spec from_geo_json(Geometry.geo_json_term()) :: {:ok, t()} | Geometry.geo_json_error() def from_geo_json(json), do: GeoJson.to_multi_polygon(json, MultiPolygonM) @doc """ The same as `from_geo_json/1`, but raises a `Geometry.Error` exception if it fails. """ @spec from_geo_json!(Geometry.geo_json_term()) :: t() def from_geo_json!(json) do case GeoJson.to_multi_polygon(json, MultiPolygonM) do {:ok, geometry} -> geometry error -> raise Geometry.Error, error end end @doc """ Returns the GeoJSON term of a `MultiPolygonM`. There are no guarantees about the order of polygons in the returned `coordinates`. ## Examples ```elixir MultiPolygonM.to_list( MultiPolygonM.new([ PolygonM.new([ LineStringM.new([ PointM.new(111, 112, 114), PointM.new(111, 122, 124), PointM.new(131, 122, 134), PointM.new(111, 112, 114) ]) ]), PolygonM.new([ LineStringM.new([ PointM.new(211, 212, 214), PointM.new(211, 222, 224), PointM.new(231, 222, 234), PointM.new(211, 212, 214) ]) ]) ]) ) # => # %{ # "type" => "MultiPolygon", # "coordinates" => [ # [ # [ # [11, 12, 14], # [11, 22, 24], # [31, 22, 34], # [11, 12, 14] # ] # ], [ # [ # [21, 22, 24], # [21, 22, 24], # [21, 22, 24], # [21, 22, 24] # ] # ] # ] # } ``` """ @spec to_geo_json(t()) :: Geometry.geo_json_term() def to_geo_json(%MultiPolygonM{polygons: polygons}) do %{ "type" => "MultiPolygon", "coordinates" => MapSet.to_list(polygons) } end @doc """ Returns an `:ok` tuple with the `MultiPolygonM` from the given WKT string. Otherwise returns an `:error` tuple. If the geometry contains a SRID the id is added to the tuple. ## Examples iex> MultiPolygonM.from_wkt(" ...> SRID=1234;MULTIPOLYGON M ( ...> ( ...> (40 40 20, 20 45 10, 45 30 30, 40 40 20) ...> ), ( ...> (20 35 10, 10 30 20, 10 10 15, 30 5 15, 45 20 16, 20 35 10), ...> (30 20 15, 20 15 10, 20 25 25, 30 20 15) ...> ) ...> ) ...> ") {:ok, { %MultiPolygonM{ polygons: MapSet.new([ [ [ [20, 35, 10], [10, 30, 20], [10, 10, 15], [30, 5, 15], [45, 20, 16], [20, 35, 10] ], [ [30, 20, 15], [20, 15, 10], [20, 25, 25], [30, 20, 15] ] ], [ [ [40, 40, 20], [20, 45, 10], [45, 30, 30], [40, 40, 20] ] ] ]) }, 1234 }} iex> MultiPolygonM.from_wkt("MultiPolygon M EMPTY") {:ok, %MultiPolygonM{}} """ @spec from_wkt(Geometry.wkt()) :: {:ok, t() | {t(), Geometry.srid()}} | Geometry.wkt_error() def from_wkt(wkt), do: WKT.to_geometry(wkt, MultiPolygonM) @doc """ The same as `from_wkt/1`, but raises a `Geometry.Error` exception if it fails. """ @spec from_wkt!(Geometry.wkt()) :: t() | {t(), Geometry.srid()} def from_wkt!(wkt) do case WKT.to_geometry(wkt, MultiPolygonM) do {:ok, geometry} -> geometry error -> raise Geometry.Error, error end end @doc """ Returns the WKT representation for a `MultiPolygonM`. With option `:srid` an EWKT representation with the SRID is returned. There are no guarantees about the order of polygons in the returned WKT-string. ## Examples ```elixir MultiPolygonM.to_wkt( MultiPolygonM.new([ PolygonM.new([ LineStrinZM.new([ PointM.new(20, 35, 10), PointM.new(10, 30, 20), PointM.new(10, 10, 15), PointM.new(30, 5, 15), PointM.new(45, 20, 16), PointM.new(20, 35, 10) ]), LineStringM.new([ PointM.new(30, 20, 15), PointM.new(20, 15, 10), PointM.new(20, 25, 25), PointM.new(30, 20, 15) ]) ]), PolygonM.new([ LineStringM.new([ PointM.new(40, 40, 20), PointM.new(20, 45, 10), PointM.new(45, 30, 30), PointM.new(40, 40, 20) ]) ]) ]) ) # Returns a string without any \\n or extra spaces (formatted just for readability): # SRID=478;MultiPolygon M ( # ( # (20 35 10, 10 30 20, 10 10 15, 30 5 15, 45 20 16, 20 35 10), # (30 20 15, 20 15 10, 20 25 25, 30 20 15) # ), ( # (40 40 20, 20 45 10, 45 30 30, 40 40 20) # ) # ) """ @spec to_wkt(t(), opts) :: Geometry.wkt() when opts: [srid: Geometry.srid()] def to_wkt(%MultiPolygonM{polygons: polygons}, opts \\ []) do WKT.to_ewkt( << "MultiPolygon M ", polygons |> MapSet.to_list() |> to_wkt_polygons()::binary() >>, opts ) end @doc """ Returns the WKB representation for a `MultiPolygonM`. With option `:srid` an EWKB representation with the SRID is returned. The option `endian` indicates whether `:xdr` big endian or `:ndr` little endian is returned. The default is `:xdr`. The `:mode` determines whether a hex-string or binary is returned. The default is `:binary`. An example of a simpler geometry can be found in the description for the `Geometry.PointM.to_wkb/1` function. """ @spec to_wkb(t(), opts) :: Geometry.wkb() when opts: [endian: Geometry.endian(), srid: Geometry.srid(), mode: Geometry.mode()] def to_wkb(%MultiPolygonM{} = multi_polygon, opts \\ []) do endian = Keyword.get(opts, :endian, Geometry.default_endian()) mode = Keyword.get(opts, :mode, Geometry.default_mode()) srid = Keyword.get(opts, :srid) to_wkb(multi_polygon, srid, endian, mode) end @doc """ Returns an `:ok` tuple with the `MultiPolygonM` from the given WKB string. Otherwise returns an `:error` tuple. If the geometry contains a SRID the id is added to the tuple. An example of a simpler geometry can be found in the description for the `Geometry.PointM.from_wkb/2` function. """ @spec from_wkb(Geometry.wkb(), Geometry.mode()) :: {:ok, t() | {t(), Geometry.srid()}} | Geometry.wkb_error() def from_wkb(wkb, mode \\ :binary), do: WKB.to_geometry(wkb, mode, MultiPolygonM) @doc """ The same as `from_wkb/2`, but raises a `Geometry.Error` exception if it fails. """ @spec from_wkb!(Geometry.wkb(), Geometry.mode()) :: t() | {t(), Geometry.srid()} def from_wkb!(wkb, mode \\ :binary) do case WKB.to_geometry(wkb, mode, MultiPolygonM) do {:ok, geometry} -> geometry error -> raise Geometry.Error, error end end @doc """ Returns the number of elements in `MultiPolygonM`. ## Examples iex> MultiPolygonM.size( ...> MultiPolygonM.new([ ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(11, 12, 14), ...> PointM.new(11, 22, 24), ...> PointM.new(31, 22, 34), ...> PointM.new(11, 12, 14) ...> ]) ...> ]) ...> ]) ...> ) 1 """ @spec size(t()) :: non_neg_integer() def size(%MultiPolygonM{polygons: polygons}), do: MapSet.size(polygons) @doc """ Checks if `MultiPolygonM` contains `point`. ## Examples iex> MultiPolygonM.member?( ...> MultiPolygonM.new([ ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(11, 12, 14), ...> PointM.new(11, 22, 24), ...> PointM.new(31, 22, 34), ...> PointM.new(11, 12, 14) ...> ]) ...> ]) ...> ]), ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(11, 12, 14), ...> PointM.new(11, 22, 24), ...> PointM.new(31, 22, 34), ...> PointM.new(11, 12, 14) ...> ]) ...> ]) ...> ) true iex> MultiPolygonM.member?( ...> MultiPolygonM.new([ ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(11, 12, 14), ...> PointM.new(11, 22, 24), ...> PointM.new(31, 22, 34), ...> PointM.new(11, 12, 14) ...> ]) ...> ]) ...> ]), ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(11, 12, 14), ...> PointM.new(11, 22, 24), ...> PointM.new(33, 22, 34), ...> PointM.new(11, 12, 14) ...> ]) ...> ]) ...> ) false """ @spec member?(t(), PolygonM.t()) :: boolean() def member?(%MultiPolygonM{polygons: polygons}, %PolygonM{rings: rings}), do: MapSet.member?(polygons, rings) @doc """ Converts `MultiPolygonM` to a list. ## Examples iex> MultiPolygonM.to_list( ...> MultiPolygonM.new([ ...> PolygonM.new([ ...> LineStringM.new([ ...> PointM.new(11, 12, 14), ...> PointM.new(11, 22, 24), ...> PointM.new(31, 22, 34), ...> PointM.new(11, 12, 14) ...> ]) ...> ]) ...> ]) ...> ) [ [ [ [11, 12, 14], [11, 22, 24], [31, 22, 34], [11, 12, 14] ] ] ] """ @spec to_list(t()) :: [PolygonM.t()] def to_list(%MultiPolygonM{polygons: polygons}), do: MapSet.to_list(polygons) @compile {:inline, to_wkt_polygons: 1} defp to_wkt_polygons([]), do: "EMPTY" defp to_wkt_polygons([polygon | polygons]) do <<"(", Enum.reduce(polygons, PolygonM.to_wkt_rings(polygon), fn polygon, acc -> <<acc::binary(), ", ", PolygonM.to_wkt_rings(polygon)::binary()>> end)::binary(), ")">> end @doc false @compile {:inline, to_wkb: 4} @spec to_wkb(t(), srid, endian, mode) :: wkb when srid: Geometry.srid() | nil, endian: Geometry.endian(), mode: Geometry.mode(), wkb: Geometry.wkb() def to_wkb(%MultiPolygonM{polygons: polygons}, srid, endian, mode) do << WKB.byte_order(endian, mode)::binary(), wkb_code(endian, not is_nil(srid), mode)::binary(), WKB.srid(srid, endian, mode)::binary(), to_wkb_polygons(MapSet.to_list(polygons), endian, mode)::binary() >> end @compile {:inline, to_wkb_polygons: 3} defp to_wkb_polygons(polygons, endian, mode) do Enum.reduce(polygons, WKB.length(polygons, endian, mode), fn polygon, acc -> <<acc::binary(), PolygonM.to_wkb(polygon, nil, endian, mode)::binary()>> end) end @compile {:inline, wkb_code: 3} defp wkb_code(endian, srid?, :hex) do case {endian, srid?} do {:xdr, false} -> "40000006" {:ndr, false} -> "06000040" {:xdr, true} -> "60000006" {:ndr, true} -> "06000060" end end defp wkb_code(endian, srid?, :binary) do case {endian, srid?} do {:xdr, false} -> <<0x40000006::big-integer-size(32)>> {:ndr, false} -> <<0x40000006::little-integer-size(32)>> {:xdr, true} -> <<0x60000006::big-integer-size(32)>> {:ndr, true} -> <<0x60000006::little-integer-size(32)>> end end defimpl Enumerable do # credo:disable-for-next-line Credo.Check.Readability.Specs def count(multi_polygon) do {:ok, MultiPolygonM.size(multi_polygon)} end # credo:disable-for-next-line Credo.Check.Readability.Specs def member?(multi_polygon, val) do {:ok, MultiPolygonM.member?(multi_polygon, val)} end # credo:disable-for-next-line Credo.Check.Readability.Specs def slice(multi_polygon) do size = MultiPolygonM.size(multi_polygon) {:ok, size, &Enumerable.List.slice(MultiPolygonM.to_list(multi_polygon), &1, &2, size)} end # credo:disable-for-next-line Credo.Check.Readability.Specs def reduce(multi_polygon, acc, fun) do Enumerable.List.reduce(MultiPolygonM.to_list(multi_polygon), acc, fun) end end defimpl Collectable do # credo:disable-for-next-line Credo.Check.Readability.Specs def into(%MultiPolygonM{polygons: polygons}) do fun = fn list, {:cont, x} -> [{x, []} | list] list, :done -> map = Map.merge( polygons.map, Enum.into(list, %{}, fn {polygon, []} -> {polygon.rings, []} end) ) %MultiPolygonM{polygons: %{polygons | map: map}} _list, :halt -> :ok end {[], fun} end end end
lib/geometry/multi_polygon_m.ex
0.931361
0.604107
multi_polygon_m.ex
starcoder
defmodule Membrane.MP4.Muxer.CMAF.Segment.Helper do @moduledoc false use Bunch @spec get_segment(map(), non_neg_integer()) :: {:ok, map(), map()} | {:error, :not_enough_data} def get_segment(state, duration) do with {:ok, segment_part_1, state} <- collect_duration(state, duration), {:ok, segment_part_2, state} <- collect_until_keyframes(state) do segment = Map.new(segment_part_1, fn {pad, samples} -> samples_part_2 = Map.get(segment_part_2, pad, []) {pad, samples ++ samples_part_2} end) {:ok, segment, state} end end @spec take_all_samples(map()) :: {:ok, map(), map()} def take_all_samples(state) do samples = Map.new(state.samples, fn {key, samples} -> {key, Enum.reverse(samples)} end) {:ok, samples, %{state | samples: %{}}} end @spec get_discontinuity_segment(map(), non_neg_integer()) :: {:error, :not_enough_data} | {:ok, map, map} def get_discontinuity_segment(state, duration) do with {:ok, segment, state} <- get_segment(state, duration) do {:ok, segment, %{state | awaiting_caps: nil}} end end # Collects partial CMAF segment so that the length matches given duration defp collect_duration(state, target_duration) do use Ratio end_timestamp = Enum.map(state.pad_to_track_data, fn {_key, track_data} -> Ratio.to_float(track_data.elapsed_time + target_duration) end) |> Enum.max() partial_segments = for {track, samples} <- state.samples do collect_from_track_to_timestamp(track, samples, end_timestamp) end if Enum.any?(partial_segments, &(&1 == {:error, :not_enough_data})) do {:error, :not_enough_data} else state = Enum.reduce(partial_segments, state, fn {:ok, {track, _segment, leftover}}, state -> put_in(state, [:samples, track], leftover) end) segment = Map.new(partial_segments, fn {:ok, {track, segment, _leftover}} -> {track, Enum.reverse(segment)} end) {:ok, segment, state} end end defp collect_from_track_to_timestamp(_track, [], _target_duration), do: {:error, :not_enough_data} defp collect_from_track_to_timestamp(track, samples, desired_end) do use Ratio, comparison: true {leftover, samples} = Enum.split_while(samples, &(&1.dts > desired_end)) if hd(samples).dts + hd(samples).metadata.duration >= desired_end do {:ok, {track, samples, leftover}} else {:error, :not_enough_data} end end # Collects the samples until there is a keyframe on all tracks. # 1. Check if all tracks begin with keyframe. If that is the case, algorithm finishes # 2. Select the track with the smallest dts at the beginning. Collect samples until it doesn't have the smallest dts. # 3. Go to step 1 defp collect_until_keyframes(state) do with {:ok, segment, state} <- do_collect_until_keyframes(reverse_samples(state), nil) do {:ok, segment, reverse_samples(state)} end end defp do_collect_until_keyframes(state, last_target_pad) do use Ratio, comparison: true {target_pad, _samples} = Enum.min_by(state.samples, fn {_track, samples} -> case samples do [] -> :infinity [sample | _rest] -> Ratio.to_float(sample.dts + sample.metadata.duration) end end) # This holds true if and only if all tracks are balanced to begin with. # Therefore, this algorithm will not destroy the balance of tracks, but it is not guaranteed to restore it timestamps_balanced? = target_pad != last_target_pad or Map.keys(state.samples) == [target_pad] cond do Enum.any?(state.samples, fn {_track, samples} -> samples == [] end) -> {:error, :not_enough_data} timestamps_balanced? and Enum.all?(state.samples, fn {_track, samples} -> starts_with_keyframe?(samples) end) -> {:ok, %{}, state} true -> {sample, state} = get_and_update_in(state, [:samples, target_pad], &List.pop_at(&1, 0)) with {:ok, segment, state} <- do_collect_until_keyframes(state, target_pad) do segment = Map.update(segment, target_pad, [sample], &[sample | &1]) {:ok, segment, state} end end end defp starts_with_keyframe?([]), do: false defp starts_with_keyframe?([target | _rest]), do: Map.get(target.metadata, :mp4_payload, %{}) |> Map.get(:key_frame?, true) defp reverse_samples(state), do: Map.update!( state, :samples, &Map.new(&1, fn {key, samples} -> {key, Enum.reverse(samples)} end) ) end
lib/membrane_mp4/muxer/cmaf/segment_helper.ex
0.875235
0.581006
segment_helper.ex
starcoder
defmodule Gringotts.Gateways.Paymill do @moduledoc """ [PAYMILL][home] gateway implementation. For refernce see [PAYMILL's API (v2.1) documentation][docs]. The following features of PAYMILL are implemented: | Action | Method | | ------ | ------ | | Authorize | `authorize/3` | | Capture | `capture/3` | | Purchase | `purchase/3` | | Refund | `refund/3` | | Void | `void/2` | ## The `opts` argument Most `Gringotts` API calls accept an optional `keyword` list `opts` to supply optional arguments for transactions with the PAYMILL gateway. **Currently, no optional params are supported.** ## Registering your PAYMILL account at `Gringotts` After [making an account on PAYMILL][dashboard], head to the dashboard and find your account "secrets". Here's how the secrets map to the required configuration parameters for PAYMILL: | Config parameter | PAYMILL secret | | ------- | ---- | | `:private_key` | **Private Key** | | `:public_key` | **Public Key** | Your Application config **must include the `:private_key`, `:public_key` fields** and would look something like this: config :gringotts, Gringotts.Gateways.Paymill, private_key: "your_secret_private_key", public_key: "your_secret_public_key" ## Scope of this module * PAYMILL processes money in the sub-divided unit of currency (ie, in case of USD it works in cents). * This module does not offer direct API integration for [PCI DSS][pci-dss] compliant merchants. Hence, you can use this module even if your infrastructure (servers) are not PCI-DSS compliant! * To use their product, a merchant (aka user of this library) would have to use their [Bridge (js integration)][bridge] (or equivalent) in your application frontend to collect Credit/Debit Card data. * This would obtain a unique `card_token` at the client-side which can be used by this module for various operations like `authorize/3` and `purchase/3`. [bridge]: https://developers.paymill.com/guides/reference/paymill-bridge.html ## Supported countries As a PAYMILL merchant you can accept payments from around the globe. For more details refer to [Paymill country support][country-support]. ## Supported currencies Your transactions will be processed in your native currency. For more information refer to [Paymill currency support][currency-support]. ## Following the examples 1. First, set up a sample application and configure it to work with PAYMILL. - You could do that from scratch by following our [Getting Started][gs] guide. - To save you time, we recommend [cloning our example repo][example-repo] that gives you a pre-configured sample app ready-to-go. + You could use the same config or update it the with your "secrets" as described [above](#module-registering-your-paymill-account-at-gringotts). 2. Run an `iex` session with `iex -S mix` and add some variable bindings and aliases to it (to save some time): ``` iex> alias Gringotts.{Response, CreditCard, Gateways.Paymill} iex> amount = Money.new(4200, :EUR) ``` We'll be using these in the examples below. [home]: https://paymill.com [docs]: https://developers.paymill.com [dashboard]: https://app.paymill.com/user/register [gs]: https://github.com/aviabird/gringotts/wiki [example-repo]: https://github.com/aviabird/gringotts_example [currency-support]: https://www.paymill.com/en/faq/in-which-currency-will-my-transactions-be-processed-and-payout-in [country-support]: https://www.paymill.com/en/faq/which-countries-is-paymill-available-in [pci-dss]: https://www.paymill.com/en/pci-dss """ use Gringotts.Gateways.Base use Gringotts.Adapter, required_config: [:private_key, :public_key] alias Gringotts.{Money, Response} @base_url "https://api.paymill.com/v2.1/" @headers [{"Content-Type", "application/x-www-form-urlencoded"}] @response_code %{ 10_001 => "Undefined response", 10_002 => "Waiting for something", 11_000 => "Retry request at a later time", 20_000 => "Operation successful", 20_100 => "Funds held by acquirer", 20_101 => "Funds held by acquirer because merchant is new", 20_200 => "Transaction reversed", 20_201 => "Reversed due to chargeback", 20_202 => "Reversed due to money-back guarantee", 20_203 => "Reversed due to complaint by buyer", 20_204 => "Payment has been refunded", 20_300 => "Reversal has been canceled", 22_000 => "Initiation of transaction successful", 30_000 => "Transaction still in progress", 30_100 => "Transaction has been accepted", 31_000 => "Transaction pending", 31_100 => "Pending due to address", 31_101 => "Pending due to uncleared eCheck", 31_102 => "Pending due to risk review", 31_103 => "Pending due regulatory review", 31_104 => "Pending due to unregistered/unconfirmed receiver", 31_200 => "Pending due to unverified account", 31_201 => "Pending due to non-captured funds", 31_202 => "Pending due to international account (accept manually)", 31_203 => "Pending due to currency conflict (accept manually)", 31_204 => "Pending due to fraud filters (accept manually)", 40_000 => "Problem with transaction data", 40_001 => "Problem with payment data", 40_002 => "Invalid checksum", 40_100 => "Problem with credit card data", 40_101 => "Problem with CVV", 40_102 => "Card expired or not yet valid", 40_103 => "Card limit exceeded", 40_104 => "Card is not valid", 40_105 => "Expiry date not valid", 40_106 => "Credit card brand required", 40_200 => "Problem with bank account data", 40_201 => "Bank account data combination mismatch", 40_202 => "User authentication failed", 40_300 => "Problem with 3-D Secure data", 40_301 => "Currency/amount mismatch", 40_400 => "Problem with input data", 40_401 => "Amount too low or zero", 40_402 => "Usage field too long", 40_403 => "Currency not allowed", 40_410 => "Problem with shopping cart data", 40_420 => "Problem with address data", 40_500 => "Permission error with acquirer API", 40_510 => "Rate limit reached for acquirer API", 42_000 => "Initiation of transaction failed", 42_410 => "Initiation of transaction expired", 50_000 => "Problem with back end", 50_001 => "Country blacklisted", 50_002 => "IP address blacklisted", 50_004 => "Live mode not allowed", 50_005 => "Insufficient permissions (API key)", 50_100 => "Technical error with credit card", 50_101 => "Error limit exceeded", 50_102 => "Card declined", 50_103 => "Manipulation or stolen card", 50_104 => "Card restricted", 50_105 => "Invalid configuration data", 50_200 => "Technical error with bank account", 50_201 => "Account blacklisted", 50_300 => "Technical error with 3-D Secure", 50_400 => "Declined because of risk issues", 50_401 => "Checksum was wrong", 50_402 => "Bank account number was invalid (formal check)", 50_403 => "Technical error with risk check", 50_404 => "Unknown error with risk check", 50_405 => "Unknown bank code", 50_406 => "Open chargeback", 50_407 => "Historical chargeback", 50_408 => "Institution / public bank account (NCA)", 50_409 => "KUNO/Fraud", 50_410 => "Personal Account Protection (PAP)", 50_420 => "Rejected due to acquirer fraud settings", 50_430 => "Rejected due to acquirer risk settings", 50_440 => "Failed due to restrictions with acquirer account", 50_450 => "Failed due to restrictions with user account", 50_500 => "General timeout", 50_501 => "Timeout on side of the acquirer", 50_502 => "Risk management transaction timeout", 50_600 => "Duplicate operation", 50_700 => "Cancelled by user", 50_710 => "Failed due to funding source", 50_711 => "Payment method not usable, use other payment method", 50_712 => "Limit of funding source was exceeded", 50_713 => "Means of payment not reusable (canceled by user)", 50_714 => "Means of payment not reusable (expired)", 50_720 => "Rejected by acquirer", 50_730 => "Transaction denied by merchant", 50_800 => "Preauthorisation failed", 50_810 => "Authorisation has been voided", 50_820 => "Authorisation period expired" } @doc """ Performs a (pre) Authorize operation. The authorization validates the `card` details for `token` with the banking network, places a hold on the transaction `amount` in the customer’s issuing bank and also triggers risk management. Funds are not transferred. The authorization token is available in the `Response.id` field. ## Example The following example shows how one would (pre) authorize a payment of €42 on a sample `token`. ``` iex> amount = Money.new(4200, :EUR) iex> card_token = "<KEY>" iex> {:ok, auth_result} = Gringotts.authorize(Gringotts.Gateways.Paymill, amount, card_token, opts) iex> auth_result.id # This is the preauth-id ``` """ @spec authorize(Money.t(), String.t(), keyword) :: {:ok | :error, Response.t()} def authorize(amount, card_token, opts) do params = [{:token, card_token} | amount_params(amount)] commit(:post, "preauthorizations", params, opts) end @doc """ Captures a pre-authorized `amount`. `amount` is transferred to the merchant account by PAYMILL when it is smaller or equal to the amount used in the pre-authorization referenced by `preauth_id`. ## Note PAYMILL allows partial captures and unlike many other gateways, and releases any remaining amount back to the payment source. > Thus, the same pre-authorisation ID **cannot** be used to perform multiple captures. ## Example The following example shows how one would (partially) capture a previously authorized a payment worth €42 by referencing the obtained authorization `id`. ``` iex> amount = Money.new(4200, :EUR) iex> preauth_id = auth_result.id # preauth_id = "some_authorization_id" iex> Gringotts.capture(Gringotts.Gateways.Paymill, preauth_id, amount, opts) ``` """ @spec capture(String.t(), Money.t(), keyword) :: {:ok | :error, Response.t()} def capture(id, amount, opts) do params = [{:preauthorization, id} | amount_params(amount)] commit(:post, "transactions", params, opts) end @doc """ Transfers `amount` from the customer to the merchant. PAYMILL attempts to process a purchase on behalf of the customer, by debiting `amount` from the customer's account by charging the customer's `card` via `token`. ## Example The following example shows how one would process a payment worth €42 in one-shot, without (pre) authorization. ``` iex> amount = Money.new(4200, :EUR) iex> token = "<KEY>" iex> {:ok, purchase_result} = Gringotts.purchase(Gringotts.Gateways.Paymill, amount, token, opts) ``` """ @spec purchase(Money.t(), String.t(), keyword) :: {:ok | :error, Response.t()} def purchase(amount, card_token, opts) do param = [{:token, card_token} | amount_params(amount)] commit(:post, "transactions", param, opts) end @doc """ Refunds the `amount` to the customer's account with reference to a prior transfer. PAYMILL processes a full or partial refund worth `amount`, where `transaction_id` references a previous `purchase/3` or `capture/3` result. Multiple partial refunds are allowed on the same `transaction_id` till all the captured/purchased amount has been refunded. ## Example The following example shows how one would refund a previous purchase (and similarily for captures). ``` iex> transaction_id = purchase_result.id iex> amount = Money.new(4200, :EUR) iex> Gringotts.refund(Gringotts.Gateways.Paymill, amount, transaction_id) ``` """ @spec refund(Money.t(), String.t(), keyword) :: {:ok | :error, Response.t()} def refund(amount, id, opts) do {_, int_value, _} = Money.to_integer(amount) commit(:post, "refunds/#{id}", [amount: int_value], opts) end @doc """ Voids the referenced authorization. Attempts a reversal of the a previous `authorize/3` referenced by `preauth_id`. ## Example The following example shows how one would void a previous authorization. ``` iex> preauth_id = auth_result.id iex> Gringotts.void(Gringotts.Gateways.Paymill, preauth_id) ``` """ @spec void(String.t(), keyword) :: {:ok | :error, Response.t()} def void(id, opts) do commit(:delete, "preauthorizations/#{id}", [], opts) end defp commit(method, endpoint, params, opts) do method |> HTTPoison.request(base_url(opts) <> endpoint, {:form, params}, headers(opts)) |> respond() end @response_code_paths [ ~w[transaction response_code], ~w[data response_code], ~w[data transaction response_code] ] @token_paths [~w[id], ~w[data id]] @reason_paths [~w[error], ~w[exception]] @fraud_paths [ ~w[transaction is_fraud], ~w[data transaction is_fraud], ~w[data transaction is_markable_as_fraud], ~w[data is_markable_as_fraud] ] defp get_either(collection, paths) do paths |> Stream.map(&get_in(collection, &1)) |> Enum.find(fn x -> x != nil end) end defp respond({:ok, %{status_code: 200, body: body}}) do case Poison.decode(body) do {:ok, parsed_resp} -> gateway_code = get_either(parsed_resp, @response_code_paths) status = if gateway_code in [20_000, 50_810], do: :ok, else: :error {status, %Response{ id: get_either(parsed_resp, @token_paths), token: parsed_resp["transaction"]["identification"]["uniqueId"], status_code: 200, gateway_code: gateway_code, reason: get_either(parsed_resp, @reason_paths), message: @response_code[gateway_code], raw: body, fraud_review: get_either(parsed_resp, @fraud_paths) }} :error -> {:error, %Response{status_code: 200, raw: body, reason: "could not parse paymill response"}} end end defp respond({:ok, %{status_code: status_code, body: body}}) do {:error, %Response{ status_code: status_code, raw: body }} end defp respond({:error, %HTTPoison.Error{} = error}) do { :error, Response.error( reason: "network related failure", message: "HTTPoison says '#{error.reason}' [ID: #{error.id || "nil"}]" ) } end defp headers(opts) do [ {"Authorization", "Basic #{Base.encode64(get_in(opts, [:config, :private_key]))}"} | @headers ] end defp amount_params(money) do {currency, int_value, _} = Money.to_integer(money) [amount: int_value, currency: currency] end defp base_url(opts), do: opts[:config][:test_url] || @base_url end
lib/gringotts/gateways/paymill.ex
0.877161
0.847021
paymill.ex
starcoder
defmodule Tuple do @moduledoc """ Functions for working with tuples. Please note the following functions for tuples are found in `Kernel`: * `elem/2` - accesses a tuple by index * `put_elem/3` - inserts a value into a tuple by index * `tuple_size/1` - gets the number of elements in a tuple Tuples are intended as fixed-size containers for multiple elements. To manipulate a collection of elements, use a list instead. `Enum` functions do not work on tuples. Tuples are denoted with curly braces: iex> {} {} iex> {1, :two, "three"} {1, :two, "three"} A tuple may contain elements of different types, which are stored contiguously in memory. Accessing any element takes constant time, but modifying a tuple, which produces a shallow copy, takes linear time. Tuples are good for reading data while lists are better for traversals. Tuples are typically used either when a function has multiple return values or for error handling. `File.read/1` returns `{:ok, contents}` if reading the given file is successful, or else `{:error, reason}` such as when the file does not exist. The functions in this module that add and remove elements from tuples are rarely used in practice, as they typically imply tuples are being used as collections. To append to a tuple, it is preferable to use pattern matching: tuple = {:ok, :example} # Avoid Tuple.insert_at(tuple, 2, %{}) # Prefer {:ok, atom} = tuple {:ok, atom, %{}} """ @doc """ Creates a new tuple. Creates a tuple of `size` containing the given `data` at every position. Inlined by the compiler. ## Examples iex> Tuple.duplicate(:hello, 3) {:hello, :hello, :hello} """ @spec duplicate(term, non_neg_integer) :: tuple def duplicate(data, size) do :erlang.make_tuple(size, data) end @doc """ Inserts an element into a tuple. Inserts `value` into `tuple` at the given `index`. Raises an `ArgumentError` if `index` is negative or greater than the length of `tuple`. Index is zero-based. Inlined by the compiler. ## Examples iex> tuple = {:bar, :baz} iex> Tuple.insert_at(tuple, 0, :foo) {:foo, :bar, :baz} iex> Tuple.insert_at(tuple, 2, :bong) {:bar, :baz, :bong} """ @spec insert_at(tuple, non_neg_integer, term) :: tuple def insert_at(tuple, index, value) do :erlang.insert_element(index + 1, tuple, value) end @doc """ Inserts an element at the end of a tuple. Returns a new tuple with the element appended at the end, and contains the elements in `tuple` followed by `value` as the last element. Inlined by the compiler. ## Examples iex> tuple = {:foo, :bar} iex> Tuple.append(tuple, :baz) {:foo, :bar, :baz} """ @spec append(tuple, term) :: tuple def append(tuple, value) do :erlang.append_element(tuple, value) end @doc """ Removes an element from a tuple. Deletes the element at the given `index` from `tuple`. Raises an `ArgumentError` if `index` is negative or greater than or equal to the length of `tuple`. Index is zero-based. Inlined by the compiler. ## Examples iex> tuple = {:foo, :bar, :baz} iex> Tuple.delete_at(tuple, 0) {:bar, :baz} """ @spec delete_at(tuple, non_neg_integer) :: tuple def delete_at(tuple, index) do :erlang.delete_element(index + 1, tuple) end @doc """ Converts a tuple to a list. Returns a new list with all the tuple elements. Inlined by the compiler. ## Examples iex> tuple = {:foo, :bar, :baz} iex> Tuple.to_list(tuple) [:foo, :bar, :baz] """ @spec to_list(tuple) :: list def to_list(tuple) do :erlang.tuple_to_list(tuple) end end
lib/elixir/lib/tuple.ex
0.903655
0.827828
tuple.ex
starcoder
defmodule Maru.Params.Types.List do @moduledoc """ Buildin Type: List ## Parser Arguments * `:unique` - whether unique the data in list with `Enum.uniq` * `true` - unique the list * `false` (default) - do NOT unique the list * `:string_strategy` - how to parse a string value * `:codepoints` (default) - decode by `String.codepoints/1` * `:charlist` - decode by `String.to_charlist/1` ## Validator Arguments * `:max_length` - max length of the list * `:min_length` - min length of the list * `:length_range` - length range of the list ## Examples: requires :tags, List[String], max_length: 3 requires :chars, List, string_strategy: :charlist """ use Maru.Params.Type def parser_arguments, do: [:string_strategy, :unique] def validator_arguments, do: [:min_length, :max_length, :length_range] def parse(input, args) when is_list(input) do args |> Map.get(:unique, false) |> case do true -> {:ok, Enum.uniq(input)} false -> {:ok, input} end end def parse(input, args) when is_binary(input) do args |> Map.get(:string_strategy, :codepoints) |> case do :codepoints -> String.codepoints(input) :charlist -> String.to_charlist(input) end |> parse(args) end def parse(input, _) do {:error, :parse, "unknown input format as list: #{inspect(input)}"} end def validate(parsed, min_length: min_length) do if length(parsed) >= min_length do {:ok, parsed} else {:error, :validate, "min length: #{min_length}"} end end def validate(parsed, max_length: max_length) do if length(parsed) <= max_length do {:ok, parsed} else {:error, :validate, "max length: #{max_length}"} end end def validate(parsed, length_range: %Range{first: min_length, last: max_length}) do len = length(parsed) if len >= min_length and len <= max_length do {:ok, parsed} else {:error, :validate, "length range: #{min_length}..#{max_length}"} end end end
lib/maru/params/types/list.ex
0.770076
0.601974
list.ex
starcoder
defmodule Membrane.Audiometer.Peakmeter.Amplitude do @moduledoc false alias Membrane.RawAudio @doc """ Finds frame within given payload that has the highest amplitude for any of its channels. On success, it returns `{:ok, {values, rest}}`, where: * `values` is a list of amplitudes per channel expressed in decibels, or one of `:infinity` or `:clip`, * `rest` is a binary containing payload remaining after processing if given payload contained incomplete frames. On error it returns, `{:error, reason}`, where `reason` is one of the following: * `:empty` - the payload was empty. """ @spec find_amplitudes(binary, RawAudio.t()) :: {:ok, {[number | :infinity | :clip], binary}} | {:error, any} def find_amplitudes(<<>>, _caps) do {:error, :empty} end def find_amplitudes(payload, caps) do # Get silence as a point of reference silence_payload = RawAudio.silence(caps) silence_value = RawAudio.sample_to_value(silence_payload, caps) # Find max sample values within given payload {:ok, {frame_values, rest}} = do_find_frame_with_max_values( payload, caps, RawAudio.frame_size(caps), RawAudio.sample_size(caps), silence_value, nil ) # Convert values into decibels max_amplitude_value = if RawAudio.sample_type_float?(caps) do 1.0 else # +1 is needed so max int value for frame does not cause clipping RawAudio.sample_max(caps) - silence_value + 1 end {:ok, frame_values_in_dbs} = do_convert_values_to_dbs(frame_values, max_amplitude_value, []) # Return amplitudes in dBs and remaining payload {:ok, {frame_values_in_dbs, rest}} end # If we have at least one frame in the payload, get values of its samples. defp do_find_frame_with_max_values(payload, caps, frame_size, sample_size, silence_value, acc) when byte_size(payload) >= frame_size do <<frame::binary-size(frame_size), rest::binary>> = payload # Get list of sample values per channel, normalized to 0..n scale {:ok, sample_values} = do_sample_to_channel_values(frame, caps, sample_size, silence_value, []) # Check if the new frame is louder than the last known loudest {:ok, max_sample_values} = do_find_max_sample_values(acc, sample_values) do_find_frame_with_max_values( rest, caps, frame_size, sample_size, silence_value, max_sample_values ) end # If we have not less than one frame in the payload, return the amplitudes # and the remaning payload. defp do_find_frame_with_max_values( payload, _caps, _frame_size, _sample_size, _silence_value, acc ), do: {:ok, {acc, payload}} # If there's any payload, convert sample data to actual values normalized on 0..n scale defp do_sample_to_channel_values(payload, caps, sample_size, silence_value, acc) when byte_size(payload) >= sample_size do <<sample::binary-size(sample_size), rest::binary>> = payload value = (RawAudio.sample_to_value(sample, caps) - silence_value) |> abs do_sample_to_channel_values(rest, caps, sample_size, silence_value, [value | acc]) end # If there's no more payload in the frame, return a list with sample values per channel. defp do_sample_to_channel_values(<<>>, _caps, _sample_size, _silence_value, acc), do: {:ok, Enum.reverse(acc)} # If there's no previous sample known, current one has to be louder. defp do_find_max_sample_values(nil, current), do: {:ok, current} # Use pattern matching for the most common cases for a performance reasons: # Mono, previous frame is louder defp do_find_max_sample_values([previous_value] = previous, [current_value]) when previous_value >= current_value, do: {:ok, previous} # Mono, current frame is louder defp do_find_max_sample_values([previous_value], [current_value] = current) when previous_value < current_value, do: {:ok, current} # Stereo, any of the channels in the previous frame is louder than any of the channels in the current frame defp do_find_max_sample_values([previous_value1, previous_value2] = previous, [ current_value1, current_value2 ]) when (previous_value1 >= current_value1 and previous_value1 >= current_value2) or (previous_value2 >= current_value1 and previous_value2 >= current_value2), do: {:ok, previous} # Stereo, any of the channels in the current frame is louder than any of the channels in the previous frame defp do_find_max_sample_values( [previous_value1, previous_value2], [current_value1, current_value2] = current ) when (current_value1 >= previous_value1 and current_value1 >= previous_value2) or (current_value2 >= previous_value1 and current_value2 >= previous_value2), do: {:ok, current} # Generic handler for multi-channel audio defp do_find_max_sample_values(previous, current) do if Enum.max(previous) >= Enum.max(current) do {:ok, previous} else {:ok, current} end end defp do_convert_values_to_dbs([head | tail], max_amplitude_value, acc) do value = cond do head > max_amplitude_value -> :clip head > 0 -> 20 * :math.log10(head / max_amplitude_value) true -> :infinity end do_convert_values_to_dbs(tail, max_amplitude_value, [value | acc]) end defp do_convert_values_to_dbs([], _max_amplitude_value, acc), do: {:ok, Enum.reverse(acc)} end
lib/membrane_element_audiometer/amplitude.ex
0.88916
0.642783
amplitude.ex
starcoder
defmodule AWS.ElasticInference do @moduledoc """ Elastic Inference public APIs. """ @doc """ Describes the locations in which a given accelerator type or set of types is present in a given region. """ def describe_accelerator_offerings(client, input, options \\ []) do path_ = "/describe-accelerator-offerings" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Describes the accelerator types available in a given region, as well as their characteristics, such as memory and throughput. """ def describe_accelerator_types(client, options \\ []) do path_ = "/describe-accelerator-types" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Describes information over a provided set of accelerators belonging to an account. """ def describe_accelerators(client, input, options \\ []) do path_ = "/describe-accelerators" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Returns all tags of an Elastic Inference Accelerator. """ def list_tags_for_resource(client, resource_arn, options \\ []) do path_ = "/tags/#{URI.encode(resource_arn)}" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Adds the specified tags to an Elastic Inference Accelerator. """ def tag_resource(client, resource_arn, input, options \\ []) do path_ = "/tags/#{URI.encode(resource_arn)}" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Removes the specified tags from an Elastic Inference Accelerator. """ def untag_resource(client, resource_arn, input, options \\ []) do path_ = "/tags/#{URI.encode(resource_arn)}" headers = [] {query_, input} = [ {"tagKeys", "tagKeys"}, ] |> AWS.Request.build_params(input) request(client, :delete, path_, query_, headers, input, options, nil) end @spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) :: {:ok, map() | nil, map()} | {:error, term()} defp request(client, method, path, query, headers, input, options, success_status_code) do client = %{client | service: "elastic-inference"} host = build_host("api.elastic-inference", client) url = host |> build_url(path, client) |> add_query(query, client) additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}] headers = AWS.Request.add_headers(additional_headers, headers) payload = encode!(client, input) headers = AWS.Request.sign_v4(client, method, url, headers, payload) perform_request(client, method, url, payload, headers, options, success_status_code) end defp perform_request(client, method, url, payload, headers, options, success_status_code) do case AWS.Client.request(client, method, url, payload, headers, options) do {:ok, %{status_code: status_code, body: body} = response} when is_nil(success_status_code) and status_code in [200, 202, 204] when status_code == success_status_code -> body = if(body != "", do: decode!(client, body)) {:ok, body, response} {:ok, response} -> {:error, {:unexpected_response, response}} error = {:error, _reason} -> error end end defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do endpoint end defp build_host(_endpoint_prefix, %{region: "local"}) do "localhost" end defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do "#{endpoint_prefix}.#{region}.#{endpoint}" end defp build_url(host, path, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}#{path}" end defp add_query(url, [], _client) do url end defp add_query(url, query, client) do querystring = encode!(client, query, :query) "#{url}?#{querystring}" end defp encode!(client, payload, format \\ :json) do AWS.Client.encode!(client, payload, format) end defp decode!(client, payload) do AWS.Client.decode!(client, payload, :json) end end
lib/aws/generated/elastic_inference.ex
0.745306
0.414751
elastic_inference.ex
starcoder
defmodule Meeseeks.XPath do @moduledoc """ Compile XPath 1.0 selector syntax into `Meeseeks.Selector`s. ## Supported Syntax Supports almost all XPath 1.0 syntax with a few notable exceptions. ### No top-level filter expressions Due to the way Meeseeks selection works, top-level filter expressions like `xpath("(//ol|//ul)[2]")`, which would select the second list element in the document, are particularly difficult to implement. An error will be raised if you try to use the above or any other top-level filter expression. To do the above try something like: ```elixir Meeseeks.all(doc, xpath("ol|ul")) |> Enum.at(1) ``` All other filter expressions, like `xpath("//div[2]")`, are valid. ### No support for variable references Variable references are not currently supported, meaning expression like `xpath("*[position()=$p]")` are invalid and will raise an error. To do the above try something like: ```elixir p = 2 xpath("*[position()=" <> Integer.to_string(p) <> "]") ``` ### No support for id(), lang(), or translate() functions These three functions from the core functions library are not currently supported. A runtime error will be raised should they attempt to be used. ### Namespace prefixes are not resolved to namespace uris If you want to find a namespace, search for the namespace prefix of node, not the expanded namespace-uri. `xpath("*[namespace-uri()='example']")`, not `xpath("*[namespace-uri()='https://example.com/ns']")` ### HTML5 doesn't support processing instructions Because in HTML5 processing instructions as are parsed as comments, trying to select a processing instruction will not work as expected if you are using `meeseeks_html5ever`'s html parser. To select a processing instruction when parsing with `meeseeks_html5ever`'s html parser, search for comment nodes. If you've parsed a tuple-tree that had a `{:pi, _}` or `{:pi, _, _}` node, selecting processing instructions should work as expected, though if the tuple-tree is the result of parsing with `:mochiweb_html`, the data might be slightly mangled due to `:mochiweb_html`'s rather suspect decision to parse the data of all processing instructions except `<?php .. ?>` as attributes. ## Examples iex> import Meeseeks.XPath iex> xpath("//li[last()]") %Meeseeks.Selector.Element{ combinator: %Meeseeks.Selector.Combinator.Children{ selector: %Meeseeks.Selector.Element{ combinator: nil, filters: [ %Meeseeks.Selector.XPath.Predicate{e: %Meeseeks.Selector.XPath.Expr.Predicate{ e: %Meeseeks.Selector.XPath.Expr.Function{ args: [], f: :last}}}], selectors: [%Meeseeks.Selector.Element.Tag{value: "li"}]}}, filters: nil, selectors: []} iex> xpath("//ol|//ul") [%Meeseeks.Selector.Element{ combinator: %Meeseeks.Selector.Combinator.Children{ selector: %Meeseeks.Selector.Element{ combinator: nil, filters: nil, selectors: [%Meeseeks.Selector.Element.Tag{value: "ol"}]}}, filters: nil, selectors: []}, %Meeseeks.Selector.Element{ combinator: %Meeseeks.Selector.Combinator.Children{ selector: %Meeseeks.Selector.Element{ combinator: nil, filters: nil, selectors: [%Meeseeks.Selector.Element.Tag{value: "ul"}]}}, filters: nil, selectors: []}] """ alias Meeseeks.Selector.XPath @doc """ Compiles a string representing XPath selector syntax into one or more `Meeseeks.Selector`s. When a static string literal is provided this work will be done during compilation, but if a string with interpolated values or a var is provided this work will occur at run time. """ defmacro xpath(string_literal) when is_binary(string_literal) do string_literal |> XPath.compile_selectors() |> Macro.escape() end defmacro xpath(other) do quote do: XPath.compile_selectors(unquote(other)) end end
lib/meeseeks/xpath.ex
0.864768
0.812347
xpath.ex
starcoder
defmodule Talib.SMA do @moduledoc ~S""" Defines a Simple Moving Average. ## History Version: 1.0 Source: https://qkdb.wordpress.com/2013/04/22/simple-moving-average/ Audited by: | Name | Title | | :----------- | :---------------- | | | | """ @typedoc """ Defines a Simple Moving Average. * :period - The period of the SMA * :values - List of values resulting from the calculation """ @type t :: %Talib.SMA{period: integer, values: [number]} defstruct [ period: 0, values: [] ] @doc """ Gets the SMA of a list. Returns `{:ok, sma}`, otherwise `{:error, reason}`. ## Examples iex> Talib.SMA.from_list([17, 23, 44], 2) {:ok, %Talib.SMA{ period: 2, values: [nil, 20.0, 33.5] }} iex> Talib.SMA.from_list([], 1) {:error, :no_data} iex> Talib.SMA.from_list([17], 0) {:error, :bad_period} """ @spec from_list([number], integer) :: {:ok, Talib.SMA.t} | {:error, atom} def from_list(data, period), do: calculate(data, period) @doc """ Gets the SMA of a list. Raises `NoDataError` if the given list is an empty list. Raises `BadPeriodError` if the given period is 0. ## Examples iex> Talib.SMA.from_list!([17, 23, 44], 2) %Talib.SMA{ period: 2, values: [nil, 20.0, 33.5] } iex> Talib.SMA.from_list!([], 1) ** (NoDataError) no data error iex> Talib.SMA.from_list!([17], 0) ** (BadPeriodError) bad period error """ @spec from_list!([number], integer) :: Talib.SMA.t | no_return def from_list!(data, period) do case calculate(data, period) do {:ok, result} -> result {:error, :no_data} -> raise NoDataError {:error, :bad_period} -> raise BadPeriodError end end @doc false @spec calculate([number], integer, [float]) :: {:ok, Talib.SMA.t} | {:error, atom} defp calculate(data, period, results \\ []) defp calculate([], _period, []), do: {:error, :no_data} defp calculate(_data, 0, _results), do: {:error, :bad_period} defp calculate([], period, results), do: {:ok, %Talib.SMA{period: period, values: results}} defp calculate([hd | tl] = data, period, results) do cond do length(results) < (period - 1) && length(data) > length(results) -> calculate(data, period, results ++ [nil]) length(data) < period -> calculate(tl, period, results) hd === nil -> calculate(tl, period, results ++ [nil]) length(data) >= period -> result = data |> Enum.take(period) |> Enum.sum |> Kernel./(period) calculate(tl, period, results ++ [result]) end end end
lib/talib/sma.ex
0.91668
0.65185
sma.ex
starcoder
# based on XKCD 287 - https://xkcd.com/287/ defmodule Item do @moduledoc """ Defines a struct for a menu item with a name and price """ @type t :: %__MODULE__{ name: String.t, price: Money.t } defstruct name: "", price: Money.new(1) @type menu() :: [t, ...] @spec new(String.t, Money.t) :: t @doc ~S""" Create a new `Item` struct with a name and price ## Examples iex> Item.new("grilled cheese", Money.parse!("$1.25")) %Item{name: "grilled cheese", price: %Money{amount: 125, currency: :USD}} """ def new(name, %Money{amount: amount} = price) when amount > 0, do: %Item{name: name, price: price} @spec parse(String.t) :: {:ok, t} @doc ~S""" Parse a comma-separated string into name and price ## Examples iex> Item.parse("cheezborger,$2.50") %Item{name: "cheezborger", price: %Money{amount: 250, currency: :USD}} iex> Item.parse("caviar,$99.99") %Item{name: "caviar", price: %Money{amount: 9999, currency: :USD}} """ def parse(string) when is_binary(string) do [name | price] = String.split(string, ",", parts: 2) new(name, Money.parse!(hd(price))) end @spec parse([String.t]) :: menu() @doc ~S""" Parse a list of comma-separated strings into parsed menu items ## Examples iex> Item.parse(["coffee,$2.00","tea,$1.00","me,$9.99"]) [%Item{name: "coffee", price: %Money{amount: 200, currency: :USD}}, %Item{name: "tea", price: %Money{amount: 100, currency: :USD}}, %Item{name: "me", price: %Money{amount: 999, currency: :USD}}] """ def parse(menu_rows) when is_list(menu_rows) do Enum.map(menu_rows, fn r -> Item.parse(r) end) end @spec sort(menu()) :: menu() @doc ~S""" Sorts a list of menu items in descending price order ## Examples # iex> Enum.map(&(&1.price.amount)) # iex> Enum.map(Item.parse(["last,$5.00","first,$1.00","middle,$3.00","second,$2.00","penultimate,$4.00"])) # [1,2,3] iex> Item.sort(Item.parse(["last,$5.00","first,$1.00","middle,$3.00","second,$2.00","penultimate,$4.00"])) [%Item{name: "last", price: %Money{amount: 500, currency: :USD}}, %Item{name: "penultimate", price: %Money{amount: 400, currency: :USD}}, %Item{name: "middle", price: %Money{amount: 300, currency: :USD}}, %Item{name: "second", price: %Money{amount: 200, currency: :USD}}, %Item{name: "first", price: %Money{amount: 100, currency: :USD}}] """ def sort(menu) do Enum.sort(menu, &(Money.compare(&1.price, &2.price) != -1)) end end
lib/item.ex
0.838828
0.505066
item.ex
starcoder
defmodule MoneroAddress do @moduledoc """ MoneroAddress implements functions decoding and validating Monero base58 encoded addresses. Monero uses different address format than bicoin. The address contains 1 byte network prefix, 2 32 byte public keys - spend key and view key and 4 byte checksm. Address is encoded in 8 byte chunks (11 base58 characters), optionally '0'-padded ('1' in base58). The address is ths allways 95 character long. Checksm is computed by taking first 4 bytes of keccak-256 hash. Note that Monero does not use official SHA3 but original keccak-256. """ b58_alphabet = Enum.with_index('123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz') for {encoding, value} <- b58_alphabet do defp do_encode58(unquote(value)), do: unquote(encoding) defp do_decode58(unquote(encoding)), do: unquote(value) end defp do_decode58(c), do: raise(ArgumentError, "illegal character #{c}") @b58base 58 encodedBlockSizes = Enum.with_index([0, 2, 3, 5, 6, 7, 9, 10, 11]) for {size, index} <- encodedBlockSizes do defp decoded_block_size(unquote(size)), do: unquote(index) defp encoded_block_size(unquote(index)), do: unquote(size) end @fullEncodedBlockSize 11 defp decode58([], acc), do: acc defp decode58([c | code], acc) do decode58(code, acc * @b58base + do_decode58(c)) end defp decode_block(block) do decoded_bin = decode58(block, 0) |> :binary.encode_unsigned() size = byte_size(decoded_bin) needed_size = decoded_block_size(length(block)) padding = if size < needed_size do for _ <- 1..(needed_size - size), into: <<>>, do: <<0>> else <<>> end padding <> decoded_bin end # defp encode_block(block) do # needed_size = encoded_block_size(byte_size(block)) # decoded_bin = decode58(block, 0) |> :binary.encode_unsigned() # size = byte_size(decoded_bin) # needed_size = decoded_block_size(length(block)) # padding = # if size < needed_size do # for _ <- 1..(needed_size - size), into: <<>>, do: <<0>> # else # <<>> # end # padding <> decoded_bin # end def encode58(data) do encoded = encode58(data |> elem(0), []) desired_length = data |> elem(1) |> encoded_block_size() diff = desired_length - String.length(encoded) padding = if diff > 0 do for _ <- 1..diff, into: "", do: "1" else "" end padding <> encoded end defp encode58(0, acc), do: to_string(acc) defp encode58(integer, acc) do encode58(div(integer, 58), [do_encode58(rem(integer, 58)) | acc]) end @doc """ Decodes Monero specific base58 encoded binary. ## Examples iex> MoneroAddress.base58_decode!("55LTR8KniP") <<107, 134, 160, 20, 146, 167, 244>> """ def base58_decode!(code) when is_binary(code) do # this function differs from btc # split payload into blocks of 11 codepoints (8 bytes) blocks = to_charlist(code) |> Enum.chunk_every(@fullEncodedBlockSize) for block <- blocks, do: decode_block(block), into: <<>> end def base58_decode!(_code), do: raise(ArgumentError, "expects base58-encoded binary") defp split(data, acc) do case data do <<a::size(64), rest::binary>> -> split(rest, acc ++ [{a, 8}]) <<a::size(56)>> -> acc ++ [{a, 7}] <<a::size(48)>> -> acc ++ [{a, 6}] <<a::size(40)>> -> acc ++ [{a, 5}] <<a::size(32)>> -> acc ++ [{a, 4}] <<a::size(24)>> -> acc ++ [{a, 3}] <<a::size(16)>> -> acc ++ [{a, 2}] <<a::size(8)>> -> acc ++ [{a, 1}] <<>> -> acc end end def base58_encode!(code) do # this function differs from btc # split payload into blocks of 11 codepoints (8 bytes) blocks = split(code, []) for block <- blocks, do: encode58(block), into: "" end @doc """ Decodes and validates Monero address. Returns tuple containing network prefix (encoded as integer), public spend key (hex encoded) and public view key (hex encoded) ## Examples iex> MoneroAddress.decode_address!("<KEY>", :main) {18, "<KEY>", "5db35109fbba7d5f275fef4b9c49e0cc1c84b219ec6ff652fda54f89f7f63c88", :address} """ def decode_address!(code, network) do case String.length(code) do 95 -> decode_simple_address!(code, network) 106 -> decode_integrated_v2_address!(code, network) 139 -> decode_integrated_v1_address!(code, network) _ -> raise(ArgumentError, "invalid length") end end defp decode_simple_address!(code, network) do <<prefix::binary-size(1), public_spend_key::binary-size(32), public_view_key::binary-size(32), checksum::binary-size(4)>> = base58_decode!(code) payload = prefix <> public_spend_key <> public_view_key unless checksum_valid?(payload, checksum), do: raise(ArgumentError, "checksum does not match") <<prefix_code>> = prefix type = cond do prefix_code == network_prefix(network, :address) -> :address prefix_code == network_prefix(network, :subaddress) -> :subaddress true -> raise(ArgumentError, "invalid network prefix #{prefix_code}") end {prefix_code, public_spend_key |> Base.encode16(case: :lower), public_view_key |> Base.encode16(case: :lower), type} end defp decode_integrated_v1_address!(code, network) do <<prefix::binary-size(1), public_spend_key::binary-size(32), public_view_key::binary-size(32), payment_id::binary-size(32), checksum::binary-size(4)>> = base58_decode!(code) payload = prefix <> public_spend_key <> public_view_key <> payment_id unless checksum_valid?(payload, checksum), do: raise(ArgumentError, "checksum does not match") <<prefix_code>> = prefix if prefix_code != network_prefix(network, :integrated_address), do: raise(ArgumentError, "invalid network prefix #{prefix_code}") {prefix_code, public_spend_key |> Base.encode16(case: :lower), public_view_key |> Base.encode16(case: :lower), {:integrated, payment_id |> Base.encode16(case: :lower)}} end defp decode_integrated_v2_address!(code, network) do <<prefix::binary-size(1), public_spend_key::binary-size(32), public_view_key::binary-size(32), payment_id::binary-size(8), checksum::binary-size(4)>> = base58_decode!(code) payload = prefix <> public_spend_key <> public_view_key <> payment_id unless checksum_valid?(payload, checksum), do: raise(ArgumentError, "checksum does not match") <<prefix_code>> = prefix if prefix_code != network_prefix(network, :integrated_address), do: raise(ArgumentError, "invalid network prefix #{prefix_code}") {prefix_code, public_spend_key |> Base.encode16(case: :lower), public_view_key |> Base.encode16(case: :lower), {:integrated_address, payment_id |> Base.encode16(case: :lower)}} end defp checksum_valid?(payload, checksum) do prefix = compute_checksum(payload) prefix == checksum end defp compute_checksum(payload) do # monero uses original keccak-256 not the NIST official sha3 <<prefix::binary-size(4), _::binary-size(28)>> = ExKeccak.hash_256(payload) prefix end defp network_prefix(:main, :address), do: 18 defp network_prefix(:main, :integrated_address), do: 19 defp network_prefix(:main, :subaddress), do: 42 defp network_prefix(:stage, :address), do: 24 defp network_prefix(:stage, :integrated_address), do: 25 defp network_prefix(:stage, :subaddress), do: 36 defp network_prefix(:test, :address), do: 53 defp network_prefix(:test, :integrated_address), do: 54 defp network_prefix(:test, :subaddress), do: 63 @doc """ Encodes public spend key, public view key and optionally payment id into base58 encoded address ## Examples iex> MoneroAddress.encode_address!("cd8235338c6d9a4b467d97d20e6ea309d3af16f845abf74b62b48d616ba00ff6", "708dae560daacda2d39d0c0b5586edc92a0fa918f0a444ad7ae029ff1ae81185", :test, :address) "9zxM5uUAZxDDbGPDtBBpga2eLKhuK5WWJDcLQ3brGwHpiDmro8TrXkpUEd5rHyZecCaeYen7rou6KW1ySfCvU69eG2Bmsiz" """ def encode_address!(public_spend_key, public_view_key, network, type) when type in [:address, :subaddress] do if String.length(public_spend_key) != 64, do: raise(ArgumentError, "invalid public spend key length") if String.length(public_view_key) != 64, do: raise(ArgumentError, "invalid public view key length") payload = <<network_prefix(network, type)>> <> Base.decode16!(public_spend_key, case: :lower) <> Base.decode16!(public_view_key, case: :lower) checksum = compute_checksum(payload) (payload <> checksum) |> base58_encode!() end def encode_address!( public_spend_key, public_view_key, network, {:integrated_address, payment_id} ) do if String.length(public_spend_key) != 64, do: raise(ArgumentError, "invalid public spend key length") if String.length(public_view_key) != 64, do: raise(ArgumentError, "invalid public view key length") if String.length(payment_id) not in [16, 64], do: raise(ArgumentError, "invalid payment id length") payload = <<network_prefix(network, :integrated_address)>> <> Base.decode16!(public_spend_key, case: :lower) <> Base.decode16!(public_view_key, case: :lower) <> Base.decode16!(payment_id, case: :lower) checksum = compute_checksum(payload) (payload <> checksum) |> base58_encode!() end end
lib/monero_address.ex
0.748076
0.475849
monero_address.ex
starcoder
defmodule RTL.Playlist do alias RTL.Playlist.Segment alias RTL.Videos # Outputs a list of matching Segment structs (ie. video clips) def build_playlist(project, tags) do tags |> Enum.each(& RTL.Helpers.assert_keys(&1, allowed: [:name])) Videos.list_videos(project: project, coded: true, having_tags: tags, preload: :tags) |> Enum.map(&convert_video_record_to_map(&1)) |> Enum.flat_map(&split_video_into_segments(&1)) |> Enum.filter(&Segment.is_tagged?(&1)) |> Enum.filter(&Segment.matches_all_tags?(&1, tags)) |> Segment.merge_adjacent() |> Enum.shuffle() end defp convert_video_record_to_map(video) do title = video.title || (if video.permission_show_name, do: video.speaker_name) || "Anonymous" %{ video_id: video.id, title: title, recording_url: Videos.video_recording_url(video), thumbnail_url: Videos.video_thumbnail_url(video), tags: convert_tags_to_maps(video.coding.taggings) } end # Input: a list of Tagging records with associated Tags # Output: a list of maps, each summarizing that tag # Note that starts_at and ends_at are left in numeric form. defp convert_tags_to_maps(taggings) do Enum.map(taggings, fn tagging -> %{ name: tagging.tag.name, starts_at: tagging.starts_at, ends_at: tagging.ends_at } end) end # Input: a single Video map # Output: a list of Segment structs # Note that: # - Some tags don't have a start & end time # - Any segments with no tags are discarded defp split_video_into_segments(video) do video.tags |> identify_breakpoints |> create_empty_segments |> populate_segments(video) end # Input: a list of tags, each in format %{starts_at:, ends_at:} # Output: a list of breakpoints, each an integer of seconds defp identify_breakpoints(tags) do tags |> Enum.flat_map(fn tag -> [tag.starts_at || 0, tag.ends_at || 9999] end) |> Enum.sort() |> Enum.uniq() end # Input: a list of breakpoints, each an integer of seconds # Output: a list of partially-populated Segments defp create_empty_segments(breakpoints) do start_times = breakpoints |> List.delete_at(-1) end_times = breakpoints |> List.delete_at(0) [start_times, end_times] |> List.zip() |> Enum.map(fn {starts_at, ends_at} -> %Segment{ starts_at: starts_at, ends_at: ends_at } end) end # Returns the list of Segments, but with video & tag data filled in for each defp populate_segments(segments, video) do Enum.map(segments, fn segment -> segment = populate_segment(segment, video) Map.put(segment, :segment_id, Segment.hash(segment)) end) end defp populate_segment(segment, video) do video_data = Map.take(video, [:video_id, :title, :recording_url, :thumbnail_url]) segment |> Map.merge(video_data) |> Map.put(:tags, get_segment_tags_list(segment, video.tags)) end defp get_segment_tags_list(segment, tags) do tags |> Enum.filter(fn tag -> tag_overlaps_with_segment?(tag, segment) end) |> Enum.map(fn tag -> Map.drop(tag, [:starts_at, :ends_at]) end) end defp tag_overlaps_with_segment?(tag, segment) do tag_starts_at = tag.starts_at || 0 tag_ends_at = tag.ends_at || 9999 tag_starts_at < segment.ends_at && tag_ends_at > segment.starts_at end end
lib/rtl/playlist/playlist.ex
0.695028
0.473292
playlist.ex
starcoder
defmodule RDF.Namespace do @moduledoc """ A behaviour for resolvers of atoms to `RDF.IRI`s. Currently there's only one type of such namespaces: `RDF.Vocabulary.Namespace`, but other types are thinkable and might be implemented in the future, eg. namespaces for JSON-LD contexts. """ alias RDF.IRI import RDF.Guards @doc """ Resolves a term to a `RDF.IRI`. """ @callback __resolve_term__(atom) :: {:ok, IRI.t()} | {:error, Exception.t()} @doc """ All terms of a `RDF.Namespace`. """ @callback __terms__() :: [atom] @doc """ Resolves a qualified term to a `RDF.IRI`. It determines a `RDF.Namespace` from the qualifier of the given term and delegates to remaining part of the term to `__resolve_term__/1` of this determined namespace. """ @spec resolve_term(IRI.t() | module) :: {:ok, IRI.t()} | {:error, Exception.t()} def resolve_term(expr) def resolve_term(%IRI{} = iri), do: {:ok, iri} def resolve_term(namespaced_term) when maybe_ns_term(namespaced_term) do namespaced_term |> to_string() |> do_resolve_term() end @doc """ Resolves a qualified term to a `RDF.IRI` or raises an error when that's not possible. See `resolve_term/1` for more. """ @spec resolve_term!(IRI.t() | module) :: IRI.t() def resolve_term!(expr) do with {:ok, iri} <- resolve_term(expr) do iri else {:error, error} -> raise error end end defp do_resolve_term("Elixir." <> _ = namespaced_term) do {term, namespace} = namespaced_term |> Module.split() |> List.pop_at(-1) do_resolve_term(Module.concat(namespace), String.to_atom(term)) end defp do_resolve_term(namespaced_term) do {:error, %RDF.Namespace.UndefinedTermError{ message: "#{namespaced_term} is not a term on a RDF.Namespace" }} end defp do_resolve_term(RDF, term), do: do_resolve_term(RDF.NS.RDF, term) defp do_resolve_term(Elixir, term) do {:error, %RDF.Namespace.UndefinedTermError{ message: "#{term} is not a RDF.Namespace; top-level modules can't be RDF.Namespaces" }} end defp do_resolve_term(namespace, term) do is_module = case Code.ensure_compiled(namespace) do {:module, _} -> true _ -> false end if is_module and Keyword.has_key?(namespace.__info__(:functions), :__resolve_term__) do namespace.__resolve_term__(term) else {:error, %RDF.Namespace.UndefinedTermError{message: "#{namespace} is not a RDF.Namespace"}} end end end
lib/rdf/namespace.ex
0.801042
0.561365
namespace.ex
starcoder
defmodule OpentelemetryProcessPropagator do @moduledoc """ `OpentelemetryProcessPropagator` provides helpers for dealing with context propagation across process boundaries. ## Context Propagation Erlang and Elixir do not have a mechanism for transparently passing context between processes. This requires the user to explicitly pass data between processes. In order to continue a trace across processes, the user must start a new span and pass it to the spawned process. ``` span_ctx = OpenTelemetry.Tracer.start_span("child") ctx = OpenTelemetry.Ctx.get_current() task = Task.async(fn -> OpenTelemetry.Ctx.attach(ctx) OpenTelemetry.Tracer.set_current_span(span_ctx) # do work here OpenTelemetry.Tracer.end_span(span_ctx) end) _result = Task.await(task) ``` ### Reverse Propagation It's not always possible to have full control over traces, such as when using `telemetry` events emitted from a library you don't control to create a span. In such cases, a mechanism to fetch a context from a calling process is necessary. This is effectively context propagation in reverse. As an example, Ecto uses the `Task` module to execute preloads which are each a separate query. Since a task is a spawned process, creating an otel span results in orphan spans. To correctly connect these spans we must find the otel context which spawned the process. ## Usage Example of using `fetch_parent_ctx/1` to find a parent context. ```elixir OpenTelemetry.with_span :span_started_in_your_app do # some span being created in a process spawned by a library # you don't control, e.g. Ecto preloads Task.async(fn -> parent_ctx = OpentelemetryProcessPropagator.fetch_parent_ctx(:"$callers") OpenTelemetry.Ctx.attach(parent_ctx) attrs = %{some_attr: :from_telemetry_event} span = OpenTelemetry.Tracer.start_span(:span_created_in_lib, %{attributes: attrs}) OpenTelemetry.Span.end_span(span) end) end ``` ```erlang ?with_span(span_started_in_your_app, #{}, fun() -> %% some span being created in a process spawned by a library %% you don't control proc_lib:spawn_link(fun() -> Tracer = opentelemetry:get_tracer(test_tracer), ParentCtx = opentelemetry_process_propagator:fetch_parent_ctx(), otel_ctx:attach(ParentCtx), Span = otel_tracer:start_span(Tracer, span_created_in_lib, #{}), otel_tracer:end_span(Span). ). end ``` """ @doc """ Attempt to fetch an otel context from a give pid. """ @spec fetch_ctx(pid) :: OpenTelemetry.span_ctx() | :undefined defdelegate fetch_ctx(pid), to: :opentelemetry_process_propagator @doc """ Attempt to find an otel context in the spawning process. This is equivalent to calling `fetch_parent_ctx(1, :"$ancestors")` """ @spec fetch_parent_ctx() :: OpenTelemetry.span_ctx() | :undefined defdelegate fetch_parent_ctx(), to: :opentelemetry_process_propagator @doc """ Attempt to find an otel context in a spawning process within `n` number of parent processes """ @spec fetch_parent_ctx(non_neg_integer()) :: OpenTelemetry.span_ctx() | :undefined defdelegate fetch_parent_ctx(depth), to: :opentelemetry_process_propagator @doc """ Attempt to find an otel context under a given process dictionary key within `n` number of parent processes. The first context found will be returned. Processes spawned by `proc_lib` are stored under `:"$ancestors`. The Elixir `Task` module uses the `:"$callers` key. """ @spec fetch_parent_ctx(non_neg_integer(), atom()) :: OpenTelemetry.span_ctx() | :undefined defdelegate fetch_parent_ctx(max_depth, key), to: :opentelemetry_process_propagator end
propagators/opentelemetry_process_propagator/lib/opentelemetry_process_propagator.ex
0.914544
0.855791
opentelemetry_process_propagator.ex
starcoder
defmodule Correcthorse.Password do @moduledoc """ Generates lists of random words with `words/2` that can be used to make a password with `to_password/3`. """ @max_minimum_words 50 @words (case Mix.env() do :test -> StubWords _ -> Correcthorse.Words.WordsImpl end) @doc """ List of (random) words that can be used to make a password with `to_password/3` """ @spec words(pos_integer(), pos_integer()) :: list(String.t()) def words(minimum_words, minimum_chars) do do_words([], 0, restrict_minimum_sizes(minimum_words, minimum_chars)) end defp restrict_minimum_sizes(minimum_words, minimum_chars) do {min(max_minimum_words(), minimum_words), min(max_minimum_chars(), minimum_chars)} end @spec max_minimum_words :: 50 def max_minimum_words, do: @max_minimum_words @spec max_minimum_chars :: 250 def max_minimum_chars, do: min_chars_from_min_words(max_minimum_words()) @spec min_chars_from_min_words(pos_integer()) :: pos_integer() def min_chars_from_min_words(min_words), do: min_words * 5 @doc """ Turn a list of words into a password.pos_integer() * wordlist - the list of words * separator - string used to separate the words * options. Keyword list of options, which may incliude: * `captalise: :each_word` - capitalise every word * `capitalise: first` - capitalise the first word * `append: append` - add `String.Chars.to_string(append)` to the end of the password """ @spec to_password(list(String.t()), String.t(), list({atom, any})) :: String.t() def to_password(wordlist, separator, options) do wordlist |> decorate(options) |> Enum.join(separator) end defp decorate(wordlist, []), do: wordlist defp decorate(wordlist, [{:capitalise, :each_word} | rest]) do wordlist |> Enum.map(&String.capitalize/1) |> decorate(rest) end defp decorate(wordlist, [{:capitalise, :first} | rest]) do wordlist |> List.update_at(0, &String.capitalize/1) |> decorate(rest) end defp decorate(wordlist, [{:append, append} | rest]) do wordlist |> List.update_at(-1, fn word -> "#{word}#{append}" end) |> decorate(rest) end defp decorate(wordlist, [_ | rest]), do: decorate(wordlist, rest) defp do_words(wordlist, char_count, {minimum_words, minimum_chars}) when length(wordlist) >= minimum_words and char_count >= minimum_chars do wordlist end defp do_words(wordlist, char_count, constraints) do new_wordlist = [@words.random_word() | wordlist] new_char_count = calculate_size(char_count, new_wordlist) do_words(new_wordlist, new_char_count, constraints) end defp calculate_size(previous_char_count, [new_word | _]) do previous_char_count + String.length(new_word) end end
apps/correcthorse/lib/correcthorse/password.ex
0.792544
0.478651
password.ex
starcoder
defmodule Advent20.Passport do @moduledoc """ Day 4: Passport Processing """ # Set up a stream of passport inputs defp passport_input_stream(input_filename) do input_filename |> File.stream!() |> Stream.map(&String.trim/1) |> Stream.chunk_by(&(&1 == "")) |> Stream.reject(&(&1 == [""])) |> Stream.map(fn raw_passport -> Enum.flat_map(raw_passport, fn line -> String.split(line, " ") end) end) end # We're using Ecto for the validation, yeah! defmodule Passport do use Ecto.Schema import Ecto.Changeset embedded_schema do field :byr, :integer field :iyr, :integer field :eyr, :integer field :hgt, :string field :hcl, :string field :ecl, :string field :pid, :string field :cid, :string end def changeset(params) do %__MODULE__{} |> cast(params, [:byr, :iyr, :eyr, :hgt, :hcl, :ecl, :pid, :cid]) |> validate_required([:byr, :iyr, :eyr, :hgt, :hcl, :ecl, :pid]) end def extended_validation(changeset) do changeset |> validate_number(:byr, greater_than_or_equal_to: 1920, less_than_or_equal_to: 2002) |> validate_number(:iyr, greater_than_or_equal_to: 2010, less_than_or_equal_to: 2020) |> validate_number(:eyr, greater_than_or_equal_to: 2020, less_than_or_equal_to: 2030) |> validate_height(:hgt) |> validate_hair_color(:hcl) |> validate_inclusion(:ecl, ~w[amb blu brn gry grn hzl oth]) |> validate_length(:pid, is: 9) |> validate_password_is_only_numbers(:pid) end defp validate_height(changeset, field) do input = fetch_field!(changeset, field) valid? = case Regex.run(~r/(\d+)(cm|in)/, input, capture: :all_but_first) do [height, "cm"] -> String.to_integer(height) in 150..193 [height, "in"] -> String.to_integer(height) in 59..76 _ -> false end if(not valid?, do: changeset |> add_error(field, "not valid"), else: changeset) end defp validate_hair_color(changeset, field) do valid? = case fetch_field!(changeset, field) do "#" <> value -> String.length(value) == 6 _ -> false end if(not valid?, do: changeset |> add_error(field, "not valid"), else: changeset) end defp validate_password_is_only_numbers(changeset, field) do input = fetch_field!(changeset, field) valid? = Regex.match?(~r/^\d{9}$/, input) if(not valid?, do: changeset |> add_error(field, "not valid"), else: changeset) end end @doc """ Part 1: Count passwords that have all required fields """ def count_valid_passports(input_filename) do passport_input_stream(input_filename) |> Stream.map(&create_passport/1) |> Stream.map(&Passport.changeset/1) |> Enum.count(& &1.valid?) end defp create_passport(password_input_list) do password_input_list |> Enum.into(%{}, fn input -> input |> String.split(":") |> List.to_tuple() end) end @doc """ Part 2. Count passwords that have all required fields AND are valid according to the rules """ def count_valid_passports_strict(input_filename) do passport_input_stream(input_filename) |> Stream.map(&create_passport/1) |> Stream.map(&Passport.changeset/1) |> Stream.filter(& &1.valid?) |> Stream.map(&Passport.extended_validation/1) |> Enum.count(& &1.valid?) end end
lib/advent20/04_passport.ex
0.684053
0.42182
04_passport.ex
starcoder
defmodule FunWithFlags do @moduledoc """ FunWithFlags, the Elixir feature flag library. This module provides the public interface to the library and its API is made of three simple methods to enable, disable and query feature flags. In their simplest form, flags can be toggled on and off globally. More advanced rules or "gates" are available, and they can be set and queried for any term that implements these protocols: * The `FunWithFlags.Actor` protocol can be implemented for types and structs that should have specific rules. For example, in web applications it's common to use a `%User{}` struct or equivalent as an actor, or perhaps the current country of the request. * The `FunWithFlags.Group` protocol can be implemented for types and structs that should belong to groups for which one wants to enable and disable some flags. For example, one could implement the protocol for a `%User{}` struct to identify administrators. See the [Usage](/fun_with_flags/readme.html#usage) notes for a more detailed explanation. """ alias FunWithFlags.{Flag, Gate, Config} @store FunWithFlags.Config.store_module @type options :: Keyword.t @doc """ Checks if a flag is enabled. It can be invoked with just the flag name, as an atom, to check the general status of a flag (i.e. the boolean gate). ## Options * `:for` - used to provide a term for which the flag could have a specific value. The passed term should implement the `Actor` or `Group` protocol, or both. ## Examples This example relies on the [reference implementation](https://github.com/tompave/fun_with_flags/blob/master/test/support/test_user.ex) used in the tests. iex> alias FunWithFlags.TestUser, as: User iex> harry = %User{id: 1, name: "<NAME>", groups: ["wizards", "gryffindor"]} iex> FunWithFlags.disable(:elder_wand) iex> FunWithFlags.enable(:elder_wand, for_actor: harry) iex> FunWithFlags.enabled?(:elder_wand) false iex> FunWithFlags.enabled?(:elder_wand, for: harry) true iex> voldemort = %User{id: 7, name: "<NAME>", groups: ["wizards", "slytherin"]} iex> FunWithFlags.enabled?(:elder_wand, for: voldemort) false iex> filch = %User{id: 88, name: "<NAME>", groups: ["staff"]} iex> FunWithFlags.enable(:magic_wands, for_group: "wizards") iex> FunWithFlags.enabled?(:magic_wands, for: harry) true iex> FunWithFlags.enabled?(:magic_wands, for: voldemort) true iex> FunWithFlags.enabled?(:magic_wands, for: filch) false """ @spec enabled?(atom, options) :: boolean def enabled?(flag_name, options \\ []) def enabled?(flag_name, []) when is_atom(flag_name) do {:ok, flag} = @store.lookup(flag_name) Flag.enabled?(flag) end def enabled?(flag_name, [for: nil]) do enabled?(flag_name) end def enabled?(flag_name, [for: item]) when is_atom(flag_name) do {:ok, flag} = @store.lookup(flag_name) Flag.enabled?(flag, for: item) end @doc """ Enables a feature flag. ## Options * `:for_actor` - used to enable the flag for a specific term only. The value can be any term that implements the `Actor` protocol. * `:for_group` - used to enable the flag for a specific group only. The value should be a binary or an atom (It's internally converted to a binary and it's stored and retrieved as a binary. Atoms are supported for retro-compatibility with versions <= 0.9) * `:for_percentage_of` - used to enable the flag for a percentage of time or actors, expressed as `{:time, float}` or `{:actors, float}`, where float is in the range `0.0 < x < 1.0`. ## Examples ### Enable globally iex> FunWithFlags.enabled?(:super_shrink_ray) false iex> FunWithFlags.enable(:super_shrink_ray) {:ok, true} iex> FunWithFlags.enabled?(:super_shrink_ray) true ### Enable for an actor iex> FunWithFlags.disable(:warp_drive) {:ok, false} iex> FunWithFlags.enable(:warp_drive, for_actor: "Scotty") {:ok, true} iex> FunWithFlags.enabled?(:warp_drive) false iex> FunWithFlags.enabled?(:warp_drive, for: "Scotty") true ### Enable for a group This example relies on the [reference implementation](https://github.com/tompave/fun_with_flags/blob/master/test/support/test_user.ex) used in the tests. iex> alias FunWithFlags.TestUser, as: User iex> marty = %User{name: "<NAME>", groups: ["students", "time_travelers"]} iex> doc = %User{name: "<NAME>", groups: ["scientists", "time_travelers"]} iex> buford = %User{name: "<NAME>", groups: ["gunmen", "bandits"]} iex> FunWithFlags.enable(:delorean, for_group: "time_travelers") {:ok, true} iex> FunWithFlags.enabled?(:delorean) false iex> FunWithFlags.enabled?(:delorean, for: buford) false iex> FunWithFlags.enabled?(:delorean, for: marty) true iex> FunWithFlags.enabled?(:delorean, for: doc) true ### Enable for a percentage of the time iex> FunWithFlags.disable(:random_glitch) iex> FunWithFlags.enable(:random_glitch, for_percentage_of: {:time, 0.999999999}) iex> FunWithFlags.enabled?(:random_glitch) true iex> FunWithFlags.enable(:random_glitch, for_percentage_of: {:time, 0.000000001}) iex> FunWithFlags.enabled?(:random_glitch) false ### Enable for a percentage of the actors This example is based on the fact that the actor score for the actor-flag pair `marty + :new_ui` is lower than 50%, and for the `buford + :new_ui` is higher. iex> FunWithFlags.disable(:new_ui) iex> FunWithFlags.enable(:new_ui, for_percentage_of: {:actors, 0.5}) iex> FunWithFlags.enabled?(:new_ui) false iex> alias FunWithFlags.TestUser, as: User iex> marty = %User{id: 42, name: "<NAME>"} iex> buford = %User{id: 2, name: "<NAME>"} iex> FunWithFlags.enabled?(:new_ui, for: marty) true iex> FunWithFlags.enabled?(:new_ui, for: buford) false """ @spec enable(atom, options) :: {:ok, true} def enable(flag_name, options \\ []) def enable(flag_name, []) when is_atom(flag_name) do {:ok, flag} = @store.put(flag_name, Gate.new(:boolean, true)) verify(flag) end def enable(flag_name, [for_actor: nil]) do enable(flag_name) end def enable(flag_name, [for_actor: actor]) when is_atom(flag_name) do gate = Gate.new(:actor, actor, true) {:ok, flag} = @store.put(flag_name, gate) verify(flag, for: actor) end def enable(flag_name, [for_group: nil]) do enable(flag_name) end def enable(flag_name, [for_group: group_name]) when is_atom(flag_name) do gate = Gate.new(:group, group_name, true) {:ok, _flag} = @store.put(flag_name, gate) {:ok, true} end def enable(flag_name, [for_percentage_of: {:time, ratio}]) when is_atom(flag_name) do gate = Gate.new(:percentage_of_time, ratio) {:ok, _flag} = @store.put(flag_name, gate) {:ok, true} end def enable(flag_name, [for_percentage_of: {:actors, ratio}]) when is_atom(flag_name) do gate = Gate.new(:percentage_of_actors, ratio) {:ok, _flag} = @store.put(flag_name, gate) {:ok, true} end @doc """ Disables a feature flag. ## Options * `:for_actor` - used to disable the flag for a specific term only. The value can be any term that implements the `Actor` protocol. * `:for_group` - used to disable the flag for a specific group only. The value should be a binary or an atom (It's internally converted to a binary and it's stored and retrieved as a binary. Atoms are supported for retro-compatibility with versions <= 0.9) * `:for_percentage_of` - used to disable the flag for a percentage of time or actors, expressed as `{:time, float}` or `{:actors, float}`, where float is in the range `0.0 < x < 1.0`. ## Examples ### Disable globally iex> FunWithFlags.enable(:random_koala_gifs) iex> FunWithFlags.enabled?(:random_koala_gifs) true iex> FunWithFlags.disable(:random_koala_gifs) {:ok, false} iex> FunWithFlags.enabled?(:random_koala_gifs) false ## Disable for an actor iex> FunWithFlags.enable(:spider_sense) {:ok, true} iex> villain = %{name: "Venom"} iex> FunWithFlags.disable(:spider_sense, for_actor: villain) {:ok, false} iex> FunWithFlags.enabled?(:spider_sense) true iex> FunWithFlags.enabled?(:spider_sense, for: villain) false ### Disable for a group This example relies on the [reference implementation](https://github.com/tompave/fun_with_flags/blob/master/test/support/test_user.ex) used in the tests. iex> alias FunWithFlags.TestUser, as: User iex> harry = %User{name: "<NAME>", groups: ["wizards", "gryffindor"]} iex> dudley = %User{name: "<NAME>", groups: ["muggles"]} iex> FunWithFlags.enable(:hogwarts) {:ok, true} iex> FunWithFlags.disable(:hogwarts, for_group: "muggles") {:ok, false} iex> FunWithFlags.enabled?(:hogwarts) true iex> FunWithFlags.enabled?(:hogwarts, for: harry) true iex> FunWithFlags.enabled?(:hogwarts, for: dudley) false ### Disable for a percentage of the time iex> FunWithFlags.clear(:random_glitch) :ok iex> FunWithFlags.disable(:random_glitch, for_percentage_of: {:time, 0.999999999}) {:ok, false} iex> FunWithFlags.enabled?(:random_glitch) false iex> FunWithFlags.disable(:random_glitch, for_percentage_of: {:time, 0.000000001}) {:ok, false} iex> FunWithFlags.enabled?(:random_glitch) true ### Disable for a percentage of the actors iex> FunWithFlags.disable(:new_ui, for_percentage_of: {:actors, 0.3}) {:ok, false} """ @spec disable(atom, options) :: {:ok, false} def disable(flag_name, options \\ []) def disable(flag_name, []) when is_atom(flag_name) do {:ok, flag} = @store.put(flag_name, Gate.new(:boolean, false)) verify(flag) end def disable(flag_name, [for_actor: nil]) do disable(flag_name) end def disable(flag_name, [for_actor: actor]) when is_atom(flag_name) do gate = Gate.new(:actor, actor, false) {:ok, flag} = @store.put(flag_name, gate) verify(flag, for: actor) end def disable(flag_name, [for_group: nil]) do disable(flag_name) end def disable(flag_name, [for_group: group_name]) when is_atom(flag_name) do gate = Gate.new(:group, group_name, false) {:ok, _flag} = @store.put(flag_name, gate) {:ok, false} end def disable(flag_name, [for_percentage_of: {type, ratio}]) when is_atom(flag_name) and is_float(ratio) do inverted_ratio = 1.0 - ratio {:ok, true} = enable(flag_name, [for_percentage_of: {type, inverted_ratio}]) {:ok, false} end @doc """ Clears the data of a feature flag. Clears the data for an entire feature flag or for a specific Actor or Group gate. Clearing a boolean gate is not supported because a missing boolean gate is equivalent to a disabled boolean gate. Sometimes enabling or disabling a gate is not what you want, and you need to remove that gate's rules instead. For example, if you don't need anymore to explicitly enable or disable a flag for an actor, and the default state should be used instead, you'll want to clear the gate. It's also possible to clear the entire flag, by not passing any option. ## Options * `for_actor: an_actor` - used to clear the flag for a specific term only. The value can be any term that implements the `Actor` protocol. * `for_group: a_group_name` - used to clear the flag for a specific group only. The value should be a binary or an atom (It's internally converted to a binary and it's stored and retrieved as a binary. Atoms are supported for retro-compatibility with versions <= 0.9) * `boolean: true` - used to clear the boolean gate. * `for_percentage: true` - used to clear any percentage gate. ## Examples iex> alias FunWithFlags.TestUser, as: User iex> harry = %User{id: 1, name: "<NAME>", groups: ["wizards", "gryffindor"]} iex> hagrid = %User{id: 2, name: "<NAME>", groups: ["wizards", "gamekeeper"]} iex> dudley = %User{id: 3, name: "<NAME>", groups: ["muggles"]} iex> FunWithFlags.disable(:wands) iex> FunWithFlags.enable(:wands, for_group: "wizards") iex> FunWithFlags.disable(:wands, for_actor: hagrid) iex> iex> FunWithFlags.enabled?(:wands) false iex> FunWithFlags.enabled?(:wands, for: harry) true iex> FunWithFlags.enabled?(:wands, for: hagrid) false iex> FunWithFlags.enabled?(:wands, for: dudley) false iex> iex> FunWithFlags.clear(:wands, for_actor: hagrid) :ok iex> FunWithFlags.enabled?(:wands, for: hagrid) true iex> iex> FunWithFlags.clear(:wands) :ok iex> FunWithFlags.enabled?(:wands) false iex> FunWithFlags.enabled?(:wands, for: harry) false iex> FunWithFlags.enabled?(:wands, for: hagrid) false iex> FunWithFlags.enabled?(:wands, for: dudley) false """ @spec clear(atom, options) :: :ok def clear(flag_name, options \\ []) def clear(flag_name, []) when is_atom(flag_name) do {:ok, _flag} = @store.delete(flag_name) :ok end def clear(flag_name, [boolean: true]) do gate = Gate.new(:boolean, false) # we only care about the gate id _clear_gate(flag_name, gate) end def clear(flag_name, [for_actor: nil]) do clear(flag_name) end def clear(flag_name, [for_actor: actor]) when is_atom(flag_name) do gate = Gate.new(:actor, actor, false) # we only care about the gate id _clear_gate(flag_name, gate) end def clear(flag_name, [for_group: nil]) do clear(flag_name) end def clear(flag_name, [for_group: group_name]) when is_atom(flag_name) do gate = Gate.new(:group, group_name, false) # we only care about the gate id _clear_gate(flag_name, gate) end def clear(flag_name, [for_percentage: true]) do gate = Gate.new(:percentage_of_time, 0.5) # we only care about the gate id _clear_gate(flag_name, gate) end defp _clear_gate(flag_name, gate) do {:ok, _flag} = @store.delete(flag_name, gate) :ok end @doc """ Returns a list of all flag names currently configured, as atoms. This can be useful for debugging or for display purposes, but it's not meant to be used at runtime. Undefined flags, for example, will be considered disabled. """ @spec all_flag_names() :: {:ok, [atom]} defdelegate all_flag_names(), to: @store @doc """ Returns a list of all the flags currently configured, as data structures. This function is provided for debugging and to build more complex functionality (e.g. it's used in the web GUI), but it is not meant to be used at runtime to check if a flag is enabled. To query the value of a flag, please use the `enabled?2` function instead. """ @spec all_flags() :: {:ok, [FunWithFlags.Flag.t]} defdelegate all_flags(), to: @store @doc """ Returns a `FunWithFlags.Flag` struct for the given name, or `nil` if no flag is found. Useful for debugging. """ @spec get_flag(atom) :: FunWithFlags.Flag.t | nil def get_flag(name) do {:ok, names} = all_flag_names() if name in names do {:ok, flag} = Config.persistence_adapter().get(name) flag else nil end end defp verify(flag) do {:ok, Flag.enabled?(flag)} end defp verify(flag, [for: data]) do {:ok, Flag.enabled?(flag, for: data)} end end
lib/fun_with_flags.ex
0.82226
0.583203
fun_with_flags.ex
starcoder
defmodule Json do def encode!(object, conv \\ []) do prepare!(object, conv) |> Poison.encode!() end def prepare!(object, conv \\ []) do do_encode(object, conv_validate(conv)) end defp conv_validate(conv) do Keyword.merge([big_x: true, all_hex: false], conv) end def decode!(binary) do {:ok, result} = decode(binary) result end def decode(binary) do case Poison.decode(binary) do {:ok, object} -> {:ok, do_decode(object)} other -> other end end defp do_encode(map, conv) when is_map(map) do Enum.into( Enum.map(Map.to_list(map), fn {key, value} -> {do_encode(key, conv), do_encode(value, conv)} end), %{} ) end defp do_encode(list, conv) when is_list(list) do Enum.map(list, &do_encode(&1, conv)) end defp do_encode({:raw, num}, _conv) do num end defp do_encode(tuple, conv) when is_tuple(tuple) do Tuple.to_list(tuple) |> Enum.map(&do_encode(&1, conv)) end defp do_encode(int, conv) when is_integer(int) and int >= 0 do Base16.encode(int, conv[:big_x]) end defp do_encode("", _conv) do "0x" end defp do_encode(bin, conv) when is_binary(bin) do if conv[:all_hex] == false and String.printable?(bin) do bin else Base16.encode(bin, conv[:big_x]) end end defp do_encode(bits, _big_x) when is_bitstring(bits) do for <<x::size(1) <- bits>>, do: if(x == 1, do: "1", else: "0"), into: "" end defp do_encode(other, _big_x) do other end defp do_decode(map) when is_map(map) do Enum.into( Enum.map(Map.to_list(map), fn {key, value} -> {key, do_decode(value)} end), %{} ) end defp do_decode(list) when is_list(list) do Enum.map(list, &do_decode(&1)) end defp do_decode(bin) when is_binary(bin) do case bin do <<"0x", _rest::binary>> -> Base16.decode(bin) <<"0X", _rest::binary>> -> Base16.decode(bin) other -> other end end defp do_decode(other) do other end end
lib/json.ex
0.558568
0.545104
json.ex
starcoder
defmodule PlugProxy do @moduledoc """ A plug for reverse proxy server. PlugProxy pipeline the request to the upstream, and the response will be sent with [ranch](https://github.com/ninenines/ranch) transport which is highly efficient. ## Options - `:upstream` - Upstream URL string. Additional path and query string will be prefixed. - `:url` - URL string or a function which returns an URL. - `:transport` - Transport module. Default to `PlugProxy.Transport.Cowboy`. Additional options will be passed to hackney. You can see [:hackney.request/5](https://github.com/benoitc/hackney/blob/master/doc/hackney.md#request5) for available options. ## Examples Forward requests to a upstream. forward "/v2", to: PlugProxy, upstream: "http://example.com/" # http://localhost:4000/v2/test => http://example.com/v2/test forward "/v2", to: PlugProxy, upstream: "http://example.com/abc/" # http://localhost:4000/v2/test => http://example.com/abc/v2/test forward "/v2", to: PlugProxy, upstream: "http://example.com?a=1" # http://localhost:4000/v2/test?b=2 => http://example.com/v2/test?a=1&b=2 Return URL in a function. get "/v2/:id", do url_fun = fn conn, opts -> "http://example.com/a/" <> String.reverse(id) end opts = PlugProxy.init(url: url_fun) PlugProxy.call(conn, opts) end # http://localhost:4000/v2/123 => http://example.com/a/321 """ @behaviour Plug alias PlugProxy.BadGatewayError @methods ["GET", "POST", "PUT", "PATCH", "DELETE", "HEAD", "OPTIONS"] def init(opts) do opts |> parse_upstream(Keyword.get(opts, :upstream)) end def call(conn, opts) do transport = Keyword.get(opts, :transport, PlugProxy.Transport.Cowboy) case send_req(conn, opts) do {:ok, client} -> conn = apply(transport, :write, [conn, client, opts]) apply(transport, :read, [conn, client, opts]) {:error, err} -> raise BadGatewayError, reason: err end end for method <- @methods do defp method_atom(unquote(method)), do: unquote(method |> String.downcase() |> String.to_atom()) end defp send_req(conn, opts) do url_fun = Keyword.get(opts, :url, &format_url/2) url = get_url(url_fun, conn, opts) :hackney.request(method_atom(conn.method), url, prepare_headers(conn), :stream, opts) end defp parse_upstream(opts, upstream) when is_binary(upstream) do uri = URI.parse(upstream) uri = %{uri | query: uri.query || "", path: String.replace_trailing(uri.path || "", "/", "")} Keyword.put(opts, :upstream, uri) end defp parse_upstream(opts, _), do: opts defp get_url(url, conn, opts) when is_function(url) do url.(conn, opts) end defp get_url(url, _, _), do: url defp format_url(conn, opts) do upstream = Keyword.get(opts, :upstream) scheme = scheme_str(upstream.scheme || conn.scheme) host = upstream.host || conn.host port = upstream.port || conn.port path = format_path(upstream.path, conn.request_path) query = format_query_string(upstream.query, conn.query_string) "#{scheme}://#{host}:#{port}#{path}" |> append_query_string(query) end for scheme <- [:http, :https] do defp scheme_str(unquote(scheme)), do: unquote(Atom.to_string(scheme)) end defp scheme_str(scheme), do: to_string(scheme) defp format_path(target, "/" <> path), do: format_path(target, path) defp format_path(target, path), do: "#{target}/#{path}" defp format_query_string("", query), do: query defp format_query_string(target, query), do: "#{target}&#{query}" defp append_query_string(url, ""), do: url defp append_query_string(url, query_string), do: "#{url}?#{query_string}" defp prepare_headers(conn) do conn.req_headers end end
lib/plug_proxy.ex
0.811863
0.527073
plug_proxy.ex
starcoder
defmodule HL7.V2_5_1.Segments.OBR do @moduledoc false require Logger alias HL7.V2_5_1.{DataTypes} use HL7.Segment, fields: [ segment: nil, set_id_obr: nil, placer_order_number: DataTypes.Ei, filler_order_number: DataTypes.Ei, universal_service_identifier: DataTypes.Ce, priority_obr: nil, requested_date_time: DataTypes.Ts, observation_date_time: DataTypes.Ts, observation_end_date_time: DataTypes.Ts, collection_volume: DataTypes.Cq, collector_identifier: DataTypes.Xcn, specimen_action_code: nil, danger_code: DataTypes.Ce, relevant_clinical_information: nil, specimen_received_date_time: DataTypes.Ts, specimen_source: DataTypes.Sps, ordering_provider: DataTypes.Xcn, order_callback_phone_number: DataTypes.Xtn, placer_field_1: nil, placer_field_2: nil, filler_field_1: nil, filler_field_2: nil, results_rpt_status_chng_date_time: DataTypes.Ts, charge_to_practice: DataTypes.Moc, diagnostic_serv_sect_id: nil, result_status: nil, parent_result: DataTypes.Prl, quantity_timing: DataTypes.Tq, result_copies_to: DataTypes.Xcn, parent: DataTypes.Eip, transportation_mode: nil, reason_for_study: DataTypes.Ce, principal_result_interpreter: DataTypes.Ndl, assistant_result_interpreter: DataTypes.Ndl, technician: DataTypes.Ndl, transcriptionist: DataTypes.Ndl, scheduled_date_time: DataTypes.Ts, number_of_sample_containers_: nil, transport_logistics_of_collected_sample: DataTypes.Ce, collectors_comment_: DataTypes.Ce, transport_arrangement_responsibility: DataTypes.Ce, transport_arranged: nil, escort_required: nil, planned_patient_transport_comment: DataTypes.Ce, procedure_code: DataTypes.Ce, procedure_code_modifier: DataTypes.Ce, placer_supplemental_service_information: DataTypes.Ce, filler_supplemental_service_information: DataTypes.Ce, medically_necessary_duplicate_procedure_reason: DataTypes.Cwe, result_handling: nil, parent_universal_service_identifier: DataTypes.Cwe ] end
lib/hl7/2.5.1/segments/obr.ex
0.61057
0.408542
obr.ex
starcoder
defmodule AWS.DevOpsGuru do @moduledoc """ Amazon DevOps Guru is a fully managed service that helps you identify anomalous behavior in business critical operational applications. You specify the AWS resources that you want DevOps Guru to cover, then the Amazon CloudWatch metrics and AWS CloudTrail events related to those resources are analyzed. When anomalous behavior is detected, DevOps Guru creates an *insight* that includes recommendations, related events, and related metrics that can help you improve your operational applications. For more information, see [What is Amazon DevOps Guru](https://docs.aws.amazon.com/devops-guru/latest/userguide/welcome.html). You can specify 1 or 2 Amazon Simple Notification Service topics so you are notified every time a new insight is created. You can also enable DevOps Guru to generate an OpsItem in AWS Systems Manager for each insight to help you manage and track your work addressing insights. To learn about the DevOps Guru workflow, see [How DevOps Guru works](https://docs.aws.amazon.com/devops-guru/latest/userguide/welcome.html#how-it-works). To learn about DevOps Guru concepts, see [Concepts in DevOps Guru](https://docs.aws.amazon.com/devops-guru/latest/userguide/concepts.html). """ alias AWS.Client alias AWS.Request def metadata do %AWS.ServiceMetadata{ abbreviation: nil, api_version: "2020-12-01", content_type: "application/x-amz-json-1.1", credential_scope: nil, endpoint_prefix: "devops-guru", global?: false, protocol: "rest-json", service_id: "DevOps Guru", signature_version: "v4", signing_name: "devops-guru", target_prefix: nil } end @doc """ Adds a notification channel to DevOps Guru. A notification channel is used to notify you about important DevOps Guru events, such as when an insight is generated. If you use an Amazon SNS topic in another account, you must attach a policy to it that grants DevOps Guru permission to it notifications. DevOps Guru adds the required policy on your behalf to send notifications using Amazon SNS in your account. For more information, see [Permissions for cross account Amazon SNS topics](https://docs.aws.amazon.com/devops-guru/latest/userguide/sns-required-permissions.html). If you use an Amazon SNS topic that is encrypted by an AWS Key Management Service customer-managed key (CMK), then you must add permissions to the CMK. For more information, see [Permissions for AWS KMS–encrypted Amazon SNS topics](https://docs.aws.amazon.com/devops-guru/latest/userguide/sns-kms-permissions.html). """ def add_notification_channel(%Client{} = client, input, options \\ []) do url_path = "/channels" headers = [] query_params = [] Request.request_rest( client, metadata(), :put, url_path, query_params, headers, input, options, 200 ) end @doc """ Returns the number of open reactive insights, the number of open proactive insights, and the number of metrics analyzed in your AWS account. Use these numbers to gauge the health of operations in your AWS account. """ def describe_account_health(%Client{} = client, options \\ []) do url_path = "/accounts/health" headers = [] query_params = [] Request.request_rest( client, metadata(), :get, url_path, query_params, headers, nil, options, 200 ) end @doc """ For the time range passed in, returns the number of open reactive insight that were created, the number of open proactive insights that were created, and the Mean Time to Recover (MTTR) for all closed reactive insights. """ def describe_account_overview(%Client{} = client, input, options \\ []) do url_path = "/accounts/overview" headers = [] query_params = [] Request.request_rest( client, metadata(), :post, url_path, query_params, headers, input, options, 200 ) end @doc """ Returns details about an anomaly that you specify using its ID. """ def describe_anomaly(%Client{} = client, id, options \\ []) do url_path = "/anomalies/#{URI.encode(id)}" headers = [] query_params = [] Request.request_rest( client, metadata(), :get, url_path, query_params, headers, nil, options, 200 ) end @doc """ Returns the most recent feedback submitted in the current AWS account and Region. """ def describe_feedback(%Client{} = client, input, options \\ []) do url_path = "/feedback" headers = [] query_params = [] Request.request_rest( client, metadata(), :post, url_path, query_params, headers, input, options, 200 ) end @doc """ Returns details about an insight that you specify using its ID. """ def describe_insight(%Client{} = client, id, options \\ []) do url_path = "/insights/#{URI.encode(id)}" headers = [] query_params = [] Request.request_rest( client, metadata(), :get, url_path, query_params, headers, nil, options, 200 ) end @doc """ Returns the number of open proactive insights, open reactive insights, and the Mean Time to Recover (MTTR) for all closed insights in resource collections in your account. You specify the type of AWS resources collection. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks. """ def describe_resource_collection_health( %Client{} = client, resource_collection_type, next_token \\ nil, options \\ [] ) do url_path = "/accounts/health/resource-collection/#{URI.encode(resource_collection_type)}" headers = [] query_params = [] query_params = if !is_nil(next_token) do [{"NextToken", next_token} | query_params] else query_params end Request.request_rest( client, metadata(), :get, url_path, query_params, headers, nil, options, 200 ) end @doc """ Returns the integration status of services that are integrated with DevOps Guru. The one service that can be integrated with DevOps Guru is AWS Systems Manager, which can be used to create an OpsItem for each generated insight. """ def describe_service_integration(%Client{} = client, options \\ []) do url_path = "/service-integrations" headers = [] query_params = [] Request.request_rest( client, metadata(), :get, url_path, query_params, headers, nil, options, 200 ) end @doc """ Returns lists AWS resources that are of the specified resource collection type. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks. """ def get_resource_collection( %Client{} = client, resource_collection_type, next_token \\ nil, options \\ [] ) do url_path = "/resource-collections/#{URI.encode(resource_collection_type)}" headers = [] query_params = [] query_params = if !is_nil(next_token) do [{"NextToken", next_token} | query_params] else query_params end Request.request_rest( client, metadata(), :get, url_path, query_params, headers, nil, options, 200 ) end @doc """ Returns a list of the anomalies that belong to an insight that you specify using its ID. """ def list_anomalies_for_insight(%Client{} = client, insight_id, input, options \\ []) do url_path = "/anomalies/insight/#{URI.encode(insight_id)}" headers = [] query_params = [] Request.request_rest( client, metadata(), :post, url_path, query_params, headers, input, options, 200 ) end @doc """ Returns a list of the events emitted by the resources that are evaluated by DevOps Guru. You can use filters to specify which events are returned. """ def list_events(%Client{} = client, input, options \\ []) do url_path = "/events" headers = [] query_params = [] Request.request_rest( client, metadata(), :post, url_path, query_params, headers, input, options, 200 ) end @doc """ Returns a list of insights in your AWS account. You can specify which insights are returned by their start time and status (`ONGOING`, `CLOSED`, or `ANY`). """ def list_insights(%Client{} = client, input, options \\ []) do url_path = "/insights" headers = [] query_params = [] Request.request_rest( client, metadata(), :post, url_path, query_params, headers, input, options, 200 ) end @doc """ Returns a list of notification channels configured for DevOps Guru. Each notification channel is used to notify you when DevOps Guru generates an insight that contains information about how to improve your operations. The one supported notification channel is Amazon Simple Notification Service (Amazon SNS). """ def list_notification_channels(%Client{} = client, input, options \\ []) do url_path = "/channels" headers = [] query_params = [] Request.request_rest( client, metadata(), :post, url_path, query_params, headers, input, options, 200 ) end @doc """ Returns a list of a specified insight's recommendations. Each recommendation includes a list of related metrics and a list of related events. """ def list_recommendations(%Client{} = client, input, options \\ []) do url_path = "/recommendations" headers = [] query_params = [] Request.request_rest( client, metadata(), :post, url_path, query_params, headers, input, options, 200 ) end @doc """ Collects customer feedback about the specified insight. """ def put_feedback(%Client{} = client, input, options \\ []) do url_path = "/feedback" headers = [] query_params = [] Request.request_rest( client, metadata(), :put, url_path, query_params, headers, input, options, 200 ) end @doc """ Removes a notification channel from DevOps Guru. A notification channel is used to notify you when DevOps Guru generates an insight that contains information about how to improve your operations. """ def remove_notification_channel(%Client{} = client, id, input, options \\ []) do url_path = "/channels/#{URI.encode(id)}" headers = [] query_params = [] Request.request_rest( client, metadata(), :delete, url_path, query_params, headers, input, options, 200 ) end @doc """ Returns a list of insights in your AWS account. You can specify which insights are returned by their start time, one or more statuses (`ONGOING`, `CLOSED`, and `CLOSED`), one or more severities (`LOW`, `MEDIUM`, and `HIGH`), and type (`REACTIVE` or `PROACTIVE`). Use the `Filters` parameter to specify status and severity search parameters. Use the `Type` parameter to specify `REACTIVE` or `PROACTIVE` in your search. """ def search_insights(%Client{} = client, input, options \\ []) do url_path = "/insights/search" headers = [] query_params = [] Request.request_rest( client, metadata(), :post, url_path, query_params, headers, input, options, 200 ) end @doc """ Updates the collection of resources that DevOps Guru analyzes. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze only the AWS resources that are defined in the stacks. This method also creates the IAM role required for you to use DevOps Guru. """ def update_resource_collection(%Client{} = client, input, options \\ []) do url_path = "/resource-collections" headers = [] query_params = [] Request.request_rest( client, metadata(), :put, url_path, query_params, headers, input, options, 200 ) end @doc """ Enables or disables integration with a service that can be integrated with DevOps Guru. The one service that can be integrated with DevOps Guru is AWS Systems Manager, which can be used to create an OpsItem for each generated insight. """ def update_service_integration(%Client{} = client, input, options \\ []) do url_path = "/service-integrations" headers = [] query_params = [] Request.request_rest( client, metadata(), :put, url_path, query_params, headers, input, options, 200 ) end end
lib/aws/generated/dev_ops_guru.ex
0.87251
0.441011
dev_ops_guru.ex
starcoder
defmodule Bolt.Sips.Internals.Error do @moduledoc false defexception [:message, :code, :connection_id, :function, :type] @type t :: %__MODULE__{ message: String.t(), code: nil | any(), connection_id: nil | integer(), function: atom(), type: atom() } @doc false # Produce a Bolt.Sips.Internals.Error depending on the context. @spec exception(any(), nil | port(), atom()) :: Bolt.Sips.Internals.Error.t() def exception(%{"message" => message, "code" => code}, pid, function) do %Bolt.Sips.Internals.Error{ message: message, code: code, connection_id: get_id(pid), function: function, type: :cypher_error } end def exception({:error, :closed}, pid, function) do %Bolt.Sips.Internals.Error{ message: "Port #{inspect(pid)} is closed", connection_id: get_id(pid), function: function, type: :connection_error } end def exception(message, pid, function) do %Bolt.Sips.Internals.Error{ message: message_for(function, message), connection_id: get_id(pid), function: function, type: :protocol_error } end @spec message_for(nil | atom(), any()) :: String.t() defp message_for(:handshake, "HTTP") do """ Handshake failed. The port expected a HTTP request. This happens when trying to Neo4J using the REST API Port (default: 7474) instead of the Bolt Port (default: 7687). """ end defp message_for(:handshake, bin) when is_binary(bin) do """ Handshake failed. Expected 01:00:00:00 as a result, received: #{inspect(bin, base: :hex)}. """ end defp message_for(:handshake, other) do """ Handshake failed. Expected 01:00:00:00 as a result, received: #{inspect(other)}. """ end defp message_for(nil, message) do """ Unknown failure: #{inspect(message)} """ end defp message_for(_function, {:error, error}) do case error |> :inet.format_error() |> to_string do "unknown POSIX error" -> to_string(error) other -> other end end defp message_for(_function, {:ignored, []}) do """ The session is in a failed state and ignores further messages. You need to `ACK_FAILURE` or `RESET` in order to send new messages. """ end defp message_for(function, message) do """ #{function}: Unknown failure: #{inspect(message)} """ end @spec get_id(any()) :: nil | integer() defp get_id({:sslsocket, {:gen_tcp, port, _tls, _unused_yet}, _pid}) do get_id(port) end defp get_id(port) when is_port(port) do case Port.info(port, :id) do {:id, id} -> id nil -> nil end end defp get_id(_), do: nil end
lib/bolt_sips/internals/error.ex
0.727879
0.426232
error.ex
starcoder
defmodule JiffyEx do @moduledoc """ An Elixir module that wraps the Erlang jiffy library. Provides `encode!/1`, `encode!/2`, `encode/1`, `encode/2`, `decode!/1`, `decode!/2`, `decode/1`, `decode/2` functions for encoding and decoding JSON values. *Note: The jiffy functions by default all raise exceptions, so by definition the `encode!` and `decode!` functions will be faster as they are not wrapped in try/rescue clauses in order to catch any errors that come out of the jiffy library.* """ alias JiffyEx.Opts @doc """ Generates a JSON encoded string of the input term. Same as `encode/2` but raises an exception on errors. """ @spec encode!(term, Keyword.t()) :: String.t() | no_return def encode!(value, opts \\ []) do opts = Opts.parse_encode_opts(opts) :jiffy.encode(value, opts) end @doc """ Parses the JSON input into an Elixir term. Same as `decode/2` but raises an exception on errors. """ @spec decode!(iodata, Keyword.t()) :: term | {:has_trailer, term, iodata} | no_return def decode!(value, opts \\ []) do opts = Opts.parse_decode_opts(opts) :jiffy.decode(value, opts) end @doc """ Generates a JSON encoded string of the input term. ## Options - `:uescape` - (defaults to `false`) Tells jiffy to escape UTF-8 sequences and produce 7-bit clean output. - `:pretty` - (defaults to `false`) Tells jiffy to produce JSON using two-space indentation. - `:force_utf8` - (defaults to `false`) Tells jiffy to force strings to encode as UTF-8 by fixing broken surrogate pairs and/or using the replacement character to remove broken UTF-8 sequences in data. - `:use_nil` - (defaults to `false`) Tells jiffy to encode any instances of `nil` as `null` in the JSON output. - `:escape_forward_slashes` - (defaults to `false`) Tells jiffy to escape the forward slash (`/`) character which can be useful when encoding URLs in some cases. - `:bytes_per_red` - A jiffy option that controls the number of bytes that jiffy will process as an equivalent to a reduction. Each 20 reductions jiffy consumes 1% of our allocated time slice for the current process. - `:bytes_per_iter` - A jiffy option for backwards compatible option that is converted into the `bytes_per_red` value. """ @spec encode(term, Keyword.t()) :: {:ok, String.t()} | {:error, Exception.t()} def encode(value, opts \\ []) do {:ok, encode!(value, opts)} rescue exception -> {:error, exception} end @doc """ Parses the JSON input into an Elixir term. ## Options - `:return_maps` - (defaults to `false`) Tells jiffy to return maps instead of jiffy's default tuple of a list of tuples. - `:null_term` - (defaults to `:null`) Tells jiffy to use the specified term instead of `:null` when decoding JSON. If not set, then `:null` is returned. - `:use_nil` - (defaults to `false`) Tells jiffy to return `nil` instead of `:null` when decoding the JSON. Equivalent to setting `null_term: nil`. - `:return_trailer` - (defaults to `false`) Tells jiffy that if any non-whitespace is found after the first JSON term is decoded the return value becomes `{:has_trailer, first_decoded_term, rest_of_the_iodata}. This is useful to decode multiple terms in a single binary. - `:dedupe_keys` - (defaults to `false`) Tells jiffy to ensure that the if there are duplicated keys in a JSON object, the decoded term will only contain one entry with that key. The last value seen will be taken. - `:copy_strings` - (defaults to `false`) When strings are decoded in Erlang, they are created as sub-binaries of the input data, which forces the BEAM VM to keep references to the original input and keeping the original input in memory, sometimes causing leaks. Setting this option will force the decoding process to create new binaries for the decoded strings, so that the original input can be properly garbage collected after decoding the result. - `:bytes_per_red` - A jiffy option that controls the number of bytes that jiffy will process as an equivalent to a reduction. Each 20 reductions jiffy consumes 1% of our allocated time slice for the current process. - `:bytes_per_iter` - A jiffy option for backwards compatible option that is converted into the `bytes_per_red` value. """ @spec decode(iodata, Keyword.t()) :: {:ok, term | {:has_trailer, term, iodata}} | {:error, Exception.t()} def decode(value, opts \\ []) do {:ok, decode!(value, opts)} rescue exception -> {:error, exception} end end
lib/jiffy_ex.ex
0.92342
0.679903
jiffy_ex.ex
starcoder
defmodule Nerves.Package.Platform do @moduledoc """ Defines the Nerves package platform behaviour This behaviour is implemented on a module that would be used to construct an artifact for a nerves package. Nerves packages are prioritized to be compiled before any other dependencies, therefore, a package platform is useful for constructing host tools to be used during the elixir compile phase. You can implement both `Nerves.Package.Platform` and `Nerves.Artifact.BuildRunner` in the same module with the using macro. Here is a simple example that touches a file in the `Artifact.build_path` ```elixir defmodule SystemPlatform do use Nerves.Package.Platform def bootstrap(_pkg) do System.put_env("NERVES_BOOTSTRAP_SYSTEM", "1") :ok end def build(pkg, _toolchain, _opts) do build_path = Artifact.build_path(pkg) File.rm_rf!(build_path) File.mkdir_p!(build_path) build_path |> Path.join("file") |> File.touch() {:ok, build_path} end def build_path_link(pkg) do Artifact.build_path(pkg) end def archive(pkg, _toolchain, _opts) do build_path = Artifact.build_path(pkg) name = Artifact.download_name(pkg) <> Artifact.ext(pkg) Nerves.Utils.File.tar(build_path, name) {:ok, Path.join(File.cwd!, name)} end def clean(pkg) do Artifact.build_path(pkg) |> File.rm_rf() end end ``` """ @doc """ Bootstrap is called as the final phase of loading the Nerves environment. It is used typically for setting / unsetting any system environment variables. For example, if we were building a C cross compiler, we would use the bootstrap phase to override CC to point to our compiler. """ @callback bootstrap(Nerves.Package.t()) :: :ok | {:error, error :: term} @doc """ Build path link should return the location inside the `Artifact.build_path` that represents the final artifact. This is used to symlink the global artifact to the local build_path location. """ @callback build_path_link(package :: Nerves.Package.t()) :: build_path_link :: String.t() defmacro __using__(_) do quote do @behaviour Nerves.Artifact.BuildRunner @behaviour Nerves.Package.Platform end end end
lib/nerves/package/platform.ex
0.835416
0.772316
platform.ex
starcoder
defmodule AleRFM69.HW do @moduledoc false use Bitwise, only_operators: true # Convert a frequency entered by the user into the coresponding value for the register def freq_to_register(freq) when freq < 100000000, do: freq_to_register freq * 10 def freq_to_register(freq) do <<f1, f2, f3>> = <<round(freq / 61.03515625) :: size(24)>> [f1, f2, f3] end # Writes a list of register values into the registers def write_registers([head | []], pid), do: write_register head, pid def write_registers([head | rest], pid) do write_register head, pid write_registers rest, pid end # Writes multiple values starting from one register address def write_register({addr, val}, pid) when is_list(val) do data = [<<1 :: size(1), addr :: size(7)>> | Enum.map(val, &(<<&1>>)) ] |> Enum.join <<_::size(8), res::bitstring>> = Spi.transfer(pid, data) res end # Write a single value to a register def write_register({addr, val}, pid) do <<_::size(8), res::bitstring>> = Spi.transfer(pid, <<1 :: size(1), addr :: size(7), val>>) res end # Read a single register def read_register(addr, pid) do <<_ :: size(8), res :: bitstring>> = Spi.transfer(pid, << 0 :: size(1), addr :: size(7), 0x00>>) res end # Convert bitstring from register into an uint_8 value def reg_to_uint(<<val::size(8)>>), do: val # Read two registers def read_2register(addr, pid) do <<_::size(8), rest::binary>> = Spi.transfer(pid, <<0 :: size(1), addr :: size(7), 0x00, 0x00>>) rest end # Do a full hardware reset def reset(reset_pin) do with {:ok, rpid} <- Gpio.start_link(reset_pin, :output), :ok <- Gpio.write(rpid, 1), :ok <- Process.sleep(1), :ok <- Gpio.write(rpid, 0), :ok <- Gpio.release(rpid), :ok <- Process.sleep(5) do :ok else _ -> :error end end # Test interrupts - switch into FS mode and configure DIO0 to get an interrupt def test_interrupt(pid, _int_pid) do old_dio = write_register({0x25, 0x00}, pid) |> reg_to_uint # no interrupts old_mode = write_register({0x01, 0x08}, pid) |> reg_to_uint # change mode (add busywaiting here until modeready == 1 write_register({0x25, 0xC0}, pid) receive do {:gpio_interrupt, _, :rising} -> write_register({0x01, old_mode}, pid) write_register({0x25, old_dio}, pid) :ok after 1000 -> :error end end # wait a maximum of *timeout* ms (default 10ms) until a function returns *not* nil # timeout (in ms) is always positive, negative numbers are used internally def wait_for(fun, timeout \\ 10) def wait_for(fun, timeout) when timeout > 0, do: wait_for(fun, 0 - :os.system_time(:milli_seconds) - timeout) def wait_for(fun, timeout) do cond do :os.system_time(:milli_seconds) + timeout >= 0 -> :timeout res = fun.() -> res true -> wait_for(fun, timeout) end end defp wait_for_modeready(pid) do case wait_for( fn() -> case read_register(0x27, pid) do <<1::size(1), _::size(7)>>=reg -> reg _ -> false end end, 12) do :timeout -> :timeout _ -> :ok end end def switch_opmode(pid, mode, _diomapping \\ -1) def switch_opmode(pid, :sleep, _diomapping) do # Disable all interrupt sources and switch mode write_register {0x25, 0x00}, pid write_register {0x01, 0x00}, pid wait_for_modeready pid end def switch_opmode(pid, :standby, _diomapping) do # Disable all interrupt sources and switch mode write_register {0x25, 0x00}, pid write_register {0x01, 0x04}, pid wait_for_modeready pid end def switch_opmode(pid, :rx, _diomapping) do # Interrupt on rssi and switch mode write_register {0x25, 0x80}, pid write_register {0x01, 0x10}, pid wait_for_modeready pid end end
lib/ale_rfm69/hw.ex
0.567457
0.409457
hw.ex
starcoder
defmodule ExUnit.AssertionError do @no_value :ex_unit_no_meaningful_value defexception left: @no_value, right: @no_value, message: @no_value, expr: @no_value @doc """ Indicates no meaningful value for a field. """ def no_value do @no_value end end defmodule ExUnit.Assertions do @moduledoc """ This module contains a set of assertion functions that are imported by default into your test cases. In general, a developer will want to use the general `assert` macro in tests. This macro tries to be smart and provide good reporting whenever there is a failure. For example, `assert some_fun() == 10` will fail (assuming `some_fun()` returns 13): Comparison (using ==) failed in: code: some_fun() == 10 lhs: 13 rhs: 10 This module also provides other convenience functions like `assert_in_delta` and `assert_raise` to easily handle other common cases such as checking a floating point number or handling exceptions. """ @doc """ Asserts its argument is `true`. `assert` tries to be smart and provide good reporting whenever there is a failure. In particular, if given a match expression, it will report any failure in terms of that match. Given assert [one] = [two] you'll see: match (=) failed code: [one] = [two] rhs: [2] If the expression is a comparison operator, the message will show the values of the two sides. The assertion assert 1+2+3+4 > 15 will fail with the message: Assertion with > failed code: 1+2+3+4 > 15 lhs: 10 rhs: 15 """ defmacro assert({:=, _, [left, right]} = assertion) do code = Macro.escape(assertion) {:case, meta, args} = quote do case right do unquote(left) -> right _ -> raise ExUnit.AssertionError, right: right, expr: unquote(code), message: "match (=) failed" end end quote do right = unquote(right) unquote({:case, [{:export_head, true}|meta], args}) end end defmacro assert(assertion) do case translate_assertion(assertion) do nil -> quote do value = unquote(assertion) unless value do raise ExUnit.AssertionError, expr: unquote(Macro.escape(assertion)), message: "Expected truthy, got #{inspect value}" end value end value -> value end end @doc """ This is a negative assertion, failing if its parameter is truthy. ## Examples refute age < 0 """ defmacro refute({:=, _, [left, right]} = assertion) do code = Macro.escape(assertion) {:case, meta, args} = quote do case right do unquote(left) -> raise ExUnit.AssertionError, right: right, expr: unquote(code), message: "match (=) succeeded, but should have failed" _ -> right end end quote do right = unquote(right) unquote({:case, [{:export_head, true}|meta], args}) end end defmacro refute(assertion) do case translate_assertion({:!, [], [assertion]}) do nil -> quote do value = unquote(assertion) if value do raise ExUnit.AssertionError, expr: unquote(Macro.escape(assertion)), message: "Expected false or nil, got #{inspect value}" end value end value -> {:!, [], [value]} end end ## START HELPERS @operator [:==, :<, :>, :<=, :>=, :===, :=~, :!==, :!=, :in] defp translate_assertion({operator, _, [left, right]} = expr) when operator in @operator do expr = Macro.escape(expr) quote do left = unquote(left) right = unquote(right) assert unquote(operator)(left, right), left: left, right: right, expr: unquote(expr), message: unquote("Assertion with #{operator} failed") end end defp translate_assertion({:!, [], [{operator, _, [left, right]} = expr]}) when operator in @operator do expr = Macro.escape(expr) quote do left = unquote(left) right = unquote(right) assert not(unquote(operator)(left, right)), left: left, right: right, expr: unquote(expr), message: unquote("Refute with #{operator} failed") end end defp translate_assertion(_expected) do nil end ## END HELPERS @doc """ Asserts `value` is `true`, displaying the given `message` otherwise. ## Examples assert false, "it will never be true" """ def assert(value, message) when is_binary(message) do assert(value, message: message) end def assert(value, opts) when is_list(opts) do unless value, do: raise(ExUnit.AssertionError, opts) true end @doc """ Asserts `value` is `true`. If it fails, it raises an expectation error using the given `left` and `right` values. You probably don't need to use this—the regular `assert` function handles this for you. ## Examples assert this > that, this, that, "more than" """ def assert(value, left, right, message) when is_binary(message) do assert(value, left: left, right: right, message: message) end @doc """ Asserts a message was or is going to be received. Unlike `assert_received`, it has a default timeout of 100 milliseconds. The `expected` argument is a pattern. ## Examples assert_receive :hello Asserts against a larger timeout: assert_receive :hello, 20_000 You can also match against specific patterns: assert_receive {:hello, _} x = 5 assert_receive {:count, ^x} """ defmacro assert_receive(expected, timeout \\ 100, message \\ nil) do do_assert_receive(expected, timeout, message) end @doc """ Asserts a message was received and is in the current process' mailbox. Timeout is set to 0, so there is no waiting time. The `expected` argument is a pattern. ## Examples send self, :hello assert_received :hello You can also match against specific patterns: send self, {:hello, "world"} assert_received {:hello, _} """ defmacro assert_received(expected, message \\ nil) do do_assert_receive(expected, 0, message) end defp do_assert_receive(expected, timeout, message) do binary = Macro.to_string(expected) pattern = case expected do {:when, meta, [left, right]} -> {:when, meta, [quote(do: unquote(left) = received), right]} left -> quote(do: unquote(left) = received) end {:receive, meta, args} = quote do receive do unquote(pattern) -> received after timeout -> message = unquote(message) || "No message matching #{unquote(binary)} after #{timeout}ms" flunk(message <> ExUnit.Assertions.__mailbox__(self())) end end quote do timeout = unquote(timeout) unquote({:receive, [{:export_head, true}|meta], args}) end end @max_mailbox_length 10 @doc false def __mailbox__(pid) do {:messages, messages} = Process.info(pid, :messages) length = length(messages) mailbox = Enum.take(messages, @max_mailbox_length) |> Enum.map_join("\n", &inspect/1) mailbox_message(length, mailbox) end defp mailbox_message(0, _mailbox), do: ". The process mailbox is empty." defp mailbox_message(length, mailbox) when length > 10 do ". Process mailbox:\n" <> mailbox <> "\nShowing only #{@max_mailbox_length} of #{length} messages." end defp mailbox_message(_length, mailbox) do ". Process mailbox:\n" <> mailbox end @doc """ Asserts the `exception` is raised during `function` execution with the `expected_message`. Returns the rescued exception, fails otherwise. ## Examples assert_raise ArithmeticError, "bad argument in arithmetic expression", fn -> 1 + "test" end """ def assert_raise(exception, message, function) when is_function(function) do error = assert_raise(exception, function) is_match = cond do is_binary(message) -> Exception.message(error) == message Regex.regex?(message) -> Exception.message(error) =~ message end msg = "Wrong message for #{inspect exception}. " <> "Expected #{inspect message}, got #{inspect Exception.message(error)}" assert is_match, message: msg error end @doc """ Asserts the `exception` is raised during `function` execution. Returns the rescued exception, fails otherwise. ## Examples assert_raise ArithmeticError, fn -> 1 + "test" end """ def assert_raise(exception, function) when is_function(function) do try do function.() rescue error -> stacktrace = System.stacktrace name = error.__struct__ cond do name == exception -> error name == ExUnit.AssertionError -> reraise(error, stacktrace) true -> flunk "Expected exception #{inspect exception} but got #{inspect name} (#{Exception.message(error)})" end else _ -> flunk "Expected exception #{inspect exception} but nothing was raised" end end @doc """ Asserts that `value1` and `value2` differ by no more than `delta`. ## Examples assert_in_delta 1.1, 1.5, 0.2 assert_in_delta 10, 15, 4 """ def assert_in_delta(value1, value2, delta, message \\ nil) do diff = abs(value1 - value2) message = message || "Expected the difference between #{inspect value1} and " <> "#{inspect value2} (#{inspect diff}) to be less than #{inspect delta}" assert diff < delta, message end @doc """ Asserts `expression` will throw a value. Returns the thrown value or fails otherwise. ## Examples assert catch_throw(throw 1) == 1 """ defmacro catch_throw(expression) do do_catch(:throw, expression) end @doc """ Asserts `expression` will exit. Returns the exit status/message or fails otherwise. ## Examples assert catch_exit(exit 1) == 1 """ defmacro catch_exit(expression) do do_catch(:exit, expression) end @doc """ Asserts `expression` will cause an error. Returns the error or fails otherwise. ## Examples assert catch_error(error 1) == 1 """ defmacro catch_error(expression) do do_catch(:error, expression) end defp do_catch(kind, expr) do quote do try do unquote(expr) flunk "Expected to catch #{unquote(kind)}, got nothing" rescue e in [ExUnit.AssertionError] -> reraise(e, System.stacktrace) catch unquote(kind), we_got -> we_got end end end @doc """ Asserts `value` is `nil` or `false` (that is, `value` is not truthy). ## Examples refute true, "This will obviously fail" """ def refute(value, message) do not assert(!value, message) end @doc """ refute_receive message, timeout \\ 100, message \\ nil Asserts `message` was not received (and won't be received) within the `timeout` period. The `not_expected` argument is a match pattern. ## Examples refute_receive :bye Refute received with a explicit timeout: refute_receive :bye, 1000 """ defmacro refute_receive(not_expected, timeout \\ 100, message \\ nil) do do_refute_receive(not_expected, timeout, message) end @doc """ Asserts a message was not received (i.e. it is not in the current process mailbox). The `not_expected` argument must be a match pattern. Timeout is set to 0, so there is no waiting time. ## Examples send self, :hello refute_received :bye """ defmacro refute_received(not_expected, message \\ nil) do do_refute_receive(not_expected, 0, message) end defp do_refute_receive(not_expected, timeout, message) do receive_clause = refute_receive_clause(not_expected, message) quote do receive do unquote(receive_clause) after unquote(timeout) -> false end end end defp refute_receive_clause(not_expected, nil) do binary = Macro.to_string(not_expected) quote do unquote(not_expected) = actual -> flunk "Unexpectedly received message #{inspect actual} (which matched #{unquote binary})" end end defp refute_receive_clause(not_expected, message) do quote do unquote(not_expected) -> flunk unquote(message) end end @doc """ Asserts `value1` and `value2` are not within `delta`. If you supply `message`, information about the values will automatically be appended to it. ## Examples refute_in_delta 1.1, 1.2, 0.2 refute_in_delta 10, 11, 2 """ def refute_in_delta(value1, value2, delta, message \\ nil) do diff = abs(value1 - value2) message = if message do message <> " (difference between #{inspect value1} " <> "and #{inspect value2} is less than #{inspect delta})" else "Expected the difference between #{inspect value1} and " <> "#{inspect value2} (#{inspect diff}) to be more than #{inspect delta}" end refute diff < delta, message end @doc """ Fails with a message. ## Examples flunk "This should raise an error" """ @spec flunk :: no_return @spec flunk(String.t) :: no_return def flunk(message \\ "Flunked!") when is_binary(message) do assert false, message: message end end
lib/ex_unit/lib/ex_unit/assertions.ex
0.919527
0.848408
assertions.ex
starcoder
defmodule ExqBatch do alias ExqBatch.Internal alias ExqBatch.Utils @moduledoc """ ExqBatch provides a building block to create complex workflows using Exq jobs. A batch monitors a group of Exq jobs and creates callback job when all the jobs are processed. ## Telemetry The following telemetry events are published. `progress` event is published on each job completion (either dead or done). `done` is published when the batch is done (all the jobs are either dead or done). ``` [ [:exq_batch, :batch, :new], [:exq_batch, :batch, :add], [:exq_batch, :batch, :create], [:exq_batch, :batch, :progress], [:exq_batch, :batch, :done] ] ``` ### Measurements All the events include `duration` in native time unit. Note that duration here measures the time taken for ExqBatch to complete the operation. ### Metadata All the events include `id` attribute (batch id). `add` and `progress` events include `jid` attribute as well. """ defstruct [:id, :redis, :on_complete, :prefix, :ttl] @derive {Inspect, only: [:id]} @type t :: %__MODULE__{id: String.t()} @doc """ Initialize a new batch. on\_complete job will receive an extra arg which includes the list of dead and succeeded jids. Example `%{"dead" => [], "succeeded" => [jid1, jid2]}` ### Options * on\_complete (keyword) *required* - A Keyword list that specifies the details of job that will get enqueued on when all the jobs in a batch get completed * queue (string) *required* - exq job queue * args (array) *required* - exq job args. * class (string) *required* - exq job class. * retry (integer) - no of times the job should be retried. * jid (string) - if not present, A UUID is used. * id (string) - A UUID is used by default. If same id is used for two batch jobs, the previous batch jobs will get cleared. """ @spec new(Keyword.t()) :: {:ok, t} | {:error, term()} def new(options) do id = Keyword.get_lazy(options, :id, fn -> UUID.uuid4() end) batch = from_id(id) on_complete = Keyword.fetch!(options, :on_complete) |> to_job() batch = %{batch | on_complete: on_complete} with :ok <- Internal.init(batch) do {:ok, batch} end end @doc """ Add a job to the given batch. There are two ways to add a job to batch 1) Pass the job params and let ExqBatch enqueue the job. ExqBatch will both enqueue the job and add it to the batch using a atomic [MULTI EXEC](https://redis.io/topics/transactions) operation. Refer the on\_complete option in `new/1` for job options. ``` {:ok, batch, jid} = ExqBatch.add(batch, queue: "default", class: Worker, args: [1]) ``` 2) Add a job using jid. Note that, the `add/2` should be called **before** the job is enqueued. Otherwise, there is a potential race condition where the job could finish before `add/2` is called and would cause the batch to hang. Exq allows to specify the jid of the job, so generate a jid first, then add it to the batch and after that enqueue the job. ``` jid = UUID.uuid4() {:ok, batch, ^jid} = ExqBatch.add(batch, jid) {:ok, ^jid} = Exq.enqueue(Exq, "default", SuccessWorker, [1], jid: jid) ``` """ @spec add(t, String.t() | Keyword.t()) :: {:ok, t, binary} | {:error, term()} def add(batch, jid) when is_binary(jid) do with :ok <- Internal.add(batch, jid) do {:ok, batch, jid} end end def add(batch, job) when is_list(job) do job = to_job(job) with :ok <- Internal.add(batch, job) do {:ok, batch, job.jid} end end @doc """ Finalize the batch creation process. """ @spec create(t) :: {:ok, t} | {:error, term()} def create(batch) do with :ok <- Internal.create(batch) do {:ok, batch} end end ## Private @doc false def from_id(id) do ttl = Application.get_env(:exq_batch, :ttl_in_seconds, 60 * 60 * 24 * 30) prefix = Application.get_env(:exq_batch, :prefix, "exq_batch") <> ":" redis = Application.get_env( :exq_batch, :redis, Exq.Support.Config.get(:name) |> Exq.Support.Opts.redis_client_name() ) %__MODULE__{ id: id, redis: redis, prefix: prefix, ttl: ttl } end defp to_job(options) do Keyword.put_new_lazy(options, :jid, fn -> UUID.uuid4() end) |> Keyword.put_new_lazy(:retry, fn -> Utils.max_retries() end) |> Keyword.update!(:args, fn args when is_list(args) -> args end) |> Keyword.update!(:class, fn worker when is_atom(worker) -> "Elixir." <> worker = to_string(worker) worker worker when is_binary(worker) -> worker end) |> Keyword.update!(:queue, fn queue -> to_string(queue) end) |> Enum.into(%{}) end end
lib/exq_batch.ex
0.917349
0.849472
exq_batch.ex
starcoder
defmodule Unicode.Script do @moduledoc """ Functions to introspect Unicode scripts for binaries (Strings) and codepoints. """ @behaviour Unicode.Property.Behaviour alias Unicode.Utils @scripts Utils.scripts() |> Utils.remove_annotations() @doc """ Returns the map of Unicode scripts. The script name is the map key and a list of codepoint ranges as tuples as the value. """ def scripts do @scripts end @doc """ Returns a list of known Unicode script names. This function does not return the names of any script aliases. """ @known_scripts Map.keys(@scripts) def known_scripts do @known_scripts end @script_alias Utils.property_value_alias() |> Map.get("sc") |> Utils.invert_map() |> Utils.atomize_values() |> Utils.downcase_keys_and_remove_whitespace() |> Utils.add_canonical_alias() @doc """ Returns a map of aliases for Unicode scripts. An alias is an alternative name for referring to a script. Aliases are resolved by the `fetch/1` and `get/1` functions. """ @impl Unicode.Property.Behaviour def aliases do @script_alias end @doc """ Returns the Unicode ranges for a given script as a list of ranges as 2-tuples. Aliases are resolved by this function. Returns either `{:ok, range_list}` or `:error`. """ @impl Unicode.Property.Behaviour def fetch(script) when is_atom(script) do Map.fetch(scripts(), script) end def fetch(script) do script = Utils.downcase_and_remove_whitespace(script) script = Map.get(aliases(), script, script) Map.fetch(scripts(), script) end @doc """ Returns the Unicode ranges for a given script as a list of ranges as 2-tuples. Aliases are resolved by this function. Returns either `range_list` or `nil`. """ @impl Unicode.Property.Behaviour def get(script) do case fetch(script) do {:ok, script} -> script _ -> nil end end @doc """ Returns the count of the number of characters for a given script. ## Example iex> Unicode.Script.count("mongolian") 167 """ @impl Unicode.Property.Behaviour def count(script) do with {:ok, script} <- fetch(script) do Enum.reduce(script, 0, fn {from, to}, acc -> acc + to - from + 1 end) end end @doc """ Returns the script name(s) for the given binary or codepoint. In the case of a codepoint, a single script name is returned. For a binary a list of distinct script names represented by the graphemes in the binary is returned. """ def script(string) when is_binary(string) do string |> String.to_charlist() |> Enum.map(&script/1) |> Enum.uniq() end for {script, ranges} <- @scripts do def script(codepoint) when unquote(Utils.ranges_to_guard_clause(ranges)) do unquote(script) end end def script(codepoint) when is_integer(codepoint) and codepoint in 0..0x10FFFF do :unknown end end
lib/unicode/script.ex
0.905961
0.430746
script.ex
starcoder
defmodule ForthVM.Words.IO do @moduledoc """ IO words """ alias ForthVM.Process # --------------------------------------------- # IO side effects # --------------------------------------------- @doc """ set-io-device: ( name -- ) set the current IO device to the value on the top of the data_stack """ def set_io_device( tokens, [device_name | data_stack], return_stack, dictionary, %{io: %{device: device, devices: devices}} = meta ) do device = Map.get(devices, device_name, device) meta = %{ meta | io: Map.put(meta.io, :device, device) } Process.next(tokens, data_stack, return_stack, dictionary, meta) end @doc """ emit: ( c -- ) pops and prints (without cr) the binary of an ascii value on the top of the data_stack """ def emit(tokens, [c | data_stack], return_stack, dictionary, %{io: %{device: device}} = meta) do IO.write(device, <<c>>) Process.next(tokens, data_stack, return_stack, dictionary, meta) end @doc """ cr: ( -- ) emits a carriage return """ def cr(tokens, data_stack, return_stack, dictionary, %{io: %{device: device}} = meta) do IO.write(device, "\n") Process.next(tokens, data_stack, return_stack, dictionary, meta) end @doc """ .: ( x -- ) pops and prints the literal value on the top of the data_stack """ def dot(tokens, [x | data_stack], return_stack, dictionary, %{io: %{device: device}} = meta) do IO.write(device, "#{x}") Process.next(tokens, data_stack, return_stack, dictionary, meta) end @doc """ puts: ( x -- ) pops and prints the literal value on the top of the data_stack """ def puts(tokens, [x | data_stack], return_stack, dictionary, %{io: %{device: device}} = meta) do IO.write(device, x) Process.next(tokens, data_stack, return_stack, dictionary, meta) end @doc """ inspect: ( x -- ) pops and prints the inspected value on the top of the data_stack """ def inspect(tokens, [x | data_stack], return_stack, dictionary, %{io: %{device: device}} = meta) do IO.inspect(device, x, limit: :infinity) Process.next(tokens, data_stack, return_stack, dictionary, meta) end @doc """ .s: ( -- ) prints the whole data_stack, without touching it """ def dump_data_stack( tokens, data_stack, return_stack, dictionary, %{io: %{device: device}} = meta ) do data_stack |> Enum.reverse() |> Enum.each(fn x -> IO.write(device, "#{x} ") end) Process.next(tokens, data_stack, return_stack, dictionary, meta) end @doc """ "?": ( var -- ) fetch a variable value and prints it """ def fetch_puts(tokens, data_stack, return_stack, dictionary, meta) do Process.next(["@", "." | tokens], data_stack, return_stack, dictionary, meta) end end
lib/forthvm/words/io.ex
0.798501
0.644987
io.ex
starcoder
defmodule RayTracer.Tasks.Chapter9 do @moduledoc """ This module tests camera from Chapter 9 """ alias RayTracer.RTuple alias RayTracer.Sphere alias RayTracer.Plane alias RayTracer.Canvas alias RayTracer.Material alias RayTracer.Color alias RayTracer.Light alias RayTracer.World alias RayTracer.Camera alias RayTracer.Matrix @r 4 import RTuple, only: [point: 3, vector: 3] import Light, only: [point_light: 2] import RayTracer.Transformations @doc """ Generates a file that tests rendering a world """ @spec execute :: :ok def execute(w \\ 100, h \\ 50) do # RayTracer.Tasks.Chapter9.execute world = build_world() camera = build_camera(w, h) camera |> Camera.render(world) |> Canvas.export_to_ppm_file :ok end defp build_world do objects = [ wall1(), wall2(), wall3(), wall4(), wall5(), wall6(), floor(), middle_sphere(), left_sphere(), right_sphere() ] light = point_light(point(0, 4, -2), Color.new(1, 1, 1)) World.new(objects, light) end defp build_camera(w, h) do transform = view_transform(point(0, 6, -1.7), point(0, 1, 0), vector(0, 1, 0)) Camera.new(w, h, :math.pi / 3, transform) end defp wall1 do transform = translation(0, 0, @r * :math.sqrt(3) / 2) |> Matrix.mult(rotation_y(0)) |> Matrix.mult(rotation_x(:math.pi / 2)) %Plane{floor() | material: %Material{Material.new | color: Color.new(1, 0, 0), specular: 0}, transform: transform} end defp wall2 do transform = translation(@r / 2, 0, @r * :math.sqrt(3) / 2) |> Matrix.mult(rotation_y(:math.pi / 3)) |> Matrix.mult(rotation_x(:math.pi / 2)) %Plane{floor() | material: %Material{Material.new | color: Color.new(0, 1, 0), specular: 0}, transform: transform} end defp wall3 do transform = translation(@r / 2, 0, -@r * :math.sqrt(3) / 2) |> Matrix.mult(rotation_y(2 * :math.pi / 3)) |> Matrix.mult(rotation_x(:math.pi / 2)) %Plane{floor() | material: %Material{Material.new | color: Color.new(0, 0, 1), specular: 0}, transform: transform} end defp wall4 do transform = translation(0, 0, -@r * :math.sqrt(3) / 2) |> Matrix.mult(rotation_y(:math.pi)) |> Matrix.mult(rotation_x(:math.pi / 2)) %Plane{floor() | material: %Material{Material.new | color: Color.new(1, 0, 0), specular: 0}, transform: transform} end defp wall5 do transform = translation(-@r / 2, 0, -@r * :math.sqrt(3) / 2) |> Matrix.mult(rotation_y(4 * :math.pi / 3)) |> Matrix.mult(rotation_x(:math.pi / 2)) %Plane{floor() | material: %Material{Material.new | color: Color.new(0, 1, 0), specular: 0}, transform: transform} end defp wall6 do transform = translation(-@r / 2, 0, @r * :math.sqrt(3) / 2) |> Matrix.mult(rotation_y(5 * :math.pi / 3)) |> Matrix.mult(rotation_x(:math.pi / 2)) %Plane{floor() | material: %Material{Material.new | color: Color.new(0, 0, 1), specular: 0}, transform: transform} end defp floor do material = %Material{Material.new | color: Color.new(1, 0.9, 0.9), specular: 0} %Plane{Plane.new | material: material} end defp left_sphere do transform = translation(-1.5, 0.33, -0.75) |> Matrix.mult(scaling(0.33, 0.33, 0.33)) material = %Material{Material.new | color: Color.new(1, 0.8, 0.1), specular: 0.3, diffuse: 0.7} %Sphere{Sphere.new | material: material, transform: transform} end defp middle_sphere do transform = translation(0, 1, 0) material = %Material{Material.new | color: Color.new(0.1, 1, 0.5), specular: 0.3, diffuse: 0.7} %Sphere{Sphere.new | material: material, transform: transform} end defp right_sphere do transform = translation(1.5, 0.5, -0.5) |> Matrix.mult(scaling(0.5, 0.5, 0.5)) material = %Material{Material.new | color: Color.new(0.5, 1, 0.1), specular: 0.3, diffuse: 0.7} %Sphere{Sphere.new | material: material, transform: transform} end end
lib/tasks/chapter9.ex
0.915186
0.652954
chapter9.ex
starcoder
defmodule Binary do @moduledoc """ Functions to operate on binaries. Wrappers of erlang's `:binary`, functions that try to mimic `String` behaviour but on bytes, and some very simple functions that are here just to make piping operations on binaries easier. """ @doc """ Convert list of bytes into binary. """ @spec from_list(list) :: binary def from_list(list) when is_list(list) do :binary.list_to_bin(list) end @doc """ Converts binary to a list of bytes. """ @spec to_list(binary) :: list def to_list(bin) when is_binary(bin) do :binary.bin_to_list(bin) end @doc """ Returns the first byte of the binary as an integer. """ @spec first(binary) :: byte def first(bin) when is_binary(bin) do :binary.first(bin) end @doc """ Returns the last byte of the binary as an integer. """ @spec last(binary) :: byte def last(bin) when is_binary(bin) do :binary.last(bin) end @doc """ Create a binary with the binary content repeated n times. """ @spec copy(binary, non_neg_integer) :: binary def copy(bin, n) when is_binary(bin) and is_integer(n) do :binary.copy(bin, n) end @doc """ Reverse bytes order in the binary. """ @spec reverse(binary) :: binary def reverse(binary) when is_binary(binary), do: do_reverse(binary, <<>>) # Would be nice to bench this against to_list |> Enum.reverse |> from_list # I only assumed that this version should be faster defp do_reverse(<<>>, acc), do: acc defp do_reverse(<< x :: binary-size(1), bin :: binary >>, acc), do: do_reverse(bin, x <> acc) @doc """ Returns byte at given position. Numbering starts with `0`. Position can be negative to make it relative to the end of the binary. Returns `nil` if position is outside the binary (following `Enum` and `String` behavior) ## Examples iex> <<1, 2, 3>> |> Binary.at(1) 2 iex> <<1, 2, 3>> |> Binary.at(3) nil iex> <<1, 2, 3>> |> Binary.at(-1) 3 """ @spec at(binary, integer) :: byte def at(binary, postion) def at(binary, position) when is_binary(binary) and is_integer(position) and (position >= byte_size(binary) or position < -1*byte_size(binary)), do: nil def at(binary, position) when is_integer(position) and position < 0 do binary |> at(byte_size(binary) + position) end def at(binary, position) when is_integer(position) do :binary.at(binary, position) end @doc """ Split binary into list of binaries based on `pattern`. `pattern` can be a binary, or a byte. It mimics erlang's `:binary.split/3`split behavior rather than `String.split/3`, and only splits once by default. `global: true` option can be provided to split on all occurences. iex> <<1, 2, 3, 2, 3>> |> Binary.split(<<3, 2>>) [<<1, 2>>, <<3>>] iex> <<1, 2, 3, 2, 3>> |> Binary.split(2) [<<1>>, <<3, 2, 3>>] iex> <<1, 2, 3, 2, 3>> |> Binary.split(2, global: true) [<<1>>, <<3>>, <<3>>] """ # TODO maybe add parts: option, but I don't think it's practical for binaries @spec split(binary, binary | byte, Keyword.t) :: list(binary) def split(binary, pattern, opts \\ []) def split(binary, byte, opts) when is_binary(binary) and is_integer(byte) and byte >= 0 and byte < 256 do split(binary, <<byte>>, opts) end def split(binary, pattern, opts) do global = opts |> Keyword.get(:global, false) :binary.split(binary, pattern, global && [:global] || []) end @doc """ Splits a binary into two at the specified position. Returns a tuple. When position is negative it is counted from the end of the binary. ## Examples iex> <<1, 2, 3>> |> Binary.split_at(1) {<<1>>, <<2, 3>>} iex> <<1, 2, 3, 4>> |> Binary.split_at(-1) {<<1, 2, 3>>, <<4>>} iex> <<1, 2, 3>> |> Binary.split_at(10) {<<1, 2, 3>>, <<>>} """ @spec split_at(binary, integer) :: { binary, binary } def split_at(binary, position) def split_at(binary, position) when is_binary(binary) and is_integer(position) and position >= byte_size(binary), do: { binary, <<>> } def split_at(binary, position) when is_binary(binary) and is_integer(position) and position < -1*byte_size(binary), do: { <<>>, binary } def split_at(binary, position) when is_binary(binary) and is_integer(position) and position < 0 do split_at(binary, byte_size(binary) + position) end def split_at(binary, position) when is_binary(binary) and is_integer(position) do { Kernel.binary_part(binary, 0, position), Kernel.binary_part(binary, position, byte_size(binary) - position) } end @doc """ Removes all specified trailing bytes from the binary. ## Examples iex> <<0, 1, 2, 0, 0>> |> Binary.trim_trailing <<0, 1, 2>> iex> <<1, 2>> |> Binary.trim_trailing(2) <<1>> """ @spec trim_trailing(binary, byte) :: binary # Maybe also provide an option to pass binary instead of byte. def trim_trailing(binary, byte \\ 0) when is_binary(binary) and is_integer(byte) do do_trim_trailing(binary |> reverse, byte) end defp do_trim_trailing(<< byte, binary :: binary >>, byte), do: do_trim_trailing(binary, byte) defp do_trim_trailing(<< binary :: binary >>, _byte), do: binary |> reverse @doc """ Pad end of the binary with the provided byte until provided length is achieved. ## Examples iex> <<3, 7>> |> Binary.pad_trailing(5) <<3, 7, 0, 0, 0>> """ @spec pad_trailing(binary, non_neg_integer, byte) :: binary def pad_trailing(binary, len, byte \\ 0) # Return binary if it's already long enough def pad_trailing(binary, len, byte) when is_binary(binary) and is_integer(len) and is_integer(byte) and len > 0 and byte_size(binary) >= len, do: binary def pad_trailing(binary, len, byte) when is_binary(binary) and is_integer(len) and is_integer(byte) and len > 0 do binary <> (<< byte >> |> copy(len - byte_size(binary))) end @doc """ Removes all spcefied leading bytes from the binary. """ @spec trim_leading(binary, byte) :: binary def trim_leading(binary, byte \\ 0) def trim_leading(<< byte, binary :: binary >>, byte) when is_binary(binary) and is_integer(byte), do: trim_leading(binary, byte) def trim_leading(binary, byte) when is_binary(binary) and is_integer(byte), do: binary @doc """ Pad with the provided byte at the beginning of the binary until provided length is achieved. """ @spec pad_leading(binary, non_neg_integer, byte) :: binary def pad_leading(binary, len, byte \\ 0) # Return binary if it is already long enough def pad_leading(binary, len, byte) when is_binary(binary) and is_integer(len) and is_integer(byte) and len > 0 and byte_size(binary) >= len, do: binary def pad_leading(binary, len, byte) when is_binary(binary) and is_integer(len) and is_integer(byte) and len > 0 do (<< byte >> |> copy(len - byte_size(binary))) <> binary end @doc """ Replace binary pattern inside the binary with the replacement. For readability, examples are presented on strings, but do note we are operating on bytes, not codepoints. iex> "a-b-c" |> Binary.replace("-", "..") "a..b..c" By default it replaces all occurrences. If you only want to replace the first occurence, set `global: false` iex> "a-b-c" |> Binary.replace("-", "..", global: false) "a..b-c" """ @spec replace(binary, binary, binary, Keyword.t) :: binary def replace(binary, pattern, replacement, opts \\ []) when is_binary(binary) and is_binary(pattern) and is_binary(replacement) do # We default to global replacement following Elixir.String as opposed to erlang :binary.replace erl_opts = case opts[:global] do false -> [] _ -> [:global] end :binary.replace(binary, pattern, replacement, erl_opts) end @doc """ Returns the length of the longest common prefix in the provided list of binaries. Uses `:binary.longest_common_prefix/1` iex> ["moo", "monad", "mojo"] |> Binary.longest_common_prefix 2 """ @spec longest_common_prefix([binary]) :: non_neg_integer def longest_common_prefix(binaries) when is_list(binaries) do :binary.longest_common_prefix(binaries) end @doc """ Returns the length of the longest common prefix in the provided list of binaries Uses `:binary.longest_common_suffix/1` """ @spec longest_common_suffix([binary]) :: non_neg_integer def longest_common_suffix(binaries) when is_list(binaries) do :binary.longest_common_suffix(binaries) end @doc """ Exctracts part of the binary starting at given position with given length. Based on `Kernel.binary_part/3`, but: * it also accepts negative position, interpreting it as position relative to the end of the binary. * length is allowed to be outside binary size i.e. it is max number of fetched bytes ## Examples iex> x = <<1, 2, 3, 4, 5>> <<1, 2, 3, 4, 5>> iex> x |> Binary.part(1, 2) <<2, 3>> iex> x |> Binary.part(-2, 1) <<4>> iex> x |> Binary.part(-2, -1) <<3>> iex> x |> Binary.part(-1, 10) <<5>> """ def part(binary, position, len) # Allow negative position def part(binary, position, len) when is_binary(binary) and is_integer(position) and is_integer(len) and position < 0 do part(binary, byte_size(binary) + position, len) end # length goes outside the binary, which would raise ArgumentError in Kernel.binary_part/3 def part(binary, position, len) when is_binary(binary) and is_integer(position) and is_integer(len) and (position + len > byte_size(binary)) do part(binary, position, byte_size(binary) - position) end # length is negative and goes outside binary def part(binary, position, len) when is_binary(binary) and is_integer(position) and is_integer(len) and len < 0 and (position + len < 0) do part(binary, position, -1 * position) end def part(binary, position, len) when is_binary(binary) and is_integer(position) and is_integer(len) do Kernel.binary_part(binary, position, len) end @doc """ Interpret binary as an unsigned integer representation. Second option decides endianness which defaults to `:big`. Uses `:binary.decode_unsigned/1` ## Examples iex> <<1, 2>> |> Binary.to_integer 258 iex> <<1, 2>> |> Binary.to_integer(:little) 513 """ @spec to_integer(binary, :big | :little) :: non_neg_integer def to_integer(binary, endianness \\ :big) when is_binary(binary) do :binary.decode_unsigned(binary, endianness) end @doc """ Returns binary representation of the provided integer. Second option decides endianness which defaults to `:big`. Uses `:binary.encode_unsigned/1` ## Examples iex> 1234 |> Binary.from_integer <<4, 210>> iex> 1234 |> Binary.from_integer(:little) <<210, 4>> """ @spec from_integer(non_neg_integer, :big | :little) :: binary def from_integer(int, endianness \\ :big) when is_integer(int) and int >= 0 do :binary.encode_unsigned(int, endianness) end @doc """ Returns hex representation of the provided binary. iex> <<190,239>> |> Binary.to_hex "beef" Just a shorthand for: Base.encode16(binary, case: :lower) """ @spec to_hex(binary) :: binary def to_hex(binary) when is_binary(binary) do binary |> Base.encode16(case: :lower) end @doc """ Returns binary from the hex representation. iex> "ff01" |> Binary.from_hex <<255, 1>> Just a shorthand for: Base.decode16!(binary, case: :mixed) """ @spec from_hex(binary) :: binary # Before API is frozen, maybe it should return {:ok, and have a bang version. # Although if somebody cares about that, she can use Base.decode16 and this is meant to be short. def from_hex(binary) when is_binary(binary) do binary |> Base.decode16!(case: :mixed) end @doc """ Takes the first N bytes from the binary. When negative count is given, last N bytes are returned. In case when count > byte_size, it will return the full binary. """ @spec take(binary, integer) :: binary def take(binary, count) def take(binary, count) when is_binary(binary) and is_integer(count) and count < 0 do binary |> split_at(count) |> elem(1) end def take(binary, count) when is_binary(binary) and is_integer(count) and count >= 0 do binary |> split_at(count) |> elem(0) end @doc """ Drops first N bytes from the binary. If provided count is negative, it drops N bytes from the end. In case where count > byte_size, it will return `<<>>` """ @spec drop(binary, integer) :: binary def drop(binary, count) def drop(binary, count) when is_binary(binary) and is_integer(count) and count < 0 do binary |> split_at(count) |> elem(0) end def drop(binary, count) when is_binary(binary) and is_integer(count) and count >= 0 do binary |> split_at(count) |> elem(1) end @doc """ Append binary or a byte to another binary. Handy for piping. With binary argument it's exactly the same as `Kernel.<>/2` """ @spec append(binary, binary | byte) :: binary def append(left, right) def append(left, right) when is_binary(left) and is_integer(right) and right >= 0 and right < 256 do left <> <<right>> end def append(left, right) when is_binary(left) and is_binary(right) do left <> right end @doc """ Prepend binary or a byte to another binary. """ @spec prepend(binary, binary | byte) :: binary def prepend(left, right) def prepend(left, right) when is_binary(left) and is_integer(right) and right >= 0 and right < 256 do <<right>> <> left end def prepend(left, right) when is_binary(left) and is_binary(right) do right <> left end end
lib/binary.ex
0.897102
0.641507
binary.ex
starcoder
defmodule Moonsugar.Maybe do @moduledoc """ The Maybe module contains functions that help create and interact with the maybe type. The Maybe type is represented as either `{:just, value}` or `:nothing`. """ @doc """ Helper function to create a just tuple. ## Examples iex> Maybe.just(3) {:just, 3} """ def just(val) do {:just, val} end @doc """ Helper function to create a nothing value ## Examples iex> Maybe.nothing() :nothing """ def nothing() do :nothing end @doc """ Extracts a value from a maybe type. If the provided argument doesn't have a value, the default is returned. ## Examples iex> Maybe.get_with_default({:just, 3}, 0) 3 iex> Maybe.get_with_default(:nothing, 0) 0 """ def get_with_default(maybe, default) do case maybe do {:just, val} -> val _ -> default end end @doc """ Maps over a maybe type. ## Examples iex> Maybe.map({:just, 3}, fn(x) -> x * 2 end) {:just, 6} iex> Maybe.map(:nothing, fn(x) -> x * 2 end) :nothing """ def map(maybe, fun) do case maybe do {:just, val} -> just(fun.(val)) _ -> :nothing end end @doc """ Like map, but chain takes a function that returns a maybe type. This prevents nested maybes. ## Examples iex> Maybe.map({:just, 3}, fn(x) -> ...> cond do ...> x > 0 -> {:just, x * 3} ...> x <= 0 -> :nothing ...> end ...> end) {:just, {:just, 9}} iex> Maybe.chain({:just, 3}, fn(x) -> ...> cond do ...> x > 0 -> {:just, x * 3} ...> x <= 0 -> :nothing ...> end ...> end) {:just, 9} iex> Maybe.chain({:just, 0}, fn(x) -> ...> cond do ...> x > 0 -> {:just, x * 3} ...> x <= 0 -> :nothing ...> end ...> end) :nothing """ def chain(maybe, fun) do with {:just, val} <- maybe, {:just, innerVal} <- fun.(val) do {:just, innerVal} else _ -> :nothing end end @doc """ Determines if a value is a maybe type. ## Examples iex> Maybe.is_maybe({:just, 0}) true iex> Maybe.is_maybe(:nothing) true iex> Maybe.is_maybe({:ok, 3}) false """ def is_maybe(maybe) do case maybe do {:just, _} -> true :nothing -> true _ -> false end end @doc """ Converts a variable that might be nil to a maybe type. ## Examples iex> Maybe.from_nilable("khajiit has wares") {:just, "khajiit has wares"} iex> Maybe.from_nilable(nil) :nothing """ def from_nilable(val) do cond do is_nil(val) -> :nothing true -> just(val) end end @doc """ Converts a variable from a result type to a maybe type. ## Examples iex> Maybe.from_result({:ok, 3}) {:just, 3} iex> Maybe.from_result({:error, "I took an arrow to the knee"}) :nothing """ def from_result(result) do case result do {:ok, val} -> {:just, val} _ -> :nothing end end @doc """ Converts a variable from a validation type to a maybe type. ## Examples iex> Maybe.from_validation({:success, "Dragon Slayed"}) {:just, "Dragon Slayed"} iex> Maybe.from_result({:fail, ["You Died"]}) :nothing """ def from_validation(result) do case result do {:success, val} -> {:just, val} _ -> :nothing end end end
lib/maybe.ex
0.831622
0.570301
maybe.ex
starcoder
defmodule Farmbot.Regimen.Manager do @moduledoc "Manages a Regimen" use Farmbot.Logger use GenServer alias Farmbot.CeleryScript alias Farmbot.Asset alias Asset.Regimen import Farmbot.Regimen.NameProvider import Farmbot.System.ConfigStorage, only: [ get_config_value: 3 ] defmodule Error do @moduledoc false defexception [:epoch, :regimen, :message] end defmodule Item do @moduledoc false @type t :: %__MODULE__{ time_offset: integer, sequence_id: integer, ref: reference } defstruct [:time_offset, :sequence, :sequence_id, :name, :ref] def parse(%{time_offset: offset, sequence_id: sequence_id}) do %Item{ time_offset: offset, sequence_id: sequence_id, ref: make_ref() } end end def filter_items(regimen) do regimen.regimen_items |> Enum.map(&Item.parse(&1)) |> Enum.sort(&(&1.time_offset <= &2.time_offset)) end @doc false def start_link(regimen, time) do regimen.farm_event_id || raise "Starting a regimen requires a farm_event id" GenServer.start_link(__MODULE__, [regimen, time], name: via(regimen)) end def init([regimen, time]) do # parse and sort the regimen items items = filter_items(regimen) first_item = List.first(items) regimen = %{regimen | regimen_items: items} epoch = Farmbot.TimeUtils.build_epoch(time) initial_state = %{ next_execution: nil, regimen: regimen, epoch: epoch, timer: nil } if first_item do state = build_next_state(regimen, first_item, self(), initial_state) {:ok, state} else Logger.warn(2, "[#{regimen.name} #{regimen.farm_event_id}] has no items on regimen.") {:ok, initial_state} end end def handle_call({:reindex, regimen, time}, _from, state) do Logger.debug(3, "Reindexing regimen by id: #{regimen.id}") regimen.farm_event_id || raise "Can't reindex without farm_event_id" # parse and sort the regimen items items = filter_items(regimen) first_item = List.first(items) regimen = %{regimen | regimen_items: items} epoch = if time, do: Farmbot.TimeUtils.build_epoch(time), else: state.epoch initial_state = %{ regimen: regimen, epoch: epoch, # Leave these so they get cleaned up next_execution: state.next_execution, timer: state.timer } if first_item do state = build_next_state(regimen, first_item, self(), initial_state) {:reply, :ok, state} else Logger.warn(2, "[#{regimen.name} #{regimen.farm_event_id}] has no items on regimen.") {:reply, :ok, initial_state} end end def handle_info(:execute, state) do {item, regimen} = pop_item(state.regimen) if item do do_item(item, regimen, state) else complete(regimen, state) end end def handle_info(:skip, state) do {item, regimen} = pop_item(state.regimen) if item do do_item(nil, regimen, state) else complete(regimen, state) end end defp complete(regimen, state) do Logger.success( 2, "[#{regimen.name} #{regimen.farm_event_id}] has executed all current items!" ) items = filter_items(state.regimen) regimen = %{state.regimen | regimen_items: items} {:noreply, %{state | regimen: regimen}} end defp do_item(item, regimen, state) do if item do sequence = Farmbot.Asset.get_sequence_by_id!(item.sequence_id) {:ok, ast} = CeleryScript.AST.decode(sequence) ast_with_label = %{ast | args: Map.put(ast.args, :label, sequence.name)} Logger.busy( 2, "[#{regimen.name} #{regimen.farm_event_id}] is going to execute: #{sequence.name}" ) CeleryScript.execute(ast_with_label) end next_item = List.first(regimen.regimen_items) if next_item do new_state = build_next_state(regimen, next_item, self(), state) {:noreply, new_state} else complete(regimen, state) end end def build_next_state(%Regimen{} = regimen, %Item{} = nx_itm, pid, state) do if state.timer do Process.cancel_timer(state.timer) end next_dt = Timex.shift(state.epoch, milliseconds: nx_itm.time_offset) timezone = get_config_value(:string, "settings", "timezone") now = Timex.now(timezone) offset_from_now = Timex.diff(next_dt, now, :milliseconds) timer = if offset_from_now < 0 and offset_from_now < -60_000 do Process.send_after(pid, :skip, 1) else {msg, real_offset} = ensure_not_negative(offset_from_now) Process.send_after(pid, msg, real_offset) end if offset_from_now > 0 do timestr = Farmbot.TimeUtils.format_time(next_dt) from_now = Timex.from_now(next_dt, Farmbot.Asset.device().timezone) msg = "[#{regimen.name}] scheduled by FarmEvent (#{regimen.farm_event_id}) " <> "will execute next item #{from_now} (#{timestr})" Logger.info(3, msg) end %{state | timer: timer, regimen: regimen, next_execution: next_dt} end defp ensure_not_negative(offset) when offset < -60_000, do: {:skip, 1} defp ensure_not_negative(offset) when offset < 0, do: {:execute, 1000} defp ensure_not_negative(offset), do: {:execute, offset} @spec pop_item(Regimen.t()) :: {Item.t() | nil, Regimen.t()} # when there is more than one item pop the top one defp pop_item(%Regimen{regimen_items: [do_this_one | items]} = r) do {do_this_one, %Regimen{r | regimen_items: items}} end end
lib/farmbot/regimen/manager.ex
0.620622
0.475544
manager.ex
starcoder
defmodule Phoenix.Socket.Transport do @moduledoc """ API for building transports. This module describes what is required to build a Phoenix transport. The transport sits between the socket and channels, forwarding client messages to channels and vice-versa. A transport is responsible for: * Implementing the transport behaviour * Establishing the socket connection * Handling of incoming messages * Handling of outgoing messages * Managing channels * Providing secure defaults ## The transport behaviour The transport requires two functions: * `default_config/0` - returns the default transport configuration to be merged whent the transport is declare in the socket module * `handlers/0` - returns a map of handlers. For example, if the transport can be run cowboy, it just need to specify the appropriate cowboy handler ## Socket connections Once a connection is established, the transport is responsible for invoking the `Phoenix.Socket.connect/2` callback and acting accordingly. Once connected, the transport should request the `Phoenix.Socket.id/1` and subscribe to the topic if one exists. On subscribed, the transport must be able to handle "disconnect" broadcasts on the given id topic. The `connect/6` function in this module can be used as a convenience or a documentation on such steps. ## Incoming messages Incoming messages are encoded in whatever way the transport chooses. Those messages must be decoded in the transport into a `Phoenix.Socket.Message` before being forwarded to a channel. Most of those messages are user messages except by: * "heartbeat" events in the "phoenix" topic - should just emit an OK reply * "phx_join" on any topic - should join the topic * "phx_leave" on any topic - should leave the topic The function `dispatch/3` can help with handling of such messages. ## Outgoing messages Channels can send two types of messages back to a transport: `Phoenix.Socket.Message` and `Phoenix.Socket.Reply`. Those messages are encoded in the channel into a format defined by the transport. That's why transports are required to pass a serializer that abides to the behaviour described in `Phoenix.Transports.Serializer`. ## Managing channels Because channels are spawned from the transport process, transports must trap exists and correctly handle the `{:EXIT, _, _}` messages arriving from channels, relaying the proper response to the client. The function `on_exit_message/2` should aid with that. ## Security This module also provides functions to enable a secure environment on transports that, at some point, have access to a `Plug.Conn`. The functionality provided by this module help with doing "origin" header checks and ensuring only SSL connections are allowed. ## Remote Client Channels can reply, synchronously, to any `handle_in/3` event. To match pushes with replies, clients must include a unique `ref` with every message and the channel server will reply with a matching ref where the client and pick up the callback for the matching reply. Phoenix includes a JavaScript client for WebSocket and Longpolling support using JSON encodings. However, a client can be implemented for other protocols and encodings by abiding by the `Phoenix.Socket.Message` format. See `web/static/js/phoenix.js` for an example transport client implementation. """ use Behaviour require Logger alias Phoenix.Socket alias Phoenix.Socket.Message alias Phoenix.Socket.Reply @doc """ Provides a keyword list of default configuration for socket transports. """ defcallback default_config() :: Keyword.t @doc """ Provides handlers for different applications. """ defcallback handlers() :: map @doc """ Handles the socket connection. It builds a new `Phoenix.Socket` and invokes the handler `connect/2` callback and returns the result. If the connection was successful, generates `Phoenix.PubSub` topic from the `id/1` callback. """ def connect(endpoint, handler, transport_name, transport, serializer, params) do socket = %Socket{endpoint: endpoint, transport: transport, transport_pid: self(), transport_name: transport_name, handler: handler, pubsub_server: endpoint.__pubsub_server__, serializer: serializer} case handler.connect(params, socket) do {:ok, socket} -> case handler.id(socket) do nil -> {:ok, socket} id when is_binary(id) -> {:ok, %Socket{socket | id: id}} invalid -> Logger.error "#{inspect handler}.id/1 returned invalid identifier #{inspect invalid}. " <> "Expected nil or a string." :error end :error -> :error invalid -> Logger.error "#{inspect handler}.connect/2 returned invalid value #{inspect invalid}. " <> "Expected {:ok, socket} or :error" :error end end @doc """ Dispatches `Phoenix.Socket.Message` to a channel. All serialized, remote client messages should be deserialized and forwarded through this function by adapters. The following returns must be handled by transports: * `:noreply` - Nothing to be done by the transport * `{:reply, reply}` - The reply to be sent to the client * `{:joined, channel_pid, reply}` - The channel was joined and the reply must be sent as result * `{:error, reason, reply}` - An error happened and the reply must be sent as result """ def dispatch(msg, sockets, socket) def dispatch(%{ref: ref, topic: "phoenix", event: "heartbeat"}, _sockets, _socket) do {:reply, %Reply{ref: ref, topic: "phoenix", status: :ok, payload: %{}}} end def dispatch(%Message{} = msg, sockets, socket) do sockets |> HashDict.get(msg.topic) |> do_dispatch(msg, socket) end defp do_dispatch(nil, %{event: "phx_join", topic: topic} = msg, socket) do if channel = socket.handler.__channel__(topic, socket.transport_name) do socket = %Socket{socket | topic: topic, channel: channel} log_info topic, fn -> "JOIN #{topic} to #{inspect(channel)}\n" <> " Transport: #{inspect socket.transport}\n" <> " Parameters: #{inspect msg.payload}" end case Phoenix.Channel.Server.join(socket, msg.payload) do {:ok, response, pid} -> log_info topic, fn -> "Replied #{topic} :ok" end {:joined, pid, %Reply{ref: msg.ref, topic: topic, status: :ok, payload: response}} {:error, reason} -> log_info topic, fn -> "Replied #{topic} :error" end {:error, reason, %Reply{ref: msg.ref, topic: topic, status: :error, payload: reason}} end else reply_ignore(msg, socket) end end defp do_dispatch(nil, msg, socket) do reply_ignore(msg, socket) end defp do_dispatch(socket_pid, msg, _socket) do send(socket_pid, msg) :noreply end defp log_info("phoenix" <> _, _func), do: :noop defp log_info(_topic, func), do: Logger.info(func) defp reply_ignore(msg, socket) do Logger.debug fn -> "Ignoring unmatched topic \"#{msg.topic}\" in #{inspect(socket.handler)}" end {:error, :unmatched_topic, %Reply{ref: msg.ref, topic: msg.topic, status: :error, payload: %{reason: "unmatched topic"}}} end @doc """ Returns the message to be relayed when a channel exists. """ def on_exit_message(topic, reason) do case reason do :normal -> %Message{topic: topic, event: "phx_close", payload: %{}} :shutdown -> %Message{topic: topic, event: "phx_close", payload: %{}} {:shutdown, _} -> %Message{topic: topic, event: "phx_close", payload: %{}} _ -> %Message{topic: topic, event: "phx_error", payload: %{}} end end @doc """ Forces SSL in the socket connection. Uses the endpoint configuration to decide so. It is a noop if the connection has been halted. """ def force_ssl(%Plug.Conn{halted: true} = conn, _socket, _endpoint) do conn end def force_ssl(conn, socket, endpoint) do if force_ssl = force_ssl_config(socket, endpoint) do Plug.SSL.call(conn, Plug.SSL.init(force_ssl)) else conn end end defp force_ssl_config(socket, endpoint) do Phoenix.Config.cache(endpoint, {:force_ssl, socket}, fn _ -> {:cache, if force_ssl = endpoint.config(:force_ssl) do Keyword.put_new(force_ssl, :host, endpoint.config(:url)[:host] || "localhost") end} end) end @doc """ Logs the transport request. Available for transports that generate a connection. """ def transport_log(conn, level) do if level do Plug.Logger.call(conn, Plug.Logger.init(log: level)) else conn end end @doc """ Checks the origin request header against the list of allowed origins. Should be called by transports before connecting when appropriate. If the origin header matches the allowed origins, no origin header was sent or no origin was configured, it will return the given connection. Otherwise a otherwise a 403 Forbidden response will be sent and the connection halted. It is a noop if the connection has been halted. """ def check_origin(conn, endpoint, check_origin, sender \\ &Plug.Conn.send_resp/1) def check_origin(%Plug.Conn{halted: true} = conn, _endpoint, _check_origin, _sender), do: conn def check_origin(conn, endpoint, check_origin, sender) do import Plug.Conn origin = get_req_header(conn, "origin") |> List.first cond do is_nil(origin) -> conn origin_allowed?(check_origin, origin, endpoint) -> conn true -> resp(conn, :forbidden, "") |> sender.() |> halt() end end defp origin_allowed?(false, _, _), do: true defp origin_allowed?(true, origin, endpoint), do: compare?(URI.parse(origin).host, endpoint.config(:url)[:host]) defp origin_allowed?(check_origin, origin, _endpoint) when is_list(check_origin), do: origin_allowed?(origin, check_origin) defp origin_allowed?(origin, allowed_origins) do origin = URI.parse(origin) Enum.any?(allowed_origins, fn allowed -> allowed = URI.parse(allowed) compare?(origin.scheme, allowed.scheme) and compare?(origin.port, allowed.port) and compare?(origin.host, allowed.host) end) end defp compare?(_, nil), do: true defp compare?(x, y), do: x == y end
lib/phoenix/socket/transport.ex
0.883783
0.558026
transport.ex
starcoder
defmodule Axon.CompilerError do defexception [:exception, :graph] @impl true def message(%{graph: %Axon{op: op}, exception: exception}) do op_inspect = if is_atom(op) do Atom.to_string(op) else "#{inspect(op)}" end """ error while building prediction for #{op_inspect}: ** (#{inspect(exception.__struct__)}) #{Exception.message(exception)} """ end end defmodule Axon.Compiler do @moduledoc false require Logger import Axon.Shared ## Init JIT Compilation @doc false def __compile__(graph, opts) do mode = opts[:mode] || :inference {compile_init(graph), compile_predict(graph, mode)} end @doc false def __jit_init__(graph, [] = args, opts) do fun = compile_init(graph) Nx.Defn.jit_or_apply(fun, args, opts) end defp compile_init(%Axon{} = graph) do init_fn = fn -> {cache, _} = to_init_fun(graph, {%{}, %{}}) cache |> Enum.reduce(%{}, fn {_, layer}, layers_acc -> Map.merge(layer, layers_acc) end) end fn -> Nx.Defn.jit_or_apply(init_fn, []) end end defp compile_init(graph) do raise ArgumentError, "attempting to compile initialization function from" <> " an unrecognized graph #{inspect(graph)}, if you" <> " are attempting to initialize a model with a container" <> " output, use `Axon.container`" end defp to_init_fun( %Axon{ id: id, parent: parent, op: op, name: name_fn, params: params, opts: opts, policy: %{params: dtype}, hooks: hooks }, cache_and_counts ) do {cache, op_counts} = case {op, parent} do {:container, [parent]} -> deep_reduce(parent, cache_and_counts, &to_init_fun/2) {_, nil} -> cache_and_counts {_, parents} -> Enum.reduce(parents, cache_and_counts, &to_init_fun/2) end {cache, op_counts} = case opts[:hidden_state] do state when is_tuple(state) -> state |> Tuple.to_list() |> Enum.reduce({cache, op_counts}, &to_init_fun/2) nil -> {cache, op_counts} end case cache do %{^id => _} -> {cache, op_counts} %{} -> if Enum.empty?(params) do {cache, op_counts} else layer_params = Enum.reduce(params, %{}, fn {key, param}, layer_params -> init_param(key, param, layer_params, dtype) end) layer_params = apply_hooks(layer_params, :initialize, nil, hooks) name = name_fn.(op, op_counts) params = %{name => layer_params} { Map.put(cache, id, params), Map.update(op_counts, op, 1, fn x -> x + 1 end) } end end end defp init_param(key, param, layer_params, dtype) do case param do %{name: name, shape: shape, initializer: initializer} -> fun = apply(Axon.Initializers, initializer, [[type: dtype, shape: shape]]) Map.put(layer_params, name, fun) params when is_tuple(params) -> params |> Tuple.to_list() |> Enum.map(fn %{shape: shape, initializer: initializer} -> apply(Axon.Initializers, initializer, [[type: dtype, shape: shape]]) end) |> List.to_tuple() |> then(&Map.put(layer_params, key, &1)) end end ## Model JIT Compilation @doc false def __jit_predict__(graph, args, opts) do {mode, opts} = Keyword.pop(opts, :mode, :inference) fun = compile_predict(graph, mode) Nx.Defn.jit_or_apply(fun, args, opts) end defp compile_predict(%Axon{} = graph, mode) do predict_fn = fn params, inputs -> {pred_expr, {state_expr, _, _}} = to_predict_fun(graph, {%{}, %{}, %{}}, params, inputs, mode) case mode do :train -> %{prediction: pred_expr, state: state_expr} :inference -> pred_expr end end &Nx.Defn.jit_or_apply(predict_fn, [&1, &2]) end defp compile_predict(graph, _mode) do raise ArgumentError, "attempting to compile predict function from" <> " an unrecognized graph #{inspect(graph)}, if you" <> " are attempting to initialize a model with a container" <> " output, use `Axon.container`" end defp to_predict_fun(%{id: id} = graph, {state, cache, op_counts}, params, inputs, mode) do case cache do %{^id => res} -> {res, {state, cache, op_counts}} %{} -> try do recur_predict_fun(graph, {state, cache, op_counts}, params, inputs, mode) rescue e -> reraise Axon.CompilerError.exception(graph: graph, exception: e), __STACKTRACE__ end end end defp recur_predict_fun( %Axon{id: id, op: :container, parent: [parents]}, cache_and_counts, params, inputs, mode ) do {exprs, {state, cache, op_counts}} = deep_map_reduce(parents, cache_and_counts, &to_predict_fun(&1, &2, params, inputs, mode)) op_counts = Map.update(op_counts, :container, 1, fn x -> x + 1 end) cache = Map.put(cache, id, exprs) {exprs, {state, cache, op_counts}} end ## Custom Layers defp recur_predict_fun( %Axon{ id: id, name: name_fn, op: op, parent: parents, params: layer_params, opts: opts, policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) when is_function(op) and is_list(parents) do {exprs, {state, cache, op_counts}} = Enum.map_reduce( parents, cache_and_counts, &to_predict_fun(&1, &2, params, inputs, mode) ) name = name_fn.(op, op_counts) op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end) inp_params = Map.new(layer_params, fn {k, %{name: v, frozen: frz}} -> {k, maybe_freeze(params[name][v], frz)} end) inputs = exprs |> Enum.map(&safe_as_type(&1, compute)) |> Enum.map(&apply_hooks(&1, :pre_forward, mode, hooks)) args = case opts do [] -> inputs ++ [inp_params] [_ | _] -> inputs ++ [inp_params, opts] end res = args |> then(&apply(op, &1)) |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) |> safe_as_type(output) {res, {state, Map.put(cache, id, res), op_counts}} end ## Activation Layers @activation_layers [:celu, :elu, :exp, :gelu, :hard_sigmoid, :hard_silu, :hard_tanh] ++ [:leaky_relu, :linear, :log_sigmoid, :mish, :relu, :relu6] ++ [:sigmoid, :silu, :selu, :softmax, :softplus, :softsign, :tanh] ++ [:log_softmax] defp recur_predict_fun( %Axon{ id: id, op: op, parent: [parent], policy: %{compute: compute, output: output}, opts: opts, hooks: hooks }, cache_and_counts, params, inputs, mode ) when op in @activation_layers do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end) input = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, mode, hooks) args = case opts do [] -> [input] [_ | _] -> [input, opts] end res = args |> then(&apply(Axon.Activations, op, &1)) |> safe_as_type(output) |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Linear Layers @linear_layers [:dense, :bilinear, :conv, :depthwise_conv, :conv_transpose] defp recur_predict_fun( %Axon{ id: id, op: op, name: name_fn, parent: parent, params: layer_params, policy: %{compute: compute, output: output}, opts: opts, hooks: hooks }, cache_and_counts, params, inputs, mode ) when op in @linear_layers do {exprs, {state, cache, op_counts}} = Enum.map_reduce( parent, cache_and_counts, &to_predict_fun(&1, &2, params, inputs, mode) ) name = name_fn.(op, op_counts) op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end) {use_bias, opts} = Keyword.pop(opts, :use_bias, true) w = layer_param(layer_params, "kernel", params[name], compute) b = if use_bias do layer_param(layer_params, "bias", params[name], compute) else Nx.tensor(0.0, type: compute) end inputs = exprs |> Enum.map(&safe_as_type(&1, compute)) |> Enum.map(&apply_hooks(&1, :pre_forward, mode, hooks)) args = case opts do [] -> inputs ++ [w, b] [_ | _] -> inputs ++ [w, b, opts] end res = args |> then(&apply(Axon.Layers, op, &1)) |> safe_as_type(output) |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Sparse Layers defp recur_predict_fun( %Axon{ id: id, name: name_fn, op: :embedding, parent: [parent], params: layer_params, policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) name = name_fn.(:embedding, op_counts) op_counts = Map.update(op_counts, :embedding, 1, fn x -> x + 1 end) w = layer_param(layer_params, "kernel", params[name], compute) res = res |> apply_hooks(:pre_forward, :inference, hooks) |> Axon.Layers.embedding(w) |> safe_as_type(output) |> apply_hooks(:forward, :inference, hooks) |> apply_hooks(:backward, :inference, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Pooling Layers @pooling_layers [:max_pool, :avg_pool, :adaptive_avg_pool] ++ [:adaptive_max_pool, :adaptive_lp_pool, :lp_pool] ++ [:global_lp_pool, :global_max_pool, :global_avg_pool] defp recur_predict_fun( %Axon{ id: id, op: op, parent: [parent], opts: opts, policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) when op in @pooling_layers do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end) res = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, :inference, hooks) |> then(&apply(Axon.Layers, op, [&1, opts])) |> safe_as_type(output) |> apply_hooks(:forward, :inference, hooks) |> apply_hooks(:backward, :inference, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Dropout Layers @dropout_layers [:dropout, :feature_alpha_dropout, :spatial_dropout, :alpha_dropout] defp recur_predict_fun( %Axon{ id: id, op: op, parent: [parent], opts: opts, policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) when op in @dropout_layers do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end) case mode do :train -> res = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, :train, hooks) |> then(&apply(Axon.Layers, op, [&1, opts])) |> safe_as_type(output) |> apply_hooks(:forward, :train, hooks) |> apply_hooks(:backward, :train, hooks) {res, {state, Map.put(cache, id, res), op_counts}} :inference -> # Skip dropout in inference mode res = safe_as_type(res, output) {res, {state, Map.put(cache, id, res), op_counts}} end end defp recur_predict_fun( %Axon{ id: id, name: name_fn, op: :separable_conv2d, parent: [parent], opts: opts, params: layer_params, policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) name = name_fn.(:separable_conv2d, op_counts) op_counts = Map.update(op_counts, :separable_conv2d, 1, fn x -> x + 1 end) {use_bias, opts} = Keyword.pop!(opts, :use_bias) k1 = layer_param(layer_params, "k1", params[name], compute) k2 = layer_param(layer_params, "k2", params[name], compute) {b1, b2} = if use_bias do {layer_param(layer_params, "b1", params[name], compute), layer_param(layer_params, "b2", params[name], compute)} else {Nx.tensor(0, type: compute), Nx.tensor(0, type: compute)} end res = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, mode, hooks) |> Axon.Layers.separable_conv2d(k1, b1, k2, b2, opts) |> safe_as_type(output) |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end defp recur_predict_fun( %Axon{ id: id, name: name_fn, op: :separable_conv3d, parent: [parent], opts: opts, params: layer_params, policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) name = name_fn.(:separable_conv3d, op_counts) op_counts = Map.update(op_counts, :separable_conv3d, 1, fn x -> x + 1 end) {use_bias, opts} = Keyword.pop!(opts, :use_bias) k1 = layer_param(layer_params, "k1", params[name], compute) k2 = layer_param(layer_params, "k2", params[name], compute) k3 = layer_param(layer_params, "k3", params[name], compute) {b1, b2, b3} = if use_bias do {layer_param(layer_params, "b1", params[name], compute), layer_param(layer_params, "b2", params[name], compute), layer_param(layer_params, "b3", params[name], compute)} else {Nx.tensor(0, type: compute), Nx.tensor(0, type: compute), Nx.tensor(0, type: compute)} end res = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, mode, hooks) |> Axon.Layers.separable_conv3d(k1, b1, k2, b2, k3, b3, opts) |> safe_as_type(output) |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Normalization Layers @normalization_with_stats [:batch_norm, :instance_norm] defp recur_predict_fun( %Axon{ id: id, name: name_fn, op: op, parent: [parent], opts: [epsilon: epsilon, channel_index: channel_index, momentum: momentum], params: layer_params, policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) when op in @normalization_with_stats do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) name = name_fn.(op, op_counts) op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end) training? = mode == :train norm_opts = [ epsilon: epsilon, channel_index: channel_index, momentum: momentum, training?: training? ] g = layer_param(layer_params, "gamma", params[name], compute) b = layer_param(layer_params, "beta", params[name], compute) mean = layer_param(layer_params, "mean", params[name], compute) var = layer_param(layer_params, "var", params[name], compute) case mode do :train -> {out, ra_mean, ra_var} = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, :train, hooks) |> then(&apply(Axon.Layers, op, [&1, g, b, mean, var, norm_opts])) |> then(fn {y, m, v} -> {safe_as_type(y, output), m, v} end) |> apply_hooks(:forward, :train, hooks) |> apply_hooks(:backward, :train, hooks) res = safe_as_type(out, output) state = Map.put(state, name, %{"mean" => ra_mean, "var" => ra_var}) {res, {state, Map.put(cache, id, res), op_counts}} :inference -> res = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, :inference, hooks) |> then(&apply(Axon.Layers, op, [&1, g, b, mean, var, norm_opts])) |> safe_as_type(output) |> apply_hooks(:forward, :inference, hooks) |> apply_hooks(:backward, :inference, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end end @normalization_layers [:layer_norm, :group_norm] defp recur_predict_fun( %Axon{ id: id, name: name_fn, op: op, parent: [parent], opts: opts, params: layer_params, policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) when op in @normalization_layers do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) name = name_fn.(op, op_counts) op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end) g = layer_param(layer_params, "gamma", params[name], compute) b = layer_param(layer_params, "beta", params[name], compute) res = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, mode, hooks) |> then(&apply(Axon.Layers, op, [&1, g, b, opts])) |> safe_as_type(output) |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Recurrent Layers @recurrent_layers [:gru, :lstm, :conv_lstm] defp recur_predict_fun( %Axon{ id: id, name: name_fn, op: op, parent: [parent], params: layer_params, policy: %{compute: compute, output: output}, opts: opts, hooks: hooks }, cache_and_counts, params, inputs, mode ) when op in @recurrent_layers do {res, cache_and_counts} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) num_carry = if op == :gru, do: 1, else: 2 num_bias = if op == :conv_lstm, do: 1, else: 4 {hidden_state, opts} = Keyword.pop(opts, :hidden_state) {hidden_state_shape, opts} = Keyword.pop(opts, :hidden_state_shape) {recurrent_initializer, opts} = Keyword.pop(opts, :recurrent_initializer) {activation, opts} = Keyword.pop(opts, :activation) {gate, opts} = Keyword.pop(opts, :gate) {use_bias, opts} = Keyword.pop(opts, :use_bias) {unroll, conv_opts} = Keyword.pop(opts, :unroll) {hidden_state, {state, cache, op_counts}} = to_hidden_state( hidden_state, res, cache_and_counts, params, inputs, num_carry, recurrent_initializer, hidden_state_shape, mode ) name = name_fn.(op, op_counts) op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end) input_kernel = layer_param(layer_params, "input_kernel", params[name], compute) hidden_kernel = layer_param(layer_params, "hidden_kernel", params[name], compute) bias = if use_bias do layer_param(layer_params, "bias", params[name], compute) else List.duplicate(Nx.tensor(0, type: compute), num_bias) |> List.to_tuple() end input = safe_as_type(res, compute) carry = deep_new(hidden_state, &safe_as_type(&1, compute)) # TODO: Should these be hooked together? Not at all? {input, carry} = apply_hooks({input, carry}, :pre_forward, mode, hooks) cell_fn = get_cell_fn(op, gate, activation, conv_opts) {carry, out} = case unroll do :static -> Axon.Recurrent.static_unroll( cell_fn, input, carry, input_kernel, hidden_kernel, bias ) :dynamic -> Axon.Recurrent.dynamic_unroll( cell_fn, input, carry, input_kernel, hidden_kernel, bias ) end res = {deep_new(carry, &safe_as_type(&1, output)), safe_as_type(out, output)} res = apply_hooks(res, :forward, mode, hooks) res = apply_hooks(res, :backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Element-wise layers @element_wise_layers [:add, :subtract, :multiply] defp recur_predict_fun( %Axon{ id: id, op: op, parent: parents, policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) when op in @element_wise_layers do {[expr | exprs], {state, cache, op_counts}} = Enum.map_reduce( parents, cache_and_counts, &to_predict_fun(&1, &2, params, inputs, mode) ) op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end) [expr | exprs] = [expr | exprs] |> List.to_tuple() |> apply_hooks(:pre_forward, mode, hooks) |> Tuple.to_list() res = Enum.reduce(exprs, expr, fn next_expr, acc -> input = safe_as_type(next_expr, compute) acc = safe_as_type(acc, compute) safe_as_type(apply(Nx, op, [acc, input]), output) end) res = apply_hooks(res, :forward, mode, hooks) res = apply_hooks(res, :backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Shape Layers @shape_layers [:resize, :flatten, :reshape, :transpose, :pad] defp recur_predict_fun( %Axon{ id: id, op: op, parent: [parent], policy: %{compute: compute, output: output}, hooks: hooks, opts: opts }, cache_and_counts, params, inputs, mode ) when op in @shape_layers do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) op_counts = Map.update(op_counts, :flatten, 1, fn x -> x + 1 end) res = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, mode, hooks) |> then(&apply(Axon.Layers, op, [&1, opts])) |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) |> safe_as_type(output) {res, {state, Map.put(cache, id, res), op_counts}} end defp recur_predict_fun( %Axon{ id: id, op: :concatenate, parent: parents, opts: [axis: axis], policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) do {exprs, {state, cache, op_counts}} = Enum.map_reduce( parents, cache_and_counts, &to_predict_fun(&1, &2, params, inputs, mode) ) op_counts = Map.update(op_counts, :concatenate, 1, fn x -> x + 1 end) inps = Enum.map(exprs, &safe_as_type(&1, compute)) inps = inps |> List.to_tuple() |> apply_hooks(:pre_forward, mode, hooks) |> Tuple.to_list() res = inps |> Nx.concatenate(axis: axis) |> safe_as_type(output) |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end defp recur_predict_fun( %Axon{ id: id, op: :cond, parent: parents, opts: [cond: cond_fn], policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) do {exprs, {state, cache, op_counts}} = Enum.map_reduce( parents, cache_and_counts, &to_predict_fun(&1, &2, params, inputs, mode) ) op_counts = Map.update(op_counts, :cond, 1, fn x -> x + 1 end) [cond_input_expr, true_expr, false_expr] = exprs cond_expr = cond_fn.(cond_input_expr) cond_rank = Nx.rank(cond_expr) cond_type = Nx.type(cond_expr) unless cond_rank == 0 and cond_type == {:u, 8} do raise Axon.CompilerError, "cond_fn must return a scalar-boolean tensor" <> " got result with rank #{inspect(cond_rank)} and" <> " type #{inspect(cond_type)}" end {cond_expr, on_true, on_false} = [cond_expr, true_expr, false_expr] |> List.to_tuple() |> apply_hooks(:pre_forward, mode, hooks) res = Axon.Layers.cond( Nx.all(cond_expr), safe_as_type(on_true, compute), safe_as_type(on_false, compute) ) res = safe_as_type(res, output) res = apply_hooks(res, :forward, mode, hooks) res = apply_hooks(res, :backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Special Layers defp recur_predict_fun( %Axon{ id: id, op: :nx, parent: [parent], opts: [fun: nx_fun], policy: %{compute: compute, output: output}, hooks: hooks }, cache_and_counts, params, inputs, mode ) do {res, {state, cache, op_counts}} = to_predict_fun(parent, cache_and_counts, params, inputs, mode) op_counts = Map.update(op_counts, :nx, 1, fn x -> x + 1 end) res = res |> safe_as_type(compute) |> apply_hooks(:pre_forward, mode, hooks) |> nx_fun.() |> safe_as_type(output) |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end defp recur_predict_fun( %Axon{id: id, op: :constant, opts: [value: tensor], policy: %{output: output}}, {state, cache, op_counts}, _, _, _ ) do out = safe_as_type(tensor, output) op_counts = Map.update(op_counts, :constant, 1, fn x -> x + 1 end) {out, {state, Map.put(cache, id, out), op_counts}} end defp recur_predict_fun( %Axon{id: id, op: :input, output_shape: shape, hooks: hooks, name: name_fn}, {state, cache, op_counts}, _, inputs, mode ) do name = name_fn.(:input, op_counts) op_counts = Map.update(op_counts, :input, 1, fn x -> x + 1 end) res = case inputs do %Nx.Tensor{} = inputs -> inputs %{} = inputs -> inputs[name] _ -> raise ArgumentError, "invalid input given to model, expected input" <> " expected input to be a tensor or a map" <> " corresponding to correct input names" end unless res do raise ArgumentError, "unable to find input #{name} for model given to predict," <> " you must provide an input tensor for every input" <> " specified in the graph" end unless Axon.Shape.compatible?(Nx.shape(res), shape) do raise ArgumentError, "invalid input shape given to model, expected input" <> " with shape #{inspect(shape)}, but got input with" <> " shape #{inspect(Nx.shape(res))}" end res = res |> apply_hooks(:forward, mode, hooks) |> apply_hooks(:backward, mode, hooks) {res, {state, Map.put(cache, id, res), op_counts}} end ## Helpers defp maybe_freeze(param, true), do: Nx.Defn.Kernel.stop_grad(param) defp maybe_freeze(param, false), do: param defp apply_hooks(res, event, mode, hooks) do hooks |> Enum.reverse() |> Enum.reduce(res, fn {on_event, on_mode, hook_fn}, expr -> event? = on_event == event or on_event == :all mode? = on_mode == mode or on_mode == :both or mode == nil if event? and mode? do if on_event == :backward do Nx.Defn.Kernel.custom_grad(expr, fn _ans, g -> hooked_g = Nx.Defn.Kernel.hook(g, hook_fn) [{expr, hooked_g}] end) else Nx.Defn.Kernel.hook(expr, hook_fn) end else expr end end) end defp to_hidden_state( hidden_state, input, cache_and_counts, params, inputs, num_carry, recurrent_initializer, hidden_state_shape, mode ) do case hidden_state do {%Axon{} = c, %Axon{} = h} -> {c_res, cache_and_counts} = to_predict_fun(c, cache_and_counts, params, inputs, mode) {h_res, cache_and_counts} = to_predict_fun(h, cache_and_counts, params, inputs, mode) {{c_res, h_res}, cache_and_counts} {%Axon{} = c} -> {h_res, cache_and_counts} = to_predict_fun(c, cache_and_counts, params, inputs, mode) {{h_res}, cache_and_counts} %Axon{} = x -> {h_res, cache_and_counts} = to_predict_fun(x, cache_and_counts, params, inputs, mode) {h_res, cache_and_counts} nil -> shape = put_elem(hidden_state_shape, 0, elem(Nx.shape(input), 0)) h_res = for _ <- 1..num_carry, do: apply(Axon.Initializers, recurrent_initializer, [[shape: shape]]) res = List.to_tuple(h_res) res = if mode == :train, do: %{prediction: res, state: %{}}, else: res {res, cache_and_counts} end end defp get_cell_fn(op, gate, activation, conv_opts) do case op do :lstm -> gate_fn = &apply(Axon.Activations, gate, [&1]) activation_fn = &apply(Axon.Activations, activation, [&1]) &Axon.Recurrent.lstm_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn) :gru -> gate_fn = &apply(Axon.Activations, gate, [&1]) activation_fn = &apply(Axon.Activations, activation, [&1]) &Axon.Recurrent.gru_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn) :conv_lstm -> &Axon.Recurrent.conv_lstm_cell(&1, &2, &3, &4, &5, conv_opts) end end defp layer_param(layer_params, key, param_name, compute) do case layer_params[key] do %{name: p, frozen: frozen} -> safe_as_type(maybe_freeze(param_name[p], frozen), compute) params when is_tuple(params) -> params |> Tuple.to_list() |> Enum.with_index(fn param, i -> %{frozen: frozen} = param safe_as_type(maybe_freeze(elem(param_name[key], i), frozen), compute) end) |> List.to_tuple() end end defp safe_as_type(container_or_tensor, type) do case container_or_tensor do %Nx.Tensor{} = tensor -> Nx.as_type(tensor, type) container -> deep_new(container, &Nx.as_type(&1, type)) end end end
lib/axon/compiler.ex
0.663887
0.433262
compiler.ex
starcoder
defmodule AWS.SSM do @moduledoc """ Amazon EC2 Systems Manager is a collection of capabilities that helps you automate management tasks such as collecting system inventory, applying operating system (OS) patches, automating the creation of Amazon Machine Images (AMIs), and configuring operating systems (OSs) and applications at scale. Systems Manager works with managed instances: Amazon EC2 instances and servers or virtual machines (VMs) in your on-premises environment that are configured for Systems Manager. This references is intended to be used with the EC2 Systems Manager User Guide ([Linux](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/systems-manager.html)) ([Windows](http://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/systems-manager.html)). To get started, verify prerequisites and configure managed instances ([Linux](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/systems-manager-prereqs.html)) ([Windows](http://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/systems-manager-prereqs.html)). """ @doc """ Adds or overwrites one or more tags for the specified resource. Tags are metadata that you assign to your managed instances. Tags enable you to categorize your managed instances in different ways, for example, by purpose, owner, or environment. Each tag consists of a key and an optional value, both of which you define. For example, you could define a set of tags for your account's managed instances that helps you track each instance's owner and stack level. For example: Key=Owner and Value=DbAdmin, SysAdmin, or Dev. Or Key=Stack and Value=Production, Pre-Production, or Test. Each resource can have a maximum of 10 tags. We recommend that you devise a set of tag keys that meets your needs for each resource type. Using a consistent set of tag keys makes it easier for you to manage your resources. You can search and filter the resources based on the tags you add. Tags don't have any semantic meaning to Amazon EC2 and are interpreted strictly as a string of characters. For more information about tags, see [Tagging Your Amazon EC2 Resources](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html) in the Amazon EC2 User Guide. """ def add_tags_to_resource(client, input, options \\ []) do request(client, "AddTagsToResource", input, options) end @doc """ Attempts to cancel the command specified by the Command ID. There is no guarantee that the command will be terminated and the underlying process stopped. """ def cancel_command(client, input, options \\ []) do request(client, "CancelCommand", input, options) end @doc """ Registers your on-premises server or virtual machine with Amazon EC2 so that you can manage these resources using Run Command. An on-premises server or virtual machine that has been registered with EC2 is called a managed instance. For more information about activations, see [Setting Up Managed Instances (Linux)](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/managed-instances.html) or [Setting Up Managed Instances (Windows)](http://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/managed-instances.html) in the Amazon EC2 User Guide. """ def create_activation(client, input, options \\ []) do request(client, "CreateActivation", input, options) end @doc """ Associates the specified SSM document with the specified instances or targets. When you associate an SSM document with one or more instances using instance IDs or tags, the SSM agent running on the instance processes the document and configures the instance as specified. If you associate a document with an instance that already has an associated document, the system throws the AssociationAlreadyExists exception. """ def create_association(client, input, options \\ []) do request(client, "CreateAssociation", input, options) end @doc """ Associates the specified SSM document with the specified instances or targets. When you associate an SSM document with one or more instances using instance IDs or tags, the SSM agent running on the instance processes the document and configures the instance as specified. If you associate a document with an instance that already has an associated document, the system throws the AssociationAlreadyExists exception. """ def create_association_batch(client, input, options \\ []) do request(client, "CreateAssociationBatch", input, options) end @doc """ Creates an SSM document. After you create an SSM document, you can use CreateAssociation to associate it with one or more running instances. """ def create_document(client, input, options \\ []) do request(client, "CreateDocument", input, options) end @doc """ Creates a new Maintenance Window. """ def create_maintenance_window(client, input, options \\ []) do request(client, "CreateMaintenanceWindow", input, options) end @doc """ Creates a patch baseline. """ def create_patch_baseline(client, input, options \\ []) do request(client, "CreatePatchBaseline", input, options) end @doc """ Deletes an activation. You are not required to delete an activation. If you delete an activation, you can no longer use it to register additional managed instances. Deleting an activation does not de-register managed instances. You must manually de-register managed instances. """ def delete_activation(client, input, options \\ []) do request(client, "DeleteActivation", input, options) end @doc """ Disassociates the specified SSM document from the specified instance. When you disassociate an SSM document from an instance, it does not change the configuration of the instance. To change the configuration state of an instance after you disassociate a document, you must create a new document with the desired configuration and associate it with the instance. """ def delete_association(client, input, options \\ []) do request(client, "DeleteAssociation", input, options) end @doc """ Deletes the SSM document and all instance associations to the document. Before you delete the SSM document, we recommend that you use DeleteAssociation to disassociate all instances that are associated with the document. """ def delete_document(client, input, options \\ []) do request(client, "DeleteDocument", input, options) end @doc """ Deletes a Maintenance Window. """ def delete_maintenance_window(client, input, options \\ []) do request(client, "DeleteMaintenanceWindow", input, options) end @doc """ Delete a parameter from the system. """ def delete_parameter(client, input, options \\ []) do request(client, "DeleteParameter", input, options) end @doc """ Deletes a patch baseline. """ def delete_patch_baseline(client, input, options \\ []) do request(client, "DeletePatchBaseline", input, options) end @doc """ Removes the server or virtual machine from the list of registered servers. You can reregister the instance again at any time. If you don’t plan to use Run Command on the server, we suggest uninstalling the SSM agent first. """ def deregister_managed_instance(client, input, options \\ []) do request(client, "DeregisterManagedInstance", input, options) end @doc """ Removes a patch group from a patch baseline. """ def deregister_patch_baseline_for_patch_group(client, input, options \\ []) do request(client, "DeregisterPatchBaselineForPatchGroup", input, options) end @doc """ Removes a target from a Maintenance Window. """ def deregister_target_from_maintenance_window(client, input, options \\ []) do request(client, "DeregisterTargetFromMaintenanceWindow", input, options) end @doc """ Removes a task from a Maintenance Window. """ def deregister_task_from_maintenance_window(client, input, options \\ []) do request(client, "DeregisterTaskFromMaintenanceWindow", input, options) end @doc """ Details about the activation, including: the date and time the activation was created, the expiration date, the IAM role assigned to the instances in the activation, and the number of instances activated by this registration. """ def describe_activations(client, input, options \\ []) do request(client, "DescribeActivations", input, options) end @doc """ Describes the associations for the specified SSM document or instance. """ def describe_association(client, input, options \\ []) do request(client, "DescribeAssociation", input, options) end @doc """ Provides details about all active and terminated Automation executions. """ def describe_automation_executions(client, input, options \\ []) do request(client, "DescribeAutomationExecutions", input, options) end @doc """ Lists all patches that could possibly be included in a patch baseline. """ def describe_available_patches(client, input, options \\ []) do request(client, "DescribeAvailablePatches", input, options) end @doc """ Describes the specified SSM document. """ def describe_document(client, input, options \\ []) do request(client, "DescribeDocument", input, options) end @doc """ Describes the permissions for an SSM document. If you created the document, you are the owner. If a document is shared, it can either be shared privately (by specifying a user’s AWS account ID) or publicly (*All*). """ def describe_document_permission(client, input, options \\ []) do request(client, "DescribeDocumentPermission", input, options) end @doc """ All associations for the instance(s). """ def describe_effective_instance_associations(client, input, options \\ []) do request(client, "DescribeEffectiveInstanceAssociations", input, options) end @doc """ Retrieves the current effective patches (the patch and the approval state) for the specified patch baseline. """ def describe_effective_patches_for_patch_baseline(client, input, options \\ []) do request(client, "DescribeEffectivePatchesForPatchBaseline", input, options) end @doc """ The status of the associations for the instance(s). """ def describe_instance_associations_status(client, input, options \\ []) do request(client, "DescribeInstanceAssociationsStatus", input, options) end @doc """ Describes one or more of your instances. You can use this to get information about instances like the operating system platform, the SSM agent version (Linux), status etc. If you specify one or more instance IDs, it returns information for those instances. If you do not specify instance IDs, it returns information for all your instances. If you specify an instance ID that is not valid or an instance that you do not own, you receive an error. """ def describe_instance_information(client, input, options \\ []) do request(client, "DescribeInstanceInformation", input, options) end @doc """ Retrieves the high-level patch state of one or more instances. """ def describe_instance_patch_states(client, input, options \\ []) do request(client, "DescribeInstancePatchStates", input, options) end @doc """ Retrieves the high-level patch state for the instances in the specified patch group. """ def describe_instance_patch_states_for_patch_group(client, input, options \\ []) do request(client, "DescribeInstancePatchStatesForPatchGroup", input, options) end @doc """ Retrieves information about the patches on the specified instance and their state relative to the patch baseline being used for the instance. """ def describe_instance_patches(client, input, options \\ []) do request(client, "DescribeInstancePatches", input, options) end @doc """ Retrieves the individual task executions (one per target) for a particular task executed as part of a Maintenance Window execution. """ def describe_maintenance_window_execution_task_invocations(client, input, options \\ []) do request(client, "DescribeMaintenanceWindowExecutionTaskInvocations", input, options) end @doc """ For a given Maintenance Window execution, lists the tasks that were executed. """ def describe_maintenance_window_execution_tasks(client, input, options \\ []) do request(client, "DescribeMaintenanceWindowExecutionTasks", input, options) end @doc """ Lists the executions of a Maintenance Window (meaning, information about when the Maintenance Window was scheduled to be active and information about tasks registered and run with the Maintenance Window). """ def describe_maintenance_window_executions(client, input, options \\ []) do request(client, "DescribeMaintenanceWindowExecutions", input, options) end @doc """ Lists the targets registered with the Maintenance Window. """ def describe_maintenance_window_targets(client, input, options \\ []) do request(client, "DescribeMaintenanceWindowTargets", input, options) end @doc """ Lists the tasks in a Maintenance Window. """ def describe_maintenance_window_tasks(client, input, options \\ []) do request(client, "DescribeMaintenanceWindowTasks", input, options) end @doc """ Retrieves the Maintenance Windows in an AWS account. """ def describe_maintenance_windows(client, input, options \\ []) do request(client, "DescribeMaintenanceWindows", input, options) end @doc """ Get information about a parameter. """ def describe_parameters(client, input, options \\ []) do request(client, "DescribeParameters", input, options) end @doc """ Lists the patch baselines in your AWS account. """ def describe_patch_baselines(client, input, options \\ []) do request(client, "DescribePatchBaselines", input, options) end @doc """ Returns high-level aggregated patch compliance state for a patch group. """ def describe_patch_group_state(client, input, options \\ []) do request(client, "DescribePatchGroupState", input, options) end @doc """ Lists all patch groups that have been registered with patch baselines. """ def describe_patch_groups(client, input, options \\ []) do request(client, "DescribePatchGroups", input, options) end @doc """ Get detailed information about a particular Automation execution. """ def get_automation_execution(client, input, options \\ []) do request(client, "GetAutomationExecution", input, options) end @doc """ Returns detailed information about command execution for an invocation or plugin. """ def get_command_invocation(client, input, options \\ []) do request(client, "GetCommandInvocation", input, options) end @doc """ Retrieves the default patch baseline. """ def get_default_patch_baseline(client, input, options \\ []) do request(client, "GetDefaultPatchBaseline", input, options) end @doc """ Retrieves the current snapshot for the patch baseline the instance uses. This API is primarily used by the AWS-ApplyPatchBaseline Systems Manager document. """ def get_deployable_patch_snapshot_for_instance(client, input, options \\ []) do request(client, "GetDeployablePatchSnapshotForInstance", input, options) end @doc """ Gets the contents of the specified SSM document. """ def get_document(client, input, options \\ []) do request(client, "GetDocument", input, options) end @doc """ Query inventory information. """ def get_inventory(client, input, options \\ []) do request(client, "GetInventory", input, options) end @doc """ Return a list of inventory type names for the account, or return a list of attribute names for a specific Inventory item type. """ def get_inventory_schema(client, input, options \\ []) do request(client, "GetInventorySchema", input, options) end @doc """ Retrieves a Maintenance Window. """ def get_maintenance_window(client, input, options \\ []) do request(client, "GetMaintenanceWindow", input, options) end @doc """ Retrieves details about a specific task executed as part of a Maintenance Window execution. """ def get_maintenance_window_execution(client, input, options \\ []) do request(client, "GetMaintenanceWindowExecution", input, options) end @doc """ Retrieves the details about a specific task executed as part of a Maintenance Window execution. """ def get_maintenance_window_execution_task(client, input, options \\ []) do request(client, "GetMaintenanceWindowExecutionTask", input, options) end @doc """ Query a list of all parameters used by the AWS account. """ def get_parameter_history(client, input, options \\ []) do request(client, "GetParameterHistory", input, options) end @doc """ Get a list of parameters used by the AWS account.&gt; """ def get_parameters(client, input, options \\ []) do request(client, "GetParameters", input, options) end @doc """ Retrieves information about a patch baseline. """ def get_patch_baseline(client, input, options \\ []) do request(client, "GetPatchBaseline", input, options) end @doc """ Retrieves the patch baseline that should be used for the specified patch group. """ def get_patch_baseline_for_patch_group(client, input, options \\ []) do request(client, "GetPatchBaselineForPatchGroup", input, options) end @doc """ Lists the associations for the specified SSM document or instance. """ def list_associations(client, input, options \\ []) do request(client, "ListAssociations", input, options) end @doc """ An invocation is copy of a command sent to a specific instance. A command can apply to one or more instances. A command invocation applies to one instance. For example, if a user executes SendCommand against three instances, then a command invocation is created for each requested instance ID. ListCommandInvocations provide status about command execution. """ def list_command_invocations(client, input, options \\ []) do request(client, "ListCommandInvocations", input, options) end @doc """ Lists the commands requested by users of the AWS account. """ def list_commands(client, input, options \\ []) do request(client, "ListCommands", input, options) end @doc """ List all versions for a document. """ def list_document_versions(client, input, options \\ []) do request(client, "ListDocumentVersions", input, options) end @doc """ Describes one or more of your SSM documents. """ def list_documents(client, input, options \\ []) do request(client, "ListDocuments", input, options) end @doc """ A list of inventory items returned by the request. """ def list_inventory_entries(client, input, options \\ []) do request(client, "ListInventoryEntries", input, options) end @doc """ Returns a list of the tags assigned to the specified resource. """ def list_tags_for_resource(client, input, options \\ []) do request(client, "ListTagsForResource", input, options) end @doc """ Share a document publicly or privately. If you share a document privately, you must specify the AWS user account IDs for those people who can use the document. If you share a document publicly, you must specify *All* as the account ID. """ def modify_document_permission(client, input, options \\ []) do request(client, "ModifyDocumentPermission", input, options) end @doc """ Bulk update custom inventory items on one more instance. The request adds an inventory item, if it doesn't already exist, or updates an inventory item, if it does exist. """ def put_inventory(client, input, options \\ []) do request(client, "PutInventory", input, options) end @doc """ Add one or more paramaters to the system. """ def put_parameter(client, input, options \\ []) do request(client, "PutParameter", input, options) end @doc """ Defines the default patch baseline. """ def register_default_patch_baseline(client, input, options \\ []) do request(client, "RegisterDefaultPatchBaseline", input, options) end @doc """ Registers a patch baseline for a patch group. """ def register_patch_baseline_for_patch_group(client, input, options \\ []) do request(client, "RegisterPatchBaselineForPatchGroup", input, options) end @doc """ Registers a target with a Maintenance Window. """ def register_target_with_maintenance_window(client, input, options \\ []) do request(client, "RegisterTargetWithMaintenanceWindow", input, options) end @doc """ Adds a new task to a Maintenance Window. """ def register_task_with_maintenance_window(client, input, options \\ []) do request(client, "RegisterTaskWithMaintenanceWindow", input, options) end @doc """ Removes all tags from the specified resource. """ def remove_tags_from_resource(client, input, options \\ []) do request(client, "RemoveTagsFromResource", input, options) end @doc """ Executes commands on one or more remote instances. """ def send_command(client, input, options \\ []) do request(client, "SendCommand", input, options) end @doc """ Initiates execution of an Automation document. """ def start_automation_execution(client, input, options \\ []) do request(client, "StartAutomationExecution", input, options) end @doc """ Stop an Automation that is currently executing. """ def stop_automation_execution(client, input, options \\ []) do request(client, "StopAutomationExecution", input, options) end @doc """ Updates an association. You can only update the document version, schedule, parameters, and Amazon S3 output of an association. """ def update_association(client, input, options \\ []) do request(client, "UpdateAssociation", input, options) end @doc """ Updates the status of the SSM document associated with the specified instance. """ def update_association_status(client, input, options \\ []) do request(client, "UpdateAssociationStatus", input, options) end @doc """ The document you want to update. """ def update_document(client, input, options \\ []) do request(client, "UpdateDocument", input, options) end @doc """ Set the default version of a document. """ def update_document_default_version(client, input, options \\ []) do request(client, "UpdateDocumentDefaultVersion", input, options) end @doc """ Updates an existing Maintenance Window. Only specified parameters are modified. """ def update_maintenance_window(client, input, options \\ []) do request(client, "UpdateMaintenanceWindow", input, options) end @doc """ Assigns or changes an Amazon Identity and Access Management (IAM) role to the managed instance. """ def update_managed_instance_role(client, input, options \\ []) do request(client, "UpdateManagedInstanceRole", input, options) end @doc """ Modifies an existing patch baseline. Fields not specified in the request are left unchanged. """ def update_patch_baseline(client, input, options \\ []) do request(client, "UpdatePatchBaseline", input, options) end @spec request(map(), binary(), map(), list()) :: {:ok, Poison.Parser.t | nil, Poison.Response.t} | {:error, Poison.Parser.t} | {:error, HTTPoison.Error.t} defp request(client, action, input, options) do client = %{client | service: "ssm"} host = get_host("ssm", client) url = get_url(host, client) headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}, {"X-Amz-Target", "AmazonSSM.#{action}"}] payload = Poison.Encoder.encode(input, []) headers = AWS.Request.sign_v4(client, "POST", url, headers, payload) case HTTPoison.post(url, payload, headers, options) do {:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} -> {:ok, nil, response} {:ok, response=%HTTPoison.Response{status_code: 200, body: body}} -> {:ok, Poison.Parser.parse!(body), response} {:ok, _response=%HTTPoison.Response{body: body}} -> error = Poison.Parser.parse!(body) exception = error["__type"] message = error["message"] {:error, {exception, message}} {:error, %HTTPoison.Error{reason: reason}} -> {:error, %HTTPoison.Error{reason: reason}} end end defp get_host(endpoint_prefix, client) do if client.region == "local" do "localhost" else "#{endpoint_prefix}.#{client.region}.#{client.endpoint}" end end defp get_url(host, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}/" end end
lib/aws/ssm.ex
0.859428
0.543227
ssm.ex
starcoder
defmodule Exsm.Transitions do @moduledoc """ This is a GenServer that controls the transitions for a struct using a set of helper functions from Exsm.Transition It's meant to be run by a supervisor. """ alias Exsm.Transition @invalid_transitions_error "Invalid transitions defined" @doc false def transition_to(struct, state_machine_module, next_state) do initial_state = state_machine_module._exsm_initial_state() transitions = state_machine_module._exsm_transitions() state_field = state_machine_module._field() # Getting current state of the struct or falling back to the # first declared state on the struct model. prev_state = case Map.get(struct, state_field) do nil -> initial_state prev_state -> prev_state end # Checking declared transitions and guard functions before # actually updating the struct and retuning the tuple. declared_transition? = Transition.declared_transition?(transitions, prev_state, next_state) response = if declared_transition? do case Transition.before_callbacks(struct, prev_state, next_state, state_machine_module) do {:ok, struct} -> struct = struct |> Transition.persist_struct(prev_state, next_state, state_machine_module) |> Transition.log_transition(prev_state, next_state, state_machine_module) |> Transition.after_callbacks(prev_state, next_state, state_machine_module) {:ok, struct} {:error, reason} -> {:error, reason} end else {:error, "Transition from: #{prev_state}, to: #{next_state} isn't declared."} end response end @doc false def valid_transition?(struct, state_machine_module, next_state) do initial_state = state_machine_module._exsm_initial_state() transitions = state_machine_module._exsm_transitions() state_field = state_machine_module._field() # Getting current state of the struct or falling back to the # first declared state on the struct model. prev_state = case Map.get(struct, state_field) do nil -> initial_state prev_state -> prev_state end # Checking declared transitions and guard functions before # actually updating the struct and retuning the tuple. Transition.declared_transition?(transitions, prev_state, next_state) end def parse_transitions(transitions, states) do validate_transitions!(transitions, states) Map.keys(transitions) |> Enum.reduce(%{}, &reduce_transitions(transitions, states, &1, &2)) end defp validate_transitions!(transitions, states) do Map.keys(transitions) |> Enum.each(fn key -> validate_state!(key, states) validate_state!(transitions[key], states) end) end defp validate_state!("*", _states), do: nil defp validate_state!("^", _states), do: nil defp validate_state!(value, states) when is_binary(value) do unless value in states do raise "Transition is defined but corresponding state is not declared" end end defp validate_state!(value, states) when is_list(value) do Enum.each(value, &validate_state!(&1, states)) end defp validate_state!(_value, _states), do: raise(@invalid_transitions_error) defp reduce_transitions(transitions, states, key, acc) do cond do key in ["*", "^"] && transitions[key] in ["*", "^"] -> raise "Invalid transition declaration: key and value both cannot be wildcard" key == "*" -> Enum.reduce(states, acc, fn k, a -> Map.put(a, k, append_value(states, Map.get(a, k, []), k, transitions[key])) end) key == "^" -> value = case transitions[key] do value when is_binary(value) -> [value] value when is_list(value) -> value end Enum.reduce(states, acc, fn k, a -> Map.put(a, k, append_value(states, Map.get(a, k, []), k, value -- [k])) end) is_binary(key) -> Map.put(acc, key, append_value(states, Map.get(acc, key, []), key, transitions[key])) is_list(key) -> Enum.reduce(key, acc, fn k, a -> Map.put(a, k, append_value(states, Map.get(a, k, []), k, transitions[key])) end) end end defp append_value(states, prev_value, key, value) do value = cond do value == "*" -> states value == "^" -> states -- [key] is_binary(value) -> [value] is_list(value) -> value end Enum.reduce(value, prev_value, fn v, a -> if v in a do a else a ++ [v] end end) end end
lib/exsm/transitions.ex
0.76973
0.663307
transitions.ex
starcoder