code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
defmodule ReviewScraper do @moduledoc """ Exposes functions to fetch overly positive reviews. """ alias ReviewScraper.DealerRater.{Client, Review, Scraper} @doc """ Fetchs the reviews from DealerRater.com and returns the most positive reviews. Accepts the following options: - `:pages_to_fetch`: Number of pages to be fetched from DealerRater.com. Defaults to `5`. - `:reviews_count`: Number of reviews to be returned. Defaults to `3`. """ @spec get_overly_positive_reviews(String.t(), keyword()) :: {:ok, [Review.t()]} | {:error, atom()} def get_overly_positive_reviews(dealership_name, opts \\ []) do pages_to_fetch = Keyword.get(opts, :pages_to_fetch, 5) reviews_count = Keyword.get(opts, :reviews_count, 3) case Client.find_dealership_by_name(dealership_name) do {:ok, dealership} -> reviews = 1..pages_to_fetch |> Task.async_stream(fn page -> dealership |> Client.get_review_page(page) |> Scraper.get_reviews() end) |> Enum.flat_map(fn {:ok, reviews} -> reviews end) |> sort_reviews_by_positiveness() |> Enum.take(reviews_count) {:ok, reviews} {:error, :dealership_not_found} -> {:error, :dealership_not_found} end end @doc """ Filters and sorts the reviews by positiveness using three criterias: - Recommended the dealer: Filters only reviews that have recommended the dealer. - Average rating: sum of all the ratings divided by 5. - Occurrences of overly positive words: given a set of overly positive words, the occurrence of those words is counted. """ @spec sort_reviews_by_positiveness([Review.t()]) :: [Review.t()] def sort_reviews_by_positiveness(reviews) do reviews |> Enum.filter(& &1.recommend_dealer?) |> Enum.sort_by( fn review -> {calculate_average_rating(review), sum_positive_words_occurences(review)} end, :desc ) end defp sum_positive_words_occurences(%Review{description: review_description}) do positive_words = [ "best", "amazing", "very", "most", "ever", "wonderful", "love", "extremely", "fantastic", "super", "excellent", "perfect" ] review_description |> String.split() |> Enum.frequencies_by(&String.downcase/1) |> Map.take(positive_words) |> Map.values() |> Enum.sum() end defp calculate_average_rating(%Review{} = review) do review |> Map.take([ :dealership_rating, :customer_service_rating, :friendliness_rating, :pricing_rating, :overall_experience_rating ]) |> Map.values() |> Enum.reject(&is_nil/1) |> Enum.sum() |> Kernel.div(5) end end
lib/review_scraper.ex
0.809577
0.674235
review_scraper.ex
starcoder
defmodule Unplug do @moduledoc """ The purpose of `Unplug` is to provide a wrapper around any arbitrary plug, with the ability to conditionally execute that plug at run-time. The runtime conditions that are leveraged by `Unplug` must conform to the `Unplug.Predicate` behaviour. Out of the box `Unplug` comes with predicates that can be used to execute plugs based on: - Environment variable values - Application config values - Request header values - Request path values `Unplug` also preserves the behavior of `Plug` in that in its `init/1` function it will evaluate the `init/1` functions of your conditional plugs and store their values so that the `call/2` function does not have to reevaluate the `init/1` functions of your conditional plugs every time the plug pipeline is invoked (see the `Plug` docs for more information https://hexdocs.pm/plug/Plug.Builder.html#module-options). To use `Unplug`, add an entry in your `endpoint.ex` or `router.ex` files like so: ```elixir plug Unplug, if: {Unplug.Predicates.RequestPathIn, ["/metrics", "healthcheck"]} do: {Plug.Telemetry, event_prefix: [:phoenix, :endpoint]} ``` """ @behaviour Plug @impl true def init(opts) do # Fetch all of the required options and raise if there are any errors init_mode = Application.get_env(:unplug, :init_mode, :compile) if_condition = Keyword.get(opts, :if) || raise "Unplug requires an :if condition entry" do_plug = Keyword.get(opts, :do) || raise "Unplug requires a :do plug entry" else_plug = Keyword.get(opts, :else, :skip) # Evaluate conditional plug inits if configured to do so do_plug_init_opts = eval_plug_init(init_mode, do_plug) else_plug_init_opts = eval_plug_init(init_mode, else_plug) {init_mode, %{ if_condition: if_condition, do_plug: do_plug, do_plug_init_opts: do_plug_init_opts, else_plug: else_plug, else_plug_init_opts: else_plug_init_opts }} end @impl true def call(conn, {:compile, unplug_opts}) do %{ if_condition: if_condition, do_plug: do_plug, do_plug_init_opts: do_plug_init_opts, else_plug: else_plug, else_plug_init_opts: else_plug_init_opts } = unplug_opts cond do exec_if_condition_call(conn, if_condition) -> exec_plug_call(conn, do_plug, do_plug_init_opts) else_plug != :skip -> exec_plug_call(conn, else_plug, else_plug_init_opts) true -> conn end end def call(conn, {:runtime, unplug_opts}) do %{ if_condition: if_condition, do_plug: do_plug, else_plug: else_plug } = unplug_opts cond do exec_if_condition_call(conn, if_condition) -> do_plug_init_opts = eval_plug_init(:compile, do_plug) exec_plug_call(conn, do_plug, do_plug_init_opts) else_plug != :skip -> else_plug_init_opts = eval_plug_init(:compile, else_plug) exec_plug_call(conn, else_plug, else_plug_init_opts) true -> conn end end defp eval_plug_init(:compile, :skip), do: :skip defp eval_plug_init(:compile, {plug, opts}), do: plug.init(opts) defp eval_plug_init(:compile, plug), do: plug.init([]) defp eval_plug_init(:runtime, :skip), do: :skip defp eval_plug_init(:runtime, _plug), do: nil defp eval_plug_init(bad_arg, _plug), do: raise("Invalid value #{inspect(bad_arg)} for Unplug config :init_mode") defp exec_if_condition_call(conn, {predicate_module, predicate_opts}), do: predicate_module.call(conn, predicate_opts) defp exec_if_condition_call(conn, predicate_module), do: predicate_module.call(conn, []) defp exec_plug_call(conn, {plug_module, _init_opts}, plug_opts), do: plug_module.call(conn, plug_opts) defp exec_plug_call(conn, plug_module, plug_opts), do: plug_module.call(conn, plug_opts) end
lib/unplug.ex
0.751648
0.770724
unplug.ex
starcoder
defmodule Cabbage.Feature do @moduledoc """ An extension on ExUnit to be able to execute feature files. ## Configuration In `config/test.exs` config :cabbage, # Default is "test/features/" features: "my/path/to/features/" # Default is [] global_tags: :integration - `features` - Allows you to specify the location of your feature files. They can be anywhere, but typically are located within the test folder. - `global_tags` - Allow you to specify ex unit tag assigned to all cabbage generated tests ## Features Given a feature file, create a corresponding feature module which references it. Heres an example: defmodule MyApp.SomeFeatureTest do use Cabbage.Feature, file: "some_feature.feature" defgiven ~r/I am given a given statement/, _matched_data, _current_state do assert 1 + 1 == 2 {:ok, %{new: :state}} end defwhen ~r/I when execute it/, _matched_data, _current_state do # Nothing to do, don't need to return anything if we don't want to nil end defthen ~r/everything is ok/, _matched_data, _current_state do assert true end end This translates loosely into: defmodule MyApp.SomeFeatureTest do use ExUnit.Case test "The name of the scenario here" do assert 1 + 1 == 2 nil assert true end end ### Extracting Matched Data You'll likely have data within your feature statements which you want to extract. The second parameter to each of `defgiven/4`, `defwhen/4` and `defthen/4` is a pattern in which specifies what you want to call the matched data, provided as a map. For example, if you want to match on a number: # NOTICE THE `number` VARIABLE IS STILL A STRING!! defgiven ~r/^there (is|are) (?<number>\d+) widget(s?)$/, %{number: number}, _state do assert String.to_integer(number) >= 1 end For every named capture, you'll have a key as an atom in the second parameter. You can then use those variables you create within your block. ### Modifying State You'll likely have to keep track of some state in between statements. The third parameter to each of `defgiven/4`, `defwhen/4` and `defthen/4` is a pattern in which specifies what you want to call your state in the same way that the `ExUnit.Case.test/3` macro works. You can setup initial state using plain ExUnit `setup/1` and `setup_all/1`. Whatever state is provided via the `test/3` macro will be your initial state. To update the state, simply return `{:ok, %{new: :state}}`. Note that a `Map.merge/2` will be performed for you so only have to specify the keys you want to update. For this reason, only a map is allowed as state. Heres an example modifying state: defwhen ~r/^I am an admin$/, _, %{user: user} do {:ok, %{user: User.promote_to_admin(user)}} end All other statements do not need to return (and should be careful not to!) the `{:ok, state}` pattern. ### Organizing Features You may want to reuse several statements you create, especially ones that deal with global logic like users and logging in. Feature modules can be created without referencing a file. This makes them do nothing except hold translations between steps in a scenario and test code to be included into a test. These modules must be compiled prior to running the test suite, so for that reason you must add them to the `elixirc_paths` in your `mix.exs` file, like so: defmodule MyApp.Mixfile do use Mix.Project def project do [ app: :my_app, ... # Add this to your project function elixirc_paths: elixirc_paths(Mix.env), ... ] end # Specifies which paths to compile per environment. defp elixirc_paths(:test), do: ["lib", "test/support"] defp elixirc_paths(_), do: ["lib"] ... end If you're using Phoenix, this should already be setup for you. Simply place a file like the following into `test/support`. defmodule MyApp.GlobalFeatures do use Cabbage.Feature # Write your `defgiven/4`, `defthen/4` and `defwhen/4`s here end Then inside the test file (the .exs one) add a `import_feature MyApp.GlobalFeatures` line after the `use Cabbage.Feature` line lke so: defmodule MyApp.SomeFeatureTest do use Cabbage.Feature, file: "some_feature.feature" import_feature MyApp.GlobalFeatures # Omitted the rest end Keep in mind that if you'd like to be more explicit about what you bring into your test, you can use the macros `import_steps/1` and `import_tags/1`. This will allow you to be more selective about whats getting included into your integration tests. The `import_feature/1` macro simply calls both the `import_steps/1` and `import_tags/1` macros. """ import Cabbage.Feature.Helpers alias Cabbage.Feature.{Loader, MissingStepError} @feature_options [:file, :template] defmacro __using__(options) do has_assigned_feature = !match?(nil, options[:file]) Module.register_attribute(__CALLER__.module, :steps, accumulate: true) Module.register_attribute(__CALLER__.module, :tags, accumulate: true) quote do unquote(prepare_executable_feature(has_assigned_feature, options)) @before_compile {unquote(__MODULE__), :expose_metadata} import unquote(__MODULE__) require Logger unquote(load_features(has_assigned_feature, options)) end end defp prepare_executable_feature(false, _options), do: nil defp prepare_executable_feature(true, options) do {_options, template_options} = Keyword.split(options, @feature_options) quote do @before_compile unquote(__MODULE__) use unquote(options[:template] || ExUnit.Case), unquote(template_options) end end defp load_features(false, _options), do: nil defp load_features(true, options) do quote do @feature Loader.load_from_file(unquote(options[:file])) @scenarios @feature.scenarios end end defmacro expose_metadata(env) do steps = Module.get_attribute(env.module, :steps) || [] tags = Module.get_attribute(env.module, :tags) || [] quote generated: true do def raw_steps() do unquote(Macro.escape(steps)) end def raw_tags() do unquote(Macro.escape(tags)) end end end defmacro __before_compile__(env) do scenarios = Module.get_attribute(env.module, :scenarios) || [] steps = Module.get_attribute(env.module, :steps) || [] tags = Module.get_attribute(env.module, :tags) || [] scenarios |> Enum.map(fn scenario -> scenario = Map.put( scenario, :tags, Cabbage.global_tags() ++ List.wrap(Module.get_attribute(env.module, :moduletag)) ++ scenario.tags ) quote bind_quoted: [ scenario: Macro.escape(scenario), tags: Macro.escape(tags), steps: Macro.escape(steps) ], line: scenario.line do describe scenario.name do setup context do for tag <- unquote(scenario.tags) do case tag do {tag, _value} -> Cabbage.Feature.Helpers.run_tag( unquote(Macro.escape(tags)), tag, __MODULE__, unquote(scenario.name) ) tag -> Cabbage.Feature.Helpers.run_tag( unquote(Macro.escape(tags)), tag, __MODULE__, unquote(scenario.name) ) end end {:ok, Map.merge( Cabbage.Feature.Helpers.fetch_state(unquote(scenario.name), __MODULE__), context || %{} )} end tags = Cabbage.Feature.Helpers.map_tags(scenario.tags) || [] name = ExUnit.Case.register_test( __ENV__, :scenario, scenario.name, tags ) def unquote(name)(exunit_state) do Cabbage.Feature.Helpers.start_state(unquote(scenario.name), __MODULE__, exunit_state) unquote(Enum.map(scenario.steps, &compile_step(&1, steps, scenario.name))) end end end end) end def compile_step(step, steps, scenario_name) when is_list(steps) do step_type = step.__struct__ |> Module.split() |> List.last() step |> find_implementation_of_step(steps) |> compile(step, step_type, scenario_name) end defp compile( {:{}, _, [regex, vars, state_pattern, block, metadata]}, step, step_type, scenario_name ) do {regex, _} = Code.eval_quoted(regex) named_vars = extract_named_vars(regex, step.text) |> Map.merge(%{table: step.table_data, doc_string: step.doc_string}) quote generated: true do with {_type, unquote(vars)} <- {:variables, unquote(Macro.escape(named_vars))}, {_type, state = unquote(state_pattern)} <- {:state, Cabbage.Feature.Helpers.fetch_state(unquote(scenario_name), __MODULE__)} do new_state = case unquote(block) do {:ok, new_state} -> Map.merge(state, new_state) _ -> state end Cabbage.Feature.Helpers.update_state(unquote(scenario_name), __MODULE__, fn _ -> new_state end) Logger.info([ "\t\t", IO.ANSI.cyan(), unquote(step_type), " ", IO.ANSI.green(), unquote(step.text) ]) else {type, state} -> metadata = unquote(Macro.escape(metadata)) reraise """ ** (MatchError) Failure to match #{type} of #{ inspect(Cabbage.Feature.Helpers.remove_hidden_state(state)) } Pattern: #{unquote(Macro.to_string(state_pattern))} """, Cabbage.Feature.Helpers.stacktrace(__MODULE__, metadata) end end end defp compile(_, step, step_type, _scenario_name) do extra_vars = %{table: step.table_data, doc_string: step.doc_string} raise MissingStepError, step_text: step.text, step_type: step_type, extra_vars: extra_vars end defp find_implementation_of_step(step, steps) do Enum.find(steps, fn {:{}, _, [r, _, _, _, _]} -> step.text =~ r |> Code.eval_quoted() |> elem(0) end) end defp extract_named_vars(regex, step_text) do regex |> Regex.named_captures(step_text) |> Enum.map(fn {k, v} -> {String.to_atom(k), v} end) |> Enum.into(%{}) end @doc """ Brings in all the functionality available from the supplied module. Module must `use Cabbage.Feature` (with or without a `:file`). Same as calling both `import_steps/1` and `import_tags/1`. """ defmacro import_feature(module) do quote do import_steps(unquote(module)) import_tags(unquote(module)) end end @doc """ Brings in all the step definitions from the supplied module. Module must `use Cabbage.Feature` (with or without a `:file`). """ defmacro import_steps(module) do quote do if Code.ensure_compiled(unquote(module)) do for step <- unquote(module).raw_steps() do Module.put_attribute(__MODULE__, :steps, step) end end end end @doc """ Brings in all the tag definitions from the supplied module. Module must `use Cabbage.Feature` (with or without a `:file`). """ defmacro import_tags(module) do quote do if Code.ensure_compiled(unquote(module)) do for {name, block} <- unquote(module).raw_tags() do Cabbage.Feature.Helpers.add_tag(__MODULE__, name, block) end end end end defmacro defgiven(regex, vars, state, do: block) do add_step(__CALLER__.module, regex, vars, state, block, metadata(__CALLER__, :defgiven)) end defmacro defwhen(regex, vars, state, do: block) do add_step(__CALLER__.module, regex, vars, state, block, metadata(__CALLER__, :defwhen)) end defmacro defthen(regex, vars, state, do: block) do add_step(__CALLER__.module, regex, vars, state, block, metadata(__CALLER__, :defthen)) end @doc """ Add an ExUnit `setup/1` callback that only fires for the scenarios that are tagged. Can be used inside of `Cabbage.Feature`s that don't relate to a file and then imported with `import_feature/1`. Example usage: defmodule MyTest do use Cabbage.Feature tag @some_tag do IO.puts "Do this before the @some_tag scenario" on_exit fn -> IO.puts "Do this after the @some_tag scenario" end end end """ defmacro tag(tag, do: block) do add_tag(__CALLER__.module, Macro.to_string(tag) |> String.replace(~r/\s*/, ""), block) end end
lib/cabbage/feature.ex
0.867892
0.651022
feature.ex
starcoder
defmodule Membrane.ParentSpec do @moduledoc """ Structure representing the topology of a pipeline/bin. It can be incorporated into a pipeline or a bin by returning `t:Membrane.Pipeline.Action.spec_t/0` or `t:Membrane.Bin.Action.spec_t/0` action, respectively. This commonly happens within `c:Membrane.Pipeline.handle_init/1` and `c:Membrane.Bin.handle_init/1`, but can be done in any other callback also. ## Children Children that should be spawned when the pipeline/bin starts can be defined with the `:children` field. You have to set it to a map, where keys are valid children names (`t:Membrane.Child.name_t/0`) that are unique within this pipeline/bin and values are either child's module or struct of that module. Sample definitions: %{ first_element: %Element.With.Options.Struct{option_a: 42}, some_element: Element.Without.Options, some_bin: Bin.Using.Default.Options } ## Links Links that should be made when the children are spawned can be defined with the `:links` field. Links can be defined with the use of `link/1` and `to/2` functions that allow specifying elements linked, and `via_in/2` and `via_out/2` that allow specifying pads' names and parameters. If pads are not specified, name `:input` is assumed for inputs and `:output` for outputs. Sample definition: [ link(:source_a) |> to(:converter) |> via_in(:input_a, buffer: [preferred_size: 20_000]) |> to(:mixer), link(:source_b) |> via_out(:custom_output) |> via_in(:input_b, pad: [mute: true]) |> to(:mixer) |> via_in(:input, [warn_size: 264_000, fail_size: 300_000]) |> to(:sink) ] Links can also contain children definitions, for example: [ link(:first_element, %Element.With.Options.Struct{option_a: 42}) |> to(:some_element, Element.Without.Options) |> to(:element_specified_in_children) ] Which is particularly convenient for creating links conditionally: maybe_link = &to(&1, :some_element, Some.Element) [ link(:first_element) |> then(if condition?, do: maybe_link, else: & &1) |> to(:another_element) ] ### Bins For bins boundaries, there are special links allowed. The user should define links between the bin's input and the first child's input (input-input type) and last child's output and bin output (output-output type). In this case, `link_bin_input/2` and `to_bin_output/3` should be used. Sample definition: [ link_bin_input() |> to(:filter1) |> to(:filter2) |> to_bin_output(:custom_output) ] ### Dynamic pads In most cases, dynamic pads can be linked the same way as static ones, although in the following situations, exact pad reference must be passed instead of a name: - When that reference is needed later, for example, to handle a notification related to that particular pad instance pad = Pad.ref(:output, make_ref()) [ link(:tee) |> via_out(pad) |> to(:sink) ] - When linking dynamic pads of a bin with its children, for example in `c:Membrane.Bin.handle_pad_added/3` @impl true def handle_pad_added(Pad.ref(:input, _) = pad, _ctx, state) do links = [link_bin_input(pad) |> to(:mixer)] {{:ok, spec: %ParentSpec{links: links}}, state} end ## Stream sync `:stream_sync` field can be used for specifying elements that should start playing at the same moment. An example can be audio and video player sinks. This option accepts either `:sinks` atom or a list of groups (lists) of elements. Passing `:sinks` results in synchronizing all sinks in the pipeline, while passing a list of groups of elements synchronizes all elements in each group. It is worth mentioning that to keep the stream synchronized all involved elements need to rely on the same clock. By default, no elements are synchronized. Sample definitions: ``` %ParentSpec{stream_sync: [[:element1, :element2], [:element3, :element4]]} %ParentSpec{stream_sync: :sinks} ``` ## Clock provider A clock provider is an element that exports a clock that should be used as the pipeline clock. The pipeline clock is the default clock used by elements' timers. For more information see `Membrane.Element.Base.def_clock/1`. ## Crash groups A crash group is a logical entity that prevents the whole pipeline from crashing when one of its children crash. ### Adding children to a crash group ```elixir children = %{ :some_element_1 => %SomeElement{ # ... }, :some_element_2 => %SomeElement{ # ... } } spec = %ParentSpec{children: children, crash_group: {group_id, :temporary}} ``` The crash group is defined by a two-element tuple, first element is an ID which is of type `Membrane.CrashGroup.name_t()`, and the second is a mode. Currently, we support only `:temporary` mode which means that Membrane will not make any attempts to restart crashed child. In the above snippet, we create new children - `:some_element_1` and `:some_element_2`, we add it to the crash group with id `group_id`. Crash of `:some_element_1` or `:some_element_2` propagates only to the rest of the members of the crash group and the pipeline stays alive. Currently, crash group covers all children within one or more `ParentSpec`s. ### Handling crash of a crash group When any of the members of the crash group goes down, the callback: [`handle_crash_group_down/3`](https://hexdocs.pm/membrane_core/Membrane.Pipeline.html#c:handle_crash_group_down/3) is called. ```elixir @impl true def handle_crash_group_down(crash_group_id, ctx, state) do # do some stuff in reaction to crash of group with id crash_group_id end ``` ### Limitations At this moment crash groups are only useful for elements with dynamic pads. Crash groups work only in pipelines and are not supported in bins. ## Log metadata `:log_metadata` field can be used to set the `Membrane.Logger` metadata for all children from that `Membrane.ParentSpec` """ alias Membrane.{Child, Pad} alias Membrane.Core.InputBuffer alias Membrane.ParentError require Membrane.Pad defmodule LinkBuilder do @moduledoc false use Bunch.Access defstruct children: [], links: [], status: nil @type t :: %__MODULE__{ children: [{Child.name_t(), module | struct}], links: [map], status: status_t } @type status_t :: :from | :output | :input | :done @spec update(t, status_t, Keyword.t()) :: t def update( %__MODULE__{links: [%{to: to} | _] = links, status: :done} = builder, status, entries ) do %__MODULE__{builder | links: [Map.new([from: to] ++ entries) | links], status: status} end def update(%__MODULE__{links: [link | links]} = builder, status, entries) do %__MODULE__{builder | links: [Map.merge(link, Map.new(entries)) | links], status: status} end end @opaque link_builder_t :: LinkBuilder.t() @type child_spec_t :: module | struct @type children_spec_t :: [{Child.name_t(), child_spec_t}] | %{Child.name_t() => child_spec_t} @typedoc """ Options passed to the child when linking its pad with a different one. The allowed options are: * `:buffer` - keyword allowing to configure `Membrane.Core.InputBuffer` between elements. Valid only for input pads. See `t:Membrane.Core.InputBuffer.props_t/0` for configurable properties. * `:options` - any child-specific options that will be available in `Membrane.Pad.Data` struct. """ @type pad_props_t :: [ {:buffer, InputBuffer.props_t()} | {:options, Keyword.t()} ] @type links_spec_t :: [link_builder_t() | links_spec_t] @type crash_group_spec_t :: {any(), :temporary} | nil @typedoc """ Struct used when starting and linking children within a pipeline or a bin. """ @type t :: %__MODULE__{ children: children_spec_t, links: links_spec_t, crash_group: crash_group_spec_t() | nil, stream_sync: :sinks | [[Child.name_t()]], clock_provider: Child.name_t() | nil, node: node() | nil, log_metadata: Keyword.t() } @valid_pad_prop_keys [:options, :buffer] defstruct children: %{}, links: [], crash_group: nil, stream_sync: [], clock_provider: nil, node: nil, log_metadata: [] @doc """ Begins a link. See the _links_ section of the moduledoc for more information. """ @spec link(Child.name_t()) :: link_builder_t() def link(child_name) do %LinkBuilder{links: [%{from: child_name}], status: :from} end @doc """ Defines a child and begins a link with it. See the _links_ section of the moduledoc for more information. """ @spec link(Child.name_t(), child_spec_t()) :: link_builder_t() def link(child_name, child_spec) do link(child_name) |> Map.update!(:children, &[{child_name, child_spec} | &1]) end @doc """ Begins a link with a bin's pad. See the _links_ section of the moduledoc for more information. """ @spec link_bin_input(Pad.name_t() | Pad.ref_t(), pad_props_t) :: link_builder_t() | no_return def link_bin_input(pad \\ :input, props \\ []) do link({Membrane.Bin, :itself}) |> via_out(pad, props) end @doc """ Specifies output pad name and properties of the preceding child. See the _links_ section of the moduledoc for more information. """ @spec via_out(link_builder_t(), Pad.name_t() | Pad.ref_t(), pad_props_t) :: link_builder_t() | no_return def via_out(builder, pad, props \\ []) def via_out(%LinkBuilder{status: :output}, pad, _props) do raise ParentError, "Invalid link specification: output #{inspect(pad)} placed after another output or bin's input" end def via_out(%LinkBuilder{status: :input}, pad, _props) do raise ParentError, "Invalid link specification: output #{inspect(pad)} placed after an input" end def via_out(%LinkBuilder{} = builder, pad, props) do :ok = validate_pad_name(pad) :ok = validate_pad_props(props) LinkBuilder.update(builder, :output, output: pad, output_props: props ) end @doc """ Specifies input pad name and properties of the subsequent child. See the _links_ section of the moduledoc for more information. """ @spec via_in(link_builder_t(), Pad.name_t() | Pad.ref_t(), pad_props_t) :: link_builder_t() | no_return def via_in(builder, pad, opts \\ []) def via_in(%LinkBuilder{status: :input}, pad, _opts) do raise ParentError, "Invalid link specification: output #{inspect(pad)} placed after another output" end def via_in(%LinkBuilder{} = builder, pad, props) do :ok = validate_pad_name(pad) :ok = validate_pad_props(props) LinkBuilder.update(builder, :input, input: pad, input_props: props ) end @doc """ Continues or ends a link. See the _links_ section of the moduledoc for more information. """ @spec to(link_builder_t(), Child.name_t()) :: link_builder_t() | no_return def to(%LinkBuilder{links: [%{to: {Membrane.Bin, :itself}} | _]}, child_name) do raise ParentError, "Invalid link specification: child #{inspect(child_name)} placed after bin's output" end def to(%LinkBuilder{} = builder, child_name) do LinkBuilder.update(builder, :done, to: child_name) end @doc """ Defines a child and continues or ends a link with it. See the _links_ section of the moduledoc for more information. """ @spec to(link_builder_t(), Child.name_t(), child_spec_t()) :: link_builder_t() | no_return def to(%LinkBuilder{} = builder, child_name, child_spec) do builder |> to(child_name) |> Map.update!(:children, &[{child_name, child_spec} | &1]) end @doc """ Ends a link with a bin's output. See the _links_ section of the moduledoc for more information. """ @spec to_bin_output(link_builder_t(), Pad.name_t() | Pad.ref_t(), pad_props_t) :: link_builder_t() | no_return def to_bin_output(builder, pad \\ :output, props \\ []) def to_bin_output(%LinkBuilder{status: :input}, pad, _props) do raise ParentError, "Invalid link specification: bin's output #{pad} placed after an input" end def to_bin_output(builder, pad, props) do builder |> via_in(pad, props) |> to({Membrane.Bin, :itself}) end defp validate_pad_name(pad) when Pad.is_pad_name(pad) or Pad.is_pad_ref(pad) do :ok end defp validate_pad_name(pad) do raise ParentError, "Invalid link specification: invalid pad name: #{inspect(pad)}" end defp validate_pad_props(props) do unless Keyword.keyword?(props) do raise ParentError, "Invalid link specification: pad options should be a keyword, got: #{inspect(props)}" end props |> Keyword.keys() |> Enum.each( &unless &1 in @valid_pad_prop_keys do raise ParentError, "Invalid link specification: invalid pad option: #{inspect(&1)}" end ) end end
lib/membrane/parent_spec.ex
0.947588
0.879923
parent_spec.ex
starcoder
defmodule Scenic.Math.Line do @moduledoc """ A collection of functions to work with lines. Lines are always two points in a tuple. {point_a, point_b} {{x0,y0}, {x1,y1}} """ alias Scenic.Math # import IEx @app Mix.Project.config()[:app] # @env Mix.env # load the NIF @compile {:autoload, false} @on_load :load_nifs @doc false def load_nifs do :ok = :filename.join(:code.priv_dir(@app), 'line') |> :erlang.load_nif(0) end # -------------------------------------------------------- @doc """ Truncate the points that define a line so that they are made up of integers. Parameters: * line - A line defined by two points. {point_a, point_b} Returns: A line """ @spec trunc(line :: Math.line()) :: Math.line() def trunc(line) def trunc({p0, p1}) do { Math.Vector2.trunc(p0), Math.Vector2.trunc(p1) } end # -------------------------------------------------------- @doc """ Round the points that define a line so that they are made up of integers. Parameters: * line - A line defined by two points. {point_a, point_b} Returns: A line """ @spec round(line :: Math.line()) :: Math.line() def round(line) def round({p0, p1}) do { Math.Vector2.round(p0), Math.Vector2.round(p1) } end # -------------------------------------------------------- @doc """ Find a new line that is parallel to the given line and seperated by the given distance. Parameters: * line - A line defined by two points. {point_a, point_b} * distance - The perpendicular distance to the new line. Returns: A line """ @spec parallel(line :: Math.line(), distance :: number) :: Math.line() def parallel(line, distance) def parallel({{x0, y0}, {x1, y1}}, w) do nif_parallel(x0, y0, x1, y1, w) end defp nif_parallel(_, _, _, _, _) do :erlang.nif_error("Did not find nif_parallel") end # -------------------------------------------------------- @doc """ Find the point of intersection between two lines. Parameters: * line_a - A line defined by two points. {point_a, point_b} * line_b - A line defined by two points. {point_a, point_b} Returns: A point """ @spec intersection(line_a :: Math.line(), line_b :: Math.line()) :: Math.point() def intersection(line_a, line_b) def intersection({{x0, y0}, {x1, y1}}, {{x2, y2}, {x3, y3}}) do nif_intersection(x0, y0, x1, y1, x2, y2, x3, y3) end defp nif_intersection(_, _, _, _, _, _, _, _) do :erlang.nif_error("Did not find nif_intersection") end end
lib/scenic/math/line.ex
0.875574
0.520557
line.ex
starcoder
defmodule Circuits.UART do use GenServer # Many calls take timeouts for how long to wait for reading and writing # serial ports. This is the additional time added to the GenServer message passing # timeout so that the interprocess messaging timers don't hit before the # timeouts on the actual operations. @genserver_timeout_slack 500 # There's a timeout when interacting with the port as well. If the port # doesn't respond by timeout + @port_timeout_slack, then there's something # wrong with it. @port_timeout_slack 400 @moduledoc """ Find and use UARTs, serial ports, and more. """ defmodule State do @moduledoc false # port: C port process # controlling_process: where events get sent # name: port name when opened # framing: framing behaviour # framing_state: framing behaviour's state # rx_framing_timeout: how long to wait for incomplete frames # queued_messages: queued messages when in passive mode # rx_framing_tref: frame completion timer # is_active: active or passive mode defstruct port: nil, controlling_process: nil, name: :closed, framing: Circuits.UART.Framing.None, framing_state: nil, rx_framing_timeout: 0, queued_messages: [], rx_framing_tref: nil, is_active: true, id: :name end @type uart_option :: {:active, boolean} | {:speed, non_neg_integer} | {:data_bits, 5..8} | {:stop_bits, 1..2} | {:parity, :none | :even | :odd | :space | :mark | :ignore} | {:flow_control, :none | :hardware | :software} | {:framing, module | {module, [term]}} | {:rx_framing_timeout, integer} | {:id, :name | :pid} # Public API @doc """ Return a map of available ports with information about each one. The map looks like this: ``` %{ "ttyS0" -> %{vendor_id: 1234, product_id: 1, manufacturer: "Acme Corporation", serial_number: "000001"}, "ttyUSB0" -> ${vendor_id: 1234, product_id: 2} } ``` Depending on the port and the operating system, not all fields may be returned. Informational fields are: * `:vendor_id` - The 16-bit USB vendor ID of the device providing the port. Vendor ID to name lists are managed through usb.org * `:product_id` - The 16-bit vendor supplied product ID * `:manufacturer` - The manufacturer of the port * `:description` - A description or product name * `:serial_number` - The device's serial number if it has one """ @spec enumerate() :: map def enumerate() do Circuits.UART.Enumerator.enumerate() end @doc """ Find UARTs. This is intended as a diagnostic function for finding UARTs that you may have opened and forgotten about. Since a UART can only be opened once, this helps you find the problematic one so that you can close it. It returns a list of {pid, uart_name} tuples. NOTE: Do not rely on this function in production code. It may change if updates to the interface make it more convenient to use. """ @spec find_pids() :: [{binary | :closed, pid()}] def find_pids() do Process.list() |> Enum.filter(&is_circuits_uart_process/1) |> Enum.map(&circuits_uart_info/1) end defp is_circuits_uart_process(pid) do {:dictionary, dictionary} = Process.info(pid, :dictionary) Keyword.get(dictionary, :"$initial_call") == {Circuits.UART, :init, 1} end defp circuits_uart_info(pid) do {name, _opts} = configuration(pid) {pid, name} end @doc """ Start up a UART GenServer. """ @spec start_link([term]) :: {:ok, pid} | {:error, term} def start_link(opts \\ []) do GenServer.start_link(__MODULE__, [], opts) end @doc """ Stop the UART GenServer. """ @spec stop(GenServer.server()) :: :ok def stop(pid) do GenServer.stop(pid) end @doc """ Open a serial port. The following options are available: * `:active` - (`true` or `false`) specifies whether data is received as messages or by calling `read/2`. See discussion below. * `:speed` - (number) set the initial baudrate (e.g., 115200) * `:data_bits` - (5, 6, 7, 8) set the number of data bits (usually 8) * `:stop_bits` - (1, 2) set the number of stop bits (usually 1) * `:parity` - (`:none`, `:even`, `:odd`, `:space`, or `:mark`) set the parity. Usually this is `:none`. Other values: * `:space` means that the parity bit is always 0 * `:mark` means that the parity bit is always 1 * `:ignore` means that the parity bit is ignored (Linux/OSX only) * `:flow_control` - (`:none`, `:hardware`, or `:software`) set the flow control strategy. * `:framing` - (`module` or `{module, args}`) set the framing for data. The `module` must implement the `Circuits.UART.Framing` behaviour. See `Circuits.UART.Framing.None`, `Circuits.UART.Framing.Line`, and `Circuits.UART.Framing.FourByte`. The default is `Circuits.UART.Framing.None`. * `:rx_framing_timeout` - (milliseconds) this specifies how long incomplete frames will wait for the remainder to be received. Timed out partial frames are reported as `{:partial, data}`. A timeout of <= 0 means to wait forever. * `:id` - (`:name` or `:pid`) specify what to return with the uart active messages. with `:name` the messages are returned as `{:circuits_uart, serial_port_name, data}` otherwise they are returned as `{:circuits_uart, pid, data}`. The name and pid are the name of the connected UART or the pid of the Circuits.UART server pid as returned by `start_link/1`. The default value is `:name`. Active mode defaults to true and means that data received on the UART is reported in messages. The messages have the following form: `{:circuits_uart, serial_port_id, data}` or `{:circuits_uart, serial_port_id, {:error, reason}}` When in active mode, flow control can not be used to push back on the sender and messages will accumulated in the mailbox should data arrive fast enough. If this is an issue, set `:active` to false and call `read/2` manually when ready for more data. On success, `open/3` returns `:ok`. On error, `{:error, reason}` is returned. The following are some reasons: * `:enoent` - the specified port couldn't be found * `:eagain` - the port is already open * `:eacces` - permission was denied when opening the port """ @spec open(GenServer.server(), binary, [uart_option]) :: :ok | {:error, term} def open(pid, name, opts \\ []) do GenServer.call(pid, {:open, name, opts}) end @doc """ Close the serial port. The GenServer continues to run so that a port can be opened again. """ @spec close(GenServer.server()) :: :ok | {:error, term} def close(pid) do GenServer.call(pid, :close) end @doc """ Change the serial port configuration after `open/3` has been called. See `open/3` for the valid options. """ @spec configure(GenServer.server(), [uart_option]) :: :ok | {:error, term} def configure(pid, opts) do GenServer.call(pid, {:configure, opts}) end @doc """ Get the configuration of the serial port. """ @spec configuration(GenServer.server()) :: {binary() | :closed, [uart_option]} def configuration(pid) do GenServer.call(pid, :configuration) end @doc """ Send a continuous stream of zero bits for a duration in milliseconds. By default, the zero bits are transmitted at least 0.25 seconds. This is a convenience function for calling `set_break/2` to enable the break signal, wait, and then turn it off. """ @spec send_break(GenServer.server(), integer) :: :ok | {:error, term} def send_break(pid, duration \\ 250) do :ok = set_break(pid, true) :timer.sleep(duration) set_break(pid, false) end @doc """ Start or stop sending a break signal. """ @spec set_break(GenServer.server(), boolean) :: :ok | {:error, term} def set_break(pid, value) when is_boolean(value) do GenServer.call(pid, {:set_break, value}) end @doc """ Write data to the opened UART. It's possible for the write to return before all of the data is actually transmitted. To wait for the data, call drain/1. This call blocks until all of the data to be written is in the operating system's internal buffers. If you're sending a lot of data on a slow link, supply a longer timeout to avoid timing out prematurely. Returns `:ok` on success or `{:error, reason}` if an error occurs. Typical error reasons: * `:ebadf` - the UART is closed """ @spec write(GenServer.server(), any(), non_neg_integer()) :: :ok | {:error, term} def write(pid, data, timeout \\ 5000) do GenServer.call(pid, {:write, data, timeout}, genserver_timeout(timeout)) end @doc """ Read data from the UART. This call returns data as soon as it's available or after timing out. Returns `{:ok, binary}`, where `binary` is a binary data object that contains the read data, `{:error, reason}` if an error occurs, or `{:ok, <<>>}` after timing out. Typical error reasons: * `:ebadf` - the UART is closed * `:einval` - the UART is in active mode """ @spec read(GenServer.server(), non_neg_integer()) :: {:ok, binary} | {:error, term} def read(pid, timeout \\ 5000) do GenServer.call(pid, {:read, timeout}, genserver_timeout(timeout)) end @doc """ Waits until all data has been transmitted. See [tcdrain(3)](http://linux.die.net/man/3/tcdrain) for low level details on Linux or OSX. This is not implemented on Windows. """ @spec drain(GenServer.server()) :: :ok | {:error, term} def drain(pid) do GenServer.call(pid, :drain) end @doc """ Flushes the `:receive` buffer, the `:transmit` buffer, or `:both`. See [tcflush(3)](http://linux.die.net/man/3/tcflush) for low level details on Linux or OSX. This calls `PurgeComm` on Windows. """ @spec flush(GenServer.server()) :: :ok | {:error, term} def flush(pid, direction \\ :both) do GenServer.call(pid, {:flush, direction}) end @doc """ Returns a map of signal names and their current state (true or false). Signals include: * `:dsr` - Data Set Ready * `:dtr` - Data Terminal Ready * `:rts` - Request To Send * `:st` - Secondary Transmitted Data * `:sr` - Secondary Received Data * `:cts` - Clear To Send * `:cd` - Data Carrier Detect * `:rng` - Ring Indicator """ @spec signals(GenServer.server()) :: map | {:error, term} def signals(pid) do GenServer.call(pid, :signals) end @doc """ Set or clear the Data Terminal Ready signal. """ @spec set_dtr(GenServer.server(), boolean) :: :ok | {:error, term} def set_dtr(pid, value) when is_boolean(value) do GenServer.call(pid, {:set_dtr, value}) end @doc """ Set or clear the Request To Send signal. """ @spec set_rts(GenServer.server(), boolean) :: :ok | {:error, term} def set_rts(pid, value) when is_boolean(value) do GenServer.call(pid, {:set_rts, value}) end @doc """ Change the controlling process that receives events from an active uart. """ @spec controlling_process(GenServer.server(), pid) :: :ok | {:error, term} def controlling_process(pid, controlling_process) when is_pid(controlling_process) do GenServer.call(pid, {:controlling_process, controlling_process}) end # gen_server callbacks def init([]) do executable = Application.app_dir(:circuits_uart, ["priv", "circuits_uart"]) |> to_charlist() port = Port.open({:spawn_executable, executable}, [ {:args, []}, {:packet, 2}, :use_stdio, :binary, :exit_status ]) state = %State{port: port} {:ok, state} end def handle_call({:open, name, opts}, {from_pid, _}, state) do new_framing = Keyword.get(opts, :framing, nil) new_rx_framing_timeout = Keyword.get(opts, :rx_framing_timeout, state.rx_framing_timeout) is_active = Keyword.get(opts, :active, true) id_mode = Keyword.get(opts, :id, :name) response = call_port(state, :open, {name, opts}) new_state = change_framing( %{ state | name: name, controlling_process: from_pid, rx_framing_timeout: new_rx_framing_timeout, is_active: is_active, id: id_mode }, new_framing ) {:reply, response, new_state} end def handle_call(:configuration, _from, state) do opts = call_port(state, :configuration, {}) ++ [ active: state.is_active, id: state.id, rx_framing_timeout: state.rx_framing_timeout, framing: state.framing ] {:reply, {state.name, opts}, state} end def handle_call(:close, _from, state) do # Clean up the C side response = call_port(state, :close, nil) # Clean up the Elixir side new_framing_state = apply(state.framing, :flush, [:both, state.framing_state]) new_state = handle_framing_timer( %{state | name: :closed, framing_state: new_framing_state, queued_messages: []}, :ok ) {:reply, response, new_state} end def handle_call({:read, _timeout}, _from, %{queued_messages: [message | rest]} = state) do # Return the queued response. new_state = %{state | queued_messages: rest} {:reply, {:ok, message}, new_state} end def handle_call({:read, timeout}, from, state) do call_time = System.monotonic_time(:millisecond) # Poll the serial port case call_port(state, :read, timeout, port_timeout(timeout)) do {:ok, <<>>} -> # Timeout {:reply, {:ok, <<>>}, state} {:ok, buffer} -> # More data {rc, messages, new_framing_state} = apply(state.framing, :remove_framing, [buffer, state.framing_state]) new_state = handle_framing_timer(%{state | framing_state: new_framing_state}, rc) if messages == [] do # If nothing, poll some more with reduced timeout elapsed = System.monotonic_time(:millisecond) - call_time retry_timeout = max(timeout - elapsed, 0) handle_call({:read, retry_timeout}, from, new_state) else # Return the first message [first_message | rest] = messages new_state = %{new_state | queued_messages: rest} {:reply, {:ok, first_message}, new_state} end response -> # Error {:reply, response, state} end end def handle_call({:write, data, timeout}, _from, state) do {:ok, framed_data, new_framing_state} = apply(state.framing, :add_framing, [data, state.framing_state]) response = call_port(state, :write, {framed_data, timeout}, port_timeout(timeout)) new_state = %{state | framing_state: new_framing_state} {:reply, response, new_state} end def handle_call({:configure, opts}, _from, state) do new_framing = Keyword.get(opts, :framing, nil) new_rx_framing_timeout = Keyword.get(opts, :rx_framing_timeout, state.rx_framing_timeout) is_active = Keyword.get(opts, :active, state.is_active) id_mode = Keyword.get(opts, :id, state.id) state = change_framing( %{state | rx_framing_timeout: new_rx_framing_timeout, is_active: is_active, id: id_mode}, new_framing ) response = call_port(state, :configure, opts) {:reply, response, state} end def handle_call(:drain, _from, state) do response = call_port(state, :drain, nil) {:reply, response, state} end def handle_call({:flush, direction}, _from, state) do fstate = apply(state.framing, :flush, [direction, state.framing_state]) new_state = %{state | framing_state: fstate} response = call_port(new_state, :flush, direction) {:reply, response, new_state} end def handle_call(:signals, _from, state) do response = call_port(state, :signals, nil) {:reply, response, state} end def handle_call({:set_dtr, value}, _from, state) do response = call_port(state, :set_dtr, value) {:reply, response, state} end def handle_call({:set_rts, value}, _from, state) do response = call_port(state, :set_rts, value) {:reply, response, state} end def handle_call({:set_break, value}, _from, state) do response = call_port(state, :set_break, value) {:reply, response, state} end def handle_call({:controlling_process, pid}, _from, state) do new_state = %{state | controlling_process: pid} {:reply, :ok, new_state} end def handle_info({_, {:data, <<?n, message::binary>>}}, state) do msg = :erlang.binary_to_term(message) handle_port(msg, state) end def handle_info(:rx_framing_timed_out, state) do {:ok, messages, new_framing_state} = apply(state.framing, :frame_timeout, [state.framing_state]) new_state = notify_timedout_messages( %{state | rx_framing_tref: nil, framing_state: new_framing_state}, messages ) {:noreply, new_state} end defp notify_timedout_messages(%{is_active: true, controlling_process: dest} = state, messages) when dest != nil do Enum.each(messages, &report_message(state, &1)) state end defp notify_timedout_messages(%{is_active: false} = state, messages) do # IO.puts("Queuing... #{inspect(messages)}") new_queued_messages = state.queued_messages ++ messages %{state | queued_messages: new_queued_messages} end defp notify_timedout_messages(state, _messages), do: state defp change_framing(state, nil), do: state defp change_framing(state, framing_mod) when is_atom(framing_mod) do change_framing(state, {framing_mod, []}) end defp change_framing(state, {framing_mod, framing_args}) do {:ok, framing_state} = apply(framing_mod, :init, [framing_args]) %{state | framing: framing_mod, framing_state: framing_state} end defp call_port(state, command, arguments, timeout \\ 4000) do msg = {command, arguments} send(state.port, {self(), {:command, :erlang.term_to_binary(msg)}}) # Block until the response comes back since the C side # doesn't want to handle any queuing of requests. REVISIT receive do {_, {:data, <<?r, response::binary>>}} -> :erlang.binary_to_term(response) after timeout -> # Not sure how this can be recovered exit(:port_timed_out) end end defp handle_port({:notif, data}, state) when is_binary(data) do # IO.puts "Received data on port #{state.name}" {rc, messages, new_framing_state} = apply(state.framing, :remove_framing, [data, state.framing_state]) new_state = handle_framing_timer(%{state | framing_state: new_framing_state}, rc) if state.controlling_process do Enum.each(messages, &report_message(new_state, &1)) end {:noreply, new_state} end defp handle_port({:notif, data}, state) do # Report an error from the port if state.controlling_process do report_message(state, data) end {:noreply, state} end defp report_message(state, message) do event = {:circuits_uart, message_id(state.id, state.name), message} send(state.controlling_process, event) end defp message_id(:pid, _name), do: self() defp message_id(:name, name), do: name defp genserver_timeout(timeout) when timeout >= 0 do timeout + @genserver_timeout_slack end defp port_timeout(timeout) when timeout >= 0 do timeout + @port_timeout_slack end # Stop the framing timer if active and a frame completed defp handle_framing_timer(%{rx_framing_tref: tref} = state, :ok) when tref != nil do _ = :timer.cancel(tref) %{state | rx_framing_tref: tref} end # Start the framing timer if ended on an incomplete frame defp handle_framing_timer(%{rx_framing_timeout: timeout} = state, :in_frame) when timeout > 0 do _ = if state.rx_framing_tref, do: :timer.cancel(state.rx_framing_tref) {:ok, tref} = :timer.send_after(timeout, :rx_framing_timed_out) %{state | rx_framing_tref: tref} end # Don't do anything with the framing timer for all other reasons defp handle_framing_timer(state, _rc), do: state end
lib/circuits_uart.ex
0.806624
0.697168
circuits_uart.ex
starcoder
defmodule IntCode do @moduledoc """ Executes the intcode """ def sum(x, y) do {x + y, 4} end def mul(x, y) do {x * y, 4} end def less_than(x, y) do {(if x < y, do: 1, else: 0), 4} end def equals(x, y) do {(if x == y, do: 1, else: 0), 4} end def parse_program(program) do program |>String.split(~r/[,\n]/) |>Enum.reject(& &1 == "") |>Enum.map(& String.to_integer(&1)) |>Enum.with_index(0) |>Enum.map(fn {k, v} -> {v, k} end) |>Map.new() end def execute(program, opcode, needle, inputs \\ []) def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 99 do {program, :halt, needle, inputs} end def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 6 do reversed_opcode = opcode |> Integer.digits() |> Enum.reverse() mode_b = Enum.at(reversed_opcode, 3) || 0 mode_c = Enum.at(reversed_opcode, 2) || 0 index_x = program[needle + 1] index_y = program[needle + 2] value_x = program[(if mode_c == 1, do: needle + 1, else: index_x)] value_y = program[(if mode_b == 1, do: needle + 2, else: index_y)] needle_jump = (if value_x == 0, do: value_y, else: needle + 3) execute(program, program[needle_jump], needle_jump, inputs) end def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 5 do reversed_opcode = opcode |> Integer.digits() |> Enum.reverse() mode_b = Enum.at(reversed_opcode, 3) || 0 mode_c = Enum.at(reversed_opcode, 2) || 0 index_x = program[needle + 1] index_y = program[needle + 2] value_x = program[(if mode_c == 1, do: needle + 1, else: index_x)] value_y = program[(if mode_b == 1, do: needle + 2, else: index_y)] needle_jump = (if value_x != 0, do: value_y, else: needle + 3) execute(program, program[needle_jump], needle_jump, inputs) end def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 4 do reversed_opcode = opcode |> Integer.digits() |> Enum.reverse() mode_c = Enum.at(reversed_opcode, 2) || 0 index_x = program[needle + 1] value_x = program[(if mode_c == 1, do: needle + 1, else: index_x)] IO.puts value_x execute(program, program[needle + 2], needle + 2, inputs) end def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 3 do [input | new_inputs] = inputs index_x = program[needle + 1] execute(Map.put(program, index_x, input), program[needle + 2], needle + 2, new_inputs) end def execute(program, opcode, needle, inputs) do reversed_opcode = opcode |> Integer.digits() |> Enum.reverse() {mode_a, mode_b, mode_c} = {Enum.at(reversed_opcode, 4, 0), Enum.at(reversed_opcode, 3, 0), Enum.at(reversed_opcode, 2, 0)} index_z = program[needle + 3] value_x = program[(if mode_c == 1, do: needle + 1, else: program[needle + 1])] value_y = program[(if mode_b == 1, do: needle + 2, else: program[needle + 1])] {result, needle_jump} = case rem(opcode, 100) do 1 -> sum(value_x, value_y) 2 -> mul(value_x, value_y) 7 -> less_than(value_x, value_y) 8 -> equals(value_x, value_y) end new_program = Map.put(program, index_z, result) execute(new_program, new_program[needle + needle_jump], needle + needle_jump, inputs) end end
lib/intcode.ex
0.570212
0.594434
intcode.ex
starcoder
defmodule Membrane.RTP.H264.StapA do @moduledoc """ Module responsible for parsing Single Time Agregation Packets type A. Documented in [RFC6184](https://tools.ietf.org/html/rfc6184#page-22) ``` 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | RTP Header | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |STAP-A NAL HDR | NALU 1 Size | NALU 1 HDR | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | NALU 1 Data | : : + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | NALU 2 Size | NALU 2 HDR | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | NALU 2 Data | : : | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | :...OPTIONAL RTP padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ ``` """ use Bunch alias Membrane.RTP.H264.NAL @spec parse(binary()) :: {:ok, [binary()]} | {:error, :packet_malformed} def parse(data) do do_parse(data, []) end defp do_parse(<<>>, acc), do: {:ok, Enum.reverse(acc)} defp do_parse(<<size::16, nalu::binary-size(size), rest::binary>>, acc), do: do_parse(rest, [nalu | acc]) defp do_parse(_data, _acc), do: {:error, :packet_malformed} @spec aggregation_unit_size(binary()) :: pos_integer() def aggregation_unit_size(nalu), do: byte_size(nalu) + 2 @spec serialize([binary], 0..1, 0..3) :: binary def serialize(payloads, reserved, nri) do payloads |> Enum.reverse() |> Enum.map(&<<byte_size(&1)::16, &1::binary>>) |> IO.iodata_to_binary() |> NAL.Header.add_header(reserved, nri, NAL.Header.encode_type(:stap_a)) end end
lib/rtp_h264/nal_formats/stap_a.ex
0.732305
0.716962
stap_a.ex
starcoder
defmodule VintageNet.IP do @moduledoc """ This module contains utilities for handling IP addresses. By far the most important part of handling IP addresses is to pay attention to whether your addresses are names, IP addresses as strings or IP addresses at tuples. This module doesn't resolve names. While IP addresses in string form are convenient to type, nearly all Erlang and Elixir code uses IP addresses in tuple form. """ @doc """ Convert an IP address to a string Examples: iex> VintageNet.IP.ip_to_string({192, 168, 0, 1}) "192.168.0.1" iex> VintageNet.IP.ip_to_string("192.168.9.1") "192.168.9.1" iex> VintageNet.IP.ip_to_string({65152, 0, 0, 0, 0, 0, 0, 1}) "fe80::1" """ @spec ip_to_string(VintageNet.any_ip_address()) :: String.t() def ip_to_string(ipa) when is_tuple(ipa) do :inet.ntoa(ipa) |> List.to_string() end def ip_to_string(ipa) when is_binary(ipa), do: ipa @doc """ Convert an IP address w/ prefix to a CIDR-formatted string Examples: iex> VintageNet.IP.cidr_to_string({192, 168, 0, 1}, 24) "192.168.0.1/24" """ @spec cidr_to_string(:inet.ip_address(), VintageNet.prefix_length()) :: String.t() def cidr_to_string(ipa, bits) do ip_to_string(ipa) <> "/" <> Integer.to_string(bits) end @doc """ Convert an IP address to tuple form Examples: iex> VintageNet.IP.ip_to_tuple("192.168.0.1") {:ok, {192, 168, 0, 1}} iex> VintageNet.IP.ip_to_tuple({192, 168, 1, 1}) {:ok, {192, 168, 1, 1}} iex> VintageNet.IP.ip_to_tuple("fe80::1") {:ok, {65152, 0, 0, 0, 0, 0, 0, 1}} iex> VintageNet.IP.ip_to_tuple({65152, 0, 0, 0, 0, 0, 0, 1}) {:ok, {65152, 0, 0, 0, 0, 0, 0, 1}} iex> VintageNet.IP.ip_to_tuple("bologna") {:error, "Invalid IP address: bologna"} """ @spec ip_to_tuple(VintageNet.any_ip_address()) :: {:ok, :inet.ip_address()} | {:error, String.t()} def ip_to_tuple({a, b, c, d} = ipa) when a >= 0 and a <= 255 and b >= 0 and b <= 255 and c >= 0 and c <= 255 and d >= 0 and d <= 255, do: {:ok, ipa} def ip_to_tuple({a, b, c, d, e, f, g, h} = ipa) when a >= 0 and a <= 65535 and b >= 0 and b <= 65535 and c >= 0 and c <= 65535 and d >= 0 and d <= 65535 and e >= 0 and e <= 65535 and f >= 0 and f <= 65535 and g >= 0 and g <= 65535 and h >= 0 and h <= 65535, do: {:ok, ipa} def ip_to_tuple(ipa) when is_binary(ipa) do case :inet.parse_address(to_charlist(ipa)) do {:ok, addr} -> {:ok, addr} {:error, :einval} -> {:error, "Invalid IP address: #{ipa}"} end end def ip_to_tuple(ipa), do: {:error, "Invalid IP address: #{inspect(ipa)}"} @doc """ Raising version of ip_to_tuple/1 """ @spec ip_to_tuple!(VintageNet.any_ip_address()) :: :inet.ip_address() def ip_to_tuple!(ipa) do case ip_to_tuple(ipa) do {:ok, addr} -> addr {:error, error} -> raise ArgumentError, error end end @doc """ Convert an IPv4 subnet mask to a prefix length. Examples: iex> VintageNet.IP.subnet_mask_to_prefix_length({255, 255, 255, 0}) {:ok, 24} iex> VintageNet.IP.subnet_mask_to_prefix_length({192, 168, 1, 1}) {:error, "{192, 168, 1, 1} is not a valid IPv4 subnet mask"} """ @spec subnet_mask_to_prefix_length(:inet.ip4_address()) :: {:ok, VintageNet.ipv4_prefix_length()} | {:error, String.t()} def subnet_mask_to_prefix_length(subnet_mask) do # Not exactly efficient... lookup = for bits <- 0..32, into: %{}, do: {prefix_length_to_subnet_mask(:inet, bits), bits} case Map.get(lookup, subnet_mask) do nil -> {:error, "#{inspect(subnet_mask)} is not a valid IPv4 subnet mask"} bits -> {:ok, bits} end end @doc """ Convert an IPv4 or IPv6 prefix length to a subnet mask. Examples: iex> VintageNet.IP.prefix_length_to_subnet_mask(:inet, 24) {255, 255, 255, 0} iex> VintageNet.IP.prefix_length_to_subnet_mask(:inet, 28) {255, 255, 255, 240} iex> VintageNet.IP.prefix_length_to_subnet_mask(:inet6, 64) {65535, 65535, 65535, 65535, 0, 0, 0, 0} """ @spec prefix_length_to_subnet_mask(:inet | :inet6, VintageNet.prefix_length()) :: :inet.ip_address() def prefix_length_to_subnet_mask(:inet, len) when len >= 0 and len <= 32 do rest = 32 - len <<a, b, c, d>> = <<-1::size(len), 0::size(rest)>> {a, b, c, d} end def prefix_length_to_subnet_mask(:inet6, len) when len >= 0 and len <= 128 do rest = 128 - len <<a::size(16), b::size(16), c::size(16), d::size(16), e::size(16), f::size(16), g::size(16), h::size(16)>> = <<-1::size(len), 0::size(rest)>> {a, b, c, d, e, f, g, h} end @doc """ Utility function to trim an IP address to its subnet Examples: iex> VintageNet.IP.to_subnet({192, 168, 1, 50}, 24) {192, 168, 1, 0} iex> VintageNet.IP.to_subnet({192, 168, 255, 50}, 22) {192, 168, 252, 0} """ @spec to_subnet(:inet.ip_address(), VintageNet.prefix_length()) :: :inet.ip_address() def to_subnet({a, b, c, d}, subnet_bits) when subnet_bits >= 0 and subnet_bits <= 32 do not_subnet_bits = 32 - subnet_bits <<subnet::size(subnet_bits), _::size(not_subnet_bits)>> = <<a, b, c, d>> <<new_a, new_b, new_c, new_d>> = <<subnet::size(subnet_bits), 0::size(not_subnet_bits)>> {new_a, new_b, new_c, new_d} end end
lib/vintage_net/ip.ex
0.887774
0.471345
ip.ex
starcoder
defmodule Mix.Tasks.Snoop do use Mix.Task @moduledoc """ A tool for snooping on DHCP transactions that are passing by this particular connected device. ## Usage Run this mix task on a device on the same layer-2 network as the network where you'd like to watch DHCP packets go by. It's probably a good idea to *not* have this be the same machine that you're using to serve DHCP. ```bash mix snoop ``` `Ctrl-c` will exit out of this mix task You'll probably want to set the following `iptables` settings before running: ```bash iptables -t nat -I PREROUTING -p udp --dport 67 -j DNAT --to :6767 iptables -t nat -I PREROUTING -p udp --dport 68 -j DNAT --to :6767 ``` This will cause DHCP packets streaming to be logged to the console. """ @shortdoc "snoop on DHCP packets as they go by" defmodule DhcpSnooper do @moduledoc false use ExDhcp require Logger @impl true def init(_) do Logger.info("SNOOPING...") {:ok, :ok} end @impl true def handle_discover(packet, _, _, :ok) do Logger.info(inspect packet) {:norespond, :ok} end @impl true def handle_request(packet, _, _, :ok) do Logger.info(inspect packet) {:norespond, :ok} end @impl true def handle_decline(packet, _, _, :ok) do Logger.info(inspect packet) {:norespond, :ok} end @impl true def handle_inform(packet, _, _, :ok) do Logger.info(inspect packet) {:norespond, :ok} end @impl true def handle_release(packet, _, _, :ok) do Logger.info(inspect packet) {:norespond, :ok} end @impl true def handle_packet(packet, _, _, :ok) do Logger.info(inspect packet) {:norespond, :ok} end @impl true def handle_info({:udp, _, _, _, binary}, :ok) do unrolled_binary = binary |> :erlang.binary_to_list |> Enum.chunk_every(16) |> Enum.map(&Enum.join(&1, ", ")) |> Enum.join("\n") Logger.warn("untrapped udp: \n <<#{unrolled_binary}>> ") {:noreply, :ok} end def handle_info(info, :ok) do Logger.warn(inspect info) {:noreply, :ok} end end @doc false def run(_) do DhcpSnooper.start_link(:ok, port: 67) receive do after :infinity -> :ok end end end
lib/mix.tasks.snoop.ex
0.764188
0.689456
mix.tasks.snoop.ex
starcoder
defmodule ST7789 do @moduledoc """ ST7789 Elixir driver """ use Bitwise @enforce_keys [:spi, :gpio, :opts] defstruct [:spi, :gpio, :opts] @doc """ New connection to an ST7789 - **port**: SPI port number Default value: `0` - **cs**: SPI chip-select number (0 or 1 for BCM). Default value: `0`. - **dc**: Command/data register selection Default value: `9`. - **backlight**: Pin for controlling backlight Default value: `nil`. - **rst**: Reset pin for ST7789 Default value: `nil`. - **width**: Width of display connected to ST7789 Default value: `240`. - **height**: Height of display connected to ST7789 Default value: `240`. - **offset_top**: Offset to top row Default value: `0`. - **offset_left**: Offset to left column Default value: `0`. - **invert**: Invert display Default value: `true`. - **speed_hz**: SPI speed (in Hz) Default value: `400_0000`. **return**: `%ST7789{}` ## Example ```elixir # default # assuming device at /dev/spidev0.0 # DC connects to BCM 9 # BL not connected # RST not connected # SPI speed: 4MHz disp = ST7789.new() ``` ```elixir # specify init arguments port = 0 # spi bus 0 cs = 0 # BCM 8 / CE 0 dc = 9 # BCM 9 backlight = 17 # BCM 17 speed_hz = 80 * 1000 * 1000 # 80MHz disp = ST7789.new(port: port, cs: cs, dc: dc, backlight: backlight, speed_hz: speed_hz) ``` """ @doc functions: :exported def new(opts \\ []) do port = opts[:port] || 0 cs = opts[:cs] || 0 dc = opts[:dc] || 9 speed_hz = opts[:speed_hz] || 4_000_000 invert = opts[:invert] || true width = opts[:width] || 240 height = opts[:height] || 240 offset_top = opts[:offset_top] || 0 offset_left = opts[:offset_left] || 0 backlight = opts[:backlight] rst = opts[:rst] {:ok, spi} = Circuits.SPI.open("spidev#{port}.#{cs}", speed_hz: speed_hz) # Set DC as output. {:ok, gpio_dc} = Circuits.GPIO.open(dc, :output) # Setup backlight as output (if provided). gpio_backlight = init_backlight(backlight) # Setup reset as output (if provided). gpio_rst = init_reset(rst) %ST7789{ spi: spi, gpio: [ dc: gpio_dc, backlight: gpio_backlight, rst: gpio_rst ], opts: [ port: port, cs: cs, dc: dc, speed_hz: speed_hz, invert: invert, width: width, height: height, offset_top: offset_top, offset_left: offset_left, backlight: backlight, rst: rst ] } |> ST7789.reset() |> init() end @doc """ Reset the display, if reset pin is connected. - **self**: `%ST7789{}` **return**: `self` """ @doc functions: :exported def reset(self = %ST7789{gpio: gpio}) do gpio_rst = gpio[:rst] if gpio_rst != nil do Circuits.GPIO.write(gpio_rst, 1) :timer.sleep(500) Circuits.GPIO.write(gpio_rst, 0) :timer.sleep(500) Circuits.GPIO.write(gpio_rst, 1) :timer.sleep(500) end self end @doc """ Write the provided 16bit RGB565 image to the hardware. - **self**: `%ST7789{}` - **image_data**: Should be 16bit RGB565 format and the same dimensions (width x height x 3) as the display hardware. **return**: `self` """ @doc functions: :exported def display_rgb565(self, image_data) when is_binary(image_data) do display_rgb565(self, :binary.bin_to_list(image_data)) end def display_rgb565(self, image_data) when is_list(image_data) do self |> set_window(x0: 0, y0: 0, x1: nil, y2: nil) |> send(image_data, true, 4096) end @doc """ Write the provided 24bit BGR888/RGB888 image to the hardware. - **self**: `%ST7789{}` - **image_data**: Should be 24bit BGR888/RGB888 format and the same dimensions (width x height x 3) as the display hardware. - **channel_order**: either `:rgb` or `:bgr` **return**: `self` """ @doc functions: :exported def display(self, image_data, channel_order) when is_binary(image_data) and (channel_order == :rgb or channel_order == :bgr) do display_rgb565( self, image_data |> CvtColor.cvt(String.to_atom("#{Atom.to_string(channel_order)}888"), :rgb565) |> :binary.bin_to_list() ) end def display(self, image_data, channel_order) when is_list(image_data) and (channel_order == :rgb or channel_order == :bgr) do display( self, Enum.map(image_data, &Enum.into(&1, <<>>, fn bit -> <<bit::8>> end)), channel_order ) end @doc """ Set backlight status - **self**: `%ST7789{}` - **status**: either `:on` or `:off` **return**: `self` """ @doc functions: :exported def set_backlight(self = %ST7789{gpio: gpio}, :on) do backlight = gpio[:backlight] if backlight != nil do Circuits.GPIO.write(backlight, 1) end self end def set_backlight(self = %ST7789{gpio: gpio}, :off) do backlight = gpio[:backlight] if backlight != nil do Circuits.GPIO.write(backlight, 0) end self end @doc """ Get screen size - **self**: `%ST7789{}` **return**: `%{height: height, width: width}` """ @doc functions: :exported def size(%ST7789{opts: opts}) do %{height: opts[:height], width: opts[:width]} end @doc """ Write a byte to the display as command data. - **self**: `%ST7789{}` - **cmd**: command data **return**: `self` """ @doc functions: :exported def command(self, cmd) when is_integer(cmd) do send(self, cmd, false) end @doc """ Write a byte or array of bytes to the display as display data. - **self**: `%ST7789{}` - **data**: display data **return**: `self` """ @doc functions: :exported def data(self, data) do send(self, data, true) end defp chunk_binary(binary, chunk_size) when is_binary(binary) do total_bytes = byte_size(binary) full_chunks = div(total_bytes, chunk_size) chunks = if full_chunks > 0 do for i <- 0..(full_chunks - 1), reduce: [] do acc -> [:binary.part(binary, chunk_size * i, chunk_size) | acc] end else [] end remaining = rem(total_bytes, chunk_size) chunks = if remaining > 0 do [:binary.part(binary, chunk_size * full_chunks, remaining) | chunks] else chunks end Enum.reverse(chunks) end @doc """ Send bytes to the ST7789 - **self**: `%ST7789{}` - **bytes**: The bytes to be sent to `self` - `when is_integer(bytes)`, `sent` will take the 8 least-significant bits `[band(bytes, 0xFF)]` and send it to `self` - `when is_list(bytes)`, `bytes` will be casting to bitstring and then sent to `self` - **is_data**: - `true`: `bytes` will be sent as data - `false`: `bytes` will be sent as commands - **chunk_size**: Indicates how many bytes will be send in a single write call **return**: `self` """ @doc functions: :exported def send(self, bytes, is_data, chunk_size \\ 4096) def send(self = %ST7789{}, bytes, true, chunk_size) do send(self, bytes, 1, chunk_size) end def send(self = %ST7789{}, bytes, false, chunk_size) do send(self, bytes, 0, chunk_size) end def send(self = %ST7789{}, bytes, is_data, chunk_size) when (is_data == 0 or is_data == 1) and is_integer(bytes) do send(self, [Bitwise.band(bytes, 0xFF)], is_data, chunk_size) end def send(self = %ST7789{}, bytes, is_data, chunk_size) when (is_data == 0 or is_data == 1) and is_list(bytes) do send(self, IO.iodata_to_binary(bytes), is_data, chunk_size) end def send(self = %ST7789{gpio: gpio, spi: spi}, bytes, is_data, chunk_size) when (is_data == 0 or is_data == 1) and is_binary(bytes) do gpio_dc = gpio[:dc] if gpio_dc != nil do Circuits.GPIO.write(gpio_dc, is_data) for xfdata <- chunk_binary(bytes, chunk_size) do {:ok, _ret} = Circuits.SPI.transfer(spi, xfdata) end self else {:error, "gpio[:dc] is nil"} end end defp init(self = %ST7789{opts: board}) do invert = board[:invert] # Initialize the display. # Software reset command(self, kSWRESET()) # delay 150 ms :timer.sleep(150) self |> command(kMADCTL()) |> data(0x70) |> command(kFRMCTR2()) |> data(0x0C) |> data(0x0C) |> data(0x00) |> data(0x33) |> data(0x33) |> command(kCOLMOD()) |> data(0x05) |> command(kGCTRL()) |> data(0x14) |> command(kVCOMS()) |> data(0x37) # Power control |> command(kLCMCTRL()) |> data(0x2C) # Power control |> command(kVDVVRHEN()) |> data(0x01) # Power control |> command(kVRHS()) |> data(0x12) # Power control |> command(kVDVS()) |> data(0x20) |> command(0xD0) |> data(0xA4) |> data(0xA1) |> command(kFRCTRL2()) |> data(0x0F) # Set Gamma |> command(kGMCTRP1()) |> data(0xD0) |> data(0x04) |> data(0x0D) |> data(0x11) |> data(0x13) |> data(0x2B) |> data(0x3F) |> data(0x54) |> data(0x4C) |> data(0x18) |> data(0x0D) |> data(0x0B) |> data(0x1F) |> data(0x23) # Set Gamma |> command(kGMCTRN1()) |> data(0xD0) |> data(0x04) |> data(0x0C) |> data(0x11) |> data(0x13) |> data(0x2C) |> data(0x3F) |> data(0x44) |> data(0x51) |> data(0x2F) |> data(0x1F) |> data(0x1F) |> data(0x20) |> data(0x23) |> init_invert(invert) |> command(kSLPOUT()) |> command(kDISPON()) :timer.sleep(100) self end defp init_backlight(nil), do: nil defp init_backlight(backlight) when backlight >= 0 do {:ok, gpio} = Circuits.GPIO.open(backlight, :output) Circuits.GPIO.write(gpio, 0) :timer.sleep(100) Circuits.GPIO.write(gpio, 1) gpio end defp init_backlight(_), do: nil defp init_reset(nil), do: nil defp init_reset(rst) when rst >= 0 do {:ok, gpio} = Circuits.GPIO.open(rst, :output) gpio end defp init_reset(_), do: nil defp init_invert(self, true) do # Invert display command(self, kINVON()) end defp init_invert(self, _) do # Don't invert display command(self, kINVOFF()) end defp set_window(self = %ST7789{opts: board}, opts = [x0: 0, y0: 0, x1: nil, y2: nil]) do width = board[:width] height = board[:height] offset_top = board[:offset_top] offset_left = board[:offset_left] x0 = opts[:x0] x1 = opts[:x1] x1 = if x1 == nil, do: width - 1 y0 = opts[:y0] y1 = opts[:y1] y1 = if y1 == nil, do: height - 1 y0 = y0 + offset_top y1 = y1 + offset_top x0 = x0 + offset_left x1 = x1 + offset_left self |> command(kCASET()) |> data(bsr(x0, 8)) |> data(band(x0, 0xFF)) |> data(bsr(x1, 8)) |> data(band(x1, 0xFF)) |> command(kRASET()) |> data(bsr(y0, 8)) |> data(band(y0, 0xFF)) |> data(bsr(y1, 8)) |> data(band(y1, 0xFF)) |> command(kRAMWR()) end @doc functions: :constants def kSWRESET, do: 0x01 @doc functions: :constants def kSLPOUT, do: 0x11 @doc functions: :constants def kINVOFF, do: 0x20 @doc functions: :constants def kINVON, do: 0x21 @doc functions: :constants def kDISPON, do: 0x29 @doc functions: :constants def kCASET, do: 0x2A @doc functions: :constants def kRASET, do: 0x2B @doc functions: :constants def kRAMWR, do: 0x2C @doc functions: :constants def kMADCTL, do: 0x36 @doc functions: :constants def kCOLMOD, do: 0x3A @doc functions: :constants def kFRMCTR2, do: 0xB2 @doc functions: :constants def kGCTRL, do: 0xB7 @doc functions: :constants def kVCOMS, do: 0xBB @doc functions: :constants def kLCMCTRL, do: 0xC0 @doc functions: :constants def kVDVVRHEN, do: 0xC2 @doc functions: :constants def kVRHS, do: 0xC3 @doc functions: :constants def kVDVS, do: 0xC4 @doc functions: :constants def kFRCTRL2, do: 0xC6 @doc functions: :constants def kGMCTRP1, do: 0xE0 @doc functions: :constants def kGMCTRN1, do: 0xE1 end
lib/st7789_elixir.ex
0.916367
0.867092
st7789_elixir.ex
starcoder
defmodule GrovePi.RGBLCD do @moduledoc """ Conveniences for controlling a RGB LCD Display. The display should be connected to the I2C-1 port. Example usage: ``` iex> {:ok, config} = GrovePi.RGBLCD.initialize() {:ok, %GrovePi.RGBLCD.Config{display_control: 12, entry_mode: 6, function: 56}} iex> {:ok, new_config} = GrovePi.RGBLCD.cursor_on(config) {:ok, %GrovePi.RGBLCD.Config{display_control: 14, entry_mode: 6, function: 56}} iex> GrovePi.RGBLCD.set_rgb(0, 255, 0) :ok iex> GrovePi.RGBLCD.set_text("hello world!") :ok ``` """ # References # datasheet: https://www.sparkfun.com/datasheets/LCD/HD44780.pdf # C++ library: https://github.com/Seeed-Studio/Grove_LCD_RGB_Backlight @rgb_address 0x62 @lcd_address 0x3E # pwm2 @reg_red 0x04 # pwm1 @reg_green 0x03 # pwm0 @reg_blue 0x02 @reg_mode1 0x00 @reg_mode2 0x01 @reg_output 0x08 # commands @lcd_clear_display 0x01 @lcd_return_home 0x02 @lcd_entry_mode 0x04 @lcd_display_control 0x08 @lcd_shift 0x10 @lcd_function 0x20 # @lcd_set_cg_ram_addr 0x40 ## config unused @lcd_set_dd_ram_addr 0x80 # flags for set entry mode @lcd_increment_after_entry 0x02 # @lcd_decrement_after_entry 0x00 ## config unused @lcd_display_shift_on 0x01 @lcd_display_shift_off 0x00 # flags for control display @lcd_display_on 0x04 # @lcd_display_off 0x00 ## config unused @lcd_cursor_on 0x02 @lcd_cursor_off 0x00 @lcd_blink_on 0x01 @lcd_blink_off 0x00 # flags for shift cursor or display @lcd_move_display 0x08 @lcd_move_cursor 0x00 @lcd_move_left 0x04 @lcd_move_right 0x00 # flags for set function, must be set during initialization @lcd_8_bit_mode 0x10 # @lcd_4_bit_mode 0x00 ## config unused @lcd_2_line 0x08 # @lcd_1_line 0x00 ## config unused # @lcd_5x10_dots 0x04 ## config unused @lcd_5x8_dots 0x00 alias GrovePi.{Board, RGBLCD} import Bitwise defmodule Config do @moduledoc """ Module with struct to hold GrovePi.RGBLCD configuration """ defstruct entry_mode: :none, display_control: :none, function: :none def update_display_control(config, display_control) do %{config | display_control: display_control} end def update_entry_mode(config, entry_mode) do %{config | entry_mode: entry_mode} end def update_function(config, function) do %{config | function: function} end end @doc """ Autoscroll so display moves with cursor """ def autoscroll(%{entry_mode: entry_mode} = config) do new_entry_mode = entry_mode |> set_config(@lcd_entry_mode) |> set_config(@lcd_display_shift_on) send_lcd_cmd(new_entry_mode) {:ok, RGBLCD.Config.update_entry_mode(config, new_entry_mode)} end @doc """ Display does not move with cursor """ def autoscroll_off(%{entry_mode: entry_mode} = config) do new_entry_mode = entry_mode |> set_config(@lcd_entry_mode) |> set_rev_config(@lcd_display_shift_on) send_lcd_cmd(new_entry_mode) {:ok, RGBLCD.Config.update_entry_mode(config, new_entry_mode)} end @doc """ Clears the LCD Display """ def clear_display() do send_lcd_cmd(@lcd_clear_display) # cmd takes a long time C++ library slept for 2000 Process.sleep(50) end @doc """ Turn off blinking the cursor """ def cursor_blink_off(%{display_control: display_control} = config) do new_display_control = display_control |> set_config(@lcd_display_control) |> set_rev_config(@lcd_blink_on) send_lcd_cmd(new_display_control) {:ok, RGBLCD.Config.update_display_control(config, new_display_control)} end @doc """ Turn on blinking the cursor """ def cursor_blink_on(%{display_control: display_control} = config) do new_display_control = display_control |> set_config(@lcd_display_control) |> set_config(@lcd_blink_on) send_lcd_cmd(new_display_control) {:ok, RGBLCD.Config.update_display_control(config, new_display_control)} end @doc """ Moves cursor to the left. Accepts spaces (integer), defaults to 1. """ def cursor_left(spaces \\ 1) do for _num <- 1..spaces do do_cursor_left() Process.sleep(50) end :ok end defp do_cursor_left() do @lcd_shift |> set_config(@lcd_move_cursor) |> set_config(@lcd_move_left) |> send_lcd_cmd end @doc """ Turn off the underline cursor """ def cursor_off(%{display_control: display_control} = config) do new_display_control = display_control |> set_config(@lcd_display_control) |> set_rev_config(@lcd_cursor_on) send_lcd_cmd(new_display_control) {:ok, RGBLCD.Config.update_display_control(config, new_display_control)} end @doc """ Turn on the underline cursor """ def cursor_on(%{display_control: display_control} = config) do new_display_control = display_control |> set_config(@lcd_display_control) |> set_config(@lcd_cursor_on) send_lcd_cmd(new_display_control) {:ok, RGBLCD.Config.update_display_control(config, new_display_control)} end @doc """ Moves cursor to the right. Accepts spaces (integer), defaults to 1. """ def cursor_right(spaces \\ 1) do for _num <- 1..spaces do do_cursor_right() Process.sleep(50) end :ok end defp do_cursor_right() do @lcd_shift |> set_config(@lcd_move_cursor) |> set_config(@lcd_move_right) |> send_lcd_cmd end @doc """ Turns display on quickly """ def display_on(%{display_control: display_control} = config) do new_display_control = display_control |> set_config(@lcd_display_control) |> set_config(@lcd_display_on) send_lcd_cmd(new_display_control) {:ok, RGBLCD.Config.update_display_control(config, new_display_control)} end @doc """ Turns display off quickly """ def display_off(%{display_control: display_control} = config) do new_display_control = display_control |> set_config(@lcd_display_control) |> set_rev_config(@lcd_display_on) send_lcd_cmd(new_display_control) {:ok, RGBLCD.Config.update_display_control(config, new_display_control)} end @doc """ Returns a GrovePi.RGBLCD.Config struct with default configuration. - 2 Line - 8 bit mode - 5x8 dots - display on - cursor off - blink off - increment after entry (right to left) - display shift (autoscroll) off """ def get_default_config() do function_config = @lcd_function |> set_config(@lcd_2_line) |> set_config(@lcd_8_bit_mode) |> set_config(@lcd_5x8_dots) display_control_config = @lcd_display_control |> set_config(@lcd_display_on) |> set_config(@lcd_cursor_off) |> set_config(@lcd_blink_off) entry_mode_config = @lcd_entry_mode |> set_config(@lcd_increment_after_entry) |> set_config(@lcd_display_shift_off) %RGBLCD.Config{} |> RGBLCD.Config.update_function(function_config) |> RGBLCD.Config.update_display_control(display_control_config) |> RGBLCD.Config.update_entry_mode(entry_mode_config) end @doc """ Sets cursor position to zero """ def home() do send_lcd_cmd(@lcd_return_home) # cmd takes a long time C++ library slept for 2000 Process.sleep(50) end @doc """ Initializes the LCD Display. Returns tuple with :ok, and %GrovePi.RGBLCD.Config{} with initial configuration. """ def initialize() do clear_display() config = get_default_config() send_lcd_cmd(config.function) send_lcd_cmd(config.display_control) send_lcd_cmd(config.entry_mode) # backlit init send_rgb(@reg_mode1, 0) # set LEDs controllable by both PWM and GRPPWM registers send_rgb(@reg_output, 0xFF) # set reg_mode2 values # 0010 0000 -> 0x20 (DMBLNK to 1, ie blinky mode) send_rgb(@reg_mode2, 0x20) set_color_white() {:ok, config} end @doc """ Scroll display left. Accepts spaces (integer) as an argument, defaults to 1. """ def scroll_left(spaces \\ 1) do for _num <- 1..spaces do do_scroll_left() Process.sleep(50) end :ok end defp do_scroll_left() do @lcd_shift |> set_config(@lcd_move_display) |> set_config(@lcd_move_left) |> send_lcd_cmd end @doc """ Scroll display right. Accepts spaces (integer) as an argument, defaults to 1. """ def scroll_right(spaces \\ 1) do for _num <- 1..spaces do do_scroll_right() Process.sleep(50) end :ok end defp do_scroll_right() do @lcd_shift |> set_config(@lcd_move_display) |> set_config(@lcd_move_right) |> send_lcd_cmd end @doc """ Sets display to white (255 for Red, Green, and Blue) """ def set_color_white() do set_rgb(255, 255, 255) end @doc """ Sets cursor given row and value. Home position is (0, 0). """ def set_cursor(0, col) do @lcd_set_dd_ram_addr |> set_config(col) |> send_lcd_cmd end def set_cursor(_row, col) do row2 = 0x40 @lcd_set_dd_ram_addr |> set_config(col) |> set_config(row2) |> send_lcd_cmd end @doc """ Sets the red, green, and blue values for a RGB LCD Display. Accepts an integer from 0 - 255 for each color. """ def set_rgb(red, green, blue) do send_rgb(@reg_red, red) send_rgb(@reg_green, green) send_rgb(@reg_blue, blue) end @doc """ Updates the text on a RGB LCD Display. Deletes existing text. """ def set_text(text) do clear_display() send_chars(text) end @doc """ Set text flow from left to right """ def text_left_to_right(%{entry_mode: entry_mode} = config) do new_entry_mode = entry_mode |> set_config(@lcd_entry_mode) |> set_config(@lcd_increment_after_entry) send_lcd_cmd(new_entry_mode) {:ok, RGBLCD.Config.update_entry_mode(config, new_entry_mode)} end @doc """ Set text flow from right to left """ def text_right_to_left(%{entry_mode: entry_mode} = config) do new_entry_mode = entry_mode |> set_config(@lcd_entry_mode) |> set_rev_config(@lcd_increment_after_entry) send_lcd_cmd(new_entry_mode) {:ok, RGBLCD.Config.update_entry_mode(config, new_entry_mode)} end @doc """ Write text at cursor. Does not delete existing text. """ def write_text(text) do send_chars(text) end defp send_chars(<<>>), do: :ok defp send_chars(<<?\n, rest::binary>>) do set_cursor(1, 0) send_chars(rest) end defp send_chars(<<text, rest::binary>>) do send_lcd_write(text) send_chars(rest) end @doc false def send_lcd_cmd(cmd) do Board.i2c_write_device(@lcd_address, <<0x80, cmd>>) end @doc false def send_lcd_write(text) do Board.i2c_write_device(@lcd_address, <<0x40, text>>) end @doc false def send_rgb(address, value) do Board.i2c_write_device(@rgb_address, <<address, value>>) end defp set_config(config, addl_config) do config ||| addl_config end defp set_rev_config(config, addl_config) do config &&& ~~~addl_config end end
lib/grovepi/rgblcd.ex
0.766337
0.580352
rgblcd.ex
starcoder
defmodule SoftBank.Entry do @moduledoc """ Entries are the recording of account debits and credits and can be considered as consituting a traditional accounting Journal. """ @type t :: %__MODULE__{ description: String.t(), date: Ecto.Date.t() } use Ecto.Schema import Ecto.Changeset import Ecto.Query, only: [from: 1, from: 2] alias SoftBank.Account alias SoftBank.Amount alias SoftBank.Entry SoftBank.Config schema "softbank_entries" do field(:description, :string) field(:date, :utc_datetime_usec) has_many(:amounts, SoftBank.Amount, on_delete: :delete_all) timestamps() end @fields ~w(description date)a @doc """ Creates a changeset for `SoftBank.Entry`, validating a required `:description` and `:date`, casting an provided "debit" and "credit" `SoftBank.Amount`s, and validating that those amounts balance. """ def changeset(model, params \\ %{}, default_currency \\ :USD) do model |> cast(params, @fields) |> validate_required([:description, :date]) |> cast_assoc(:amounts) |> validate_debits_and_credits_balance(default_currency) end @doc """ Accepts and returns a changeset, appending an error if "credit" and "debit" amounts are not equivalent """ def validate_debits_and_credits_balance(changeset, default_currency \\ :USD) do amounts = Ecto.Changeset.get_field(changeset, :amounts) types = Enum.group_by(amounts, fn i -> i.type end) credits = Enum.group_by(types["credit"], fn i -> i.amount.amount end) debits = Enum.group_by(types["debit"], fn i -> i.amount.amount end) default_amount = Money.new!(default_currency, 0) credit_sum = Enum.reduce(credits, default_amount, fn {_, i}, acc -> amt = List.first(i) {_, amt} = Money.add(amt.amount, acc) amt end) debit_sum = Enum.reduce(debits, default_amount, fn {_, i}, acc -> amt = List.first(i) {_, amt} = Money.add(amt.amount, acc) amt end) if credit_sum == debit_sum do changeset else add_error(changeset, :amounts, "Credit and Debit amounts must be equal") end end @doc """ Accepts an `SoftBank.Entry` and `Ecto.Repo` and returns true/false based on whether the associated amounts for that entry sum to zero. """ @spec(balanced?(Ecto.Repo.t(), SoftBank.Entry.t()) :: Boolean.t(), String.t()) def balanced?(repo \\ Config.repo(), entry = %Entry{}, default_currency \\ :USD) do credits = Amount |> Amount.for_entry(entry) |> Amount.select_type("credit") |> repo.all debits = Amount |> Amount.for_entry(entry) |> Amount.select_type("debit") |> repo.all default_amount = Money.new(default_currency, 0) {_, credit_sum} = Enum.reduce(credits, default_amount, fn i, acc -> Money.add(i.amount.amount, acc) end) {_, debit_sum} = Enum.reduce(debits, default_amount, fn i, acc -> Money.add(i.amount.amount, acc) end) IO.inspect(credit_sum, label: "credit sum in repo.bankk.amount.balanced ") IO.inspect(debit_sum, label: "debit_sum in repo.bankk.amount.balanced ") if credit_sum - debit_sum == 0 do true else false end end end
lib/repos/Bank/Entry.ex
0.821689
0.505615
Entry.ex
starcoder
defmodule ChessApp.Chess.Move do defstruct [:to,:from,:promotion,:chesspiece,:special,:side,:capture] alias ChessApp.Chess.Board alias ChessApp.Chess.Move import ChessApp.Chess.Board.Macros def from_algebraic_notation(an,board = %Board{}) do codes = String.downcase(an) |> String.codepoints with {:ok, {from,to,promotion}} <- parse_codes(codes) do build_move(board, from, to, promotion) else {:error, :invalid_format} -> {:error, :invalid_format} end end defp parse_codes([from_file, from_rank, "x", to_file, to_rank]) when is_file(from_file) and is_rank(from_rank) and is_file(to_file) and is_rank(to_rank) do parse_codes([from_file, from_rank, to_file, to_rank]) end defp parse_codes([from_file, from_rank, "x", to_file, to_rank, promote]) when is_file(from_file) and is_rank(from_rank) and is_file(to_file) and is_rank(to_rank) do parse_codes([from_file, from_rank, to_file, to_rank, promote]) end defp parse_codes([from_file, from_rank, to_file, to_rank]) when is_file(from_file) and is_rank(from_rank) and is_file(to_file) and is_rank(to_rank) do {:ok, {Board.name_to_square!(from_file,from_rank), Board.name_to_square!(to_file, to_rank), nil}} end defp parse_codes([from_file, from_rank, to_file, to_rank, promote]) when is_file(from_file) and is_rank(from_rank) and is_file(to_file) and is_rank(to_rank) do {:ok, {_color, promote_piece}} = Board.parse_piece(promote) {:ok, {Board.name_to_square!(from_file,from_rank), Board.name_to_square!(to_file, to_rank), promote_piece}} end defp parse_codes(_other) do {:error, :invalid_format} end defp build_move(board = %Board{active: active, enpassant: enpassant}, from, to, promotion) when is_integer(from) and is_integer(to) do case Board.at!(board,from) do {^active,piece} -> move = %Move{ to: to, from: from, promotion: promotion, chesspiece: piece, side: active, special: special_kind({active, piece}, from, to, promotion, enpassant) } case Board.at!(board,to) do {^active,_piece} -> {:error, :invalid_move, "Can't capture your own piece"} :empty -> capture = (move.special == :enpassant) {:ok, %{move | capture: capture}} {_,_piece} -> {:ok, %{move | capture: true}} end :empty -> {:error, :invalid_move, "No piece to move on that square."} {_other_color, _piece} -> {:error, :invalid_move, "Can't move a piece that isn't yours!"} end end defp special_kind({_, :pawn}, _from, to, nil, to), do: :enpassant defp special_kind({_, :pawn}, _, _, promoted, _) when not is_nil(promoted), do: :promotion; defp special_kind({:white, :king}, 5, 7, nil, _), do: :castle defp special_kind({:white, :king}, 5, 3, nil, _), do: :castle defp special_kind({:black, :king}, 61, 63, nil, _), do: :castle defp special_kind({:black, :king}, 61, 59, nil, _), do: :castle defp special_kind(_, _, _, nil, _), do: :normal end
lib/chess_app/chess/move.ex
0.602412
0.542379
move.ex
starcoder
defmodule Game.Lobby do @moduledoc """ Game lobby """ alias Game.{ Lobby, Settings } @pids 2 @type t :: %Lobby{ uuid: String.t(), turn: integer(), user: User.t(), status: :creating | :waiting | :playing | :finished, settings: Settings.t(), game: Game.t(), pids: %{}, users: list() } @derive {Jason.Encoder, except: [:pids]} defstruct uuid: "", status: :creating, turn: 0, user: nil, settings: %Settings{}, game: %Game{}, pids: %{}, users: [] @spec joinable?(Lobby.t(), User.t()) :: boolean() def joinable?(state, user) do Map.has_key?(state.pids, user.uuid) || Enum.count(state.pids) < @pids end @spec stale?(Lobby.t()) :: boolean() def stale?(state) do Enum.empty?(state.pids) end @spec startable?(Lobby.t()) :: boolean() def startable?(state) do state.status != :playing && Enum.count(state.pids) == @pids && Enum.all?(state.users, & &1.ready) end @spec turn?(Lobby.t(), pid()) :: boolean() def turn?(state, pid) do %{user: %User{uuid: uuid}} = Game.Turn.get_player(state.game) Map.get(state.pids, uuid) == pid end @spec auto_turn?(Lobby.t()) :: boolean() def auto_turn?(state) do state.game.settings.phases |> Map.get(state.game.phase) |> Map.get(:auto, false) end @spec try_to_start(Lobby.t()) :: Lobby.t() def try_to_start(state) do if Game.Lobby.startable?(state) do Game.Lobby.start(state) else state end end @spec update_status(Lobby.t()) :: Lobby.t() def update_status(%{game: %{winner: 0}} = state), do: state def update_status(state) do state.pids |> Map.values() |> Enum.reduce(state, &toggle_ready(&2, &1)) |> Map.put(:status, :finished) end @spec toggle_ready(Lobby.t(), pid()) :: Lobby.t() def toggle_ready(state, pid) do user = get_user(state, pid) update_user(state, %{"ready" => !user.ready}, pid) end @spec update_user(Lobby.t(), map(), pid()) :: Lobby.t() def update_user(state, attrs, pid) do user = get_user(state, pid) updated_user = User.update(user, attrs) User.Store.update(updated_user) Map.put( state, :users, state.users |> Enum.map(&update_user(&1, updated_user)) ) end @spec update_settings(Lobby.t(), map) :: Lobby.t() def update_settings(state, settings) do new_settings = state.settings |> Map.put(:health, Map.get(settings, "health", state.settings.health)) |> Map.put(:dices, Map.get(settings, "dices", state.settings.dices)) |> Map.put(:favors, Map.get(settings, "favors", state.settings.favors)) |> Map.put(:tokens, Map.get(settings, "tokens", state.settings.tokens)) %{state | settings: new_settings} end @spec turn(Lobby.t(), pid()) :: integer() def turn(state, pid) do state.game.players |> Enum.find({0, nil}, fn {_index, player} -> Map.get(state.pids, player.user.uuid) == pid end) |> elem(0) end @spec join(Lobby.t(), User.t(), pid()) :: Lobby.t() def join(state, user, pid) do %{ state | pids: Map.put(state.pids, user.uuid, pid), users: state.users ++ [user] } end @spec leave(Lobby.t(), pid()) :: Lobby.t() def leave(state, leaving_pid) do {uuid, _pid} = Enum.find(state.pids, {"", nil}, fn {_uuid, pid} -> pid == leaving_pid end) %{ state | pids: Map.delete(state.pids, uuid), users: Enum.reject(state.users, fn user -> user.uuid == uuid end) } end @spec start(Lobby.t()) :: Lobby.t() def start(state) do %{state | status: :playing, game: Game.start(state.users, state.settings)} end @spec notify_pids(Lobby.t()) :: :ok def notify_pids(state) do state.pids |> Enum.map(fn {_uuid, pid} -> Process.send(pid, state, []) end) :ok end @spec get_user(Lobby.t(), pid()) :: {String.t(), User.t()} def get_user(state, pid) do state.users |> Enum.find(fn user -> Map.get(state.pids, user.uuid) == pid end) end defp update_user(%{uuid: uuid}, %{uuid: new_uuid} = new_user) when uuid == new_uuid do new_user end defp update_user(user, _new_user), do: user end
src/server/lib/game/lobby.ex
0.621656
0.436862
lobby.ex
starcoder
defmodule Bolt.Cogs.Role.Allow do @moduledoc false @behaviour Nosedrum.Command alias Bolt.Converters alias Bolt.Helpers alias Bolt.Humanizer alias Bolt.ModLog alias Bolt.Repo alias Bolt.Schema.SelfAssignableRoles alias Nosedrum.Predicates alias Nostrum.Api @impl true def usage, do: ["role allow <role:role...>"] @impl true def description, do: """ Allow self-assignment of the given role. Self-assignable roles are special roles that can be assigned my members through bot commands. Requires the `MANAGE_ROLES` permission. **Examples**: ```rs // allow self-assginment of the 'Movie Nighter' role role allow movie nighter ``` """ @impl true def predicates, do: [&Predicates.guild_only/1, Predicates.has_permission(:manage_roles)] @impl true def parse_args(args), do: Enum.join(args, " ") @impl true def command(msg, "") do response = "ℹ️ usage: `role allow <role:role>`" {:ok, _msg} = Api.create_message(msg.channel_id, response) end def command(msg, role_name) do response = case Converters.to_role(msg.guild_id, role_name, true) do {:ok, role} -> existing_row = Repo.get(SelfAssignableRoles, msg.guild_id) cond do existing_row == nil -> new_row = %{ guild_id: msg.guild_id, roles: [role.id] } changeset = SelfAssignableRoles.changeset(%SelfAssignableRoles{}, new_row) {:ok, _created_row} = Repo.insert(changeset) ModLog.emit( msg.guild_id, "CONFIG_UPDATE", "#{Humanizer.human_user(msg.author)} added" <> " #{Humanizer.human_role(msg.guild_id, role)} to self-assignable roles" ) "👌 role #{Humanizer.human_role(msg.guild_id, role)} is now self-assignable" role.id in existing_row.roles -> "🚫 role #{Humanizer.human_role(msg.guild_id, role)} is already self-assignable" true -> updated_row = %{ roles: existing_row.roles ++ [role.id] } changeset = SelfAssignableRoles.changeset(existing_row, updated_row) {:ok, _updated_row} = Repo.update(changeset) ModLog.emit( msg.guild_id, "CONFIG_UPDATE", "#{Humanizer.human_user(msg.author)} added" <> " #{Humanizer.human_role(msg.guild_id, role)} to self-assignable roles" ) "👌 role #{Humanizer.human_role(msg.guild_id, role)} is now self-assignable" end {:error, reason} -> "🚫 cannot convert `#{Helpers.clean_content(role_name)}` to `role`: #{reason}" end {:ok, _msg} = Api.create_message(msg.channel_id, response) end end
lib/bolt/cogs/role/allow.ex
0.852091
0.522507
allow.ex
starcoder
defmodule Liberator.Trace do import Plug.Conn require Logger @moduledoc """ Decision tracing functions. """ @doc """ Get the log of all decisions made on the given conn. The trace is a list of maps, each map corresponding to one call to a decision function. Each map has the following keys: - `:step`: the name of the function that was executed, or the atoms `:start` or `:stop` - `:result`: the value the function returned - `:timestamp`: the time the function was called - `:duration`: how long the call took, in native time units """ @doc since: "1.1" def get_trace(conn) do Map.get(conn.private, :liberator_trace, []) end @doc false def start(conn, start_time) do first_trace = [ %{ step: :start, timestamp: start_time } ] put_private(conn, :liberator_trace, first_trace) end @doc false def update_trace(conn, next_step, result, called_at, duration) do current_trace = get_trace(conn) updated_trace = current_trace ++ [ %{ step: next_step, result: result, timestamp: called_at, duration: duration } ] put_private(conn, :liberator_trace, updated_trace) end @doc false def stop(conn, end_time) do current_trace = get_trace(conn) updated_trace = current_trace ++ [ %{ step: :stop, timestamp: end_time } ] put_private(conn, :liberator_trace, updated_trace) end @doc """ Get a list of tuples for the `x-liberator-trace` header, based on the given trace. """ @doc since: "1.3" def headers(trace) do trace # remove :start and :stop traces |> Enum.slice(1, Enum.count(trace) - 2) |> Enum.map(fn %{step: key, result: val, duration: duration_native} -> duration_us = System.convert_time_unit(duration_native, :native, :microsecond) {"x-liberator-trace", "#{Atom.to_string(key)}: #{inspect(val)} (took #{duration_us} µs)"} end) end @doc """ Log a message containing the given trace, along with its request path and optional request ID. """ @doc since: "1.3" def log(trace, request_path, request_id \\ nil) do trace = trace # remove :start and :stop traces |> Enum.slice(1, Enum.count(trace) - 2) |> Enum.with_index() |> Enum.map(fn {%{step: key, duration: duration_native} = trace, index} -> val = Map.get(trace, :result, nil) duration_us = System.convert_time_unit(duration_native, :native, :microsecond) " #{index + 1}. #{Atom.to_string(key)}: #{inspect(val)} (took #{duration_us} µs)" end) |> Enum.join("\n") header = if request_id do "Liberator trace for request #{inspect(request_id)} to #{request_path}:\n\n" else "Liberator trace for request to #{request_path}:\n\n" end Logger.debug(header <> trace) end end
lib/liberator/trace.ex
0.73659
0.527986
trace.ex
starcoder
defmodule RDF.XSD.Datatype.Primitive do @moduledoc """ Macros for the definition of primitive XSD datatypes. """ @doc """ Specifies the applicability of the given XSD `facet` on a primitive datatype. For a facet with the name `example_facet` this requires a function def example_facet_conform?(example_facet_value, literal_value, lexical) do end to be defined on the primitive datatype. """ defmacro def_applicable_facet(facet) do quote do @applicable_facets unquote(facet) use unquote(facet) end end defmacro __using__(opts) do quote do use RDF.XSD.Datatype, unquote(opts) import unquote(__MODULE__) Module.register_attribute(__MODULE__, :applicable_facets, accumulate: true) @impl RDF.XSD.Datatype def primitive?, do: true @impl RDF.XSD.Datatype def base, do: nil @impl RDF.XSD.Datatype def base_primitive, do: __MODULE__ @impl RDF.XSD.Datatype def derived_from?(_), do: false @impl RDF.XSD.Datatype def init_valid_lexical(value, lexical, opts) def init_valid_lexical(_value, nil, _opts), do: nil def init_valid_lexical(_value, lexical, _opts), do: lexical @impl RDF.XSD.Datatype def init_invalid_lexical(value, _opts), do: to_string(value) @doc false # Optimization: facets are generally unconstrained on primitives def facet_conform?(_, _), do: true @impl RDF.XSD.Datatype def canonical_mapping(value), do: to_string(value) @impl RDF.Literal.Datatype def do_cast(value) do if datatype?(value) do # i.e. derived datatype build_valid(value.value, value.uncanonical_lexical, []) end end @impl RDF.Literal.Datatype def do_equal_value_same_or_derived_datatypes?(%left_datatype{} = left, %right_datatype{} = right) do left_datatype.value(left) == right_datatype.value(right) end @impl RDF.Literal.Datatype def do_equal_value_different_datatypes?(left, right), do: nil @impl RDF.Literal.Datatype def do_compare(%left_datatype{} = left, %right_datatype{} = right) do if left_datatype.datatype?(right_datatype) or right_datatype.datatype?(left_datatype) do case {left_datatype.value(left), right_datatype.value(right)} do {left_value, right_value} when left_value < right_value -> :lt {left_value, right_value} when left_value > right_value -> :gt _ -> if left_datatype.equal_value?(left, right), do: :eq end end end def do_compare(_, _), do: nil defoverridable canonical_mapping: 1, do_cast: 1, init_valid_lexical: 3, init_invalid_lexical: 2, do_equal_value_same_or_derived_datatypes?: 2, do_equal_value_different_datatypes?: 2, do_compare: 2 @before_compile unquote(__MODULE__) end end defmacro __before_compile__(_env) do quote do @impl RDF.XSD.Datatype def applicable_facets, do: @applicable_facets end end end
lib/rdf/xsd/datatype/primitive.ex
0.910545
0.637976
primitive.ex
starcoder
defprotocol Nestru.Decoder do @fallback_to_any true @doc """ Returns the hint of how to decode the struct fields. The first argument is an empty struct value adopting the protocol. The second argument is the context value given to the `Nestru.from_map/3` function call. The third argument is a map to be decoded into the struct. The map is useful to generate a hint for fields that have a dynamic struct type. If the function returns `{:ok, map}` then the `map`'s key-value pairs specify the decoding hint for a field with the key name and the value configuring the following: * A module's atom specifies that the appropriate field's value should be decoded as a nested struct defined in the module. Each field of the nested struct will be decoded recursively. * An anonymous function with arity 1 specifies that the appropriate field's value should be returned from the function. The function's only argument is the value from the map to be decoded and it expected to return `{:ok, term}`, `{:error, %{message: term, path: list}}`, or `{:error, term}`. Any field missing the key in the `map` receives the value as-is. The `%{}` empty `map` value defines that all fields of the struct take all values from the second argument's map unmodified. If the function returns `{:ok, nil}` then the decoded struct's value is nil. If the function returns `{:error, message}` tuple, then decoding stops, and the error is bypassed to the caller. Any other return value raises an error. To generate the implementation of the function for the given struct, automatically set the `@derive module` attribute to the tuple of #{__MODULE__} and the `map` to be returned. ## Examples defmodule FruitBox do defstruct [:items] # Give a function to decode the list field as a hint defimpl Nestru.Decoder do def from_map_hint(_value, _context, map) do {:ok, %{items: &Nestru.from_list_of_maps(&1, FruitBox.Fruit)}} end end end # Generate implementation by deriving the protocol def FruitBox.Fruit do @derive { Nestru.Decoder, %{ vitamins: &__MODULE__.decode_vitamins/1, energy: FruitEnergy } } def decode_vitamins(value), do: Nestru.from_list_of_maps(value, Vitamin) defstruct [:vitamins, :energy] end """ def from_map_hint(value, context, map) end defimpl Nestru.Decoder, for: Any do defmacro __deriving__(module, _struct, opts) do opts = cond do opts == [] -> %{} is_map(opts) -> opts true -> raise "Nestru.Decoder protocol should be derived with map, see from_map_hint/3 docs for details." end hint_map = Macro.escape(opts) quote do defimpl Nestru.Decoder, for: unquote(module) do def from_map_hint(_value, _context, _map) do {:ok, unquote(hint_map)} end end end end def from_map_hint(%module{} = _value, _context, _map) do raise "Please, @derive Nestru.Decoder protocol before defstruct/1 call in #{inspect(module)} or defimpl the protocol in the module explicitly to support decoding from map." end end
lib/nestru/decoder.ex
0.883519
0.870927
decoder.ex
starcoder
defmodule AshPolicyAuthorizer.Checker do @moduledoc """ Determines if a set of authorization requests can be met or not. To read more about boolean satisfiability, see this page: https://en.wikipedia.org/wiki/Boolean_satisfiability_problem. At the end of the day, however, it is not necessary to understand exactly how Ash takes your authorization requirements and determines if a request is allowed. The important thing to understand is that Ash may or may not run any/all of your authorization rules as they may be deemed unnecessary. As such, authorization checks should have no side effects. Ideally, the checks built-in to ash should cover the bulk of your needs. """ alias AshPolicyAuthorizer.Policy alias AshPolicyAuthorizer.Policy.Check def strict_check_facts(%{policies: policies} = authorizer) do Enum.reduce(policies, authorizer.facts, &do_strict_check_facts(&1, authorizer, &2)) end defp do_strict_check_facts(%Policy{} = policy, authorizer, facts) do facts = policy.condition |> List.wrap() |> Enum.reduce(facts, fn {check_module, opts}, facts -> do_strict_check_facts( %Check{check_module: check_module, check_opts: opts}, authorizer, facts ) end) Enum.reduce(policy.policies, facts, &do_strict_check_facts(&1, authorizer, &2)) end defp do_strict_check_facts(%AshPolicyAuthorizer.Policy.Check{} = check, authorizer, facts) do check_module = check.check_module opts = check.check_opts case check_module.strict_check(authorizer.actor, authorizer, opts) do {:ok, boolean} when is_boolean(boolean) -> Map.put(facts, {check_module, opts}, boolean) {:ok, :unknown} -> facts other -> raise "Invalid return value from strict_check call #{check_module}.strict_check(actor, authorizer, #{ inspect(opts) }) - #{inspect(other)}" end end def find_real_scenarios(scenarios, facts) do Enum.filter(scenarios, fn scenario -> scenario_is_reality(scenario, facts) == :reality end) end defp scenario_is_reality(scenario, facts) do scenario |> Map.drop([true, false]) |> Enum.reduce_while(:reality, fn {{check_module, opts} = fact, requirement}, status -> if Keyword.has_key?(opts, :__auto_filter__) and AshPolicyAuthorizer.Check.defines_check?(check_module) do {:cont, status} else case Map.fetch(facts, fact) do {:ok, value} -> if value == requirement do {:cont, status} else {:halt, :not_reality} end :error -> {:cont, :maybe} end end end) end def strict_check_scenarios(authorizer) do case AshPolicyAuthorizer.Policy.solve(authorizer) do {:ok, scenarios} -> {:ok, scenarios} {:error, :unsatisfiable} -> {:error, :unsatisfiable} end end end
lib/ash_policy_authorizer/checker.ex
0.779196
0.40928
checker.ex
starcoder
defmodule Ecto.Query do @moduledoc ~S""" Fake Query module. """ @doc """ Creates a query. """ defmacro from(expr, kw \\ []) do {expr, kw} end @doc """ A select query expression. Selects which fields will be selected from the schema and any transformations that should be performed on the fields. Any expression that is accepted in a query can be a select field. ## Keywords examples from(c in City, select: c) # returns the schema as a struct from(c in City, select: {c.name, c.population}) from(c in City, select: [c.name, c.county]) It is also possible to select a struct and limit the returned fields at the same time: from(City, select: [:name]) The syntax above is equivalent to: from(city in City, select: struct(city, [:name])) ## Expressions examples City |> select([c], c) City |> select([c], {c.name, c.country}) City |> select([c], %{"name" => c.name}) """ defmacro select(query, binding \\ [], expr) do {query, binding, expr} end @doc """ Mergeable select query expression. This macro is similar to `select/3` except it may be specified multiple times as long as every entry is a map. This is useful for merging and composing selects. For example: query = from p in Post, select: %{} query = if include_title? do from p in query, select_merge: %{title: p.title} else query end query = if include_visits? do from p in query, select_merge: %{visits: p.visits} else query end In the example above, the query is built little by little by merging into a final map. If both conditions above are true, the final query would be equivalent to: from p in Post, select: %{title: p.title, visits: p.visits} If `:select_merge` is called and there is no value selected previously, it will default to the source, `p` in the example above. """ defmacro select_merge(query, binding \\ [], expr) do {query, binding, expr} end @doc """ A distinct query expression. When true, only keeps distinct values from the resulting select expression. ## Keywords examples # Returns the list of different categories in the Post schema from(p in Post, distinct: true, select: p.category) # If your database supports DISTINCT ON(), # you can pass expressions to distinct too from(p in Post, distinct: p.category, order_by: [p.date]) # The DISTINCT ON() also supports ordering similar to ORDER BY. from(p in Post, distinct: [desc: p.category], order_by: [p.date]) # Using atoms from(p in Post, distinct: :category, order_by: :date) ## Expressions example Post |> distinct(true) |> order_by([p], [p.category, p.author]) """ defmacro distinct(query, binding \\ [], expr) do {query, binding, expr} end end
test/support/plugins/ecto/query.ex
0.869507
0.653963
query.ex
starcoder
defmodule Bonny.CRD do @moduledoc """ Represents the `spec` portion of a Kubernetes [CustomResourceDefinition](https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/) manifest. > The CustomResourceDefinition API resource allows you to define custom resources. Defining a CRD object creates a new custom resource with a name and schema that you specify. The Kubernetes API serves and handles the storage of your custom resource. """ alias Bonny.CRD @api_version "apiextensions.k8s.io/v1beta1" @kind "CustomResourceDefinition" @type names_t :: %{ kind: String.t(), singular: String.t(), plural: String.t(), short_names: nil | list(String.t()), version: String.t() } @typedoc "CRD Spec" @type t :: %__MODULE__{ scope: :namespaced | :cluster, group: String.t(), names: names_t, version: String.t() } @enforce_keys [:scope, :group, :names] defstruct scope: :namespaced, group: nil, names: nil, version: nil @doc "Plural name of CRD" @spec plural(Bonny.CRD.t()) :: binary def plural(%Bonny.CRD{names: %{plural: plural}}), do: plural @doc """ URL Path to list a CRD's resources *Namespaced CRD URL Path* /apis/bonny.example.om/v1/namespaces/default/widgets *Cluster Resource URL Path & `--all-namespaces` path* /apis/example.com/v1/widgets """ @spec list_path(Bonny.CRD.t()) :: binary def list_path(crd = %CRD{}), do: base_path(crd) @spec watch_path(Bonny.CRD.t(), String.t() | integer) :: binary def watch_path(crd = %CRD{}, resource_version) do "#{base_path(crd)}?resourceVersion=#{resource_version}&watch=true" end @doc """ URL path to read the specified CustomResourceDefinition *Namespaced CRD Resource URL Path* /apis/example.com/v1/namespaces/default/widgets/test-widget *Cluster CRD Resource URL Path & `--all-namespaces` path* /apis/example.com/v1/widgets/test-widget """ @spec read_path(Bonny.CRD.t(), String.t()) :: binary def read_path(crd = %CRD{}, name) do "#{base_path(crd)}/#{name}" end @doc """ Generates the map equivalent of the Kubernetes CRD YAML manifest ```yaml --- apiVersion: apiextensions.k8s.io/v1beta1 kind: CustomResourceDefinition metadata: creationTimestamp: null name: widgets.example.com spec: group: example.com names: kind: Widget plural: widgets scope: Namespaced version: v1 ``` """ @spec to_manifest(Bonny.CRD.t()) :: map def to_manifest(crd = %CRD{scope: scope}) do cased_scope = String.capitalize("#{scope}") %{ apiVersion: @api_version, kind: @kind, metadata: %{ name: "#{crd.names.plural}.#{crd.group}", labels: Bonny.Operator.labels() }, spec: %{crd | scope: cased_scope} } end defp base_path(%CRD{ scope: :namespaced, version: version, group: group, names: %{plural: plural} }) do "/apis/#{group}/#{version}/namespaces/#{Bonny.Config.namespace()}/#{plural}" end defp base_path(%CRD{scope: :cluster, version: version, group: group, names: %{plural: plural}}) do "/apis/#{group}/#{version}/#{plural}" end end
lib/bonny/crd.ex
0.841386
0.692239
crd.ex
starcoder
defmodule Saucexages.IO.SauceFile do @moduledoc """ Functions for handling SAUCE files in the file system. Any devices passed are assumed to be file descriptors that are opened using `:read` and `:binary` at the minimum. SAUCE does not use UTF-8 files, so do not pass such devices or you risk incorrect behavior. """ require Saucexages.Sauce require Logger alias Saucexages.{Sauce} alias Saucexages.IO.SauceBinary @type part :: {non_neg_integer(), non_neg_integer()} @doc """ Returns the byte size of the contents in a file, before any potential SAUCE block. Contents is strictly defined as all data before a properly written SAUCE record and optionally a comments block. If there is no SAUCE data, the contents size is the actual size of the file. """ @spec contents_size(File.io_device()) :: {:ok, non_neg_integer()} | {:error, term()} def contents_size(fd) do case :file.position(fd, :eof) do {:ok, pos} when pos >= Sauce.sauce_record_byte_size() -> calculate_contents_size(fd) {:ok, pos} -> {:ok, pos} {:error, _reason} = err -> err end end defp calculate_contents_size(fd) do # Here we calculate the contents size by walking backwards, first checking for the SAUCE, followed by any comments as specified by the SAUCE. # If there is garbage such as non-matched comments, we consider this content since we don't know what it actually is and cannot assume. with {:ok, sauce_offset} = :file.position(fd, {:eof, -Sauce.sauce_record_byte_size()}), {:ok, sauce_record_bin} = :file.read(fd, Sauce.sauce_record_byte_size()), :ok <- SauceBinary.verify_sauce_record(sauce_record_bin), {:ok, comment_lines} <- SauceBinary.comment_lines(sauce_record_bin), # The cursor reset itself after reading the SAUCE, so we need to account for the 128 bytes again comment_block_offset = Sauce.sauce_byte_size(comment_lines), comment_block_size = Sauce.comment_block_byte_size(comment_lines), {:ok, comments_offset} = :file.position(fd, {:eof, -comment_block_offset}), {:ok, comments_bin} <- :file.read(fd, comment_block_size) do if SauceBinary.matches_comment_block?(comments_bin) do {:ok, comments_offset} else {:ok, sauce_offset} end else {:error, :no_sauce} -> :file.position(fd, :eof) {:error, _reason} = err -> err err -> {:error, {"Error reading contents.", err}} end end @doc """ Splits a SAUCE file into parts by contents, and optionally SAUCE, and finally comments. Each part is a tuple of position and length. Parts will be returned in the following possible forms: * `{contents, sauce, comments}` - SAUCE with comments * `{contents, sauce}` - SAUCE with no comments * `{contents}` - No SAUCE Each part has the form - `{position, length}` where position is absolute within the file. """ @spec split_parts(File.io_device()) :: {:ok, {part()}} | {:ok, {part(), part()}} | {:ok, {part(), part(), part()}} def split_parts(fd) do #TODO: Decide return format - may want list of tuples instead or to return fixed tuples with :no_sauce and :no_comments instead with {:ok, sauce_offset} = :file.position(fd, {:eof, -Sauce.sauce_record_byte_size()}), {:ok, sauce_record_bin} = :file.read(fd, Sauce.sauce_record_byte_size()), :ok <- SauceBinary.verify_sauce_record(sauce_record_bin), {:ok, comment_lines} <- SauceBinary.comment_lines(sauce_record_bin), # The cursor reset itself after reading the SAUCE, so we need to account for the 128 bytes again comment_block_offset = Sauce.sauce_byte_size(comment_lines), comment_block_size = Sauce.comment_block_byte_size(comment_lines), {:ok, comments_offset} = :file.position(fd, {:eof, -comment_block_offset}), {:ok, comments_bin} <- :file.read(fd, comment_block_size) do if SauceBinary.matches_comment_block?(comments_bin) do {:ok, {{0, comments_offset}, {comments_offset + comment_block_size, Sauce.sauce_record_byte_size()}, {comments_offset, comment_block_size}}} else {:ok, {{0, sauce_offset}, {sauce_offset, Sauce.sauce_record_byte_size()}}} end else {:error, :no_sauce} -> case :file.position(fd, :eof) do {:ok, pos} -> {:ok, {0, pos}} end {:error, _reason} = err -> err err -> {:error, {"Error reading contents.", err}} end end @doc """ Reads a SAUCE file descriptor and returns the byte size of the SAUCE file from the file descriptor. """ @spec read_byte_size(File.io_device()) :: {:ok, non_neg_integer()} | {:error, term()} def read_byte_size(fd) do #save the cursor position {:ok, cur} = :file.position(fd, :cur) try do :file.position(fd, :eof) after :file.position(fd, cur) end end @doc """ Checks if the file descriptor is for a SAUCE. """ @spec sauce?(File.io_device()) :: boolean() def sauce?(fd) do case extract_sauce_binary(fd) do {:ok, _sauce_bin} -> true _ -> false end end defp extract_sauce_binary(fd) do with {:ok, _offset} = :file.position(fd, {:eof, -Sauce.sauce_record_byte_size()}), {:ok, sauce_record_bin} = :file.read(fd, Sauce.sauce_record_byte_size()), :ok <- SauceBinary.verify_sauce_record(sauce_record_bin) do {:ok, sauce_record_bin} else {:error, _reason} = err -> err _ -> {:error, "Error reading SAUCE record."} end end end
lib/saucexages/io/sauce_file.ex
0.706899
0.455804
sauce_file.ex
starcoder
import TypeClass defclass Witchcraft.Comonad do @moduledoc """ The dual of monads, `Comonad` brings an unwrapping function to `Extend`able data. Note that the unwrapping function (`extract`) *must return a value*, and is not available on many data structres that have an empty element. For example, there is no `Comonad` instance for `List` because we cannot pull a value out of `[]`. ## Type Class An instance of `Witchcraft.Comonad` must also implement `Witchcraft.Extend`, and define `Witchcraft.Comonad.extract/1`. Functor [map/2] ↓ Extend [nest/1] ↓ Comonad [extract/1] """ alias __MODULE__ extend Witchcraft.Extend use Quark @type t :: any() defmacro __using__(opts \\ []) do quote do use Witchcraft.Extend, unquote(opts) import unquote(__MODULE__), unquote(opts) end end where do @doc """ Extract a value out of some context / data structure. This is the opposite of `Witchcraft.Applicative.of/2`. ## Examples iex> extract({1, 2}) 2 extract(%Id{id: 42}) #=> 42 """ @spec extract(Comonad.t()) :: any() def extract(nested) end @doc """ Alias for `extract/1` ## Examples iex> unwrap({1, 2}) 2 unwrap(%Id{id: 42}) #=> 42 """ @spec unwrap(Comonad.t()) :: any() def unwrap(nested), do: extract(nested) properties do def left_identity(data) do a = generate(data) a |> Witchcraft.Extend.extend(&Comonad.extract/1) |> equal?(a) end def right_identity(data) do a = generate(data) f = fn x -> x |> Comonad.extract() |> inspect() end a |> Witchcraft.Extend.extend(f) |> Comonad.extract() |> equal?(f.(a)) end end end definst Witchcraft.Comonad, for: Tuple do custom_generator(_) do import TypeClass.Property.Generator, only: [generate: 1] {generate(nil), generate(nil)} end def extract(tuple) when tuple_size(tuple) >= 1 do elem(tuple, tuple_size(tuple) - 1) end end
lib/witchcraft/comonad.ex
0.78345
0.503906
comonad.ex
starcoder
defmodule Nostrum.Cache.GuildCache do @moduledoc """ Functions for retrieving guild states. """ alias Nostrum.Cache.Guild.GuildServer alias Nostrum.Cache.Mapping.ChannelGuild alias Nostrum.Struct.Channel alias Nostrum.Struct.Guild alias Nostrum.Struct.Message alias Nostrum.Util import Nostrum.Struct.Snowflake, only: [is_snowflake: 1] @type clause :: {:id, Guild.id()} | {:channel_id, Channel.id()} | {:message, Message.t()} @type clauses :: [clause] | map @type selector :: (Guild.t() -> any) @type reason :: :id_not_found | :id_not_found_on_guild_lookup defguardp is_selector(term) when is_function(term, 1) @doc """ Retrives all `Nostrum.Struct.Guild` from the cache as a stream. """ @spec all() :: Enum.t() def all, do: select_all(fn guild -> guild end) @doc """ Selects values using a `selector` from all `Nostrum.Struct.Guild` in the cache. """ @spec select_all(selector) :: Enum.t() def select_all(selector) def select_all(selector) when is_selector(selector) do Supervisor.which_children(GuildSupervisor) |> Stream.map(fn {_, pid, _, _} -> pid end) |> Task.async_stream(fn pid -> GenServer.call(pid, {:select, selector}) end) |> Stream.map(fn {:ok, value} -> value end) end @doc """ Retrives a single `Nostrum.Struct.Guild` from the cache via its `id`. Returns `{:error, reason}` if no result was found. ## Examples ```Elixir iex> Nostrum.Cache.GuildCache.get(0) {:ok, %Nostrum.Struct.Guild{id: 0}} iex> Nostrum.Cache.GuildCache.get(10) {:error, :id_not_found_on_guild_lookup} ``` """ @spec get(Guild.id()) :: {:ok, Guild.t()} | {:error, reason} def get(id) do select(id, fn guild -> guild end) end @doc ~S""" Same as `get/1`, but raises `Nostrum.Error.CacheError` in case of failure. """ @spec get!(Guild.id()) :: Guild.t() | no_return def get!(id), do: get(id) |> Util.bangify_find(id, __MODULE__) @doc """ Retrives a single `Nostrum.Struct.Guild` where it matches the `clauses`. Returns `{:error, reason}` if no result was found. ```Elixir iex> Nostrum.Cache.GuildCache.get_by(id: 0) {:ok, %Nostrum.Struct.Guild{id: 0}} iex> Nostrum.Cache.GuildCache.get_by(%{id: 0}) {:ok, %Nostrum.Struct.Guild{id: 0}} iex> Nostrum.Cache.GuildCache.get_by(id: 10) {:error, :id_not_found_on_guild_lookup} ``` """ @spec get_by(clauses) :: {:ok, Guild.t()} | {:error, reason} def get_by(clauses) do select_by(clauses, fn guild -> guild end) end @doc ~S""" Same as `get_by/1`, but raises `Nostrum.Error.CacheError` in case of failure. """ @spec get_by!(clauses) :: Guild.t() | no_return def get_by!(clauses), do: get_by(clauses) |> Util.bangify_find(clauses, __MODULE__) @doc """ Selects values using a `selector` from a `Nostrum.Struct.Guild`. Returns `{:error, reason}` if no result was found. ## Examples ```Elixir iex> Nostrum.Cache.GuildCache.select(0, fn guild -> guild.id end) {:ok, 0} iex> Nostrum.Cache.GuildCache.select(10, fn guild -> guild.id end) {:error, :id_not_found_on_guild_lookup} ``` """ @spec select(Guild.id(), selector) :: {:ok, any} | {:error, reason} def select(id, selector) do select_by(%{id: id}, selector) end @doc ~S""" Same as `select/2`, but raises `Nostrum.Error.CacheError` in case of failure. """ @spec select!(Guild.id(), selector) :: any | no_return def select!(id, selector), do: select(id, selector) |> Util.bangify_find(id, __MODULE__) @doc """ Selects values using a `selector` from a `Nostrum.Struct.Guild` that matches the `clauses`. Returns `{:error, reason}` if no result was found. ```Elixir iex> Nostrum.Cache.GuildCache.select_by([id: 0], fn guild -> guild.id end) {:ok, 0} iex> Nostrum.Cache.GuildCache.select_by(%{id: 0}, fn guild -> guild.id end) {:ok, 0} iex> Nostrum.Cache.GuildCache.select_by([id: 10], fn guild -> guild.id end) {:error, :id_not_found_on_guild_lookup} ``` """ @spec select_by(clauses, selector) :: {:ok, any} | {:error, reason} def select_by(clauses, selector) def select_by(clauses, selector) when is_list(clauses) and is_selector(selector), do: select_by(Map.new(clauses), selector) def select_by(%{id: id}, selector) when is_snowflake(id) and is_selector(selector) do case GuildServer.select(id, selector) do {:error, _} = error -> error guild -> {:ok, guild} end end def select_by(%{channel_id: channel_id}, selector) when is_snowflake(channel_id) and is_selector(selector) do case ChannelGuild.get_guild(channel_id) do {:ok, guild_id} -> select_by(%{id: guild_id}, selector) {:error, _} = error -> error end end def select_by(%{message: %Message{channel_id: channel_id}}, selector) do select_by(%{channel_id: channel_id}, selector) end @doc ~S""" Same as `select_by/2`, but raises `Nostrum.Error.CacheError` in case of failure. """ @spec select_by!(clauses, selector) :: any | no_return def select_by!(clauses, selector), do: select_by(clauses, selector) |> Util.bangify_find(clauses, __MODULE__) end
lib/nostrum/cache/guild_cache.ex
0.811863
0.574902
guild_cache.ex
starcoder
defmodule Extract.Context do @moduledoc """ The extraction process is a reduce, and this module is its accumulator. ## Fields * `response` - Response of the last action in the reduce. This field should be overwritten, not accumulated. * `variables` - A `Map` accumulation of variable names and values. These key/value pairs can later be referenced as part of another step in the reduce. * `source` - Function acting on a `Stream` of data. This can accumulate more functions wrapping an original function acting on the `Stream`. * `after_functions` - A list of functions to be executed once the extraction pipeline is fully executed. Think of this as the `after` block in `try/catch/after`. * `error_functions` - A list of functions to be executed in case an error is caught. Think of this as the `catch` block in `try/catch/after`. """ @type source_opts :: [ read: :lines | :bytes, byte_count: non_neg_integer, chunk_size: non_neg_integer ] @typedoc "A function that that returns a stream of chunks of extract messages" @type source :: (source_opts -> Enumerable.t()) @type t() :: %__MODULE__{ response: term, variables: map, source: source, after_functions: [(list -> no_return())], error_functions: [(() -> no_return)] } defstruct response: nil, variables: %{}, source: nil, after_functions: [], error_functions: [] @spec new() :: %__MODULE__{} def new() do %__MODULE__{source: fn _ -> [] end} end @spec get_stream(context :: t, source_opts) :: Enumerable.t() def get_stream(%__MODULE__{source: function}, opts \\ []) when is_function(function, 1) do function.(opts) end @spec set_response(context :: t, response :: term) :: t def set_response(context, response) do Map.put(context, :response, response) end @spec add_variable(context :: t, name :: String.t(), value :: String.t()) :: t def add_variable(context, name, value) do new_variables = Map.put(context.variables, name, value) Map.put(context, :variables, new_variables) end @spec set_source(context :: t, source) :: t def set_source(context, source) do Map.put(context, :source, source) end @spec register_after_function(context :: t, (list -> no_return)) :: t def register_after_function(context, after_function) do Map.update!(context, :after_functions, fn functions -> functions ++ [after_function] end) end @spec run_after_functions(context :: t, list) :: t def run_after_functions(context, messages) do context.after_functions |> Enum.each(fn fun -> fun.(messages) end) context end @spec register_error_function(context :: t, (() -> no_return)) :: t def register_error_function(context, error_function) do Map.update!(context, :error_functions, fn functions -> functions ++ [error_function] end) end @spec run_error_functions(context :: t) :: t def run_error_functions(context) do context.error_functions |> Enum.each(fn fun -> fun.() end) context end @spec apply_variables(context :: t, string :: String.t()) :: String.t() def apply_variables(context, string) do context.variables |> Enum.reduce(string, fn {name, value}, buffer -> String.replace(buffer, "<" <> name <> ">", value) end) end @spec lines_or_bytes(source_opts) :: :line | non_neg_integer def lines_or_bytes(opts) do case Keyword.get(opts, :read) do nil -> :line :lines -> :line :bytes -> Keyword.get(opts, :byte_count, 100) end end @spec chunk_size(source_opts) :: non_neg_integer def chunk_size(opts) do Keyword.get(opts, :chunk_size, 1_000) end end
apps/protocol_extract_step/lib/extract/context.ex
0.778986
0.577704
context.ex
starcoder
defmodule Annotations.Annotation do @moduledoc """ Module defining annotations for a String buffer. Use this to annotate Strings. An annotation associates a range delimited by * `from` represents the first index [inclusive] * `to` represents one past the last index [exclusive] with information structured as: * `tags` a list of atoms * `info` A `Map` which might contain any content **Note**: The string is not stored in a `Annotations.Annotation`. See `Annotations.AnnotatedString` for a struct and functions working with both the `String` and the associated Annotations """ defstruct from: nil, to: nil, tags: [], info: nil @type t :: %{ to: integer, from: integer, tags: [atom], info: Map.t } @doc """ Creates a new Annotation """ def new({from,to}, tags ,info) do unless is_list(tags) do tags=[tags] end %__MODULE__{from: from, to: to, tags: tags, info: info} end def new({from,to}, tags) do new(from,to, tags,nil) end def new({from,to}, tags, info) do new(from,to, tags,info) end def new({from,to}) do new(from,to, [],nil) end def new(from,to,tags) do new(from,to,tags,nil) end def new(from,to, tags , info ) do tags= unless is_list(tags) do [tags] else tags end %__MODULE__{from: from, to: to, tags: tags, info: info} end def length(%__MODULE__{}=ann) do ann.to-ann.from end def offset(%__MODULE__{}=ann, diff) when is_integer(diff) do if ann.to+diff <= 0 do nil else %__MODULE__{ann| from: max(ann.from+diff,0), to: ann.to+diff } end end def intersects?(%__MODULE__{}=first, %__MODULE__{from: from, to: to}=second) do intersects? first, from,to end def intersects?(%__MODULE__{from: from, to: to}, cmp_from, cmp_to) do cond do cmp_to < from -> false cmp_from > to -> false true-> true end end def overlaps?(%__MODULE__{}=first, %__MODULE__{from: from, to: to}) do overlaps? first, from, to end def overlaps?(%__MODULE__{from: from, to: to}, cmp_from, cmp_to) do cond do cmp_to <= from ->false cmp_from >= to -> false true-> #require IEx #IEx.pry true end end def crop_start(%__MODULE__{}=ann, new_start) when is_integer(new_start) and new_start>=0 do if new_start >=ann.to do nil else %__MODULE__{ann| from: Enum.max([ann.from-new_start, 0]), to: Enum.max([ann.to-new_start, 0]) } end end def crop_end(%__MODULE__{}=ann, new_end) when is_integer(new_end) and new_end>=0 do if new_end <=ann.from do nil else if new_end < ann.to do %__MODULE__{ann| to: Enum.max([ann.to-new_end, 0]) } else ann end end end def crop_overlap(%__MODULE__{from: from, to: to}=ann, cmp_from, cmp_to) do if intersects?(ann,cmp_from, cmp_to) do %__MODULE__{ ann| from: Enum.min([Enum.max([cmp_from,from]), Enum.min([cmp_to, from])]), to: Enum.min([Enum.max([cmp_from,to]), Enum.min([cmp_to, to])]), } else nil end end def str(%__MODULE__{}=ann, str) when is_bitstring(str) do {chunk,_} = String.split_at(str,ann.to) {_,chunk} = String.split_at(chunk,ann.from) chunk end def str({from,to}, str) when is_bitstring(str) do {chunk,_} = String.split_at(str,to) {_,chunk} = String.split_at(chunk,from) chunk end def split_annotated_buffer(buffer, annotations, split_pos, options\\[]) when is_bitstring(buffer) and is_integer(split_pos) do options= Keyword.merge([allow_empty_range: false], options) allow_empty_range= options[:allow_empty_range] buf_len=String.length(buffer) if split_pos==0 or split_pos>buf_len-1 do if split_pos == 0 do [{"", []},{buffer,annotations}] else [{buffer,annotations},{"", []}] end else {first,last}=String.split_at(buffer, split_pos) [first_ann,last_ann] = [{first, 0, split_pos} ,{last,split_pos,buf_len}] |> Enum.map( fn {_str, f, t} -> annotations |> Stream.filter(&(&1)) |> Stream.map( fn ann -> crop_overlap(ann, f,t) end) |> Stream.filter(&(&1)) |> Stream.map(fn ann -> crop_start(ann,f) end) |> Enum.filter(fn nil->false %__MODULE__{from: from, to: to} when allow_empty_range==false and from ==to -> false ann -> true end) end) [{first,first_ann}, {last, last_ann}] end end def has_tag?(%__MODULE__{tags: tags} , tag) do tags |> Enum.reduce_while(false, fn tag_cmp , _ when tag==tag_cmp-> {:halt, true} _,_-> {:cont, false} end) end end
lib/annotations/annotation.ex
0.778944
0.677221
annotation.ex
starcoder
defmodule AWS.CertificateManager do @moduledoc """ AWS Certificate Manager Welcome to the AWS Certificate Manager (ACM) API documentation. You can use ACM to manage SSL/TLS certificates for your AWS-based websites and applications. For general information about using ACM, see the [ *AWS Certificate Manager User Guide* ](http://docs.aws.amazon.com/acm/latest/userguide/). """ @doc """ Adds one or more tags to an ACM Certificate. Tags are labels that you can use to identify and organize your AWS resources. Each tag consists of a `key` and an optional `value`. You specify the certificate on input by its Amazon Resource Name (ARN). You specify the tag by using a key-value pair. You can apply a tag to just one certificate if you want to identify a specific characteristic of that certificate, or you can apply the same tag to multiple certificates if you want to filter for a common relationship among those certificates. Similarly, you can apply the same tag to multiple resources if you want to specify a relationship among those resources. For example, you can add the same tag to an ACM Certificate and an Elastic Load Balancing load balancer to indicate that they are both used by the same website. For more information, see [Tagging ACM Certificates](http://docs.aws.amazon.com/acm/latest/userguide/tags.html). To remove one or more tags, use the `RemoveTagsFromCertificate` action. To view all of the tags that have been applied to the certificate, use the `ListTagsForCertificate` action. """ def add_tags_to_certificate(client, input, options \\ []) do request(client, "AddTagsToCertificate", input, options) end @doc """ Deletes a certificate and its associated private key. If this action succeeds, the certificate no longer appears in the list that can be displayed by calling the `ListCertificates` action or be retrieved by calling the `GetCertificate` action. The certificate will not be available for use by AWS services integrated with ACM. <note> You cannot delete an ACM Certificate that is being used by another AWS service. To delete a certificate that is in use, the certificate association must first be removed. </note> """ def delete_certificate(client, input, options \\ []) do request(client, "DeleteCertificate", input, options) end @doc """ Returns detailed metadata about the specified ACM Certificate. """ def describe_certificate(client, input, options \\ []) do request(client, "DescribeCertificate", input, options) end @doc """ Retrieves a certificate specified by an ARN and its certificate chain . The chain is an ordered list of certificates that contains the end entity certificate, intermediate certificates of subordinate CAs, and the root certificate in that order. The certificate and certificate chain are base64 encoded. If you want to decode the certificate to see the individual fields, you can use OpenSSL. """ def get_certificate(client, input, options \\ []) do request(client, "GetCertificate", input, options) end @doc """ Imports a certificate into AWS Certificate Manager (ACM) to use with services that are integrated with ACM. Note that [integrated services](http://docs.aws.amazon.com/acm/latest/userguide/acm-services.html) allow only certificate types and keys they support to be associated with their resources. Further, their support differs depending on whether the certificate is imported into IAM or into ACM. For more information, see the documentation for each service. For more information about importing certificates into ACM, see [Importing Certificates](http://docs.aws.amazon.com/acm/latest/userguide/import-certificate.html) in the *AWS Certificate Manager User Guide*. <note> ACM does not provide [managed renewal](http://docs.aws.amazon.com/acm/latest/userguide/acm-renewal.html) for certificates that you import. </note> Note the following guidelines when importing third party certificates: <ul> <li> You must enter the private key that matches the certificate you are importing. </li> <li> The private key must be unencrypted. You cannot import a private key that is protected by a password or a passphrase. </li> <li> If the certificate you are importing is not self-signed, you must enter its certificate chain. </li> <li> If a certificate chain is included, the issuer must be the subject of one of the certificates in the chain. </li> <li> The certificate, private key, and certificate chain must be PEM-encoded. </li> <li> The current time must be between the `Not Before` and `Not After` certificate fields. </li> <li> The `Issuer` field must not be empty. </li> <li> The OCSP authority URL, if present, must not exceed 1000 characters. </li> <li> To import a new certificate, omit the `CertificateArn` argument. Include this argument only when you want to replace a previously imported certificate. </li> <li> When you import a certificate by using the CLI or one of the SDKs, you must specify the certificate, the certificate chain, and the private key by their file names preceded by `file://`. For example, you can specify a certificate saved in the `C:\temp` folder as `file://C:\temp\certificate_to_import.pem`. If you are making an HTTP or HTTPS Query request, include these arguments as BLOBs. </li> </ul> This operation returns the [Amazon Resource Name (ARN)](http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html) of the imported certificate. """ def import_certificate(client, input, options \\ []) do request(client, "ImportCertificate", input, options) end @doc """ Retrieves a list of certificate ARNs and domain names. You can request that only certificates that match a specific status be listed. You can also filter by specific attributes of the certificate. """ def list_certificates(client, input, options \\ []) do request(client, "ListCertificates", input, options) end @doc """ Lists the tags that have been applied to the ACM Certificate. Use the certificate's Amazon Resource Name (ARN) to specify the certificate. To add a tag to an ACM Certificate, use the `AddTagsToCertificate` action. To delete a tag, use the `RemoveTagsFromCertificate` action. """ def list_tags_for_certificate(client, input, options \\ []) do request(client, "ListTagsForCertificate", input, options) end @doc """ Remove one or more tags from an ACM Certificate. A tag consists of a key-value pair. If you do not specify the value portion of the tag when calling this function, the tag will be removed regardless of value. If you specify a value, the tag is removed only if it is associated with the specified value. To add tags to a certificate, use the `AddTagsToCertificate` action. To view all of the tags that have been applied to a specific ACM Certificate, use the `ListTagsForCertificate` action. """ def remove_tags_from_certificate(client, input, options \\ []) do request(client, "RemoveTagsFromCertificate", input, options) end @doc """ Requests an ACM Certificate for use with other AWS services. To request an ACM Certificate, you must specify the fully qualified domain name (FQDN) for your site in the `DomainName` parameter. You can also specify additional FQDNs in the `SubjectAlternativeNames` parameter if users can reach your site by using other names. For each domain name you specify, email is sent to the domain owner to request approval to issue the certificate. Email is sent to three registered contact addresses in the WHOIS database and to five common system administration addresses formed from the `DomainName` you enter or the optional `ValidationDomain` parameter. For more information, see [Validate Domain Ownership](http://docs.aws.amazon.com/acm/latest/userguide/gs-acm-validate.html). After receiving approval from the domain owner, the ACM Certificate is issued. For more information, see the [AWS Certificate Manager User Guide](http://docs.aws.amazon.com/acm/latest/userguide/). """ def request_certificate(client, input, options \\ []) do request(client, "RequestCertificate", input, options) end @doc """ Resends the email that requests domain ownership validation. The domain owner or an authorized representative must approve the ACM Certificate before it can be issued. The certificate can be approved by clicking a link in the mail to navigate to the Amazon certificate approval website and then clicking **I Approve**. However, the validation email can be blocked by spam filters. Therefore, if you do not receive the original mail, you can request that the mail be resent within 72 hours of requesting the ACM Certificate. If more than 72 hours have elapsed since your original request or since your last attempt to resend validation mail, you must request a new certificate. For more information about setting up your contact email addresses, see [Configure Email for your Domain](http://docs.aws.amazon.com/acm/latest/userguide/setup-email.html). """ def resend_validation_email(client, input, options \\ []) do request(client, "ResendValidationEmail", input, options) end @spec request(map(), binary(), map(), list()) :: {:ok, Poison.Parser.t | nil, Poison.Response.t} | {:error, Poison.Parser.t} | {:error, HTTPoison.Error.t} defp request(client, action, input, options) do client = %{client | service: "acm"} host = get_host("acm", client) url = get_url(host, client) headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}, {"X-Amz-Target", "CertificateManager.#{action}"}] payload = Poison.Encoder.encode(input, []) headers = AWS.Request.sign_v4(client, "POST", url, headers, payload) case HTTPoison.post(url, payload, headers, options) do {:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} -> {:ok, nil, response} {:ok, response=%HTTPoison.Response{status_code: 200, body: body}} -> {:ok, Poison.Parser.parse!(body), response} {:ok, _response=%HTTPoison.Response{body: body}} -> error = Poison.Parser.parse!(body) exception = error["__type"] message = error["message"] {:error, {exception, message}} {:error, %HTTPoison.Error{reason: reason}} -> {:error, %HTTPoison.Error{reason: reason}} end end defp get_host(endpoint_prefix, client) do if client.region == "local" do "localhost" else "#{endpoint_prefix}.#{client.region}.#{client.endpoint}" end end defp get_url(host, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}/" end end
lib/aws/certificate_manager.ex
0.889487
0.579847
certificate_manager.ex
starcoder
defmodule Livebook.Utils.Time do @moduledoc false # A simplified version of https://gist.github.com/tlemens/88e9b08f62150ba6082f478a4a03ac52 @doc """ Formats the given point in time relatively to present. """ @spec time_ago_in_words(NaiveDateTime.t()) :: String.t() def time_ago_in_words(naive_date_time) when is_struct(naive_date_time, NaiveDateTime) do now = NaiveDateTime.utc_now() if NaiveDateTime.compare(naive_date_time, now) == :gt do raise ArgumentError, "expected a datetime in the past, got: #{inspect(naive_date_time)}" end distance_of_time_in_words(naive_date_time, now) end @doc """ Formats time distance between `from_ndt` and `to_ndt` as a human-readable string. ## Examples iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:15:04]) "less than 5 seconds" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:15:09]) "less than 10 seconds" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:15:19]) "less than 20 seconds" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:15:20]) "half a minute" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:15:39]) "half a minute" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:15:40]) "less than a minute" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:15:59]) "less than a minute" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:16:00]) "1 minute" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:16:29]) "1 minute" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:16:30]) "2 minutes" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:58:30]) "44 minutes" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 18:59:30]) "about 1 hour" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-20 19:59:30]) "about 2 hours" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-21 18:14:00]) "about 24 hours" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-21 18:15:00]) "1 day" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-06-22 18:15:00]) "2 days" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2020-07-22 18:15:00]) "about 1 month" iex> Livebook.Utils.Time.distance_of_time_in_words(~N[2020-06-20 18:15:00], ~N[2021-08-22 18:15:00]) "about 14 months" """ @spec distance_of_time_in_words(NaiveDateTime.t(), NaiveDateTime.t()) :: String.t() def distance_of_time_in_words(from_ndt, to_ndt) when is_struct(from_ndt, NaiveDateTime) and is_struct(to_ndt, NaiveDateTime) do duration_seconds = NaiveDateTime.diff(to_ndt, from_ndt) {:seconds, duration_seconds} |> maybe_convert_to_minutes() |> duration_in_words() end defp maybe_convert_to_minutes({:seconds, seconds}) when seconds > 59 do {:minutes, round(seconds / 60)} end defp maybe_convert_to_minutes(duration), do: duration defp duration_in_words({:seconds, seconds}) when seconds in 0..4 do "less than 5 seconds" end defp duration_in_words({:seconds, seconds}) when seconds in 5..9 do "less than 10 seconds" end defp duration_in_words({:seconds, seconds}) when seconds in 10..19 do "less than 20 seconds" end defp duration_in_words({:seconds, seconds}) when seconds in 20..39 do "half a minute" end defp duration_in_words({:seconds, seconds}) when seconds in 40..59 do "less than a minute" end defp duration_in_words({:minutes, minutes}) when minutes == 1 do "1 minute" end defp duration_in_words({:minutes, minutes}) when minutes in 2..44 do "#{minutes} minutes" end defp duration_in_words({:minutes, minutes}) when minutes in 45..89 do "about 1 hour" end # 90 mins up to 24 hours defp duration_in_words({:minutes, minutes}) when minutes in 90..1439 do "about #{round(minutes / 60)} hours" end # 24 hours up to 42 hours defp duration_in_words({:minutes, minutes}) when minutes in 1440..2519 do "1 day" end # 42 hours up to 30 days defp duration_in_words({:minutes, minutes}) when minutes in 2520..43_199 do "#{round(minutes / 1440)} days" end # 30 days up to 45 days defp duration_in_words({:minutes, minutes}) when minutes in 43_200..64_799 do "about 1 month" end # 45 days up to 60 days defp duration_in_words({:minutes, minutes}) when minutes in 64_800..86_399 do "about 2 months" end defp duration_in_words({:minutes, minutes}) do "about #{round(minutes / 43_200)} months" end end
lib/livebook/utils/time.ex
0.894083
0.520679
time.ex
starcoder
defmodule Wabbit.Basic do @moduledoc """ Functions to publish, consume and acknowledge messages. """ require Record import Wabbit.Record Record.defrecordp :amqp_msg, [props: p_basic(), payload: ""] @doc """ Publishes a message to an Exchange. This method publishes a message to a specific exchange. The message will be routed to queues as defined by the exchange configuration and distributed to any subscribers. The parameter `:exchange` specifies the name of the exchange to publish to. If set to empty string, it publishes to the default exchange. The `:routing_key` parameter specifies the routing key for the message. The `:payload` parameter specifies the message content as a binary. In addition to the previous parameters, the following options can be used: # Options * `:mandatory` - If set, returns an error if the broker can't route the message to a queue (default `false`) * `:immediate` - If set, returns an error if the broker can't deliver te message to a consumer immediately (default `false`) * `:content_type` - MIME Content type * `:content_encoding` - MIME Content encoding * `:headers` - Message headers. Can be used with headers Exchanges * `:persistent` - If set, uses persistent delivery mode. Messages marked as `persistent` that are delivered to `durable` queues will be logged to disk * `:correlation_id` - application correlation identifier * `:priority` - message priority, ranging from 0 to 9 * `:reply_to` - name of the reply queue * `:expiration` - how long the message is valid (in milliseconds) * `:message_id` - message identifier * `:timestamp` - timestamp associated with this message (epoch time) * `:type` - message type as a string * `:user_id` - creating user ID. RabbitMQ will validate this against the active connection user * `:app_id` - publishing application ID ## Examples iex> Wabbit.Basic.publish channel, \"Hello World!\", exchange: \"my_exchange\", routing_key: \"my_routing_key\", persistent: true :ok """ def publish(channel, payload, options \\ []) do basic_publish = basic_publish(exchange: Keyword.get(options, :exchange, ""), routing_key: Keyword.get(options, :routing_key, ""), mandatory: Keyword.get(options, :mandatory, false), immediate: Keyword.get(options, :immediate, false)) p_basic = p_basic(content_type: Keyword.get(options, :content_type, :undefined), content_encoding: Keyword.get(options, :content_encoding, :undefined), headers: Keyword.get(options, :headers, :undefined), delivery_mode: if(options[:persistent], do: 2, else: 1), priority: Keyword.get(options, :priority, :undefined), correlation_id: Keyword.get(options, :correlation_id, :undefined), reply_to: Keyword.get(options, :reply_to, :undefined), expiration: Keyword.get(options, :expiration, :undefined), message_id: Keyword.get(options, :message_id, :undefined), timestamp: Keyword.get(options, :timestamp, :undefined), type: Keyword.get(options, :type, :undefined), user_id: Keyword.get(options, :user_id, :undefined), app_id: Keyword.get(options, :app_id, :undefined), cluster_id: Keyword.get(options, :cluster_id, :undefined)) :amqp_channel.cast(channel, basic_publish, amqp_msg(props: p_basic, payload: payload)) end @doc """ Acknowledges one or more messages. If `multiple` is set to `true`, all messages up to the one specified by `delivery_tag` are considered acknowledged by the server. """ def ack(channel, delivery_tag, options \\ []) do :amqp_channel.call(channel, basic_ack(delivery_tag: delivery_tag, multiple: Keyword.get(options, :multiple, false))) end @doc """ Rejects (and, optionally, requeues) a message. """ def reject(channel, delivery_tag, options \\ []) do :amqp_channel.call(channel, basic_reject(delivery_tag: delivery_tag, requeue: Keyword.get(options, :requeue, true))) end @doc """ Negative acknowledge of one or more messages. If `:multiple` is set to `true`, all messages up to the one specified by `delivery_tag` are considered as not acknowledged by the server. If `:requeue` is set to `true`, the message will be returned to the queue and redelivered to the next available consumer. This is a RabbitMQ specific extension to AMQP 0.9.1. It is equivalent to reject, but allows rejecting multiple messages using the `multiple` option. """ def nack(channel, delivery_tag, options \\ []) do :amqp_channel.call(channel, basic_nack(delivery_tag: delivery_tag, multiple: Keyword.get(options, :multiple, false), requeue: Keyword.get(options, :requeue, true))) end @doc """ Registers a queue consumer process. The `pid` of the process can be set using the `subscriber` argument and defaults to the calling process. The consumer process will receive the following messages: * `{:basic_deliver, payload, meta}` - This is sent for each message consumed, where `payload` contains the message content and `meta` contains all the metadata set when sending with Basic.publish or additional info set by the broker * `{:basic_consume_ok, %{consumer_tag: consumer_tag}}` - Sent when the consumer process is registered with Basic.consume. The caller receives the same information as the return of Basic.consume * `{:basic_cancel, %{consumer_tag: consumer_tag, no_wait: no_wait}}` - Sent by the broker when the consumer is unexpectedly cancelled (such as after a queue deletion) * `{:basic_cancel_ok, %{consumer_tag: consumer_tag}}` - Sent to the consumer process after a call to Basic.cancel # Options * `:consumer_tag` - Identifier for the consumer, valid within the current channel * `:no_local` - If the no-local field is set the server will not send messages to the connection that published them * `:no_ack` - If this field is set the server does not expect acknowledgements for messages * `:exclusive` - Request exclusive consumer access, meaning only this consumer can access the queue * `:no_wait` - If set, the server will not respond to the method * `:arguments` - A set of arguments for the consume """ def consume(channel, queue, options \\ []) do basic_consume = basic_consume( queue: queue, consumer_tag: Keyword.get(options, :consumer_tag, ""), no_local: Keyword.get(options, :no_local, false), no_ack: Keyword.get(options, :no_ack, false), exclusive: Keyword.get(options, :exclusive, false), nowait: Keyword.get(options, :no_wait, false), arguments: Keyword.get(options, :arguments, [])) basic_consume_ok(consumer_tag: consumer_tag) = :amqp_channel.subscribe(channel, basic_consume, Keyword.get(options, :subscriber, self())) {:ok, consumer_tag} end @doc """ Sets the message prefetch count or size (in bytes). If `:global` is set to `true` this applies to the entire Connection, otherwise it applies only to the specified Channel. # Options * `:prefetch_size` - Number of messages to be sent in advance * `:prefetch_count` - Specifies a prefetch window in terms of whole messages * `:global` - QoS settings scope """ def qos(channel, options \\ []) do basic_qos_ok() = :amqp_channel.call(channel, basic_qos( prefetch_size: Keyword.get(options, :prefetch_size, 0), prefetch_count: Keyword.get(options, :prefetch_count, 0), global: Keyword.get(options, :global, false))) :ok end end
lib/wabbit/basic.ex
0.886887
0.597256
basic.ex
starcoder
defmodule Bundlex.Project do @bundlex_file_name "bundlex.exs" @moduledoc """ Behaviour that should be implemented by each project using Bundlex in the `#{@bundlex_file_name}` file. """ use Bunch alias Bunch.KVList alias Bundlex.Helper.MixHelper alias __MODULE__.{Preprocessor, Store} @src_dir_name "c_src" @deprecated_key_interfaces %{nifs: :nif, cnodes: :cnode, ports: :port} @type native_name_t :: atom @typedoc """ Type describing configuration of a native. It's a keyword list containing the following keys: * `sources` - C files to be compiled (at least one must be provided), * `includes` - Paths to look for header files (empty list by default). * `lib_dirs` - Paths to look for libraries (empty list by default). * `libs` - Names of libraries to link (empty list by default). * `pkg_configs` - Names of libraries for which the appropriate flags will be obtained using pkg-config (empty list by default). * `deps` - Dependencies in the form of `{app, lib_name}`, where `app` is the application name of the dependency, and `lib_name` is the name of lib specified in bundlex project of this dependency. See _Dependencies_ section in readme for details. * `src_base` - Native files should reside in `project_root/c_src/<src_base>` (application name by default). * `compiler_flags` - Custom flags for compiler. * `linker_flags` - Custom flags for linker. * `language` - Language of native. :c or :cpp may be chosen (:c by default) * `interface` - Interface of native. It can be single atom e.g. :nif or list of atoms. * `preprocessors` - Modules implementing `Bundlex.Project.Preprocessor` behaviour """ @type native_config_t :: [ sources: [String.t()], includes: [String.t()], lib_dirs: [String.t()], libs: [String.t()], pkg_configs: [String.t()], deps: [{Application.app(), native_name_t | [native_name_t]}], src_base: String.t(), compiler_flags: [String.t()], linker_flags: [String.t()], language: :c | :cpp, interface: [Bundlex.Native.interface_t()] | Bundlex.Native.interface_t() | nil, preprocessor: [Preprocessor.t()] | Preprocessor.t() ] @spec native_config_keys :: [atom] def native_config_keys, do: [ :includes, :libs, :lib_dirs, :pkg_configs, :sources, :deps, :compiler_flags, :linker_flags, :language, :interface, :preprocessor ] @typedoc """ Type describing input project configuration. It's a keyword list, where natives and libs can be specified. `:nifs`, `:cnodes` and `:ports` keys are deprecated. Instead, use `:natives` with proper `:interface`s. Libs are native packages that are compiled as static libraries and linked to natives that have them specified in `deps` field of their configuration. """ @type config_t :: KVList.t(:natives | :libs, KVList.t(native_name_t, native_config_t)) @doc """ Callback returning project configuration. """ @callback project() :: config_t defmacro __using__(_args) do quote do @behaviour unquote(__MODULE__) def bundlex_project?, do: true def src_path, do: __DIR__ |> Path.join(unquote(@src_dir_name)) end end @typedoc """ Struct representing bundlex project. Contains the following fileds: - `:config` - project configuration - `:src_path` - path to the native sources - `:module` - bundlex project module - `:app` - application that exports project """ @type t :: %__MODULE__{ config: config_t, src_path: String.t(), module: module, app: atom } @enforce_keys [:config, :src_path, :module, :app] defstruct @enforce_keys @doc """ Determines if `module` is a bundlex project module. """ @spec project_module?(module) :: boolean def project_module?(module) do function_exported?(module, :bundlex_project?, 0) and module.bundlex_project?() end @doc """ Returns the project struct of given application. If the module has not been loaded yet, it is loaded from `project_dir/#{@bundlex_file_name}` file. """ @spec get(application :: atom) :: {:ok, t} | {:error, :invalid_project_specification | {:no_bundlex_project_in_file, path :: binary()} | :unknown_application} def get(application \\ MixHelper.get_app!()) do project = Store.get_project(application) if project do {:ok, project} else with {:ok, module} <- load(application), project = %__MODULE__{ config: convert_input_config(module.project()), src_path: module.src_path(), module: module, app: application }, true <- Keyword.keyword?(project.config) or {:error, :invalid_project_specification} do Store.store_project(application, project) {:ok, project} end end end @spec load(application :: atom) :: {:ok, module} | {:error, {:no_bundlex_project_in_file, path :: binary()} | :unknown_application} defp load(application) do with {:ok, dir} <- MixHelper.get_project_dir(application) do bundlex_file_path = dir |> Path.join(@bundlex_file_name) modules = Code.require_file(bundlex_file_path) |> Keyword.keys() modules |> Enum.find(&project_module?/1) |> Bunch.error_if_nil({:no_bundlex_project_in_file, bundlex_file_path}) end end defp convert_input_config(input_config) do natives = Map.keys(@deprecated_key_interfaces) |> Enum.flat_map(fn key -> deprecated_keys = Keyword.get(input_config, key, []) deprecated_keys |> Enum.map(&convert_to_native(&1, @deprecated_key_interfaces[key])) end) if natives != [], do: IO.warn(":nifs, :cnodes and :ports keys are deprecated. Use :natives instead") input_config |> Keyword.update(:natives, natives, &(&1 ++ natives)) |> delistify_interfaces(:libs) |> delistify_interfaces(:natives) end defp convert_to_native({name, config}, interface) do config = Keyword.put(config, :interface, interface) {name, config} end defp delistify_interfaces(input_config, native_type) do natives = Keyword.get(input_config, native_type, []) natives = natives |> Enum.flat_map(fn {name, config} -> config |> Keyword.get(:interface, nil) |> Bunch.listify() |> Enum.map(&{name, Keyword.put(config, :interface, &1)}) end) Keyword.put(input_config, native_type, natives) end end
lib/bundlex/project.ex
0.842248
0.41052
project.ex
starcoder
defmodule Spear.Connection.Configuration do @default_mint_opts [protocols: [:http2], mode: :active] @moduledoc """ Configuration for `Spear.Connection`s ## Options * `:name` - the name of the GenServer. See `t:GenServer.name/0` for more information. When not provided, the spawned process is not aliased to a name and is only addressable through its PID. * `:connection_string` - the connection string to parse containing all connection information. Other options like `:host` or `:port` will be parsed from the connection string. If options parsed from the connection string are passed, they will be treated as overrides to the value found in the connection string. Consult the EventStoreDB documentation for formulating a valid connection string. * `:mint_opts` - (default: `#{inspect(@default_mint_opts)}`) a keyword list of options to pass to mint. The default values cannot be overridden. This can be useful for configuring TLS. See the [security guide](guides/security.md) for more information. * `:host` - (default: `"localhost"`) the host address of the EventStoreDB * `:port` - (default: `2113`) the external gRPC port of the EventStoreDB * `:tls?` - (default: `false`) whether or not to use TLS to secure the connection to the EventStoreDB * `:username` - (default: `"admin"`) the user to connect as * `:password` - (default: `"<PASSWORD>"`) the user's password * `:keep_alive_interval` - (default: `10_000`ms - 10s) the period to send keep-alive pings to the EventStoreDB. Set `-1` to disable keep-alive checks. Should be any integer value `>= 10_000`. This option can be used in conjunction with `:keep_alive_timeout` to properly disconnect if the EventStoreDB is not responding to network traffic. * `:keep_alive_timeout` - (default: `10_000`ms - 10s) the time after sending a keep-alive ping when the ping will be considered unacknowledged. Used in conjunction with `:keep_alive_interval`. Set to `-1` to disable keep-alive checks. Should be any integer value `>= 10_000`. * `:read_only?` - (default: `false`) controls whether the connection should be limited to read-only functionality. The list of read-only APIs can be queried with `Spear.Connection.read_apis/0` See the `Spear.Connection` module docs for more information about keep-alive. """ @moduledoc since: "0.2.0" require Logger # ms @default_keepalive 10_000 @typedoc """ Configuration for a `Spear.Connection`. """ @typedoc since: "0.2.0" @type t :: %__MODULE__{ scheme: :http | :https, host: Mint.Types.address(), port: :inet.port_number(), tls?: boolean(), username: String.t() | nil, password: String.t() | nil, keep_alive_interval: pos_integer() | false, keep_alive_timeout: pos_integer() | false, mint_opts: Keyword.t(), valid?: boolean(), errors: Keyword.t(), read_only?: boolean() } defstruct scheme: :http, host: "localhost", port: 2113, tls?: false, username: "admin", password: "<PASSWORD>", keep_alive_interval: 10_000, keep_alive_timeout: 10_000, mint_opts: [], valid?: true, errors: [], read_only?: false @doc false def credentials(%__MODULE__{username: username, password: password}) do {username, password} end @doc """ Parses configuration from a keyword list This function is used internally by `Spear.Connection` when connecting. """ @doc since: "0.2.0" @spec new(Keyword.t()) :: t() def new(opts) when is_list(opts) do config = opts |> Keyword.get(:connection_string) |> from_connection_string() |> Keyword.merge(opts) |> override_mint_opts() |> set_scheme() struct(__MODULE__, config) |> validate() end defp from_connection_string(connection_string) when is_binary(connection_string) do uri = parse_uri(connection_string) tls? = tls?(uri) {username, password} = parse_credentials(uri) [ host: uri.host, port: uri.port, tls?: tls?, username: username, password: password, keep_alive_interval: keep_alive_interval(uri), keep_alive_timeout: keep_alive_timeout(uri) ] end defp from_connection_string(_), do: [] defp parse_uri(connection_string) do uri = URI.parse(connection_string) %URI{uri | query: URI.decode_query(uri.query || "")} end defp tls?(%URI{query: %{"tls" => "true"}}), do: true defp tls?(_), do: false defp keep_alive_interval(uri), do: keep_alive_value(uri, "keepAliveInterval") defp keep_alive_timeout(uri), do: keep_alive_value(uri, "keepAliveTimeout") defp keep_alive_value(uri, key) do with {:ok, value_str} <- Map.fetch(uri.query, key), {value, ""} <- Integer.parse(value_str), value when value >= @default_keepalive <- value do value else -1 -> false value when value in 0..@default_keepalive -> Logger.warn("Specified #{key} of #{value} is less than recommended 10_000ms") value value when is_integer(value) and value < -1 -> # will get picked up by validation value _ -> @default_keepalive end end defp parse_credentials(uri) do with userinfo when is_binary(userinfo) <- uri.userinfo, [username, password] <- String.split(userinfo, ":") do {username, password} else _ -> {nil, nil} end end defp override_mint_opts(opts) do mint_opts = opts |> Keyword.get(:mint_opts, []) |> Keyword.merge(@default_mint_opts) Keyword.merge(opts, mint_opts: mint_opts) end defp set_scheme(opts) do Keyword.put(opts, :scheme, if(opts[:tls?], do: :https, else: :http)) end defp validate(%__MODULE__{} = config) do errors = config |> Map.from_struct() |> Enum.reduce([], &validate/2) %__MODULE__{config | errors: errors, valid?: errors == []} end defp validate({:keep_alive_interval = key, value}, errors) when is_integer(value) and value <= 0 do [{key, "keepAliveInterval must be greater than 1"} | errors] end defp validate({:keep_alive_timeout = key, value}, errors) when is_integer(value) and value <= 0 do [{key, "keepAliveTimeout must be greater than 1"} | errors] end defp validate({:port = key, value}, errors) when not is_integer(value) or value not in 1..65_535 do [{key, "#{inspect(value)} is not a valid port number"} | errors] end defp validate({_k, _v}, errors), do: errors end
lib/spear/connection/configuration.ex
0.86306
0.52007
configuration.ex
starcoder
defmodule Serum.HeaderParser do @moduledoc false _moduledocp = """ This module takes care of parsing headers of page (or post) source files. Header is where all page or post metadata goes into, and has the following format: ``` --- key: value ... --- ``` where `---` in the first and last line delimits the beginning and the end of the header area, and between these two lines are one or more key-value pair delimited by a colon, where key is the name of a metadata and value is the actual value of a metadata. """ alias Serum.HeaderParser.ValueTransformer @type options :: [{atom, value_type}] @type value_type :: :string | :integer | :datetime | {:list, value_type} @type value :: binary | integer | DateTime.t() | [binary] | [integer] | [DateTime.t()] @type parse_result :: {:ok, {map(), map(), binary()}} | {:invalid, binary()} @typep extract_ok :: {:ok, [binary], binary} @typep extract_err :: {:error, binary} @doc """ Reads lines from a binary `data` and extracts the header into a map. `options` is a keyword list which specifies the name and type of metadata the header parser expects. So the typical `options` should look like this: [key1: type1, key2: type2, ...] See "Types" section for avilable value types. `required` argument is a list of required keys (in atom). If the header parser cannot find required keys in the header area, it returns an error. ## Types Currently the HeaderParser supports following types: * `:string` - A line of string. It can contain spaces. * `:integer` - A decimal integer. * `:datetime` - Date and time. Must be specified in the format of `YYYY-MM-DD hh:mm:ss`. This data will be interpreted as a local time. * `{:list, <type>}` - A list of multiple values separated by commas. Every value must have the same type, either `:string`, `:integer`, or `:datetime`. You cannot make a list of lists. """ @spec parse_header(binary(), options(), [atom()]) :: parse_result() def parse_header(data, options, required \\ []) do case extract_header(data, [], false) do {:ok, header_lines, rest_data} -> key_strings = options |> Keyword.keys() |> Enum.map(&to_string/1) req_strings = Enum.map(required, &to_string/1) kv_lists = header_lines |> Enum.map(&split_kv/1) |> Enum.group_by(&(elem(&1, 0) in key_strings)) %{ true: accepted_kv, false: extra_kv } = Map.merge(%{true: [], false: []}, kv_lists) with [] <- find_missing(accepted_kv, req_strings), {:ok, parsed} <- transform_values(accepted_kv, options, []) do extras = Enum.map(extra_kv, fn {k, v} -> {k, ValueTransformer.transform_value(k, v, :string)} end) {:ok, {Map.new(parsed), Map.new(extras), rest_data}} else error -> handle_error(error) end error -> handle_error(error) end end @spec extract_header(binary, [binary], boolean) :: extract_ok | extract_err defp extract_header(data, acc, open?) defp extract_header(data, acc, false) do case String.split(data, ~r/\r?\n/, parts: 2) do ["---", rest] -> extract_header(rest, acc, true) [line, rest] when is_binary(line) -> extract_header(rest, acc, false) [_] -> {:error, "header not found"} end end defp extract_header(data, acc, true) do case String.split(data, ~r/\r?\n/, parts: 2) do ["---", rest] -> {:ok, acc, rest} [line, rest] when is_binary(line) -> extract_header(rest, [line | acc], true) [_] -> {:error, "encountered unexpected end of file"} end end @spec split_kv(binary) :: {binary, binary} defp split_kv(line) do line |> String.split(":", parts: 2) |> Enum.map(&String.trim/1) |> case do [k] -> {k, ""} [k, v] -> {k, v} end end @spec find_missing([{binary(), binary()}], [binary()]) :: [binary()] defp find_missing(kv_list, req_strings) do kv_list |> Enum.map(&elem(&1, 0)) |> do_find_missing(req_strings) end @spec do_find_missing([binary], [binary], [binary]) :: [binary] defp do_find_missing(keys, required, acc \\ []) defp do_find_missing(_keys, [], acc), do: acc defp do_find_missing(keys, [h | t], acc) do if h in keys do do_find_missing(keys, t, acc) else do_find_missing(keys, t, [h | acc]) end end @spec transform_values([{binary, binary}], keyword(atom), keyword(value)) :: {:error, binary} | {:ok, keyword(value)} defp transform_values([], _options, acc) do {:ok, acc} end defp transform_values([{k, v} | rest], options, acc) do atom_k = String.to_existing_atom(k) case ValueTransformer.transform_value(k, v, options[atom_k]) do {:error, _} = error -> error value -> transform_values(rest, options, [{atom_k, value} | acc]) end end @spec handle_error(term) :: {:invalid, binary()} defp handle_error(term) defp handle_error([missing]) do {:invalid, "`#{missing}` is required, but it's missing"} end defp handle_error([_ | _] = missing) do repr = missing |> Enum.map(&"`#{&1}`") |> Enum.reverse() |> Enum.join(", ") {:invalid, "#{repr} are required, but they are missing"} end defp handle_error({:error, error}) do {:invalid, "header parse error: #{error}"} end end
lib/serum/header_parser.ex
0.840029
0.890865
header_parser.ex
starcoder
defmodule Stargate.Producer.Acknowledger do @moduledoc """ By default, `Stargate.produce/2` will block the calling process until acknowledgement is received from Pulsar that the message was successfully produced. This can optionally switch to an asynchronous acknowledgement by passing an MFA tuple to `Stargate.produce/3`. This modules defines a GenServer process that works in tandem with a producer websocket connection to wait for and send receipt acknowledgements received from produce operations to the calling process or otherwise perform asynchronous acknowledgement operations. """ require Logger use GenServer import Stargate.Supervisor, only: [via: 2] @doc """ Sends a message to the acknowledger process to perform the ack operation saved for that particular message (as identified by the context sent with the message). """ @spec ack(GenServer.server(), {:ack, term()} | {:error, term(), term()}) :: :ok def ack(acknowledger, response), do: GenServer.cast(acknowledger, response) @doc """ Called by the producer when a message is produced to the Pulsar cluster. This function sends a message's context and the desired operation to perform for acknowledgement to the Acknowledger process to save in its state and act on when directed to acknowledge that message. Unless instructed otherwise by calling `Stargate.produce/3`, `Stargate.produce/2` assumes the third argument to be the PID of the calling process to send receipt confirmation and unblock. """ @spec produce(GenServer.server(), String.t(), pid() | tuple()) :: :ok def produce(acknowledger, ctx, ack), do: GenServer.cast(acknowledger, {:produce, ctx, ack}) @doc """ Starts a `Stargate.Producer.Acknowledger` process and link it to the calling process. """ @spec start_link(keyword()) :: GenServer.on_start() def start_link(init_args) do registry = Keyword.fetch!(init_args, :registry) persistence = Keyword.get(init_args, :persistence, "persistent") tenant = Keyword.fetch!(init_args, :tenant) ns = Keyword.fetch!(init_args, :namespace) topic = Keyword.fetch!(init_args, :topic) GenServer.start_link(__MODULE__, init_args, name: via(registry, {:producer_ack, "#{persistence}", "#{tenant}", "#{ns}", "#{topic}"}) ) end @impl GenServer def init(_init_args) do {:ok, %{}} end @impl GenServer def handle_cast({:produce, ctx, ack}, state) do {:noreply, Map.put(state, ctx, ack)} end @impl GenServer def handle_cast({:ack, ctx}, state) do {value, new_state} = Map.pop(state, ctx) case value do pid when is_pid(pid) -> send(pid, :ack) {module, function, args} -> apply(module, function, args) end {:noreply, new_state} end @impl GenServer def handle_cast({:error, reason, ctx}, state) do {value, new_state} = Map.pop(state, ctx) case value do pid when is_pid(pid) -> send(pid, {:error, reason}) _mfa -> Logger.error("Failed to execute produce for reason : #{inspect(reason)}") end {:noreply, new_state} end end
lib/stargate/producer/acknowledger.ex
0.85166
0.404096
acknowledger.ex
starcoder
defmodule ArrowWeb.DisruptionView.DaysOfWeek do @moduledoc "Handles the display of disruption `days_of_week`." alias Arrow.Disruption.DayOfWeek @doc "Describes each day-of-week of a disruption and its time period." @spec describe([DayOfWeek.t()]) :: [{String.t(), String.t()}] def describe(days_of_week) when is_list(days_of_week) do days_of_week |> Enum.sort_by(&day_number/1) |> Enum.map(&describe_day(&1, :long)) end @doc """ Summarizes the days-of-week of a disruption and their time periods. Each item in the returned list is a description of a single day or set of consecutive days. The items are themselves lists, to suggest where e.g. line breaks might be placed between the "days" and "times" part of the description (if these are separate). """ @spec summarize([DayOfWeek.t()]) :: [[String.t()]] def summarize([day_of_week]), do: [summarize_day(day_of_week)] def summarize(days_of_week) when is_list(days_of_week) do sorted_days = Enum.sort_by(days_of_week, &day_number/1) days_are_consecutive = consecutive?(sorted_days) cond do days_are_consecutive and same_times?(sorted_days) -> [describe_days_with_same_times(hd(sorted_days), List.last(sorted_days))] days_are_consecutive and contiguous_times?(sorted_days) -> [describe_days_with_contiguous_times(hd(sorted_days), List.last(sorted_days))] true -> Enum.map(sorted_days, &summarize_day/1) end end defp consecutive?(sorted_days) do sorted_days |> Enum.map(&day_number/1) |> Enum.chunk_every(2, 1, :discard) |> Enum.all?(fn [a, b] -> a + 1 == b end) end defp contiguous_times?(sorted_days) do sorted_days |> Enum.chunk_every(2, 1, :discard) |> Enum.all?(fn [%{end_time: nil}, %{start_time: nil}] -> true _ -> false end) end defp day_number(%{day_name: day_name}), do: day_number(day_name) defp day_number("monday"), do: 0 defp day_number("tuesday"), do: 1 defp day_number("wednesday"), do: 2 defp day_number("thursday"), do: 3 defp day_number("friday"), do: 4 defp day_number("saturday"), do: 5 defp day_number("sunday"), do: 6 defp describe_day(%{day_name: day_name, start_time: start_time, end_time: end_time}, format) do {format_day(day_name, format), describe_times(start_time, end_time)} end defp describe_days_with_contiguous_times( %{day_name: first_day, start_time: start_time}, %{day_name: last_day, end_time: end_time} ) do from = format_day(first_day, :short) <> " " <> describe_start_time(start_time) to = format_day(last_day, :short) <> " " <> describe_end_time(end_time) [from <> " – " <> to] end defp describe_days_with_same_times( %{day_name: first_day, start_time: start_time, end_time: end_time}, %{day_name: last_day} ) do [ format_day(first_day, :short) <> " – " <> format_day(last_day, :short), describe_times(start_time, end_time) ] end defp describe_times(start_time, end_time) do describe_start_time(start_time) <> " – " <> describe_end_time(end_time) end defp describe_end_time(time), do: format_time(time, "End of service") defp describe_start_time(time), do: format_time(time, "Start of service") defp format_day(day_name, :long), do: String.capitalize(day_name) defp format_day(day_name, :short), do: day_name |> String.slice(0..2) |> String.capitalize() defp format_time(%Time{} = time, _fallback), do: Calendar.strftime(time, "%-I:%M%p") defp format_time(nil, fallback), do: fallback defp same_times?([%{start_time: first_start, end_time: first_end} | _] = days_of_week) do Enum.all?(days_of_week, fn %{start_time: ^first_start, end_time: ^first_end} -> true _ -> false end) end defp summarize_day(day_of_week), do: day_of_week |> describe_day(:short) |> Tuple.to_list() end
lib/arrow_web/views/disruption_view/days_of_week.ex
0.890109
0.623205
days_of_week.ex
starcoder
defmodule List do @moduledoc """ Implements functions that only make sense for lists and cannot be part of the Enum protocol. In general, favor using the Enum API instead of List. A decision was taken to delegate most functions to Erlang's standard lib but following Elixir's convention of receiving the target (in this case, a list) as the first argument. """ @doc """ Given a list of lists, concatenates the sublists into a single list. ## Examples List.concat [[1,[2],3], [4], [5,6]] #=> [1,[2],3,4,5,6] """ def concat(list) when is_list(list) do Erlang.lists.append(list) end @doc """ Concatenates the list on the right with the list on the left. This function produces the same result the `++` operator. The only difference is a minor optimization: when the first list contains only one element, we simply add it as a head to the second list. ## Examples List.concat [1,2,3], [4,5,6] #=> [1,2,3,4,5,6] """ def concat(list, elements) when is_list(list) and is_list(elements) do list ++ elements end @doc """ Deletes the given item from the list. Returns a list without the item. If the item occurs more than once in the list, just the first occurrence is removed. ## Examples List.delete([1,2,3], 1) #=> [2,3] """ def delete(list, item) do Erlang.lists.delete(item, list) end @doc """ Flattens the given `list` of nested lists. An optional tail can be given that will be added at the end of the flattened list. ## Examples List.flatten [1,[[2],3]] #=> [1,2,3] List.flatten [1,[[2],3]], [4,5] #=> [1,2,3,4,5] """ def flatten(list) do Erlang.lists.flatten(list) end def flatten(list, tail) do Erlang.lists.flatten(list, tail) end @doc """ Folds (reduces) the given list to the left with a function. Requires an accumulator. ## Examples List.foldl [5,5], 10, fn x, acc -> x + acc end #=> 20 List.foldl [1,2,3,4], 0, fn x, acc -> x - acc end #=> 2 """ def foldl(list, acc, function) when is_list(list) and is_function(function) do Erlang.lists.foldl(function, acc, list) end @doc """ Folds (reduces) the given list to the right with a function. Requires an accumulator. ## Examples List.foldr [1,2,3,4], 0, fn x, acc -> x - acc end #=> -2 """ def foldr(list, acc, function) when is_list(list) and is_function(function) do Erlang.lists.foldr(function, acc, list) end @doc """ Reverses the given list. This function simply delegates to `lists:reverse` which is implemented in C for performance. ## Examples List.reverse [1,2,3] #=> [3,2,1] """ def reverse(list) do :lists.reverse(list) end @doc """ Returns the last element in `list` or nil if the `list` is empty. ## Examples List.last [] #=> nil List.last [1] #=> 1 List.last [1, 2, 3] #=> 3 """ def last([]), do: nil def last(list) do :lists.last(list) end @doc """ Checks if the given `term` is included in the list. This function simply delegates to `lists:member` which is implemented in C for performance. ## Examples List.member? [1,2,3], 1 #=> true List.member? [1,2,3], 0 #=> false """ def member?(list, term) do Erlang.lists.member(term, list) end @doc """ Receives a list of tuples and returns the first tuple where the item at position `posistion` matches with the given `item`. ## Examples List.keyfind([a: 1, b: 2], :a, 1) #=> { :a, 1 } List.keyfind([a: 1, b: 2], 2, 2) #=> { :b, 2 } List.keyfind([a: 1, b: 2], :c, 1) #=> nil """ def keyfind(list, item, position, default // nil) do Erlang.lists.keyfind(item, position, list) || default end @doc """ Receives a list of tuples and returns true if there is a tuple where the item at position `posistion` matches with the given `item`. ## Examples List.keymember?([a: 1, b: 2], :a, 1) #=> true List.keymember?([a: 1, b: 2], 2, 2) #=> true List.keymember?([a: 1, b: 2], :c, 1) #=> false """ def keymember?(list, item, position) do Erlang.lists.keymember(item, position, list) end @doc """ Receives a list of tuples and deletes the first tuple where the item at position `posistion` matches with the given `item`. Returns the new tuple. ## Examples List.keydelete([a: 1, b: 2], :a, 1) #=> [{ :b, 2 }] List.keydelete([a: 1, b: 2], 2, 2) #=> [{ :a, 1 }] List.keydelete([a: 1, b: 2], :c, 1) #=> [{ :a, 1 }, { :b, 2 }] """ def keydelete(list, item, position) do Erlang.lists.keydelete(item, position, list) end @doc """ Returns a list of integers in the given range (both ends included when possible). An optional step can be provided as well (defaults to 1). If first > last and no step is provided, the numbers will be in descending order. ## Examples List.range 1, 3 #=> [1,2,3] List.range 1, 8, 2 #=> [1,3,5,7] List.range 1, 0 #=> [] List.range 3, 1 #=> [3,2,1] List.range 5, 1, -2 #=> [5, 3, 1] """ def range(first, last, step // nil) def range(first, last, step) when is_integer(first) and is_integer(last) and first <= last do step = case step do nil -> Erlang.lists.seq(first, last, 1) x when x < 0 -> [] _ -> Erlang.lists.seq(first, last, step) end end def range(first, last, step) when is_integer(first) and is_integer(last) and first > last do step = case step do nil -> Erlang.lists.seq(first, last, -1) x when x > 0 -> [] _ -> Erlang.lists.seq(first, last, step) end end @doc """ Sorts the list by comparing each term. For an alternative sorting algorithm, check `Enum.qsort`. ## Examples List.sort [3, 4, 2, 1, 7] #=> [1, 2, 3, 4, 7] """ def sort(list) do :lists.sort list end @doc """ Sorts the list according to an ordering function. fun(a, b) should return true if `a` compares less than or equal to `b`, `false` otherwise. ## Examples List.sort [3, 4, 2, 1, 7], fn a, b -> b <= a end #=> [7, 4, 3, 2, 1] """ def sort(list, fun) do :lists.sort fun, list end @doc """ Returns a list without duplicated items. ## Examples List.uniq [1,2,3,2,1] #=> [1,2,3] """ def uniq(list) when is_list(list) do do_uniq(list, []) end @doc """ Duplicates the given element n times in a list. ## Examples List.duplicate "hello", 3 #=> ["hello","hello","hello"] List.duplicate [1,2], 2 #=> [[1,2],[1,2]] """ def duplicate(elem, n) do Erlang.lists.duplicate(n, elem) end @doc """ Wraps the argument in a list. If the argument is already a list, returns the list. If the argument is nil, returns an empty list. ## Examples List.wrap [1,2,3] #=> [1,2,3] """ def wrap(list) when is_list(list) do list end def wrap(nil) do [] end def wrap(other) do [other] end @doc """ Zips corresponding elements from two lists (or tuples) into one list of tuples. The number of elements in the resulting list is equal to the length of the shortest list among the given ones. ## Examples List.zip [1, 2, 3], [4, 5, 6] #=> [{1, 4}, {2, 5}, {3, 6}] List.zip [1, 2], [4, 5, 6] #=> [{1, 4}, {2, 5}] """ def zip(item1, item2) do do_zip(to_list(item1), to_list(item2), []) end @doc """ Zips corresponding elements from each list in `list_of_lists`. ## Examples List.zip [[1, 2], [3, 4], [5, 6]] #=> [{1, 3, 5}, {2, 4, 6}] List.zip [[1, 2], [3], [5, 6]] #=> [{1, 3, 5}] """ def zip(list_of_lists) when is_list(list_of_lists) do do_zip(list_of_lists, []) end @doc """ Unzips the given list of lists or tuples into separate lists and returns a list of lists. ## Examples List.unzip [{1, 2}, {3, 4}] #=> [[1, 3], [2, 4]] List.unzip [{1, :a, "apple"}, {2, :b, "banana"}, {3, :c}] #=> [[1, 2, 3], [:a, :b, :c]] """ def unzip(list) when is_list(list) do :lists.map tuple_to_list(&1), zip(list) end ## Private # uniq defp do_uniq([h|t], acc) do case Erlang.lists.member(h, acc) do true -> do_uniq(t, acc) false -> [h|do_uniq(t, [h|acc])] end end defp do_uniq([], _acc) do [] end # zip defp do_zip([h1|t1], [h2|t2], acc) do do_zip t1, t2, [{h1, h2}|acc] end defp do_zip(_, _, acc) do reverse acc end defp do_zip(list, acc) do converter = fn x, acc -> do_zip_each(to_list(x), acc) end {mlist, heads} = :lists.mapfoldl converter, [], list case heads do nil -> :lists.reverse acc _ -> do_zip mlist, [list_to_tuple(:lists.reverse(heads))|acc] end end defp do_zip_each(_, nil) do { nil, nil } end defp do_zip_each([h|t], acc) do { t, [h|acc] } end defp do_zip_each([], _) do { nil, nil } end defp to_list(tuple) when is_tuple(tuple), do: tuple_to_list(tuple) defp to_list(list) when is_list(list), do: list end
lib/elixir/lib/list.ex
0.842798
0.691972
list.ex
starcoder
defmodule Multiaddr.Codec do @moduledoc false import Multiaddr.Utils.Varint import Multiaddr.Utils alias Multiaddr.Protocol, as: Prot defp list_protocols(bytes, protocols) when bytes == <<>> do {:ok, protocols} end defp list_protocols(bytes, protocols) when is_binary(bytes) do case read_raw_protocol(bytes) do {:ok, {next_index, protocol, _raw_value}} -> list_protocols(split_binary(bytes, next_index..-1), protocols ++ [protocol]) error -> error end end def list_protocols(bytes) when is_binary(bytes) do list_protocols(bytes, []) end def validate_bytes(bytes) when bytes == <<>> do {:ok, bytes} end def validate_bytes(bytes) when is_binary(bytes) do case read_raw_protocol(bytes) do {:ok, {next_index, _protocol, _raw_value}} -> validate_bytes(split_binary(bytes, next_index..-1)) error -> error end end def string_to_bytes(string) when is_binary(string) do string = String.trim_trailing(string, "/") split_string = String.split(string, "/") case Enum.fetch(split_string, 0) do {:ok, ""} -> string_to_bytes(Enum.slice(split_string, 1..-1), <<>>) {:ok, first_string} -> {:error, {:invalid_string, "Address first character must be '/' not '#{first_string}'"}} end end defp string_to_bytes(string_split, bytes) when string_split == [] and bytes == "" do {:error, {:invalid_string, "Invalid empty address string"}} end defp string_to_bytes(string_split, bytes) when string_split == [] and is_binary(bytes) do {:ok, bytes} end defp string_to_bytes(string_split, bytes) when is_list(string_split) and is_binary(bytes) do with {:ok, protocol_name} <- Enum.fetch(string_split, 0), {:ok, protocol} <- Map.fetch(Prot.protocols_by_name(), protocol_name), string_split <- Enum.slice(string_split, 1..-1), {:ok, {next_index, protocol_bytes}} <- get_protocol_value(protocol, string_split) do bytes = bytes <> protocol.vcode <> protocol_bytes string_to_bytes(Enum.slice(string_split, next_index..-1), bytes) else :error -> {:error, {:invalid_protocol_name, Enum.fetch(string_split, 0)}} error -> error end end def bytes_to_string(bytes) when is_binary(bytes) do bytes_to_string(bytes, "") end defp bytes_to_string(bytes, string) when bytes == <<>> and string == "" do {:error, {:invalid_bytes, "Invalid empty address bytes"}} end defp bytes_to_string(bytes, string) when bytes == <<>> and is_binary(string) do {:ok, string} end defp bytes_to_string(bytes, string) when is_binary(bytes) and is_binary(string) do with {:ok, {next_index, protocol, value}} <- read_protocol(bytes) do string = if protocol.size == 0 do string <> "/" <> protocol.name else string <> "/" <> protocol.name <> "/" <> value end bytes_to_string(split_binary(bytes, next_index..-1), string) end end def find_sub_multiaddr(maddr_bytes, sub_maddr_bytes) when is_binary(maddr_bytes) and is_binary(sub_maddr_bytes) do find_sub_multiaddr(maddr_bytes, sub_maddr_bytes, 0) end defp find_sub_multiaddr(maddr_bytes, sub_maddr_bytes, index) when is_binary(maddr_bytes) and is_binary(sub_maddr_bytes) and is_integer(index) do if maddr_bytes == sub_maddr_bytes do {:ok, index} else case read_raw_protocol(maddr_bytes) do {:ok, {next_index, _protocol, _raw_value}} -> maddr_bytes = split_binary(maddr_bytes, next_index..-1) find_sub_multiaddr(maddr_bytes, sub_maddr_bytes, next_index) error = {:error, {:not_found, _value}} -> error {:error, reason} -> raise Multiaddr.Error, reason: reason end end end defp find_sub_multiaddr(maddr_bytes, sub_maddr_bytes, _index) when maddr_bytes == "" do if {:ok, _bytes} = validate_bytes(sub_maddr_bytes) do {:error, {:not_found, "Not found encapsulated multiaddr"}} else {:eror, {:invalid_bytes, "Invalid encapsulated Multiaddr bytes"}} end end def find_protocol(bytes, %Prot{} = protocol) when is_binary(bytes) do find_protocol(bytes, protocol.code, 0) end defp find_protocol(bytes, code, _index) when bytes == "" do if {:ok, protocol} = Map.fetch(Prot.protocols_by_code(), code) do {:error, {:not_found, "Not found protocol #{protocol.name}"}} else {:eror, {:invalid_protocol_code, "Invalid protocol code #{code}"}} end end defp find_protocol(bytes, code, index) when is_binary(bytes) and is_integer(code) and is_integer(index) do with {:ok, {next_index, protocol, value}} <- read_protocol(bytes) do if protocol.code == code do {:ok, index, value} else find_protocol(split_binary(bytes, next_index..-1), code, index + next_index) end else # If fails, Multiaddr bytes are corrupted error = {:error, {:not_found, _reason}} -> error {:error, reason} -> raise Multiaddr.Error, reason: reason error -> raise Multiaddr.Error, reason: {:unknown, "Unknown error: #{inspect(error)}"} end end defp read_protocol(bytes) when is_binary(bytes) do with {:ok, {next_index, protocol, raw_value}} <- read_raw_protocol(bytes) do value = if protocol.size > 0 do protocol.transcoder.bytes_to_string.(raw_value) else {:ok, raw_value} end case value do {:ok, protocol_value} -> {:ok, {next_index, protocol, protocol_value}} # It shouldn't fail because read_raw_protcol validates bytes. If it does, need to check transcoders error -> error end end end defp read_raw_protocol(bytes) when bytes == <<>> do {:error, {:invalid_bytes, "Tried to read empty bytes"}} end defp read_raw_protocol(bytes) when is_binary(bytes) do {:ok, {value_index, code}} = read_varint(bytes) bytes = split_binary(bytes, value_index..-1) with {:ok, protocol} <- Map.fetch(Prot.protocols_by_code(), code), {:ok, {next_index, size}} <- size_for_protocol(protocol, bytes), true <- byte_size(bytes) >= size, bytes = split_binary(bytes, next_index..-1), {:ok, protocol_bytes} = read_raw_protocol_value(protocol, bytes, size) do {:ok, {value_index + next_index + size, protocol, protocol_bytes}} else :error -> {:error, {:invalid_protocol_code, code}} false -> {:error, {:invalid_bytes, "Invalid protocol value: too short value for protocol #{ Map.fetch(Prot.protocols_by_code(), code).name }"}} error -> error end end defp read_raw_protocol_value(_protocol, bytes, size) when is_binary(bytes) and size == 0 do {:ok, ""} end defp read_raw_protocol_value(protocol, bytes, size) when is_binary(bytes) and is_integer(size) and size > 0 and byte_size(bytes) >= size do protocol_bytes = split_binary(bytes, 0..(size - 1)) case protocol.transcoder.validate_bytes.(protocol_bytes) do {:ok, _bytes} -> {:ok, protocol_bytes} error -> error end end defp get_protocol_value(%Prot{size: size} = _protocol, string_split) when size == 0 and is_list(string_split) do {:ok, {0, ""}} end defp get_protocol_value(%Prot{size: size, path: is_path?} = protocol, string_split) when size == :prefixed_var_size do {next_index, value_string} = if is_path? do {length(string_split), Enum.join(string_split, "/")} else {1, Enum.at(string_split, 0, "")} end with {:ok, protocol_bytes} <- protocol.transcoder.string_to_bytes.(value_string) do protocol_bytes = Varint.LEB128.encode(byte_size(protocol_bytes)) <> protocol_bytes {:ok, {next_index, protocol_bytes}} end end defp get_protocol_value(%Prot{size: size} = protocol, string_split) when size > 0 do with {:ok, protocol_bytes} <- protocol.transcoder.string_to_bytes.(Enum.at(string_split, 0, "")) do {:ok, {1, protocol_bytes}} end end end
lib/codec.ex
0.57821
0.441432
codec.ex
starcoder
defmodule Expression.DateHelpers do @moduledoc false import NimbleParsec def date_separator do choice([ string("-"), string("/") ]) end def us_date do integer(2) |> ignore(date_separator()) |> integer(2) |> ignore(date_separator()) |> integer(4) end def us_time do integer(2) |> ignore(string(":")) |> integer(2) |> optional(ignore(string(":"))) |> optional(integer(2)) end def us_datetime do us_date() |> ignore(string(" ")) |> concat(us_time()) end def iso_date do integer(4) |> ignore(date_separator()) |> integer(2) |> ignore(date_separator()) |> integer(2) end def iso_time do integer(2) |> ignore(string(":")) |> integer(2) |> ignore(string(":")) |> integer(2) |> ignore(optional(string("."))) |> optional(integer(min: 1)) |> optional( choice([ ignore(string("+")) |> integer(min: 1), string("Z") |> replace(0) ]) ) end def iso_datetime do iso_date() |> ignore(string("T")) |> concat(iso_time()) end def date do choice([ tag(us_date(), :us_format), tag(iso_date(), :iso_format) ]) |> reduce(:to_date) end def datetime do choice([ tag(us_datetime(), :us_format), tag(iso_datetime(), :iso_format) ]) |> reduce(:to_date) end def to_date(opts) do values = case opts do [iso_format: parsed_value] -> values = [:year, :month, :day, :hour, :minute, :second, :microsecond, :utc_offset] |> Enum.zip(parsed_value) {microseconds, values} = Keyword.pop(values, :microsecond, 0) microsecond_entry = {microseconds, microseconds |> to_string() |> String.length()} Keyword.put(values, :microsecond, microsecond_entry) [us_format: parsed_value] -> [:day, :month, :year, :hour, :minute, :second] |> Enum.zip(parsed_value) end fields = [ calendar: Calendar.ISO, hour: 0, minute: 0, second: 0, time_zone: "Etc/UTC", zone_abbr: "UTC", utc_offset: 0, std_offset: 0 ] |> Keyword.merge(values) struct(DateTime, fields) end end
lib/date_helpers.ex
0.705684
0.42668
date_helpers.ex
starcoder
defmodule Astarte.Flow.Flows.DETSStorage do use GenServer alias Astarte.Flow.Config @behaviour Astarte.Flow.Flows.Storage @table_name :flows def start_link(args) do GenServer.start_link(__MODULE__, args, name: __MODULE__) end @impl true @doc "Return a list of all saved Flows in the form `{realm, %Flow{}}`" @spec get_all_flows :: [{realm :: String.t(), Astarte.Flow.Flows.Flow.t()}] def get_all_flows do match_pattern = {{:"$1", :_}, :"$2"} :dets.match(@table_name, match_pattern) |> Enum.map(fn [realm, flow] -> {realm, flow} end) end @impl true @doc "Insert a flow into the dets table" @spec insert_flow(realm :: String.t(), flow :: Astarte.Flow.Flows.Flow.t()) :: :ok | {:error, reason :: term()} def insert_flow(realm, flow) do # This must go through the process since only the owner can write to the table GenServer.call(__MODULE__, {:insert_flow, realm, flow}) end @impl true @doc "Delete a flow from the dets table" @spec delete_flow(realm :: String.t(), name :: String.t()) :: :ok | {:error, reason :: term()} def delete_flow(realm, name) do # This must go through the process since only the owner can write to the table GenServer.call(__MODULE__, {:delete_flow, realm, name}) end @impl true def init(_args) do file = Config.persistency_dir!() |> Path.expand() |> Path.join("flows") |> to_charlist() case :dets.open_file(@table_name, type: :set, file: file) do {:ok, table} -> {:ok, table} {:error, reason} -> {:stop, reason} end end @impl true def handle_call({:insert_flow, realm, flow}, _from, table) do entry = {{realm, flow.name}, flow} result = case :dets.insert_new(table, entry) do true -> :ok false -> {:error, :already_existing_flow} {:error, reason} -> {:error, reason} end {:reply, result, table} end def handle_call({:delete_flow, realm, name}, _from, table) do result = :dets.delete(table, {realm, name}) {:reply, result, table} end end
lib/astarte_flow/flows/dets_storage.ex
0.778902
0.44348
dets_storage.ex
starcoder
defmodule EdgeDB.Protocol.Codecs.Builtin.Array do use EdgeDB.Protocol.Codec import EdgeDB.Protocol.Types.{ ArrayElement, Dimension } alias EdgeDB.Protocol.{ Datatypes, Error, Types } @reserved0 0 @reserved1 0 @empty_list_iodata [ Datatypes.Int32.encode(0), Datatypes.Int32.encode(@reserved0), Datatypes.Int32.encode(@reserved1) ] defcodec(type: list()) @spec new(Datatypes.UUID.t(), list(integer()), Codec.t()) :: Codec.t() def new(type_id, dimensions, codec) do encoder = create_encoder(&encode_array(&1, dimensions, codec)) decoder = create_decoder(&decode_array(&1, dimensions, codec)) %Codec{ type_id: type_id, encoder: encoder, decoder: decoder, module: __MODULE__ } end @spec encode_array(t(), list(integer()), Codec.t()) :: iodata() def encode_array([], _dimensions, _codec) do @empty_list_iodata end def encode_array(instance, dimensions, codec) when is_list(instance) do if Keyword.keyword?(instance) do raise Error.invalid_argument_error( "unable to encode keyword list #{inspect(instance)} as array" ) end ndims = length(dimensions) calculated_dimensions = get_dimensions_for_list(ndims, instance) elements = encode_data_into_array_elements(instance, codec) [ Datatypes.Int32.encode(ndims), Datatypes.Int32.encode(@reserved0), Datatypes.Int32.encode(@reserved1), Types.Dimension.encode(calculated_dimensions, raw: true), Types.ArrayElement.encode(elements, raw: true) ] end @spec decode_array(bitstring(), list(integer()), Codec.t()) :: t() def decode_array(<<0::int32, _reserved0::int32, _reserved1::int32>>, _dimensions, _codec) do [] end def decode_array( <<ndims::int32, _reserved0::int32, _reserved1::int32, rest::binary>>, expected_dimensions, codec ) do {parsed_dimensions, rest} = Types.Dimension.decode(ndims, rest) if length(parsed_dimensions) != length(expected_dimensions) do raise Error.invalid_argument_error( "unable to decode binary data as array: parsed dimensions count don't match expected dimensions count" ) end elements_count = count_elements_in_array(parsed_dimensions) {raw_elements, <<>>} = Types.ArrayElement.decode(elements_count, rest) decode_array_elements_into_list(raw_elements, parsed_dimensions, codec) end defp encode_data_into_array_elements(list, codec) do Enum.map(list, fn element -> encoded_data = Codec.encode(codec, element) array_element(data: encoded_data) end) end defp decode_array_elements_into_list(elements, dimensions, codec) do elements |> Enum.into([], fn array_element(data: data) -> Codec.decode(codec, data) end) |> transform_in_dimensions(dimensions) end defp get_dimensions_for_list(1, list) do get_dimensions_for_list(0, [], [dimension(upper: length(list))]) end defp get_dimensions_for_list(ndims, list) do get_dimensions_for_list(ndims, list, []) end defp get_dimensions_for_list(0, [], dimensions) do dimensions end defp get_dimensions_for_list(ndims, [list | rest], dimensions) when is_list(list) do get_dimensions_for_list(ndims - 1, rest, [dimension(upper: length(list)) | dimensions]) end defp count_elements_in_array(dimensions) do Enum.reduce(dimensions, 0, fn dimension(upper: upper, lower: lower), acc -> acc + upper - lower + 1 end) end defp transform_in_dimensions(list, [dimension()]) do list end defp transform_in_dimensions(list, dimensions) do {list, []} = Enum.reduce(dimensions, {[], list}, fn dimension(upper: upper), {md_list, elements} -> {new_dim_list, rest} = Enum.split(elements, upper) {[new_dim_list | md_list], rest} end) Enum.reverse(list) end end
lib/edgedb/protocol/codecs/builtin/array.ex
0.663015
0.408749
array.ex
starcoder
defmodule Gateway.Router.Portal.Commands.Handler.Wallet do @moduledoc ~S""" Processes the HTTP based requests and sends them to the correct handler. The handler or business logic is broken out of http request so I can change API versions later on but still keep backwards compatability support if possible """ require Logger # ---------------------------------------------------------------------------- # Public Auth APIs # ---------------------------------------------------------------------------- @doc """ Basic setup. Nothing to do here """ @spec init :: :ok def init() do :ok end @doc """ Looks up your balance from the system. """ @spec balances(String.t()) :: {:ok, [map]} | {:error, String.t()} def balances(username) do getBalanceForUsername(username) end @doc """ Make a payment to a payID NOTE: These implementation need to be clean up a bit For the post based payments """ @spec payment(String.t(), String.t(), String.t(), String.t(), String.t(), String.t()) :: {:error, String.t()} | {:ok, String.t()} def payment(amt, type, username, payId, "direct", _type) do info = Blockchain.Ripple.PayID.format(username) |> Blockchain.Ripple.PayID.lookupAddress(type) case info do {:ok, from} -> case type do "XRP" -> # Lookup the address info case Storage.Wallet.XRP.query(from) do nil -> {:error, "wallet not found"} wallet -> # Now Let's lookup the receiver wallet address case Blockchain.Ripple.PayID.lookupAddress(payId, type) do {:ok, to} -> mnemonic = Storage.Wallet.XRP.mnemonic(wallet) # now let's pay this out amt = String.to_integer(amt) * 1_000_000 Blockchain.Ripple.XRP.pay("#{amt}", mnemonic, to) _ -> {:error, "could not find PayID #{payId}"} end end "ETH" -> case Storage.Wallet.Eth.query(from) do nil -> {:error, "wallet not found"} wallet -> # Now Let's lookup the receiver wallet address case Blockchain.Ripple.PayID.lookupAddress(payId, type) do {:ok, to} -> # now let's pay this out amt = String.to_integer(amt) Blockchain.Eth.pay("#{amt}", wallet.address, to, wallet.privatekey) _ -> {:error, "could not find PayID #{payId}"} end end "BTC" -> {:error, :not_implemented} end _ -> {:error, "could not get PayID wallet"} end end def payment(amt, type, username, payId, postId, "post") do # Let's look up the posting case Storage.Work.Posting.queryByPostId(postId) do nil -> {:error, "Invalid Posting ID"} post -> # Let's grab the game info from the posting gameId = post.game_id case Storage.Game.query(gameId) do nil -> {:error, "Invalid Game ID"} game -> {:ok, userAddress} = Blockchain.Ripple.PayID.format(username) |> Blockchain.Ripple.PayID.lookupAddress(type) {:ok, gameAddress} = Blockchain.Ripple.PayID.lookupAddress(game.pay_id, type) case type do "XRP" -> # Lets break out the "cuts" amt = String.to_integer(amt) * 1_000_000 feePercent = String.to_integer(game.fee) / 100.0 payPercent = 1.0 - feePercent fee = (feePercent * amt) |> round() |> Integer.to_string() pay = (payPercent * amt) |> round() |> Integer.to_string() # Lookup the address info case Storage.Wallet.XRP.query(userAddress) do nil -> {:error, "wallet not found"} userWallet -> # Now Let's lookup the receiver wallet address case Blockchain.Ripple.PayID.lookupAddress(payId, type) do {:ok, to} -> mnemonic = Storage.Wallet.XRP.mnemonic(userWallet) # Let's pay off the game in a different thread _ = Blockchain.Ripple.XRP.pay(fee, mnemonic, gameAddress) # But let's wait for this one to be done _ = Blockchain.Ripple.XRP.pay(pay, mnemonic, to) {:ok, "ok"} _ -> {:error, "could not find PayID #{payId}"} end end "ETH" -> {:error, :not_implemented} # case Storage.Wallet.Eth.query(from) do # nil -> # {:error, "wallet not found"} # wallet -> # # Now Let's lookup the receiver wallet address # case Blockchain.Ripple.PayID.lookupAddress(payId, type) do # {:ok, to} -> # # now let's pay this out # amt = String.to_integer(amt) # Blockchain.Eth.pay("#{amt}", wallet.address, to, wallet.privatekey) # _ -> # {:error, "could not find PayID #{payId}"} # end # end "BTC" -> {:error, :not_implemented} end end end end def payment(amt, type, username, payId, postId, "guild") do # Let's look up the posting case Storage.Work.Posting.queryByPostId(postId) do nil -> {:error, "Invalid Posting ID"} post -> # Let's grab the game info from the posting gameId = post.game_id case Storage.Game.query(gameId) do nil -> {:error, "Invalid Game ID"} game -> {:ok, userAddress} = Blockchain.Ripple.PayID.format(username) |> Blockchain.Ripple.PayID.lookupAddress(type) {:ok, gameAddress} = Blockchain.Ripple.PayID.lookupAddress(game.pay_id, type) case type do "XRP" -> # Lets break out the "cuts" amt = String.to_integer(amt) * 1_000_000 feePercent = String.to_integer(game.fee) / 100.0 payPercent = 1.0 - feePercent fee = (feePercent * amt) |> round() |> Integer.to_string() pay = (payPercent * amt) |> round() |> Integer.to_string() # Lookup the address info case Storage.Wallet.XRP.query(userAddress) do nil -> {:error, "wallet not found"} userWallet -> # Now Let's lookup the receiver wallet address case Blockchain.Ripple.PayID.lookupAddress(payId, type) do {:ok, to} -> mnemonic = Storage.Wallet.XRP.mnemonic(userWallet) # Let's pay off the game in a different thread _ = Blockchain.Ripple.XRP.pay(fee, mnemonic, gameAddress) # But let's wait for this one to be done _ = Blockchain.Ripple.XRP.pay(pay, mnemonic, to) {:ok, "ok"} _ -> {:error, "could not find PayID #{payId}"} end end "ETH" -> {:error, :not_implemented} # case Storage.Wallet.Eth.query(from) do # nil -> # {:error, "wallet not found"} # wallet -> # # Now Let's lookup the receiver wallet address # case Blockchain.Ripple.PayID.lookupAddress(payId, type) do # {:ok, to} -> # # now let's pay this out # amt = String.to_integer(amt) # Blockchain.Eth.pay("#{amt}", wallet.address, to, wallet.privatekey) # _ -> # {:error, "could not find PayID #{payId}"} # end # end "BTC" -> {:error, :not_implemented} end end end end @doc """ Pay off the posting confirmation amount """ @spec payPostingConfirm(String.t(), String.t(), String.t()) :: {:error, any} | {:ok, String.t()} def payPostingConfirm(postId, username, payId) do case Storage.Work.Posting.queryByPostId(postId) do nil -> {:error, "post not found"} postT -> amt = Storage.Work.Posting.confirmPayAmt(postT) |> String.to_integer() payPostingAmount(username, postT, payId, "#{amt * 1_000_000}") end end @doc """ Pay off the posting complete amount """ @spec payPostingComplete(String.t(), String.t(), String.t()) :: {:error, any} | {:ok, String.t()} def payPostingComplete(postId, username, payId) do case Storage.Work.Posting.queryByPostId(postId) do nil -> {:error, "post not found"} postT -> amt = Storage.Work.Posting.completePayAmt(postT) |> String.to_integer() payPostingAmount(username, postT, payId, "#{amt * 1_000_000}") end end @doc """ Pay off the posting bound amount """ @spec payPostingBonus(String.t(), String.t(), String.t()) :: {:error, any} | {:ok, String.t()} def payPostingBonus(postId, username, payId) do case Storage.Work.Posting.queryByPostId(postId) do nil -> {:error, "post not found"} postT -> amt = Storage.Work.Posting.bonusPayAmt(postT) |> String.to_integer() payPostingAmount(username, postT, payId, "#{amt * 1_000_000}") end end # ---------------------------------------------------------------------------- # Public Auth APIs # ---------------------------------------------------------------------------- # Lets pay off a posting defp payPostingAmount(username, postT, payId, amt) do # Verify that the user owns the post if username == Storage.Work.Posting.userId(postT) do info = Blockchain.Ripple.PayID.format(username) |> Blockchain.Ripple.PayID.lookup() case info do {:ok, rsp} -> from = Map.get(rsp, "addressDetails") |> Map.get("address") # Lookup the address info case Storage.Wallet.XRP.query(from) do nil -> {:error, "wallet not found"} wallet -> # Now Let's lookup the receiver wallet address case Blockchain.Ripple.PayID.lookup(payId) do {:ok, toRsp} -> to = Map.get(toRsp, "addressDetails") |> Map.get("address") mnemonic = Storage.Wallet.XRP.mnemonic(wallet) # now let's pay this out Blockchain.Ripple.XRP.pay(amt, mnemonic, to) _ -> {:error, "could not find PayID #{payId}"} end end _ -> {:error, "could not get PayID wallet"} end else {:error, "incorrect user"} end end # Let's get the balances for these users defp getBalanceForUsername(username) do Blockchain.Ripple.PayID.format(username) |> Blockchain.walletBalances() end end
src/apps/gateway/lib/gateway/router/portal/commands/handler/wallet.ex
0.667581
0.428233
wallet.ex
starcoder
defmodule CssParser do import CssParser.File alias CssParser.Cache @moduledoc """ Provides css parsing in Elixir. CssParser was inspired by css.js (a lightweight, battle tested, fast, css parser in JavaScript). More information can be found at https://github.com/jotform/css.js. ### Adding CssParser To add CssParser to an application, add it to `deps` in the mix.exs file: ```elixir defp deps do [ {:css_parser, ">= 0.1.0"} ] end ``` ### Usage You can use CssParser either on a command line or a module. #### On command line iex> CssParser.parse("h4, h3 {color: blue; font-size: 20px;}") [ %{ rules: "color: blue; font-size: 20px;", selectors: "h4, h3", type: "elements" } ] You can also parse css from a file as follows: iex> CssParser.parse("/path/to/file.css") #### In a module CssParser can be `alias`ed or `import`ed in a module: ```elixir defmodule MyMod do import CssParser def my_css_parser(css_string) do # use the imported `parse` function parse(css_string) end end ``` ### Recommendation Enusure your css is valid to get valid results. Garbage in (maybe) garbage out. Kindly suggest improvements. """ @doc """ Parses a css string to produce selectors, rules/descriptors and types. It first tries to remove css comments that might be in the css string. ### Examples iex> CssParser.parse("h4, h3 {color: blue; font-size: 20px;}") [ %{ rules: "color: blue; font-size: 20px;", selectors: "h4, h3", type: "elements" } ] You can also parse css from a file as follows to get a string: iex> CssParser.parse("/path/to/css/file.css") In case the file path is invalid you'll get a relevant message such as `No such file or directory`. """ @font_regex ~r/((?=@font-face)(.*?)(\s*\}))/s @media_regex ~r/((?=@media)(.*?)(\s*\}){2})/s @comment_regx ~r/(\/\*.*?\*\/)/ #~r/(\/*\*[\s\S]*?\*?\/*)|(\/\*.*?\*\/)/ @keyframe_regex ~r/(\s*(?=\@keyframes|@-webkit-keyframes)(.*?)(\s*\}){2}\s*)+/s @element_regex ~r/(?=@media|@keyframe|@-webkit-keyframes|@font-face)(.*?)(\s*\}){2}\s*/s @spec parse(binary()) :: [term()] | binary() def parse(string) when string in ["", nil], do: [] def parse(string) do if is_file?(string) do parse_from_file(string) else parse_css(string) end end defp parse_from_file(string) do case File.read(string) do {:ok, content} -> parse_css(content) {:error, reason} -> format(reason) end end defp parse_css(string) do hash_key = Cache.hash(string) case Cache.get(hash_key) do {:ok, parsed_data} -> parsed_data {:error, _} -> string |> drop_comments() |> tokenize() |> Cache.save(hash_key, returning: true) end end # tries to drop existing comments defp drop_comments(css_string) do String.split(css_string, "\n", trim: true) |> Enum.reduce([], fn line, acc -> str = if Regex.match?(@comment_regx, line) do String.replace(line, @comment_regx, "") else line end [ str | acc ] end) |> Enum.reverse() |> Enum.join() end # tokenizes css string into the various css selectors e.g. @media, @font-face, @keyframes and elements defp tokenize(css) do media = Regex.scan(@media_regex, css) |> Enum.map(fn media -> %{"selector" => selector, "children" => children} = Regex.named_captures(~r/(?<selector>(@media)(.*?)(\)))(?<children>.*)/s, hd(media)) %{selectors: selector, children: parse_elements(children, :children), type: "media"} end) keyframes = Regex.scan(@keyframe_regex, css) |> Enum.map(fn keyframe -> [name | block] = String.split(hd(keyframe), ~r/(?={)/s, trim: true) %{selectors: name, rules: block, type: "keyframe"} end) font_faces = Regex.scan(@font_regex, css) |> Enum.map(fn font_face -> [name, descriptors] = String.split(hd(font_face), ~r/({)/s, trim: true) %{selectors: name, descriptors: descriptors, type: "font_face"} end) parse_elements(css, :root) ++ media ++ keyframes ++ font_faces end defp parse_elements(css, type) do # strip media-queries, keyframes and font-faces case type do :root -> String.split(css, @element_regex, trim: true) # |> IO.inspect() |> Enum.flat_map(fn rule -> Enum.map(String.split(rule, ~r/\s*\}\s*/, trim: true), fn rule -> do_parse_element(rule) end) end) :children -> Enum.map(String.split(css, ~r/\s*\}\s*/, trim: true), fn rule -> do_parse_element(rule) end) end # remove empty items |> Enum.reject(& &1 == %{}) end defp do_parse_element(el) do case String.split(el, ~r/\s*\{\s*/, trim: true) do [r | []] when r in ["", " ", " ", " ", " ", nil] -> %{} [selectors, rules] -> %{type: "elements", selectors: selectors, rules: rules} [universal_rules] -> %{type: "universal", selectors: "*", rules: universal_rules} end end @doc """ Converts a parsed css to binary #### After running: iex> parsed = CssParser.parse("h4, h3 {color: blue; font-size: 20px;}") #### You can then get a (formatted) string as follows: iex> CssParser.to_binary(parsed) "h4, h3 {\n\t\t\tcolor: blue; font-size: 20px;\r\t\t\s}" #### The function is especially useful if you need to modify the parsed css structure and then get back a binary. """ @spec to_binary([map()]) :: binary() def to_binary(parsed_css) do Enum.reduce(parsed_css, [], fn %{type: type, selectors: s} = parsed, acc -> case type do "elements" -> str = IO.iodata_to_binary([s, " {\n\t", parsed.rules, "\r}\n\n"]) [str | acc] "keyframe" -> [ IO.iodata_to_binary([s, parsed.rules, "\n\n"]) | acc ] "media" -> children = insert_media_children(parsed.children) str = IO.iodata_to_binary([s, " {\t", children, " \r}\n\n"]) [str | acc] "font_face" -> str = IO.iodata_to_binary([s, " {\t", parsed.descriptors, "\n\n"]) [str | acc] "universal" -> str = IO.iodata_to_binary([s, " {\t", parsed.rules, "\r}\n\n"]) [str | acc] end end) |> Enum.reverse() |> IO.iodata_to_binary() end defp insert_media_children(rules) do Enum.map(rules, fn %{rules: r, selectors: s} -> IO.iodata_to_binary(["\r\t", s, " {\n\t\t", r, "\r\t}"]) end) end end
lib/css_parser.ex
0.802517
0.643609
css_parser.ex
starcoder
defmodule Jsonrpc.Request do @moduledoc """ `Jsonrpc.Request` represents a JSONRPC 2.0 request, as documented in the [JSON-RPC 2.0 specification](https://www.jsonrpc.org/specification#request_object) """ import Injector inject System @type t :: %__MODULE__{ jsonrpc: String.t(), method: String.t(), params: any() | [any()], id: String.t() | integer() | nil } @derive Jason.Encoder @enforce_keys [:jsonrpc, :method] defstruct [:jsonrpc, :method, :params, :id] @doc """ `new` creates a new `Jsonrpc.Request`. It takes a list of options. The options are: * method: the method of the RPC request as a string. This field is required. * params: the parameters of the request. * id: the id of the request. When not given, the time the request was created in unix time in milliseconds is used as the ID. `new` can be piped with another `new` call to create a list of requests that can be send as a batch RPC request. See `new/2` ### Example ``` Jsonrpc.Request.new(method: "callOne", params: "example", id: "1") ``` """ @spec new(keyword) :: Jsonrpc.Request.t() def new(opts) when is_list(opts) do method = opts |> Keyword.get(:method, :not_given) |> case do :not_given -> raise "Method is not given" method when is_binary(method) -> method _ -> raise "Method is invalid, should be a string" end %__MODULE__{ jsonrpc: "2.0", method: method } |> add_params(opts |> Keyword.get(:params, :not_given)) |> add_id(opts |> Keyword.get(:id, :not_given)) end defp add_params(req, :not_given) do req |> Map.put(:params, []) end defp add_params(req, params) do req |> Map.put(:params, params) end defp add_id(req, :not_given) do req |> Map.put(:id, System.os_time(:millisecond)) end defp add_id(req, id) when is_binary(id) or is_integer(id) or is_nil(id) do req |> Map.put(:id, id) end defp add_id(_req, _id), do: raise("ID is invalid: should be a string an integer or nil") @doc """ `new/2` takes a request or a list of requests and adds a new request to the list. The options are the same as `new/1`. This function allows to chain multiple `new` calls together to create a list of RPC requests that can be send as a batch request. ### Example ``` Jsonrpc.Request.new(method: "callOne") |> Jsonrpc.Request.new(method: "callTwo") |> Jsonrpc.call(name: :example, url: "https://finchjsonrpc.redmaner.com") ``` """ @spec new(t() | [t()], keyword) :: [t()] def new(req = %__MODULE__{}, opts) when is_list(opts) do [new(opts) | [req]] end def new(req_list, opts) when is_list(opts) do [new(opts) | req_list] end @doc """ `order` can be used to order a list of requests that are created by `new/1` and `new/2`. This will overwrite ids that were given when creating the requests! It will guarantee the right order when `new/1` and `new/2` were used. ### Example ``` Jsonrpc.Request.new(method: "callOne") |> Jsonrpc.Request.new(method: "callTwo") |> Jsonrpc.Request.order(1) |> Jsonrpc.call(name: :example, url: "https://finchjsonrpc.redmaner.com") ``` """ @spec order([t()], integer()) :: [t()] def order(req_list, starting_number \\ 1) def order(req_list, starting_number) when is_list(req_list) do req_list |> Enum.reverse() |> Enum.reduce({starting_number, []}, fn req, {req_id, requests} -> req = req |> Map.put(:id, req_id) {req_id + 1, requests ++ [req]} end) |> unwrap_requests() end def order(req, _starting_number), do: req defp unwrap_requests({_req_id, requests}), do: requests end
lib/jsonrpc/request.ex
0.883895
0.713007
request.ex
starcoder
defmodule VelocyPack.Decoder do @moduledoc false # The implementation of this decoder is heavily inspired by that of Jason (https://github.com/michalmuskala/jason) use Bitwise alias VelocyPack.{Codegen, Error} import Codegen, only: [bytecase: 2] @spec parse(binary(), keyword()) :: {:ok, any()} | {:error, any()} def parse(data, _opts \\ []) when is_binary(data) do try do case value(data) do {value, <<>>} -> {:ok, value} {value, tail} -> {:ok, {value, tail}} end rescue e in MatchError -> {:error, Error.exception(e)} e in CaseClauseError -> {:error, Error.exception(e)} catch error -> {:error, error} end end @spec value(binary()) :: {any(), binary()} defp value(data) do bytecase data do _ in 0x01, rest -> {[], rest} type in 0x02..0x05, rest -> parse_array_without_index_table(type, rest) type in 0x06..0x09, rest -> parse_array_with_index_table(type, rest) _ in 0x0A, rest -> {%{}, rest} type in 0x0B..0x0E, rest -> parse_object(type, rest) # TODO: 0x0f..0x12 - objects with unsorted index table _ in 0x13, rest -> parse_compact_array(rest) _ in 0x14, rest -> parse_compact_object(rest) # 0x15..0x16 - reserved _ in 0x17, rest -> {:illegal, rest} _ in 0x18, rest -> {nil, rest} _ in 0x19, rest -> {false, rest} _ in 0x1A, rest -> {true, rest} _ in 0x1B, rest -> parse_double(rest) _ in 0x1C, rest -> parse_date_time(rest) # 0x1d - external -> not supported _ in 0x1E, rest -> {:min_key, rest} _ in 0x1F, rest -> {:max_key, rest} type in 0x20..0x27, rest -> parse_int(type, rest) type in 0x28..0x2F, rest -> parse_uint(type, rest) type in 0x30..0x39, rest -> parse_small_int(type, rest) type in 0x3A..0x3F, rest -> parse_neg_small_int(type, rest) _ in 0x40, rest -> {"", rest} type in 0x41..0xBE, rest -> parse_short_string(type, rest) _ in 0xBF, rest -> parse_string(rest) type in 0xC0..0xC7, rest -> parse_binary(type, rest) # 0xc8..0xcf - BCD -> not supported # 0xd0..0xd7 - negative BCD -> not supported # 0xd8..0xef - reserved # 0xf0..0xff - custom types -> not supported type, _rest -> error({:unsupported_type, type}) <<>> -> error(:unexpected_end) end end defp error(err), do: throw(err) @compile {:inline, parse_double: 1} @spec parse_double(binary()) :: {any(), binary()} defp parse_double(<<value::float-little-size(64), rest::binary>>), do: {value, rest} @compile {:inline, parse_date_time: 1} @spec parse_date_time(binary()) :: {any(), binary()} defp parse_date_time(<<value::integer-unsigned-little-size(64), rest::binary>>), do: {DateTime.from_unix!(value, :millisecond), rest} @compile {:inline, parse_int: 2} @spec parse_int(integer(), binary()) :: {any(), binary()} defp parse_int(type, data) do size = type - 0x1F <<value::integer-signed-little-unit(8)-size(size), rest::binary>> = data {value, rest} end @compile {:inline, parse_uint: 2} @spec parse_uint(integer(), binary()) :: {any(), binary()} defp parse_uint(type, data) do size = type - 0x27 <<value::integer-unsigned-little-unit(8)-size(size), rest::binary>> = data {value, rest} end @compile {:inline, parse_small_int: 2} @spec parse_small_int(integer(), binary()) :: {any(), binary()} defp parse_small_int(type, rest), do: {type - 0x30, rest} @compile {:inline, parse_neg_small_int: 2} @spec parse_neg_small_int(integer(), binary()) :: {any(), binary()} defp parse_neg_small_int(type, rest), do: {type - 0x40, rest} @compile {:inline, parse_short_string: 2} @spec parse_short_string(integer(), binary()) :: {any(), binary()} defp parse_short_string(type, data) do length = type - 0x40 parse_short_string_content(length, data) end @spec parse_short_string_content(integer(), binary()) :: {any(), binary()} defp parse_short_string_content(length, data) do <<value::binary-size(length), rest::binary>> = data {value, rest} end @spec parse_string(binary()) :: {any(), binary()} defp parse_string( <<length::integer-unsigned-little-size(64), value::binary-size(length), rest::binary>> ) do {value, rest} end @compile {:inline, parse_binary: 2} @spec parse_binary(integer(), binary()) :: {any(), binary()} defp parse_binary(type, data) do size = type - 0xBF parse_binary_content(size, data) end @spec parse_binary_content(integer(), binary()) :: {any(), binary()} defp parse_binary_content(size, data) do <<length::integer-unsigned-little-unit(8)-size(size), value::binary-size(length), rest::binary>> = data {value, rest} end @spec parse_array_without_index_table(integer(), binary()) :: {list(), binary()} defp parse_array_without_index_table(type, data) do size_bytes = 1 <<< (type - 0x02) <<total_size::integer-unsigned-little-unit(8)-size(size_bytes), rest::binary>> = data data_size = byte_size(rest) rest = skip_zeros(rest) zeros = data_size - byte_size(rest) data_size = total_size - size_bytes - 1 - zeros <<data::binary-size(data_size), rest::binary>> = rest list = parse_array_elements(data) # TODO - optionally validate length of list {list, rest} end @spec parse_array_with_index_table(integer(), binary()) :: {list(), binary()} defp parse_array_with_index_table( 0x09, <<total_size::integer-unsigned-little-size(64), rest::binary>> ) do data_size = total_size - 1 - 8 - 8 <<data::binary-size(data_size), length::integer-unsigned-little-size(64), rest::binary>> = rest index_size = length * 8 data_size = data_size - index_size <<data::binary-size(data_size), _index::binary-size(index_size)>> = data list = parse_array_elements(data) # TODO - optionally validate length of list {list, rest} end defp parse_array_with_index_table(type, data) do size_bytes = 1 <<< (type - 0x06) <<total_size::integer-unsigned-little-unit(8)-size(size_bytes), length::integer-unsigned-little-unit(8)-size(size_bytes), rest::binary>> = data index_size = size_bytes * length data_size = byte_size(rest) rest = skip_zeros(rest) zeros = data_size - byte_size(rest) data_size = total_size - 1 - 2 * size_bytes - zeros - index_size <<data::binary-size(data_size), _index::binary-size(index_size), rest::binary>> = rest list = parse_array_elements(data) # TODO - optionally validate length of list {list, rest} end @spec parse_compact_array(binary()) :: {list(), binary()} defp parse_compact_array(data) do {data, _length, rest} = parse_compact_header(data) list = parse_array_elements(data) # TODO - optionally validate length of list {list, rest} end # Yes, we totaly do this in a non-tail-recursive way. # Performance tests for large arrays (~10000 entries) showed # that this is ~10% faster than a tail-recursive version. # TODO - rerun performance tests @spec parse_array_elements(binary()) :: list() defp parse_array_elements(<<>>), do: [] defp parse_array_elements(data) do {elem, rest} = value(data) [elem | parse_array_elements(rest)] end @spec parse_object(integer(), binary()) :: {map(), binary()} defp parse_object(type, data) do size_bytes = 1 <<< (type - 0x0B) <<total_size::integer-unsigned-little-unit(8)-size(size_bytes), length::integer-unsigned-little-unit(8)-size(size_bytes), rest::binary>> = data data_size = total_size - 1 - 2 * size_bytes <<data::binary-size(data_size), rest::binary>> = rest index_table_size = length * size_bytes {obj, <<_index_table::binary-size(index_table_size)>>} = parse_object_members(length, %{}, skip_zeros(data)) {obj, rest} end @spec parse_compact_object(binary()) :: {map(), binary()} defp parse_compact_object(data) do {data, length, rest} = parse_compact_header(data) {obj, <<>>} = parse_object_members(length, %{}, data) {obj, rest} end @spec parse_object_members(integer(), map(), binary()) :: {map(), binary()} defp parse_object_members(0, obj, data), do: {obj, data} defp parse_object_members(length, obj, data) do {key, rest} = value(data) {value, rest} = value(rest) obj = Map.put(obj, key, value) parse_object_members(length - 1, obj, rest) end @spec parse_compact_header(binary()) :: {binary(), integer(), binary()} defp parse_compact_header(data) do {size, rest} = parse_length(data, 0, 0, false) data_size = size - (byte_size(data) - byte_size(rest)) - 1 <<data::binary-size(data_size), rest::binary>> = rest {length, data} = parse_length(data, 0, 0, true) {data, length, rest} end @spec skip_zeros(binary()) :: binary() defp skip_zeros(<<0, rest::binary>>), do: skip_zeros(rest) defp skip_zeros(data), do: data @spec parse_length(binary(), integer(), integer(), boolean()) :: {integer(), binary()} defp parse_length(data, len, p, reverse) do {v, rest} = if reverse do size = byte_size(data) - 1 <<rest::binary-size(size), v>> = data {v, rest} else <<v, rest::binary>> = data {v, rest} end len = len + ((v &&& 0x7F) <<< p) p = p + 7 if (v &&& 0x80) != 0 do parse_length(rest, len, p, reverse) else {len, rest} end end end
lib/velocy_pack/decoder.ex
0.524151
0.519643
decoder.ex
starcoder
defmodule Grizzly.CommandClass.Time.OffsetSet do @moduledoc """ Command module for working with TIME OFFSET_SET command. command options: * `:sign_tzo` - This field is used to indicate the sign (plus or minus) to apply to the Hour TZO and Minute TZO field * `:hour_tzo` - This field is used to indicate the number of hours that the originating time zone deviates from UTC * `:minute_tzo` - This field is used to indicate the number of minutes that the originating time zone deviates UTC * `:sign_offset_dst` - This field is used to indicate the sign (plus or minus) for the Minute Offset DST field to apply to the current time while in the Daylight Saving Time * `:minute_offset_dst` - This field MUST indicate the number of minutes by which the current time is to be adjusted when Daylight Saving Time starts * `:month_start_dst` - This field MUST indicate the month of the year when Daylight Saving Time starts * `:day_end_dst` - This field MUST indicate the day of the month when Daylight Saving Time starts * `:hour_start_dst` - This field MUST indicate the hour of the day when Daylight Saving Time starts * `:month_end_dst` - This field MUST indicate the month of the year when Daylight Saving Time ends * `:day_end_dst` - This field MUST indicate the day of the month when Daylight Saving Time ends * `:seq_number` - The sequence number for the Z/IP Packet * `:retries` - The number times to retry to send the command (default 2) """ @behaviour Grizzly.Command alias Grizzly.Packet alias Grizzly.Command.{EncodeError, Encoding} alias Grizzly.CommandClass.Time @type t :: %__MODULE__{ seq_number: Grizzly.seq_number(), retries: non_neg_integer(), value: Time.offset() } @type opt :: {:seq_number, Grizzly.seq_number()} | {:retries, non_neg_integer()} | {:value, Time.offset()} defstruct seq_number: nil, retries: 2, value: nil @spec init([opt]) :: {:ok, t} def init(opts) do {:ok, struct(__MODULE__, opts)} end @spec encode(t) :: {:ok, binary} | {:error, EncodeError.t()} def encode( %__MODULE__{ value: %{ sign_tzo: sign_tzo, # deviation from UTC hour_tzo: hour_tzo, minute_tzo: minute_tzo, sign_offset_dst: sign_offset_dst, minute_offset_dst: minute_offset_dst, # start of DST month_start_dst: month_start_dst, day_start_dst: day_start_dst, # end of DST hour_start_dst: hour_start_dst, month_end_dst: month_end_dst, day_end_dst: day_end_dst, hour_end_dst: hour_end_dst }, seq_number: seq_number } = command ) do with {:ok, _encoded} <- Encoding.encode_and_validate_args( command, %{ sign_tzo: :bit, hour_tzo: {:range, 0, 14}, minute_tzo: {:range, 0, 59}, sign_offset_dst: :bit, minute_offset_dst: {:range, 0, 59}, month_start_dst: {:range, 1, 12}, day_start_dst: {:range, 1, 31}, hour_start_dst: {:range, 0, 59}, month_end_dst: {:range, 1, 12}, day_end_dst: {:range, 1, 31}, hour_end_dst: {:range, 0, 59} }, [:value] ) do binary = Packet.header(seq_number) <> << 0x8A, 0x05, sign_tzo::size(1), hour_tzo::size(7), minute_tzo, sign_offset_dst::size(1), minute_offset_dst::size(7), month_start_dst, day_start_dst, hour_start_dst, month_end_dst, day_end_dst, hour_end_dst >> {:ok, binary} end end @spec handle_response(t, Packet.t()) :: {:continue, t} | {:done, {:error, :nack_response}} | {:done, Time.offset()} | {:retry, t} def handle_response( %__MODULE__{seq_number: seq_number} = _command, %Packet{ seq_number: seq_number, types: [:ack_response] } ) do {:done, :ok} end def handle_response( %__MODULE__{seq_number: seq_number, retries: 0}, %Packet{ seq_number: seq_number, types: [:nack_response] } ) do {:done, {:error, :nack_response}} end def handle_response( %__MODULE__{seq_number: seq_number, retries: n} = command, %Packet{ seq_number: seq_number, types: [:nack_response] } ) do {:retry, %{command | retries: n - 1}} end def handle_response( _, %Packet{ body: %{ command_class: :time, command: :time_offset_report, value: value } } ) do {:done, {:ok, value}} end def handle_response(command, _), do: {:continue, command} end
lib/grizzly/command_class/time/offset_set.ex
0.871966
0.591015
offset_set.ex
starcoder
defmodule Phoenix.LiveView.Static do # Holds the logic for static rendering. @moduledoc false alias Phoenix.LiveView.{Socket, Utils, Diff} # Token version. Should be changed whenever new data is stored. @token_vsn 2 # Max session age in seconds. Equivalent to 2 weeks. @max_session_age 1_209_600 @doc """ Acts as a view via put_view to maintain the controller render + instrumentation stack. """ def render("template.html", %{content: content}) do content end def render(_other, _assigns), do: nil @doc """ Verifies the session token. Returns the decoded map of session data or an error. ## Examples iex> verify_session(AppWeb.Endpoint, encoded_token, static_token) {:ok, %{} = decoded_session} iex> verify_session(AppWeb.Endpoint, "bad token", "bac static") {:error, :invalid} iex> verify_session(AppWeb.Endpoint, "expired", "expired static") {:error, :expired} """ def verify_session(endpoint, session_token, static_token) do with {:ok, session} <- verify_token(endpoint, session_token), {:ok, static} <- verify_static_token(endpoint, static_token) do {:ok, Map.merge(session, static)} end end defp verify_static_token(_endpoint, nil), do: {:ok, %{assigned_new: []}} defp verify_static_token(endpoint, token), do: verify_token(endpoint, token) defp verify_token(endpoint, token) do case Phoenix.Token.verify(endpoint, Utils.salt!(endpoint), token, max_age: @max_session_age) do {:ok, {@token_vsn, term}} -> {:ok, term} {:ok, _} -> {:error, :outdated} {:error, _} = error -> error end end @doc """ Renders a live view without spawning a LiveView server. * `conn` - the Plug.Conn struct form the HTTP request * `view` - the LiveView module ## Options * `:router` - the router the live view was built at * `:session` - the required map of session data * `:container` - the optional tuple for the HTML tag and DOM attributes to be used for the LiveView container. For example: `{:li, style: "color: blue;"}` """ def render(%Plug.Conn{} = conn, view, opts) do session = Keyword.get(opts, :session, %{}) config = load_live!(view, :view) {tag, extended_attrs} = container(config, opts) router = Keyword.get(opts, :router) endpoint = Phoenix.Controller.endpoint_module(conn) request_url = Plug.Conn.request_url(conn) socket = Utils.configure_socket( %Socket{endpoint: endpoint, view: view}, %{assigned_new: {conn.assigns, []}, connect_params: %{}} ) case call_mount_and_handle_params!(socket, router, view, session, conn.params, request_url) do {:ok, socket} -> data_attrs = [ phx_view: config.name, phx_session: sign_root_session(socket, router, view, session) ] data_attrs = if(router, do: [phx_main: true], else: []) ++ data_attrs attrs = [ {:id, socket.id}, {:data, data_attrs} | extended_attrs ] {:ok, to_rendered_content_tag(socket, tag, view, attrs)} {:stop, socket} -> {:stop, socket} end end @doc """ Renders only the static container of the LiveView. Accepts same options as `static_render/3`. This is called by external live links. """ def container_render(%Plug.Conn{} = conn, view, opts) do session = Keyword.get(opts, :session, %{}) config = load_live!(view, :view) {tag, extended_attrs} = container(config, opts) router = Keyword.get(opts, :router) endpoint = Phoenix.Controller.endpoint_module(conn) socket = Utils.configure_socket( %Socket{endpoint: endpoint, view: view}, %{assigned_new: {conn.assigns, []}, connect_params: %{}} ) session_token = sign_root_session(socket, router, view, session) attrs = [ {:id, socket.id}, {:data, phx_view: config.name, phx_session: session_token} | extended_attrs ] tag |> Phoenix.HTML.Tag.content_tag(attrs, do: nil) |> Phoenix.HTML.safe_to_string() end @doc """ Renders a nested live view without spawning a server. * `parent` - the parent `%Phoenix.LiveView.Socket{}` * `view` - the child LiveView module Accepts the same options as `static_render/3`. """ def nested_render(%Socket{endpoint: endpoint, connected?: connected?} = parent, view, opts) do session = Keyword.get(opts, :session, %{}) config = load_live!(view, :view) container = container(config, opts) child_id = opts[:id] || raise ArgumentError, "an :id is required when rendering child LiveView. " <> "The :id must uniquely identify the child." socket = Utils.configure_socket( %Socket{ id: to_string(child_id), endpoint: endpoint, root_pid: parent.root_pid, parent_pid: self() }, %{assigned_new: {parent.assigns, []}} ) if connected? do connected_nested_render(parent, config, socket, view, session, container) else disconnected_nested_render(parent, config, socket, view, session, container) end end defp disconnected_nested_render(parent, config, socket, view, session, container) do {tag, extended_attrs} = container socket = Utils.maybe_call_mount!(socket, view, [session, socket]) if exports_handle_params?(view) do raise ArgumentError, "handle_params/3 is not allowed on child LiveViews, only at the root" end attrs = [ {:id, socket.id}, {:data, phx_view: config.name, phx_session: "", phx_static: sign_static_token(socket), phx_parent_id: parent.id} | extended_attrs ] to_rendered_content_tag(socket, tag, view, attrs) end defp connected_nested_render(parent, config, socket, view, session, container) do {tag, extended_attrs} = container session_token = sign_nested_session(parent, socket, view, session) attrs = [ {:id, socket.id}, {:data, phx_parent_id: parent.id, phx_view: config.name, phx_session: session_token, phx_static: ""} | extended_attrs ] Phoenix.HTML.Tag.content_tag(tag, "", attrs) end defp to_rendered_content_tag(socket, tag, view, attrs) do rendered = Utils.to_rendered(socket, view) {_, diff, _} = Diff.render(socket, rendered, Diff.new_components()) Phoenix.HTML.Tag.content_tag(tag, {:safe, Diff.to_iodata(diff)}, attrs) end defp load_live!(view_or_component, kind) do case view_or_component.__live__() do %{kind: ^kind} = config -> config %{kind: other} -> raise "expected #{inspect(view_or_component)} to be a #{kind}, but it is a #{other}" end end defp call_mount_and_handle_params!(socket, router, view, session, params, uri) do socket |> Utils.maybe_call_mount!(view, [session, socket]) |> mount_handle_params(router, view, params, uri) |> case do {:noreply, %Socket{redirected: nil} = new_socket} -> {:ok, new_socket} {:noreply, %Socket{} = new_socket} -> {:stop, new_socket} {:stop, %Socket{redirected: nil}} -> Utils.raise_bad_stop_and_no_redirect!() {:stop, %Socket{redirected: {:live, _}}} -> Utils.raise_bad_stop_and_live_redirect!() {:stop, %Socket{} = new_socket} -> {:stop, new_socket} end end defp mount_handle_params(socket, router, view, params, uri) do cond do not exports_handle_params?(view) -> {:noreply, socket} router == nil -> Utils.live_link_info!(router, view, uri) true -> view.handle_params(params, uri, socket) end end defp exports_handle_params?(view), do: function_exported?(view, :handle_params, 3) defp sign_root_session(%Socket{id: id, endpoint: endpoint}, router, view, session) do # IMPORTANT: If you change the third argument, @token_vsn has to be bumped. sign_token(endpoint, %{ id: id, view: view, router: router, parent_pid: nil, root_pid: nil, session: session }) end defp sign_nested_session(%Socket{} = parent, %Socket{} = child, view, session) do # IMPORTANT: If you change the third argument, @token_vsn has to be bumped. sign_token(parent.endpoint, %{ id: child.id, view: view, parent_pid: self(), root_pid: parent.root_pid, session: session }) end # THe static token is computed only on disconnected render and it keeps # the information that is only available during disconnected renders, # such as assign_new. defp sign_static_token(%Socket{id: id, endpoint: endpoint} = socket) do # IMPORTANT: If you change the third argument, @token_vsn has to be bumped. sign_token(endpoint, %{ id: id, assigned_new: assigned_new_keys(socket) }) end defp sign_token(endpoint, data) do Phoenix.Token.sign(endpoint, Utils.salt!(endpoint), {@token_vsn, data}) end defp container(%{container: {tag, attrs}}, opts) do case opts[:container] do {tag, extra} -> {tag, Keyword.merge(attrs, extra)} nil -> {tag, attrs} end end defp assigned_new_keys(socket) do {_, keys} = socket.private.assigned_new keys end end
lib/phoenix_live_view/static.ex
0.909884
0.474388
static.ex
starcoder
defmodule Tesla.Middleware.Logger.Formatter do @moduledoc false # Heavily based on Elixir's Logger.Formatter # https://github.com/elixir-lang/elixir/blob/v1.6.4/lib/logger/lib/logger/formatter.ex @default_format "$method $url -> $status ($time ms)" @keys ~w(method url status time) @type format :: [atom | binary] @spec compile(binary | nil) :: format def compile(nil), do: compile(@default_format) def compile(binary) do ~r/(?<h>)\$[a-z]+(?<t>)/ |> Regex.split(binary, on: [:h, :t], trim: true) |> Enum.map(&compile_key/1) end defp compile_key("$" <> key) when key in @keys, do: String.to_atom(key) defp compile_key("$" <> key), do: raise(ArgumentError, "$#{key} is an invalid format pattern.") defp compile_key(part), do: part @spec format(Tesla.Env.t(), Tesla.Env.result(), integer, format) :: IO.chardata() def format(request, response, time, format) do Enum.map(format, &output(&1, request, response, time)) end defp output(:method, env, _, _), do: env.method |> to_string() |> String.upcase() defp output(:url, env, _, _), do: env.url defp output(:status, _, {:ok, env}, _), do: to_string(env.status) defp output(:status, _, {:error, reason}, _), do: "error: " <> inspect(reason) defp output(:time, _, _, time), do: :io_lib.format("~.3f", [time / 1000]) defp output(binary, _, _, _), do: binary end defmodule Tesla.Middleware.Logger do @behaviour Tesla.Middleware @moduledoc """ Log requests using Elixir's Logger. With the default settings it logs request method, url, response status and time taken in milliseconds. ### Example usage ``` defmodule MyClient do use Tesla plug Tesla.Middleware.Logger end ``` ### Options - `:log_level` - custom function for calculating log level (see below) - `:filter_headers` - sanitizes sensitive headers before logging in debug mode (see below) ## Custom log format The default log format is `"$method $url -> $status ($time ms)"` which shows in logs like: ``` 2018-03-25 18:32:40.397 [info] GET https://bitebot.io -> 200 (88.074 ms) ``` Because log format is processed during compile time it needs to be set in config: ``` config :tesla, Tesla.Middleware.Logger, format: "$method $url ====> $status / time=$time" ``` ## Custom log levels By default, the following log levels will be used: - `:error` - for errors, 5xx and 4xx responses - `:warn` - for 3xx responses - `:info` - for 2xx responses You can customize this setting by providing your own `log_level/1` function: ``` defmodule MyClient do use Tesla plug Tesla.Middleware.Logger, log_level: &my_log_level/1 def my_log_level(env) do case env.status do 404 -> :info _ -> :default end end end ``` ### Logger Debug output When the Elixir Logger log level is set to `:debug` Tesla Logger will show full request & response. If you want to disable detailed request/response logging but keep the `:debug` log level (i.e. in development) you can set `debug: false` in your config: ``` # config/dev.local.exs config :tesla, Tesla.Middleware.Logger, debug: false ``` #### Filter headers To sanitize sensitive headers such as `authorization` in debug logs, add them to the `:filter_headers` option. `:filter_headers` expects a list of header names as strings. ``` # config/dev.local.exs config :tesla, Tesla.Middleware.Logger, filter_headers: ["authorization"] ``` """ alias Tesla.Middleware.Logger.Formatter @config Application.get_env(:tesla, __MODULE__, []) @format Formatter.compile(@config[:format]) @type log_level :: :info | :warn | :error require Logger def call(env, next, opts) do {time, response} = :timer.tc(Tesla, :run, [env, next]) level = log_level(response, opts) Logger.log(level, fn -> Formatter.format(env, response, time, @format) end) if Keyword.get(@config, :debug, true) do Logger.debug(fn -> debug(env, response, opts) end) end response end defp log_level({:error, _}, _), do: :error defp log_level({:ok, env}, opts) do case Keyword.get(opts, :log_level) do nil -> default_log_level(env) fun when is_function(fun) -> case fun.(env) do :default -> default_log_level(env) level -> level end atom when is_atom(atom) -> atom end end @spec default_log_level(Tesla.Env.t()) :: log_level def default_log_level(env) do cond do env.status >= 400 -> :error env.status >= 300 -> :warn true -> :info end end @debug_no_query "(no query)" @debug_no_headers "(no headers)" @debug_no_body "(no body)" @debug_stream "[Elixir.Stream]" defp debug(request, {:ok, response}, opts) do [ "\n>>> REQUEST >>>\n", debug_query(request.query), ?\n, debug_headers(request.headers, opts), ?\n, debug_body(request.body), ?\n, "\n<<< RESPONSE <<<\n", debug_headers(response.headers, opts), ?\n, debug_body(response.body) ] end defp debug(request, {:error, error}, opts) do [ "\n>>> REQUEST >>>\n", debug_query(request.query), ?\n, debug_headers(request.headers, opts), ?\n, debug_body(request.body), ?\n, "\n<<< RESPONSE ERROR <<<\n", inspect(error) ] end defp debug_query([]), do: @debug_no_query defp debug_query(query) do query |> Enum.flat_map(&Tesla.encode_pair/1) |> Enum.map(fn {k, v} -> ["Query: ", to_string(k), ": ", to_string(v), ?\n] end) end defp debug_headers([], _opts), do: @debug_no_headers defp debug_headers(headers, opts) do filtered = Keyword.get(opts, :filter_headers, []) Enum.map(headers, fn {k, v} -> v = if k in filtered, do: "[FILTERED]", else: v [k, ": ", v, ?\n] end) end defp debug_body(nil), do: @debug_no_body defp debug_body([]), do: @debug_no_body defp debug_body(%Stream{}), do: @debug_stream defp debug_body(stream) when is_function(stream), do: @debug_stream defp debug_body(%Tesla.Multipart{} = mp) do [ "[Tesla.Multipart]\n", "boundary: ", mp.boundary, ?\n, "content_type_params: ", inspect(mp.content_type_params), ?\n | Enum.map(mp.parts, &[inspect(&1), ?\n]) ] end defp debug_body(data) when is_binary(data) or is_list(data), do: data defp debug_body(term), do: inspect(term) end
lib/tesla/middleware/logger.ex
0.900455
0.598283
logger.ex
starcoder
defmodule Phoenix.HTML.Form do @moduledoc ~S""" Helpers related to producing HTML forms. The functions in this module can be used in three distinct scenario: * with model data - when information to populate the form comes from a model * with connection data - when a form is created based on the information in the connection (aka `Plug.Conn`) * without form data - when the functions are used directly, outside of a form We will explore all three scenarios below. ## With model data The entry point for defining forms in Phhoenix is with the `form_for/4` function. For this example, we will use `Ecto.Changeset`, which integrate nicely with Phoenix forms via the `phoenix_ecto` package. Imagine you have the following action in your controller: def new(conn, _params) do changeset = User.changeset(%User{}) render conn, "new.html", changeset: changeset end where `User.changeset/2` is defined as follows: def changeset(user, params \\ nil) do cast(user, params) end Now a `@changeset` assign is available in views which we can pass to the form: <%= form_for @changeset, user_path(@conn, :create), fn f -> %> <label> Name: <%= text_input f, :name %> </label> <label> Age: <%= select f, :age, 18..100 %> </label> <%= submit "Submit" %> <% end %> `form_for/4` receives the `Ecto.Changeset` and converts it to a form, which is passed to the function as the argument `f`. All the remaining functions in this module receive the form and automatically generate the input fields, often by extracting information from the given changeset. For example, if the user had a default value for age set, it will automatically show up as selected in the form. ## With connection data `form_for/4` expects as first argument any data structure that implements the `Phoenix.HTML.FormData` protocol. By default, Phoenix implements this protocol for `Plug.Conn`, allowing us to create forms based only on connection information. This is useful when you are creating forms that are not backed by any kind of model data, like a search form. <%= form_for @conn, search_path(@conn, :new), [name: :search], fn f -> %> <%= text_input f, :for %> <%= submit "Search" %> <% end %> ## Without form data Sometimes we may want to generate a `text_input/3` or any other tag outside of a form. The functions in this module also support such usage by simply passing an atom as first argument instead of the form. <%= text_input :user, :name, value: "This is a prepopulated value" %> """ alias Phoenix.HTML.Form import Phoenix.HTML import Phoenix.HTML.Tag @doc """ Defines the Phoenix.HTML.Form struct. Its fields are: * `:source` - the data structure given to `form_for/4` that implements the form data protocol * `:name` - the name to be used when generating input fields * `:model` - the model used to lookup field data * `:params` - the parameters associated to this form in case they were sent as part of a previous request * `:hidden` - a keyword list of fields that are required for submitting the form behind the scenes as hidden inputs * `:options` - a copy of the options given when creating the form via `form_for/4` without any form data specific key """ defstruct source: nil, name: nil, model: %{}, hidden: [], params: %{}, options: [] @type t :: %Form{source: term, name: String.t, model: map, params: map, hidden: Keyword.t, options: Keyword.t} @doc """ Generates a form tag with a form builder. See the module documentation for examples of using this function. ## Options * `:name` - the name to be used in the form. May be inflected if a model is available * `:method` - the HTTP method. If the method is not "get" nor "post", an input tag with name `_method` is generated along-side the form tag. Defaults to "post". * `:multipart` - when true, sets enctype to "multipart/form-data". Required when uploading files * `:csrf_token` - for "post" requests, the form tag will automatically include an input tag with name `_csrf_token`. When set to false, this is disabled * `:enforce_utf8` - when false, does not enforce utf8 See `Phoenix.HTML.Tag.form_tag/2` for more information on the options above. """ @spec form_for(Phoenix.HTML.FormData.t, String.t, Keyword.t, (t -> Phoenix.HTML.unsafe)) :: Phoenix.HTML.safe def form_for(form_data, action, options \\ [], fun) when is_function(fun, 1) do form = Phoenix.HTML.FormData.to_form(form_data, options) safe_concat [form_tag(action, form.options), fun.(form), safe("</form>")] end ## Form helpers @doc """ Generates a text input. The form should either be a `Phoenix.HTML.Form` emitted by `form_for` or an atom. All given options are forwarded to the underlying input, default values are provided for id, name and value if possible. ## Examples # Assuming form contains a User model iex> text_input(form, :name) <input id="user_name" name="user[name]" type="text" value=""> iex> text_input(:user, :name) <input id="user_name" name="user[name]" type="text" value=""> """ def text_input(form, field, opts \\ []) do generic_input(:text, form, field, opts) end @doc """ Generates a hidden input. See `text_input/3` for example and docs. """ def hidden_input(form, field, opts \\ []) do generic_input(:hidden, form, field, opts) end @doc """ Generates an email input. See `text_input/3` for example and docs. """ def email_input(form, field, opts \\ []) do generic_input(:email, form, field, opts) end @doc """ Generates a number input. See `text_input/3` for example and docs. """ def number_input(form, field, opts \\ []) do generic_input(:number, form, field, opts) end defp generic_input(type, form, field, opts) when is_atom(field) and is_list(opts) do opts = opts |> Keyword.put_new(:type, type) |> Keyword.put_new(:id, id_from(form, field)) |> Keyword.put_new(:name, name_from(form, field)) |> Keyword.put_new(:value, value_from(form, field)) tag(:input, opts) end @doc """ Generates a file input. It requires the given form to be configured with `multipart: true` when invokig `form_for/4`, otherwise it fails with `ArgumentError`. See `text_input/3` for example and docs. """ def file_input(form, field, opts \\ []) do if match?(%Form{}, form) and !form.options[:multipart] do raise ArgumentError, "file_input/3 requires the enclosing form_for/4 " <> "to be configured with multipart: true" end opts = opts |> Keyword.put_new(:type, :file) |> Keyword.put_new(:id, id_from(form, field)) |> Keyword.put_new(:name, name_from(form, field)) tag(:input, opts) end @doc """ Generates a submit input to send the form. All options are forwarded to the underlying input tag. ## Examples iex> submit "Submit" <input type="submit" value="Submit"> """ def submit(value, opts \\ []) do opts = opts |> Keyword.put_new(:type, "submit") |> Keyword.put_new(:value, value) tag(:input, opts) end @doc """ Generates a radio button. Invoke this function for each possible value you to be sent to the server. ## Examples # Assuming form contains a User model iex> radio_button(form, :role, "admin") <input id="user_role_admin" name="user[role]" type="radio" value="admin"> ## Options All options are simply forwarded to the underlying HTML tag. """ def radio_button(form, field, value, opts \\ []) do value = html_escape(value) opts = opts |> Keyword.put_new(:type, "radio") |> Keyword.put_new(:id, id_from(form, field) <> "_" <> elem(value, 1)) |> Keyword.put_new(:name, name_from(form, field)) if value == html_escape(value_from(form, field)) do opts = Keyword.put_new(opts, :checked, true) end tag(:input, [value: value] ++ opts) end @doc """ Generates a checkbox. This function is useful for sending boolean values to the server. ## Examples # Assuming form contains a User model iex> checkbox(form, :famous) <input name="user[famous]" type="hidden" value="false"> <input checked="checked" id="user_famous" name="user[famous]"> type="checkbox" value="true") ## Options * `:checked_value` - the value to be sent when the checkbox is checked. Defaults to "true" * `:unchecked_value` - the value to be sent then the checkbox is unchecked, Defaults to "false" * `:value` - the value used to check if a checkbox is checked or unchecked. The default value is extracted from the model if a model is available All other options are forwarded to the underlying HTML tag. ## Hidden fields Because an unchecked checkbox is not sent to the server, Phoenix automatically generates a hidden field with the unchecked_value *before* the checkbox field to ensure the `unchecked_value` is sent when the checkbox is not marked. """ def checkbox(form, field, opts \\ []) do opts = opts |> Keyword.put_new(:type, "checkbox") |> Keyword.put_new(:id, id_from(form, field)) |> Keyword.put_new(:name, name_from(form, field)) {value, opts} = Keyword.pop(opts, :value, value_from(form, field)) {checked_value, opts} = Keyword.pop(opts, :checked_value, true) {unchecked_value, opts} = Keyword.pop(opts, :unchecked_value, false) # We html escape all values to be sure we are comparing # apples to apples. After all we may have true in the model # but "true" in the params and both need to match. value = html_escape(value) checked_value = html_escape(checked_value) unchecked_value = html_escape(unchecked_value) if value == checked_value do opts = Keyword.put_new(opts, :checked, true) end safe_concat tag(:input, name: Keyword.get(opts, :name), type: "hidden", value: unchecked_value), tag(:input, [value: checked_value] ++ opts) end @doc """ Generates a select tag with the given `values`. Values are expected to be an Enumerable containing two-item tuples (like maps and keyword lists) or any Enumerable where the element will be used both as key and value for the generated select. ## Examples # Assuming form contains a User model iex> select(form, :age, 0..120) <select id="user_age" name="user[age]"> <option value="0">0</option> ... <option value="120">120</option> </select> iex> select(form, :role, [admin: "Admin", user: "User"]) <select id="user_role" name="user[role]"> <option value="admin">Admin</option> <option value="user">User</option> </select> iex> select(form, :role, [admin: "Admin", user: "User"], prompt: "Choose your role") <select id="user_role" name="user[role]"> <option value="">Choose your role</option> <option value="admin">Admin</option> <option value="user">User</option> </select> ## Options * `:prompt` - an option to include at the top of the options with the given prompt text * `:value` - the value used to select a given option. The default value is extracted from the model if a model is available * `:default` - the default value to use when none was given in `:value` and none was available in the model All other options are forwarded to the underlying HTML tag. """ def select(form, field, values, opts \\ []) do {default, opts} = Keyword.pop(opts, :default) {value, opts} = Keyword.pop(opts, :value, value_from(form, field) || default) {options, opts} = case Keyword.pop(opts, :prompt) do {nil, opts} -> {safe(""), opts} {prompt, opts} -> {content_tag(:option, prompt, value: ""), opts} end opts = opts |> Keyword.put_new(:id, id_from(form, field)) |> Keyword.put_new(:name, name_from(form, field)) options = options_for_select(values, options, html_escape(value)) content_tag(:select, options, opts) end defp options_for_select(values, options, value) do Enum.reduce values, options, fn {option_value, option_key}, acc -> option_key = html_escape(option_key) option_value = html_escape(option_value) option(option_key, option_value, value, acc) option, acc -> option = html_escape(option) option(option, option, value, acc) end end defp option(option_key, option_value, value, acc) do opts = [value: option_value, selected: value == option_value] safe_concat acc, content_tag(:option, option_key, opts) end ## Datetime @doc ~S""" Generates select tags for datetime. ## Examples # Assuming form contains a User model iex> datetime_select form, :born_at <select id="user_born_at_year" name="user[born_at][year]">...</select> / <select id="user_born_at_month" name="user[born_at][month]">...</select> / <select id="user_born_at_day" name="user[born_at][day]">...</select> — <select id="user_born_at_hour" name="user[born_at][hour]">...</select> : <select id="user_born_at_min" name="user[born_at][min]">...</select> If you want to include the seconds field (hidden by default), pass `sec: []`: # Assuming form contains a User model iex> datetime_select form, :born_at, sec: [] If you want to configure the years range: # Assuming form contains a User model iex> datetime_select form, :born_at, year: [options: 1900..2100] You are also able to configure `:month`, `:day`, `:hour`, `:min` and `:sec`. All options given to those keys will be forwarded to the underlying select. See `select/4` for more information. ## Options * `:value` - the value used to select a given option. The default value is extracted from the model if a model is available * `:default` - the default value to use when none was given in `:value` and none was available in the model * `:year`, `:month`, `:day`, `:hour`, `:min`, `:sec` - options passed to the underlying select. See `select/4` for more information. The available values can be given in `:options`. * `:builder` - specify how the select can be build. It must be a function that receives a builder that should be invoked with the select name and a set of options. See builder below for more information. ## Builder The generated datetime_select can be customized at will by providing a builder option. Here is an example from EEx: <%= datetime_select form, :born_at, builder: fn b -> %> Date: <%= b.(:day, []) %> / <%= b.(:month, []) %> / <%= b.(:hour, []) %> Time: <%= b.(:hour, []) %> : <%= b.(:min, []) %> <% end %> Although we have passed empty lists as options (they are required), you could pass any option there and it would be given to the underlying select input. In practice, we recommend you to create your own helper with your default builder: def my_datetime_select(form, field, opts \\ []) do builder = fn b -> ~e"\"" Date: <%= b.(:day, []) %> / <%= b.(:month, []) %> / <%= b.(:hour, []) %> Time: <%= b.(:hour, []) %> : <%= b.(:min, []) %> "\"" end datetime_select(form, field, [builder: builder] ++ opts) end Then you are able to use your own datetime_select throughout your whole application. ## Supported date values The following values are supported as date: * a map containing the `year`, `month` and `day` keys (either as strings or atoms) * a tuple with three elements: `{year, month, day}` * `nil` ## Supported time values The following values are supported as time: * a map containing the `hour` and `min` keys and an optional `sec` key (either as strings or atoms) * a tuple with three elements: `{hour, min, sec}` * a tuple with four elements: `{hour, min, sec, usec}` * `nil` """ def datetime_select(form, field, opts \\ []) do value = Keyword.get(opts, :value, value_from(form, field) || Keyword.get(opts, :default)) builder = Keyword.get(opts, :builder) || fn b -> date = date_builder(b, opts) time = time_builder(b, opts) safe_concat [date, safe(" &mdash; "), time] end builder.(datetime_builder(form, field, date_value(value), time_value(value), opts)) end @doc """ Generates select tags for date. Check `datetime_select/3` for more information on options and supported values. """ def date_select(form, field, opts \\ []) do value = Keyword.get(opts, :value, value_from(form, field) || Keyword.get(opts, :default)) builder = Keyword.get(opts, :builder) || &date_builder(&1, opts) builder.(datetime_builder(form, field, date_value(value), nil, opts)) end defp date_builder(b, _opts) do safe_concat [b.(:year, []), safe(" / "), b.(:month, []), safe(" / "), b.(:day, [])] end defp date_value(%{"year" => year, "month" => month, "day" => day}), do: %{year: year, month: month, day: day} defp date_value(%{year: year, month: month, day: day}), do: %{year: year, month: month, day: day} defp date_value({{year, month, day}, _}), do: %{year: year, month: month, day: day} defp date_value({year, month, day}), do: %{year: year, month: month, day: day} defp date_value(nil), do: %{year: nil, month: nil, day: nil} defp date_value(other), do: raise(ArgumentError, "unrecognized date #{inspect other}") @doc """ Generates select tags for time. Check `datetime_select/3` for more information on options and supported values. """ def time_select(form, field, opts \\ []) do value = Keyword.get(opts, :value, value_from(form, field) || Keyword.get(opts, :default)) builder = Keyword.get(opts, :builder) || &time_builder(&1, opts) builder.(datetime_builder(form, field, nil, time_value(value), opts)) end defp time_builder(b, opts) do time = safe_concat [b.(:hour, []), safe(" : "), b.(:min, [])] if Keyword.get(opts, :sec) do safe_concat [time, safe(" : "), b.(:sec, [])] else time end end defp time_value(%{"hour" => hour, "min" => min} = map), do: %{hour: hour, min: min, sec: Map.get(map, "sec", 0)} defp time_value(%{hour: hour, min: min} = map), do: %{hour: hour, min: min, sec: Map.get(map, :sec, 0)} defp time_value({_, {hour, min, sec, _msec}}), do: %{hour: hour, min: min, sec: sec} defp time_value({hour, min, sec, _mseg}), do: %{hour: hour, min: min, sec: sec} defp time_value({_, {hour, min, sec}}), do: %{hour: hour, min: min, sec: sec} defp time_value({hour, min, sec}), do: %{hour: hour, min: min, sec: sec} defp time_value(nil), do: %{hour: nil, min: nil, sec: nil} defp time_value(other), do: raise(ArgumentError, "unrecognized time #{inspect other}") @months %{ "1" => "January", "2" => "February", "3" => "March", "4" => "April", "5" => "May", "6" => "June", "7" => "July", "8" => "August", "9" => "September", "10" => "October", "11" => "November", "12" => "December" } map = &Enum.into(&1, %{}, fn i -> i = Integer.to_string(i) {i, String.rjust(i, 2, ?0)} end) @days map.(1..31) @hours map.(0..23) @minsec map.(0..59) defp datetime_builder(form, field, date, time, parent) do id = Keyword.get(parent, :id, id_from(form, field)) name = Keyword.get(parent, :name, name_from(form, field)) fn :year, opts when date != nil -> {year, _, _} = :erlang.date() {value, opts} = datetime_options(:year, year-5..year+5, id, name, parent, date, opts) select(:datetime, :year, value, opts) :month, opts when date != nil -> {value, opts} = datetime_options(:month, @months, id, name, parent, date, opts) select(:datetime, :month, value, opts) :day, opts when date != nil -> {value, opts} = datetime_options(:day, @days, id, name, parent, date, opts) select(:datetime, :day, value, opts) :hour, opts when time != nil -> {value, opts} = datetime_options(:hour, @hours, id, name, parent, time, opts) select(:datetime, :hour, value, opts) :min, opts when time != nil -> {value, opts} = datetime_options(:min, @minsec, id, name, parent, time, opts) select(:datetime, :min, value, opts) :sec, opts when time != nil -> {value, opts} = datetime_options(:sec, @minsec, id, name, parent, time, opts) select(:datetime, :sec, value, opts) end end defp datetime_options(type, values, id, name, parent, datetime, opts) do opts = Keyword.merge Keyword.get(parent, type, []), opts suff = Atom.to_string(type) {value, opts} = Keyword.pop(opts, :options, values) {value, opts |> Keyword.put_new(:id, id <> "_" <> suff) |> Keyword.put_new(:name, name <> "[" <> suff <> "]") |> Keyword.put_new(:value, Map.get(datetime, type))} end ## Helpers defp value_from(%{model: model, params: params}, field), do: Map.get(params, Atom.to_string(field)) || Map.get(model, field) defp value_from(name, _field) when is_atom(name), do: nil defp id_from(%{name: name}, field), do: "#{name}_#{field}" defp id_from(name, field) when is_atom(name), do: "#{name}_#{field}" defp name_from(%{name: name}, field), do: "#{name}[#{field}]" defp name_from(name, field) when is_atom(name), do: "#{name}[#{field}]" end
lib/phoenix/html/form.ex
0.86712
0.691302
form.ex
starcoder
defmodule ForthVM do @moduledoc """ A toy Forth-like virtual machine. I have written it to experiment implementing a stack-based preemtive multitasking interpreter (and to play) with Elixir. """ @doc """ Starts a new VM supervisor, initializin `num_cores` cores. ## Examples ForthVM.start(num_cores: 2) {:ok, #PID<0.375.0>} """ def start(num_cores: num_cores) do children = [ {ForthVM.Supervisor, num_cores: num_cores} ] opts = [strategy: :one_for_one, name: ForthVM] Supervisor.start_link(children, opts) end @doc """ Returns a map with cores' id as keys and cores' pid as values. ## Examples ForthVM.Supervisor.cores() %{"core_1" => #PID<0.407.0>, "core_2" => #PID<0.408.0>} """ defdelegate cores(), to: ForthVM.Supervisor @doc """ Returns the PID for the Core with the given `core_id` string. ## Examples ForthVM.core_pid("core_2") #PID<0.408.0> """ defdelegate core_pid(core_id), to: ForthVM.Supervisor @doc """ Executes Forth code in `source` string using `process_id` Process managed by the `core_id` Core. Optionally, a custom Forth `dictionary` can be passed. Returns the updated Core state. ## Examples ForthVM.execute("core_2", "p_1", "40 2 +") %ForthVM.Core{ id: 2, io: :stdio, processes: [ %ForthVM.Process{ context: {[], '*', [], %{ "dup" => {:word, &ForthVM.Words.Stack.dup/5, %{doc: "duplicate element from top of stack", stack: "( x -- x x )"}}, ... }, %{ core_id: 2, debug: false, io: %{ device: :stdio, devices: %{"core_io" => :stdio, "stdio" => :stdio} }, messages: [], process_id: "p_1", reductions: 997, sleep: 0 }}, core_id: nil, exit_value: 42, id: "p_1", status: :exit } ] } """ defdelegate execute(core_id, process_id, source, dictionary \\ nil), to: ForthVM.Supervisor @doc """ Loads Forth code from the `source` string into `process_id` Process managed by `core_id` Core, replacing all code currrently stored into the process. The loaded code is executed right away. If the `process_id` does not exist, a message will be logged, but no error raised. Returns the updated Core state. """ defdelegate load(core_id, process_id, source), to: ForthVM.Supervisor @doc """ Spawns a new process with given `process_id` that will be managed by the `core_id` Core, If `process_id` is `nil`, an new id will be automatically generated using `System.unique_integer()`. Returns the newly spawned Process' state. ## Examples ForthVM.spawn("core_2", "p_new") %ForthVM.Process{ context: {[], [], [], %{ "<<" => {:word, &ForthVM.Words.Logic.b_shift_left/5, %{doc: "bitwise shift left", stack: "( x y -- v )"}}, ... }, %{ core_id: 2, debug: false, io: %{device: :stdio, devices: %{"core_io" => :stdio, "stdio" => :stdio}}, messages: [], process_id: "p_new", reductions: 0, sleep: 0 }}, core_id: nil, exit_value: nil, id: "p_new", status: nil } """ defdelegate spawn(core_id, process_id, dictionary \\ nil), to: ForthVM.Supervisor @doc """ Sends a message to `process_id` Process managed by `core_id` Core: `word_name` is the name of the dictionary's word that will handle the message, `message_data` is a list containing the data to be placed on top of the data stack. The message will place `{word_name, message_data}` into the Process' messages FIFO queue. Messages are handled when the Process has no more tokens to process: - `word_name` is placed into the list of tokens to execute - `message_data` list is joined with the data stack - the message is removed from the `messages` queue This is a cast call, so nothing is returned. ## Examples ForthVM.send_message("core_2", "p_new", ".", ["hello world"]) :ok hello world """ defdelegate send_message(core_id, process_id, word_name, message_data), to: ForthVM.Supervisor end
lib/forthvm.ex
0.805823
0.472623
forthvm.ex
starcoder
defmodule OverDB.Protocol.V4.Frames.Requests.Query do @moduledoc """ Documentation for Query. Performs a CQL query. The body of the message must be: <query><query_parameters> where <query> is a [long string] representing the query and <query_parameters> must be <consistency><flags>[<n>[name_1]<value_1>...[name_n]<value_n>][<result_page_size>][<paging_state>][<serial_consistency>][<timestamp>] where: - <consistency> is the [consistency] level for the operation. - <flags> is a [byte] whose bits define the options for this query and in particular influence what the remainder of the message contains. A flag is set if the bit corresponding to its `mask` is set. Supported flags are, given their mask: 0x01: Values. If set, a [short] <n> followed by <n> [value] values are provided. Those values are used for bound variables in the query. Optionally, if the 0x40 flag is present, each value will be preceded by a [string] name, representing the name of the marker the value must be bound to. 0x02: Skip_metadata. If set, the Result Set returned as a response to the query (if any) will have the NO_METADATA flag (see Section 4.2.5.2). 0x04: Page_size. If set, <result_page_size> is an [int] controlling the desired page size of the result (in CQL3 rows). See the section on paging (Section 8) for more details. 0x08: With_paging_state. If set, <paging_state> should be present. <paging_state> is a [bytes] value that should have been returned in a result set (Section 4.2.5.2). The query will be executed but starting from a given paging state. This is also to continue paging on a different node than the one where it started (See Section 8 for more details). 0x10: With serial consistency. If set, <serial_consistency> should be present. <serial_consistency> is the [consistency] level for the serial phase of conditional updates. That consitency can only be either SERIAL or LOCAL_SERIAL and if not present, it defaults to SERIAL. This option will be ignored for anything else other than a conditional update/insert. 0x20: With default timestamp. If set, <timestamp> should be present. <timestamp> is a [long] representing the default timestamp for the query in microseconds (negative values are forbidden). This will replace the server side assigned timestamp as default timestamp. Note that a timestamp in the query itself will still OverDBride this timestamp. This is entirely optional. 0x40: With names for values. This only makes sense if the 0x01 flag is set and is ignored otherwise. If present, the values from the 0x01 flag will be preceded by a name (see above). Note that this is only useful for QUERY requests where named bind markers are used; for EXECUTE statements, since the names for the expected values was returned during preparation, a client can always provide values in the right order without any names and using this flag, while supported, is almost surely inefficient. Note that the consistency is ignored by some queries (USE, CREATE, ALTER, TRUNCATE, ...). The server will respond to a QUERY message with a RESULT message, the content of which depends on the query. """ alias OverDB.Protocol.V4.Frames.{Frame, Requests.Encoder} @type a :: :query @type t :: %__MODULE__{statement: String.t, values: list | map, metadata: list} defstruct [:statement, :values, :metadata] def opcode() do 0x07 end @spec create(binary, list) :: list def create(statement, values \\ [], metadata \\ []) do %__MODULE__{statement: statement, values: values, metadata: metadata} end @spec new(t, map) :: list def new(%__MODULE__{statement: statement, values: values, metadata: metadata}, opts \\ %{}) do flags = Map.get(opts, :flags, %{ignore: true}) Frame.create(:query, Encoder.query_new_body(statement, values, metadata, opts), flags) |> Frame.encode() end @spec push_alone(t, map) :: list def push_alone(%__MODULE__{statement: statement, values: values, metadata: metadata}, opts \\ %{}) do flags = Map.get(opts, :flags, %{ignore: true}) Frame.create(:query, Encoder.query_new_body(statement, values, metadata, opts), flags) |> Frame.push() end # this to encode the query at the client_end before pushing it to the batcher # NOTE: the opt is unused now, but it will have use cases in future (like forcing_timestamp for individual query inside batch, or asking for high consitency level) @spec push(t, map) :: list def push(%__MODULE__{statement: _statement, values: _values, metadata: _metadata} = query, opts \\ %{}) do Encoder.query_in_batch(query, opts) end end
lib/protocol/v4/frames/requests/query.ex
0.821725
0.601681
query.ex
starcoder
defmodule Conduit.Encoding do @moduledoc """ Encodes and decodes a message body based on the content encoding given. Custom content encodings can be specified in your configuration. config :conduit, Conduit.Encoding, [{"custom", MyApp.CustomEncoding}] Note that any new content encodings specified in this way will require a recompile of Conduit. $ mix deps.clean conduit --build $ mix deps.get Any custom content encodings should implement the `Conduit.ContentType` behaviour. See `Conduit.Encoding.GZip` for an example. """ @callback encode(Conduit.Message.t(), Keyword.t()) :: Conduit.Message.t() @callback decode(Conduit.Message.t(), Keyword.t()) :: Conduit.Message.t() @default_content_encodings [ {"gzip", Conduit.Encoding.GZip}, {"identity", Conduit.Encoding.Identity} ] @doc false defmacro __using__(_opts) do quote do @behaviour Conduit.Encoding import Conduit.Message end end @doc """ Encodes the message body with the specified content encoding. ## Examples iex> import Conduit.Message iex> message = iex> %Conduit.Message{} iex> |> put_body("{}") iex> |> Conduit.Encoding.encode("gzip", []) iex> :zlib.gunzip(message.body) "{}" """ @spec encode(Conduit.Message.t(), String.t(), Keyword.t()) :: Conduit.Message.t() def encode(message, encoding, opts) do content_encoding(encoding).encode(message, opts) end @doc """ Decodes the message body with the specified content encoding. ## Examples iex> import Conduit.Message iex> body = <<31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 171, 174, 5, 0, 67, 191, 166, 163, 2, 0, 0, 0>> iex> message = iex> %Conduit.Message{} iex> |> put_body(body) iex> |> Conduit.Encoding.decode("gzip", []) iex> message.body "{}" """ @spec decode(Conduit.Message.t(), String.t(), Keyword.t()) :: Conduit.Message.t() def decode(message, encoding, opts) do content_encoding(encoding).decode(message, opts) end @spec content_encoding(String.t()) :: module config_content_encodings = Application.get_env(:conduit, Conduit.Encoding, []) encodings = config_content_encodings ++ @default_content_encodings for {encoding, content_encoding} <- encodings do defp content_encoding(unquote(encoding)), do: unquote(content_encoding) end defp content_encoding(content_encoding) do raise Conduit.UnknownEncodingError, "Unknown encoding #{inspect(content_encoding)}" end end
lib/conduit/encoding.ex
0.886862
0.430327
encoding.ex
starcoder
defmodule Phoenix.HTML do @moduledoc """ Helpers for working with HTML strings and templates. When used, it imports the given modules: * `Phoenix.HTML`- functions to handle HTML safety; * `Phoenix.HTML.Tag` - functions for generating HTML tags; * `Phoenix.HTML.Form` - functions for working with forms; * `Phoenix.HTML.Link` - functions for generating links and urls; * `Phoenix.HTML.Format` - functions for formatting text; ## HTML Safe One of the main responsibilities of this module is to provide convenience functions for escaping and marking HTML code as safe. By default, data output in templates is not considered safe: <%= "<hello>" %> will be shown as: &lt;hello&gt; User data or data coming from the database is almost never considered safe. However, in some cases, you may want to tag it as safe and show its "raw" contents: <%= raw "<hello>" %> Keep in mind most helpers will automatically escape your data and return safe content: <%= content_tag :p, "<hello>" %> will properly output: <p>&lt;hello&gt;</p> """ @doc false defmacro __using__(_) do quote do import Phoenix.HTML import Phoenix.HTML.Form import Phoenix.HTML.Link import Phoenix.HTML.Tag import Phoenix.HTML.Format end end @typedoc "Guaranteed to be safe" @type safe :: {:safe, iodata} @typedoc "May be safe or unsafe (i.e. it needs to be converted)" @type unsafe :: Phoenix.HTML.Safe.t @doc """ Provides `~e` sigil with HTML safe EEx syntax inside source files. Raises on attempts to interpolate with `\#{}`, so `~E` should be preferred. iex> ~e"\"" ...> Hello <%= "world" %> ...> "\"" {:safe, [[["" | "Hello "] | "world"] | "\\n"]} """ defmacro sigil_e(expr, opts) do handle_sigil(expr, opts, __CALLER__.line) end @doc """ Provides `~E` sigil with HTML safe EEx syntax inside source files. Does not raise on attempts to interpolate with `\#{}`, but rather shows those characters literally, so it should be preferred over `~e`. iex> ~E"\"" ...> Hello <%= "world" %> ...> "\"" {:safe, [[["" | "Hello "] | "world"] | "\\n"]} """ defmacro sigil_E(expr, opts) do handle_sigil(expr, opts, __CALLER__.line) end defp handle_sigil({:<<>>, _, [expr]}, [], line) do EEx.compile_string(expr, engine: Phoenix.HTML.Engine, line: line + 1) end defp handle_sigil(_, _, _) do raise ArgumentError, "interpolation not allowed in ~e sigil. " <> "Remove the interpolation, use <%= %> to insert values, " <> "or use ~E to show the interpolation literally" end @doc """ Marks the given content as raw. This means any HTML code inside the given string won't be escaped. iex> raw("<hello>") {:safe, "<hello>"} iex> raw({:safe, "<hello>"}) {:safe, "<hello>"} iex> raw(nil) {:safe, ""} """ @spec raw(iodata | safe) :: safe def raw({:safe, value}), do: {:safe, value} def raw(nil), do: {:safe, ""} def raw(value) when is_binary(value) or is_list(value), do: {:safe, value} @doc """ Escapes the HTML entities in the given term, returning iodata. iex> html_escape("<hello>") {:safe, "&lt;hello&gt;"} iex> html_escape('<hello>') {:safe, ["&lt;", 104, 101, 108, 108, 111, "&gt;"]} iex> html_escape(1) {:safe, "1"} iex> html_escape({:safe, "<hello>"}) {:safe, "<hello>"} """ @spec html_escape(unsafe) :: safe def html_escape({:safe, _} = safe), do: safe def html_escape(nil), do: {:safe, ""} def html_escape(bin) when is_binary(bin), do: {:safe, Plug.HTML.html_escape(bin)} def html_escape(list) when is_list(list), do: {:safe, Phoenix.HTML.Safe.List.to_iodata(list)} def html_escape(other), do: {:safe, Phoenix.HTML.Safe.to_iodata(other)} @doc """ Converts a safe result into a string. Fails if the result is not safe. In such cases, you can invoke `html_escape/1` or `raw/1` accordingly before. """ @spec safe_to_string(safe) :: String.t def safe_to_string({:safe, iodata}) do IO.iodata_to_binary(iodata) end @doc """ Escapes quotes (double and single), double backslashes and other This function is useful in JavaScript responses when there is a need to escape html rendered from other templates, like in the following: $("#container").append("<%= escape_javascript(render("post.html", post: @post)) %>"); """ @spec escape_javascript(binary | safe) :: String.t def escape_javascript({:safe, data}) do {:safe, data |> IO.iodata_to_binary |> escape_javascript} end def escape_javascript(data) when is_binary(data) do escape_javascript(data, "") end defp escape_javascript(<<0x2028::utf8, t::binary>>, acc), do: escape_javascript(t, <<acc::binary, "&#x2028;">>) defp escape_javascript(<<0x2029::utf8, t::binary>>, acc), do: escape_javascript(t, <<acc::binary, "&#x2029;">>) defp escape_javascript(<<"</", t::binary>>, acc), do: escape_javascript(t, <<acc::binary, ?<, ?\\, ?/>>) defp escape_javascript(<<"\r\n", t::binary>>, acc), do: escape_javascript(t, <<acc::binary, ?\\, ?n>>) defp escape_javascript(<<h, t::binary>>, acc) when h in [?", ?', ?\\], do: escape_javascript(t, <<acc::binary, ?\\, h>>) defp escape_javascript(<<h, t::binary>>, acc) when h in [?\r, ?\n], do: escape_javascript(t, <<acc::binary, ?\\, ?n>>) defp escape_javascript(<<h, t::binary>>, acc), do: escape_javascript(t, <<acc::binary, h>>) defp escape_javascript(<<>>, acc), do: acc end
deps/phoenix_html/lib/phoenix_html.ex
0.758376
0.525612
phoenix_html.ex
starcoder
defmodule Akd.Build.Release do @moduledoc """ A native Hook module that comes shipped with Akd. This module uses `Akd.Hook`. Provides a set of operations that build a distillery release for a given app at a deployment's `build_at` destination. This hook assumes that a distillery rel config file, `rel/config.exs` is already present or initialized by either a previously executed hook or manually. Ensures to cleanup and empty the releases created by this build. Doesn't have any Rollback operations. # Options: * `run_ensure`: `boolean`. Specifies whether to a run the ensure commands of a hook or not. This is useful when specifying whether or not to clean up after and operation. Defaults to `true`. * `ignore_failure`: `boolean`. Specifies whether to continue if this hook fails. This is useful when a hook's failure is to be ignored, and a deployed can be continued without halting the pipeline. * `cmd_envs`: `list` of `tuples`. Specifies the environments to provide while building the distillery release. # Defaults: * `run_ensure`: `true` * `ignore_failure`: `false` * `cmd_envs`: `[false]` """ use Akd.Hook @default_opts [ run_ensure: true, ignore_failure: false, cmd_envs: [] ] @doc """ Callback implementation for `get_hooks/2`. This function returns a list of operations that can be used to build a release using distillery on the `build_at` destination of a deployment. ## Examples iex> deployment = %Akd.Deployment{mix_env: "prod", ...> build_at: Akd.Destination.local("."), ...> publish_to: Akd.Destination.local("."), ...> name: "name", ...> vsn: "0.1.1"} iex> Akd.Build.Release.get_hooks(deployment, []) [%Akd.Hook{ensure: [], ignore_failure: false, main: [%Akd.Operation{cmd: "rm -rf ./_build/prod/rel", cmd_envs: [], destination: %Akd.Destination{host: :local, path: ".", user: :current}}, %Akd.Operation{cmd: "mix deps.get \\n mix compile \\n mix release", cmd_envs: [{"MIX_ENV", "prod"}], destination: %Akd.Destination{host: :local, path: ".", user: :current}}], rollback: [], run_ensure: true}] iex> deployment = %Akd.Deployment{mix_env: "prod", ...> build_at: Akd.Destination.local("."), ...> publish_to: Akd.Destination.local("."), ...> name: "name", ...> vsn: "0.1.1"} iex> Akd.Build.Release.get_hooks(deployment, [release_name: "name"]) [%Akd.Hook{ensure: [], ignore_failure: false, main: [%Akd.Operation{cmd: "rm -rf ./_build/prod/rel", cmd_envs: [], destination: %Akd.Destination{host: :local, path: ".", user: :current}}, %Akd.Operation{cmd: "mix deps.get \\n mix compile \\n mix release name", cmd_envs: [{"MIX_ENV", "prod"}], destination: %Akd.Destination{host: :local, path: ".", user: :current}}], rollback: [], run_ensure: true}] iex> deployment = %Akd.Deployment{mix_env: "prod", ...> build_at: Akd.Destination.local("."), ...> publish_to: Akd.Destination.local("."), ...> name: "name", ...> vsn: "0.1.1", ...> data: %{release_name: "name"}} iex> Akd.Build.Release.get_hooks(deployment, []) [%Akd.Hook{ensure: [], ignore_failure: false, main: [%Akd.Operation{cmd: "rm -rf ./_build/prod/rel", cmd_envs: [], destination: %Akd.Destination{host: :local, path: ".", user: :current}}, %Akd.Operation{cmd: "mix deps.get \\n mix compile \\n mix release name", cmd_envs: [{"MIX_ENV", "prod"}], destination: %Akd.Destination{host: :local, path: ".", user: :current}}], rollback: [], run_ensure: true}] """ @spec get_hooks(Akd.Deployment.t(), Keyword.t()) :: list(Akd.Hook.t()) def get_hooks(deployment, opts) do [build_hook(deployment, uniq_merge(opts, @default_opts))] end # This function takes a deployment and options and returns an Akd.Hook.t # struct using FormHook DSL defp build_hook(deployment, opts) do destination = Akd.DestinationResolver.resolve(:build, deployment) mix_env = deployment.mix_env release_name = Keyword.get(opts, :release_name) || Map.get(deployment.data, :release_name) cmd_envs = Keyword.get(opts, :cmd_envs, []) cmd_envs = [{"MIX_ENV", mix_env} | cmd_envs] form_hook opts do main("rm -rf ./_build/#{mix_env}/rel", destination) if release_name do main( "mix deps.get \n mix compile \n mix release #{release_name}", destination, cmd_envs: cmd_envs ) else main( "mix deps.get \n mix compile \n mix release", destination, cmd_envs: cmd_envs ) end end end # This function takes two keyword lists and merges them keeping the keys # unique. If there are multiple values for a key, it takes the value from # the first value of keyword1 corresponding to that key. defp uniq_merge(keyword1, keyword2) do keyword2 |> Keyword.merge(keyword1) |> Keyword.new() end end
lib/akd/base/build/release.ex
0.869576
0.537891
release.ex
starcoder
defmodule Bolt.Sips.Internals.PackStream.Utils do alias Bolt.Sips.Internals.PackStream.Encoder alias Bolt.Sips.Types.Duration alias Bolt.Sips.Internals.PackStreamError defmacro __using__(_options) do quote do import unquote(__MODULE__) # catch all clause for encoding implementation defp do_call_encode(data_type, data, original_version) do raise PackStreamError, data_type: data_type, data: data, bolt_version: original_version, message: "Encoding function not implemented for" end @spec encode_list_data(list(), integer()) :: [any()] defp encode_list_data(data, bolt_version) do Enum.map( data, &Encoder.encode(&1, bolt_version) ) end @spec encode_kv(map(), integer()) :: binary() defp encode_kv(map, bolt_version) do Enum.reduce(map, <<>>, fn data, acc -> [acc, do_reduce_kv(data, bolt_version)] end) end @spec do_reduce_kv({atom(), any()}, integer()) :: [binary()] defp do_reduce_kv({key, value}, bolt_version) do [ Encoder.encode( key, bolt_version ), Encoder.encode(value, bolt_version) ] end @spec day_time(Time.t()) :: integer() defp day_time(time) do Time.diff(time, ~T[00:00:00.000], :nanosecond) end @spec decompose_datetime(Calendar.naive_datetime()) :: [integer()] defp decompose_datetime(%NaiveDateTime{} = datetime) do datetime_micros = NaiveDateTime.diff(datetime, ~N[1970-01-01 00:00:00.000], :microsecond) seconds = div(datetime_micros, 1_000_000) nanoseconds = rem(datetime_micros, 1_000_000) * 1_000 [seconds, nanoseconds] end @spec compact_duration(Duration.t()) :: [integer()] defp compact_duration(%Duration{} = duration) do months = 12 * duration.years + duration.months days = 7 * duration.weeks + duration.days seconds = 3600 * duration.hours + 60 * duration.minutes + duration.seconds [months, days, seconds, duration.nanoseconds] end end end end
lib/bolt_sips/internals/pack_stream/utils.ex
0.780202
0.44065
utils.ex
starcoder
defmodule PaperTrail do alias PaperTrail.Version alias PaperTrail.Serializer defdelegate get_version(record), to: PaperTrail.VersionQueries defdelegate get_version(model_or_record, id_or_options), to: PaperTrail.VersionQueries defdelegate get_version(model, id, options), to: PaperTrail.VersionQueries defdelegate get_versions(record), to: PaperTrail.VersionQueries defdelegate get_versions(model_or_record, id_or_options), to: PaperTrail.VersionQueries defdelegate get_versions(model, id, options), to: PaperTrail.VersionQueries defdelegate get_current_model(version), to: PaperTrail.VersionQueries defdelegate make_version_struct(version, model, options), to: Serializer defdelegate serialize(data), to: Serializer defdelegate get_sequence_id(table_name), to: Serializer defdelegate add_prefix(schema, prefix), to: Serializer defdelegate get_item_type(data), to: Serializer defdelegate get_model_id(model), to: Serializer @doc """ Inserts a record to the database with a related version insertion in one transaction """ @spec insert(changeset :: Ecto.Changeset.t(model), options :: Keyword.t()) :: {:ok, %{model: model, version: Version.t()}} | {:error, Ecto.Changeset.t(model) | term} when model: struct def insert( changeset, options \\ [ origin: nil, meta: nil, originator: nil, prefix: nil, model_key: :model, version_key: :version, ecto_options: [] ] ) do PaperTrail.Multi.new() |> PaperTrail.Multi.insert(changeset, options) |> PaperTrail.Multi.commit() end @doc """ Same as insert/2 but returns only the model struct or raises if the changeset is invalid. """ @spec insert!(changeset :: Ecto.Changeset.t(model), options :: Keyword.t()) :: model when model: struct def insert!( changeset, options \\ [ origin: nil, meta: nil, originator: nil, prefix: nil, model_key: :model, version_key: :version, ecto_options: [] ] ) do changeset |> insert(options) |> model_or_error(:insert) end @doc """ Upserts a record to the database with a related version insertion in one transaction. """ @spec insert_or_update(changeset :: Ecto.Changeset.t(model), options :: Keyword.t()) :: {:ok, %{model: model, version: Version.t()}} | {:error, Ecto.Changeset.t(model) | term} when model: struct def insert_or_update( changeset, options \\ [origin: nil, meta: nil, originator: nil, prefix: nil] ) do PaperTrail.Multi.new() |> PaperTrail.Multi.insert_or_update(changeset, options) |> PaperTrail.Multi.commit() end @doc """ Same as insert_or_update/2 but returns only the model struct or raises if the changeset is invalid. """ @spec insert_or_update!(changeset :: Ecto.Changeset.t(model), options :: Keyword.t()) :: model when model: struct def insert_or_update!( changeset, options \\ [origin: nil, meta: nil, originator: nil, prefix: nil] ) do changeset |> insert_or_update(options) |> model_or_error(:insert_or_update) end @doc """ Updates a record from the database with a related version insertion in one transaction """ @spec update(changeset :: Ecto.Changeset.t(model), options :: Keyword.t()) :: {:ok, %{model: model, version: Version.t()}} | {:error, Ecto.Changeset.t(model) | term} when model: struct def update(changeset, options \\ [origin: nil, meta: nil, originator: nil, prefix: nil]) do PaperTrail.Multi.new() |> PaperTrail.Multi.update(changeset, options) |> PaperTrail.Multi.commit() end @doc """ Same as update/2 but returns only the model struct or raises if the changeset is invalid. """ @spec update!(changeset :: Ecto.Changeset.t(model), options :: Keyword.t()) :: model when model: struct def update!(changeset, options \\ [origin: nil, meta: nil, originator: nil, prefix: nil]) do changeset |> update(options) |> model_or_error(:update) end @doc """ Deletes a record from the database with a related version insertion in one transaction """ @spec delete(model_or_changeset :: model | Ecto.Changeset.t(model), options :: Keyword.t()) :: {:ok, %{model: model, version: Version.t()}} | {:error, Ecto.Changeset.t(model) | term} when model: struct def delete( model_or_changeset, options \\ [origin: nil, meta: nil, originator: nil, prefix: nil] ) do PaperTrail.Multi.new() |> PaperTrail.Multi.delete(model_or_changeset, options) |> PaperTrail.Multi.commit() end @doc """ Same as delete/2 but returns only the model struct or raises if the changeset is invalid. """ @spec delete!(model_or_changeset :: model | Ecto.Changeset.t(model), options :: Keyword.t()) :: model when model: struct def delete!( model_or_changeset, options \\ [origin: nil, meta: nil, originator: nil, prefix: nil] ) do model_or_changeset |> delete(options) |> model_or_error(:delete) end @spec model_or_error( result :: {:ok, %{required(:model) => model, optional(any()) => any()}}, action :: :insert | :insert_or_update | :update | :delete ) :: model when model: struct() defp model_or_error({:ok, %{model: model}}, _action) do model end @spec model_or_error( result :: {:error, reason :: term}, action :: :insert | :insert_or_update | :update | :delete ) :: no_return defp model_or_error({:error, %Ecto.Changeset{} = changeset}, action) do raise Ecto.InvalidChangesetError, action: action, changeset: changeset end defp model_or_error({:error, reason}, _action) do raise reason end end
lib/paper_trail.ex
0.840292
0.461381
paper_trail.ex
starcoder
defmodule LibraryFees do @spec datetime_from_string(binary) :: :incompatible_calendars | :invalid_date | :invalid_format | :invalid_time | NaiveDateTime.t() def datetime_from_string(string) do {_status, datetime} = NaiveDateTime.from_iso8601(string) datetime end @spec before_noon?(%{ :calendar => any, :day => any, :hour => any, :microsecond => any, :minute => any, :month => any, :second => any, :year => any, optional(any) => any }) :: boolean def before_noon?(datetime) do noon = ~T[12:00:00] comparison = datetime |> NaiveDateTime.to_time() |> Time.compare(noon) case comparison do :lt -> true _ -> false end end @spec return_date(%{ :calendar => atom, :day => any, :hour => any, :microsecond => {any, any}, :minute => any, :month => any, :second => any, :year => any, optional(any) => any }) :: Date.t() def return_date(checkout_datetime) do days = if checkout_datetime |> before_noon?(), do: 28, else: 29 seconds_in_day = 24 * 60 * 60 checkout_datetime |> NaiveDateTime.add(days * seconds_in_day, :second) |> NaiveDateTime.to_date() end @spec days_late( %{:calendar => atom, :day => any, :month => any, :year => any, optional(any) => any}, %{:calendar => atom, :day => any, :month => any, :year => any, optional(any) => any} ) :: integer def days_late(planned_return_date, actual_return_datetime) do diff = Date.diff(actual_return_datetime, planned_return_date) cond do diff < 0 -> 0 true -> diff end end @spec monday?(%{ :calendar => atom, :day => any, :month => any, :year => any, optional(any) => any }) :: boolean def monday?(datetime) do day = Date.day_of_week(datetime) cond do day == 1 -> true true -> false end end @spec calculate_late_fee(String.t(), String.t(), number) :: integer def calculate_late_fee(checkout, return, rate) do checkout_date = datetime_from_string(checkout) return_date = datetime_from_string(return) discount = if monday?(return_date), do: 0.5, else: 1 days_late(return_date(checkout_date), return_date) * rate * discount |> floor() end end
library-fees/lib/library_fees.ex
0.743168
0.404802
library_fees.ex
starcoder
defmodule MediaSample.Search.Definition do @indices %{ "ja" => [ settings: [ index: [ analysis: [ filter: [ pos_filter: [type: "kuromoji_part_of_speech", stoptags: ["助詞-格助詞-一般", "助詞-終助詞"]], greek_lowercase_filter: [type: "lowercase", language: "greek"] ], analyzer: [ kuromoji_analyzer: [ type: "custom", tokenizer: "kuromoji_tokenizer", filter: ["kuromoji_baseform", "pos_filter", "greek_lowercase_filter", "cjk_width"] ] ] ] ] ], mappings: [ entry: [ _source: [enabled: true], _all: [enabled: true, analyzer: "kuromoji_analyzer"], properties: [ id: [type: "integer", index: "not_analyzed"], title: [type: "string", index: "analyzed", analyzer: "kuromoji_analyzer"], description: [type: "string", index: "analyzed", analyzer: "kuromoji_analyzer"] ] ], section: [ _source: [enabled: true], _all: [enabled: true, analyzer: "kuromoji_analyzer"], _parent: [type: "entry"], properties: [ id: [type: "integer", index: "not_analyzed"], content: [type: "string", index: "analyzed", analyzer: "kuromoji_analyzer"] ] ] ] ], "en" => [ settings: [ index: [ analysis: [ filter: [ english_stop: [type: "stop", stopwords: "_english_"], english_stemmer: [type: "stemmer", language: "english"], english_possessive_stemmer: [type: "stemmer", language: "possessive_english"] ], analyzer: [ english: [ tokenizer: "standard", filter: ["english_possessive_stemmer", "lowercase", "english_stop", "english_stemmer"] ] ] ] ] ], mappings: [ entry: [ _source: [enabled: true], _all: [enabled: true, analyzer: "english"], properties: [ id: [type: "integer", index: "not_analyzed"], title: [type: "string", index: "analyzed", analyzer: "english"], description: [type: "string", index: "analyzed", analyzer: "english"] ] ], section: [ _source: [enabled: true], _all: [enabled: true, analyzer: "english"], _parent: [type: "entry"], properties: [ id: [type: "integer", index: "not_analyzed"], content: [type: "string", index: "analyzed", analyzer: "english"] ] ] ] ] } def indices(locale) do @indices[locale] end end
lib/media_sample/search/definition.ex
0.530966
0.477189
definition.ex
starcoder
defmodule Dict do @moduledoc ~S""" WARNING: this module is deprecated. If you need a general dictionary, use the `Map` module. If you need to manipulate keyword lists, use `Keyword`. To convert maps into keywords and vice-versa, use the `new` function in the respective modules. """ @type key :: any @type value :: any @type t :: list | map # TODO: Deprecate every function on 1.4 defmacro __using__(_) do # Use this import to guarantee proper code expansion import Kernel, except: [size: 1] %{file: file, line: line} = __CALLER__ :elixir_errors.warn(line, file, "the Dict module is deprecated") quote do def get(dict, key, default \\ nil) do case fetch(dict, key) do {:ok, value} -> value :error -> default end end def get_lazy(dict, key, fun) when is_function(fun, 0) do case fetch(dict, key) do {:ok, value} -> value :error -> fun.() end end def get_and_update(dict, key, fun) do current_value = get(dict, key) {get, new_value} = fun.(current_value) {get, put(dict, key, new_value)} end def fetch!(dict, key) do case fetch(dict, key) do {:ok, value} -> value :error -> raise KeyError, key: key, term: dict end end def has_key?(dict, key) do match? {:ok, _}, fetch(dict, key) end def put_new(dict, key, value) do case has_key?(dict, key) do true -> dict false -> put(dict, key, value) end end def put_new_lazy(dict, key, fun) when is_function(fun, 0) do case has_key?(dict, key) do true -> dict false -> put(dict, key, fun.()) end end def drop(dict, keys) do Enum.reduce(keys, dict, &delete(&2, &1)) end def take(dict, keys) do Enum.reduce(keys, new, fn key, acc -> case fetch(dict, key) do {:ok, value} -> put(acc, key, value) :error -> acc end end) end def to_list(dict) do reduce(dict, {:cont, []}, fn kv, acc -> {:cont, [kv|acc]} end) |> elem(1) |> :lists.reverse end def keys(dict) do reduce(dict, {:cont, []}, fn {k, _}, acc -> {:cont, [k|acc]} end) |> elem(1) |> :lists.reverse end def values(dict) do reduce(dict, {:cont, []}, fn {_, v}, acc -> {:cont, [v|acc]} end) |> elem(1) |> :lists.reverse end def equal?(dict1, dict2) do # Use this import to avoid conflicts in the user code import Kernel, except: [size: 1] case size(dict1) == size(dict2) do false -> false true -> reduce(dict1, {:cont, true}, fn({k, v}, _acc) -> case fetch(dict2, k) do {:ok, ^v} -> {:cont, true} _ -> {:halt, false} end end) |> elem(1) end end def merge(dict1, dict2, fun \\ fn(_k, _v1, v2) -> v2 end) do # Use this import to avoid conflicts in the user code import Kernel, except: [size: 1] if size(dict1) < size(dict2) do reduce(dict1, {:cont, dict2}, fn {k, v1}, acc -> {:cont, update(acc, k, v1, &fun.(k, v1, &1))} end) else reduce(dict2, {:cont, dict1}, fn {k, v2}, acc -> {:cont, update(acc, k, v2, &fun.(k, &1, v2))} end) end |> elem(1) end def update(dict, key, initial, fun) do case fetch(dict, key) do {:ok, value} -> put(dict, key, fun.(value)) :error -> put(dict, key, initial) end end def update!(dict, key, fun) do case fetch(dict, key) do {:ok, value} -> put(dict, key, fun.(value)) :error -> raise KeyError, key: key, term: dict end end def pop(dict, key, default \\ nil) do case fetch(dict, key) do {:ok, value} -> {value, delete(dict, key)} :error -> {default, dict} end end def pop_lazy(dict, key, fun) when is_function(fun, 0) do case fetch(dict, key) do {:ok, value} -> {value, delete(dict, key)} :error -> {fun.(), dict} end end def split(dict, keys) do Enum.reduce(keys, {new, dict}, fn key, {inc, exc} = acc -> case fetch(exc, key) do {:ok, value} -> {put(inc, key, value), delete(exc, key)} :error -> acc end end) end defoverridable merge: 2, merge: 3, equal?: 2, to_list: 1, keys: 1, values: 1, take: 2, drop: 2, get: 2, get: 3, fetch!: 2, has_key?: 2, put_new: 3, pop: 2, pop: 3, split: 2, update: 4, update!: 3, get_and_update: 3, get_lazy: 3, pop_lazy: 3, put_new_lazy: 3 end end defmacrop target(dict) do quote do case unquote(dict) do %{__struct__: x} when is_atom(x) -> x %{} -> Map x when is_list(x) -> Keyword x -> unsupported_dict(x) end end end @spec keys(t) :: [key] def keys(dict) do target(dict).keys(dict) end @spec values(t) :: [value] def values(dict) do target(dict).values(dict) end @spec size(t) :: non_neg_integer def size(dict) do target(dict).size(dict) end @spec has_key?(t, key) :: boolean def has_key?(dict, key) do target(dict).has_key?(dict, key) end @spec get(t, key, value) :: value def get(dict, key, default \\ nil) do target(dict).get(dict, key, default) end @spec get_lazy(t, key, (() -> value)) :: value def get_lazy(dict, key, fun) do target(dict).get_lazy(dict, key, fun) end @spec get_and_update(t, key, (value -> {value, value})) :: {value, t} def get_and_update(dict, key, fun) do target(dict).get_and_update(dict, key, fun) end @spec fetch(t, key) :: value def fetch(dict, key) do target(dict).fetch(dict, key) end @spec fetch!(t, key) :: value | no_return def fetch!(dict, key) do target(dict).fetch!(dict, key) end @spec put(t, key, value) :: t def put(dict, key, val) do target(dict).put(dict, key, val) end @spec put_new(t, key, value) :: t def put_new(dict, key, val) do target(dict).put_new(dict, key, val) end @spec put_new_lazy(t, key, (() -> value)) :: t def put_new_lazy(dict, key, fun) do target(dict).put_new_lazy(dict, key, fun) end @spec delete(t, key) :: t def delete(dict, key) do target(dict).delete(dict, key) end @spec merge(t, t) :: t def merge(dict1, dict2) do target1 = target(dict1) target2 = target(dict2) if target1 == target2 do target1.merge(dict1, dict2) else do_merge(target1, dict1, dict2, fn(_k, _v1, v2) -> v2 end) end end @spec merge(t, t, (key, value, value -> value)) :: t def merge(dict1, dict2, fun) do target1 = target(dict1) target2 = target(dict2) if target1 == target2 do target1.merge(dict1, dict2, fun) else do_merge(target1, dict1, dict2, fun) end end defp do_merge(target1, dict1, dict2, fun) do Enumerable.reduce(dict2, {:cont, dict1}, fn({k, v}, acc) -> {:cont, target1.update(acc, k, v, fn(other) -> fun.(k, other, v) end)} end) |> elem(1) end @spec pop(t, key, value) :: {value, t} def pop(dict, key, default \\ nil) do target(dict).pop(dict, key, default) end @spec pop_lazy(t, key, (() -> value)) :: {value, t} def pop_lazy(dict, key, fun) do target(dict).pop_lazy(dict, key, fun) end @spec update!(t, key, (value -> value)) :: t def update!(dict, key, fun) do target(dict).update!(dict, key, fun) end @spec update(t, key, value, (value -> value)) :: t def update(dict, key, initial, fun) do target(dict).update(dict, key, initial, fun) end @spec split(t, [key]) :: {t, t} def split(dict, keys) do target(dict).split(dict, keys) end @spec drop(t, [key]) :: t def drop(dict, keys) do target(dict).drop(dict, keys) end @spec take(t, [key]) :: t def take(dict, keys) do target(dict).take(dict, keys) end @spec empty(t) :: t def empty(dict) do target(dict).empty(dict) end @spec equal?(t, t) :: boolean def equal?(dict1, dict2) do target1 = target(dict1) target2 = target(dict2) cond do target1 == target2 -> target1.equal?(dict1, dict2) target1.size(dict1) == target2.size(dict2) -> Enumerable.reduce(dict2, {:cont, true}, fn({k, v}, _acc) -> case target1.fetch(dict1, k) do {:ok, ^v} -> {:cont, true} _ -> {:halt, false} end end) |> elem(1) true -> false end end @spec to_list(t) :: list def to_list(dict) do target(dict).to_list(dict) end @spec unsupported_dict(t) :: no_return defp unsupported_dict(dict) do raise ArgumentError, "unsupported dict: #{inspect dict}" end end
lib/elixir/lib/dict.ex
0.597138
0.435781
dict.ex
starcoder
defmodule Conqueuer.Pool do @moduledoc """ Use this mixin to define a poolboy pool and supervisor. Given you want a pool named `:resolvers` and will define a worker named `MyApp.ResolverWorker`: defmodule MyApp.ResolversPoolSupervisor do use Conqueuer.Pool, name: :resolvers, worker: MyApp.ResolverWorker, worker_args: [arg1: 1], size: 10, max_overflow: 20 end The `worker_args` argument is used for set up the initial state of your worker, as they are passed through to the worker's `start_link` function and eventually to the `init` function. The Worker module implements a default `init` function sets the `worker_args` as the worker's initial state. You may override the `init` function and use the options to set up a more custom initial state. The `size` and `max_overflow` arguments are optional and if not provided the defaults are `size: 1` and `max_overflow: 0`. For more information on these options please see the poolboy project's [documentation](https://github.com/devinus/poolboy) or this [article](http://hashnuke.com/2013/10/03/managing-processes-with-poolboy-in-elixir.html). Now that the `:resolvers` pool and supervisor is defined you will need to add it to your supervision tree. defmodule Test do use Application def start(_type, _args) do import Supervisor.Spec, warn: false children = [ supervisor( MyApp.ResolversPoolSupervisor, [[], [name: :ResolversPoolSupervisor]] ), ... ] Supervisor.start_link(children, opts) end end The name of the supervisor process is very important as it's collaborators infer its name through convention. """ defmacro __using__(options) do quote do use Supervisor def start_link( args \\[], opts \\ [] ) do Supervisor.start_link __MODULE__, args, opts end def init([]) do pool_options = [ name: {:local, name}, worker_module: worker, size: size, max_overflow: max_overflow ] children = [ :poolboy.child_spec(name, pool_options, worker_args) ] supervise(children, strategy: :one_for_one) end defp worker_args do unquote(options[:worker_args] || []) end defp name do unquote(options[:name]) end defp max_overflow do unquote(options[:max_overflow] || 0) end defp size do unquote(options[:size] || 1) end defp worker do unquote(options[:worker]) end end end end
lib/conqueuer/pool.ex
0.694613
0.610526
pool.ex
starcoder
defmodule Tarearbol.DynamicManager do @moduledoc ~S""" The scaffold implementation to dynamically manage many similar tasks running as processes. It creates a main supervisor, managing the `GenServer` holding the state and `DynamicSupervisor` handling chidren. It has a strategy `:rest_for_one`, assuming that if the process holding the state crashes, the children will be restarted. Typically one calls `use Tarearbol.DynamicManager` and implements at least `children_specs/0` callback and receives back supervised tree with a state and many processes controlled by `DynamicSupervisor`. To see how it works you might try defmodule DynamicManager do use Tarearbol.DynamicManager def children_specs do for i <- 1..10, do: {"foo_#{i}", []}, into: %{} end end {:ok, pid} = DynamicManager.start_link() The above would spawn `10` children with IDs `"foo_1".."foo_10"`. ## Workers Management `DynamicManager` allows dynamic workers management. It exports three functions @spec get(id :: id()) :: Enum.t() @spec put(id :: id(), opts :: Enum.t()) :: pid() @spec del(id :: id()) :: :ok @spec restart(id :: id()) :: :ok The semantics of `put/2` arguments is the same as a single `child_spec`, `del/1` and `get/1` receive the unique ID of the child and shutdown it or return it’s payload respectively. ## Workers Callbacks Workers are allowed to implement several callbacks to be used to pass messages to them. - **`peform/2`** is called periodically by the library internals; the interval is set upon worker initialization via `children_specs/1` (static) or `put/2` (dynamic); the interval set to `0` suppresses periodic invocations - **`call/3`** to handle synchronous message send to worker - **`cast/2`** to handle asynchronous message send to worker - **`terminate/2`** to handle worker process termination All the above should return a value of `t:Tarearbol.DynamicManager.response/0` type. Also, the implementing module might use a custom initialization function to e. g. dynamically build payload. Is should be passed to `use DynamicManager` as a parameter `init: handler` and might be a tuple `{module(), function(), arity()}` or a captured function `&MyMod.my_init/1`. Arities 0, 1 and 2 are allowed, as described by `t:Tarearbol.DynamicManager.init_handler/0` type. The worker process will call this function from `c:GenServer.handle_continue/2` callback. """ @moduledoc since: "0.9.0" use Boundary, exports: [Child] require Logger @typedoc "Identifier of the child process" @type id :: any() @typedoc "Payload associated with the worker" @type payload :: any() @typedoc "Expected response from the `DymanicManager` implementation" @type response :: :halt | {:replace, payload()} | {:replace, id(), payload()} | {{:timeout, integer()}, payload()} | {:ok, any()} | any() @typedoc "Post-instantion init handler type, that might be passed to `use DynamicManager` vis `init:`" @type init_handler :: nil | (() -> Tarearbol.DynamicManager.payload()) | (Tarearbol.DynamicManager.payload() -> Tarearbol.DynamicManager.payload()) | (Tarearbol.DynamicManager.id(), Tarearbol.DynamicManager.payload() -> Tarearbol.DynamicManager.payload()) @doc """ This function is called to retrieve the map of children with name as key and a workers as the value. The value must be an enumerable with keys among: - `:payload` passed as second argument to `perform/2`, default `nil` - `:timeout` time between iterations of `perform/2`, default `1` second - `:lull` threshold to notify latency in performing, default `1.1` (the threshold is `:lull` times the `:timeout`) This function should not care about anything save for producing side effects. It will be backed by `DynamicSupervisor`. The value it returns will be put into the state under `children` key. """ @doc since: "0.9.0" @callback children_specs :: %{required(id()) => Enum.t()} @doc """ The main function, doing all the internal job, supervised. It will be called with the child `id` as first argument and the `payload` option to child spec as second argument (defaulting to `nil`, can also be ignored if not needed). ### Return values `perform/2` might return - `:halt` if it wants to be killed - `{:ok, result}` to store the last result and reschedule with default timeout - `{:replace, payload}` to replace the payload (state) of the current worker with the new one - `{:replace, id, payload}` to replace the current worker with the new one - `{{:timeout, timeout}, result}` to store the last result and reschedule in given timeout interval - or **_deprecated_** anything else will be treated as a result """ @doc since: "0.9.0" @callback perform(id :: id(), payload :: payload()) :: response() @doc """ The method to implement to support explicit `GenServer.call/3` on the wrapping worker. """ @doc since: "1.2.0" @callback call(message :: any(), from :: GenServer.from(), {id :: id(), payload :: payload()}) :: response() @doc """ The method to implement to support explicit `GenServer.cast/2` on the wrapping worker. """ @doc since: "1.2.1" @callback cast(message :: any(), {id :: id(), payload :: payload()}) :: response() @doc """ The method that will be called before the worker is terminated. """ @doc since: "1.2.0" @callback terminate(reason :: term(), {id :: id(), payload :: payload()}) :: any() @doc """ Declares an instance-wide callback to report state; if the startup process takes a while, it’d be run in `handle_continue/2` and this function will be called after it finishes so that the application might start using it. If the application is not interested in receiving state updates, e. g. when all it needs from runners is a side effect, there is a default implementation that does nothing. """ @doc since: "0.9.0" @callback handle_state_change(state :: :down | :up | :starting | :unknown) :: :ok | :restart @doc """ Declares a callback to report slow process (when the scheduler cannot process in a reasonable time). """ @doc since: "0.9.5" @callback handle_timeout(state :: map()) :: any() defmodule Child do @moduledoc false @type t :: %{ __struct__: __MODULE__, pid: pid(), value: Tarearbol.DynamicManager.payload(), busy?: nil | DateTime.t(), opts: keyword() } @enforce_keys [:pid, :value] defstruct [:pid, :value, :opts, :busy?] end @defaults %{ timeout: 1_000, lull: 1.1, payload: nil } @doc false defmacro __using__(opts) do {defaults, opts} = Keyword.pop(opts, :defaults, []) defaults = @defaults |> Map.merge(Map.new(defaults)) |> Macro.escape() {init_handler, opts} = Keyword.pop(opts, :init) {distributed, opts} = Keyword.pop(opts, :distributed, false) {pickup, opts} = Keyword.pop(opts, :pickup, :hashring) quote generated: true, location: :keep do @on_definition Tarearbol.DynamicManager @namespace Keyword.get(unquote(opts), :namespace, __MODULE__) @pickup unquote(pickup) @doc false @spec __defaults__ :: %{ timeout: non_neg_integer(), lull: float(), payload: term() } def __defaults__, do: unquote(defaults) @doc false @spec __namespace__ :: module() def __namespace__, do: @namespace @init_handler (case unquote(init_handler) do nil -> nil fun when is_function(fun, 0) -> fun fun when is_function(fun, 1) -> fun fun when is_function(fun, 2) -> fun {mod, fun, arity} when is_atom(mod) and is_atom(fun) and arity in [0, 1, 2] -> Function.capture(mod, fun, arity) {mod, fun} when is_atom(mod) and is_atom(fun) -> Function.capture(mod, fun, 1) end) @spec __child_mod__(module :: module() | list()) :: module() defp __child_mod__(module) when is_atom(module), do: __child_mod__(Module.split(module)) defp __child_mod__(module) when is_list(module), do: Module.concat(@namespace, List.last(module)) @doc false @spec __init_handler__ :: Tarearbol.DynamicManager.init_handler() def __init_handler__, do: @init_handler @doc false @spec __internal_worker_module__ :: module() def __internal_worker_module__, do: __child_mod__(Tarearbol.InternalWorker) @doc false @spec __dynamic_supervisor_module__ :: module() def __dynamic_supervisor_module__, do: __child_mod__(Tarearbol.DynamicSupervisor) state_module_ast = quote generated: true, location: :keep do @moduledoc false use GenServer alias Tarearbol.DynamicManager @type t :: %{ __struct__: __MODULE__, state: :down | :up | :starting | :unknown, children: %{optional(DynamicManager.id()) => DynamicManager.Child.t()}, manager: module(), ring: HashRing.t() } defstruct [:manager, :ring, state: :down, children: %{}] @spec start_link([{:manager, atom()}]) :: GenServer.on_start() def start_link(manager: manager), do: GenServer.start_link(__MODULE__, [manager: manager], name: __MODULE__) @spec state :: t() def state, do: GenServer.call(__MODULE__, :state) @spec update_state(state :: :down | :up | :starting | :unknown) :: :ok def update_state(state), do: GenServer.cast(__MODULE__, {:update_state, state}) @spec put(id :: DynamicManager.id(), props :: map() | keyword()) :: :ok def put(id, props), do: GenServer.cast(__MODULE__, {:put, id, props}) @spec update!( id :: DynamicManager.id(), (DynamicManager.Child.t() -> DynamicManager.Child.t()) ) :: :ok def update!(id, fun), do: GenServer.cast(__MODULE__, {:update!, id, fun}) @spec del(id :: DynamicManager.id()) :: :ok def del(id), do: GenServer.cast(__MODULE__, {:del, id}) @spec get(id :: DynamicManager.id()) :: DynamicManager.Child.t() def get(id, default \\ nil), do: GenServer.call(__MODULE__, {:get, id, default}) @impl GenServer def init(opts) do opts = opts |> Keyword.put(:state, :starting) |> Keyword.put_new(:ring, HashRing.new()) state = struct!(__MODULE__, opts) state.manager.handle_state_change(:starting) {:ok, state} end @impl GenServer def handle_call(:state, _from, %__MODULE__{} = state), do: {:reply, state, state} @impl GenServer def handle_call( {:get, id, default}, _from, %__MODULE__{children: children} = state ), do: {:reply, Map.get(children, id, default), state} @impl GenServer def handle_cast( {:put, id, %DynamicManager.Child{} = props}, %__MODULE__{ring: ring, children: children} = state ), do: {:noreply, %{ state | ring: ring && HashRing.add_node(ring, id), children: Map.put(children, id, props) }} @impl GenServer def handle_cast({:put, id, props}, %__MODULE__{} = state), do: handle_cast({:put, id, struct(DynamicManager.Child, props)}, state) @impl GenServer def handle_cast({:update!, id, fun}, %__MODULE__{children: children} = state), do: {:noreply, %{state | children: Map.update!(children, id, fun)}} @impl GenServer def handle_cast({:del, id}, %__MODULE__{ring: ring, children: children} = state), do: {:noreply, %{ state | ring: ring && HashRing.remove_node(ring, id), children: Map.delete(children, id) }} @impl GenServer def handle_cast({:update_state, new_state}, %__MODULE__{} = state), do: {:noreply, %{state | state: new_state}} end @state_module Module.concat(@namespace, State) Module.create(@state_module, state_module_ast, __ENV__) @doc false @spec __state_module__ :: module() def __state_module__, do: @state_module @registry_module Module.concat(@namespace, Registry) @doc false @spec __registry_module__ :: module() def __registry_module__, do: @registry_module @doc false @spec state :: struct() def state, do: @state_module.state() @doc false @spec __free_worker__(kind :: :random | :stream | :hashring, tuple()) :: {:id, Tarearbol.DynamicManager.id()} | list() def __free_worker__(kind \\ @pickup, tuple) def __free_worker__(:stream, _tuple), do: state().children |> Stream.filter(&is_nil(elem(&1, 1).busy?)) |> Enum.take(1) def __free_worker__(:random, _tuple) do state().children |> Enum.filter(&is_nil(elem(&1, 1).busy?)) |> case do [] -> nil [one] -> one many -> Enum.random(many) end |> List.wrap() end def __free_worker__(:hashring, tuple), do: {:id, HashRing.key_to_node(state().ring, tuple)} require Logger @behaviour Tarearbol.DynamicManager @impl Tarearbol.DynamicManager def perform(id, _payload) do Logger.warn( "perform for id[#{id}] was executed with state\n\n" <> inspect(__state_module__().state()) <> "\n\nyou want to override `perform/2` in your #{inspect(__MODULE__)}\n" <> "to perform some actual work instead of printing this message" ) if Enum.random(1..3) == 1, do: :halt, else: {:ok, 42} end @impl Tarearbol.DynamicManager def call(_message, _from, {id, _payload}) do Logger.warn( "call for id[#{id}] was executed with state\n\n" <> inspect(__state_module__().state()) <> "\n\nyou want to override `call/3` in your #{inspect(__MODULE__)}\n" <> "to perform some actual work instead of printing this message" ) :ok end @impl Tarearbol.DynamicManager def cast(_message, {id, _payload}) do Logger.warn( "cast for id[#{id}] was executed with state\n\n" <> inspect(__state_module__().state()) <> "\n\nyou want to override `cast/2` in your #{inspect(__MODULE__)}\n" <> "to perform some actual work instead of printing this message" ) :ok end @impl Tarearbol.DynamicManager def terminate(reason, {id, payload}) do Logger.info( "Exiting DynamicWorker[" <> inspect(id) <> "] with reason " <> inspect(reason) <> ". Payload: " <> inspect(payload) ) end defoverridable perform: 2, call: 3, cast: 2, terminate: 2 @impl Tarearbol.DynamicManager def handle_state_change(state), do: Logger.info("[#{inspect(__MODULE__)}] state has changed to #{state}") defoverridable handle_state_change: 1 @impl Tarearbol.DynamicManager def handle_timeout(state), do: Logger.warn("A worker is too slow [#{inspect(state)}]") defoverridable handle_timeout: 1 use Supervisor @doc """ Starts the `DynamicSupervisor` and its helpers to manage dynamic children """ def start_link(opts \\ []), do: Supervisor.start_link(__MODULE__, opts, name: __MODULE__) @impl Supervisor def init(opts) do children = [ {Registry, [keys: :unique, name: @registry_module]}, {@state_module, [manager: __MODULE__]}, {Tarearbol.DynamicSupervisor, Keyword.put(opts, :manager, __MODULE__)}, {Tarearbol.InternalWorker, [manager: __MODULE__]} ] Logger.info( "Starting #{inspect(__MODULE__)} with following children:\n" <> "    State → #{inspect(@state_module)}\n" <> "    DynamicSupervisor → #{inspect(__dynamic_supervisor_module__())}\n" <> "    InternalWorker → #{inspect(__internal_worker_module__())}" ) Supervisor.init(children, strategy: :rest_for_one) end @doc """ Performs a `GenServer.call/3` to the worker specified by `id`. `c:Tarearbol.DynamicManager.call/3` callback should be implemented for this to work. """ @doc since: "1.2.0" @spec synch_call(id :: nil | Tarearbol.DynamicManager.id(), message :: any()) :: {:ok, any()} | :error def synch_call(id, message), do: do_ynch_call(:call, id, message) @doc """ Performs a `GenServer.cast/2` to the worker specified by `id`. `c:Tarearbol.DynamicManager.cast/2` callback should be implemented for this to work. """ @doc since: "1.2.1" @spec asynch_call(id :: nil | Tarearbol.DynamicManager.id(), message :: any()) :: :ok | :error def asynch_call(id, message), do: do_ynch_call(:cast, id, message) @spec do_ynch_call(:call | :cast, nil | any(), term()) :: :error | :ok | {:ok, term()} defp do_ynch_call(type, nil, message) do @pickup |> __free_worker__(message |> Tuple.to_list() |> Enum.take(2) |> List.to_tuple()) |> case do {:id, worker_id} -> do_ynch_call(type, worker_id, message) [] -> :error [{_id, %Child{pid: pid}} | _] -> GenServer |> apply(type, [pid, message]) |> do_wrap_result(type) end end defp do_ynch_call(type, id, message) do case Registry.lookup(@registry_module, id) do [{pid, nil}] -> GenServer |> apply(type, [pid, message]) |> do_wrap_result(type) [] -> :error end end @spec do_wrap_result(result, :call | :cast) :: {:ok, result} | :ok when result: any() defp do_wrap_result(result, :call), do: {:ok, result} defp do_wrap_result(result, :cast), do: result @put if unquote(distributed), do: :multiput, else: :put @doc """ Dynamically adds a supervised worker implementing `Tarearbol.DynamicManager` behaviour to the list of supervised children. If `distributed: true` parameter was given to `use Tarearbol.DynamicManager`, puts the worker into all the nodes managed by `Cloister`. `:cloister` dependency must be added to a project to use this feature. """ def put(id, opts), do: apply(Tarearbol.InternalWorker, @put, [__internal_worker_module__(), id, opts]) @doc """ Dynamically adds a supervised worker implementing `Tarearbol.DynamicManager` behaviour to the list of supervised children on all the nodes managed by `Cloister`. Use `distributed: true` parameter in call to `use Tarearbol.DynamicManager` and regular `put/2` instead. """ @doc deprecated: """ Use `distributed: true` parameter in call to `use Tarearbol.DynamicManager` and regular `put/2` instead. """ defdelegate multiput(id, opts), to: __MODULE__, as: :put @del if unquote(distributed), do: :multidel, else: :del @doc """ Dynamically removes a supervised worker implementing `Tarearbol.DynamicManager` behaviour from the list of supervised children If `distributed: true` parameter was given to `use Tarearbol.DynamicManager`, deletes the worker from all the nodes managed by `Cloister`. `:cloister` dependency must be added to a project to use this feature. """ def del(id), do: apply(Tarearbol.InternalWorker, @del, [__internal_worker_module__(), id]) @doc """ Dynamically removes a supervised worker implementing `Tarearbol.DynamicManager` behaviour from the list of supervised children on all the nodes managed by `Cloister`. Use `distributed: true` parameter in call to `use Tarearbol.DynamicManager` and regular `del/1` instead. """ @doc deprecated: """ Use `distributed: true` parameter in call to `use Tarearbol.DynamicManager` and regular `del/1` instead. """ defdelegate multidel(id), to: __MODULE__, as: :del @doc """ Retrieves the information (`payload`, `timeout`, `lull` etc.) assotiated with the supervised worker """ def get(id), do: Tarearbol.InternalWorker.get(__internal_worker_module__(), id) @doc """ Restarts the `DynamicManager` to the clean state """ def restart, do: Tarearbol.InternalWorker.restart(__internal_worker_module__()) end end @doc false def __on_definition__(%Macro.Env{module: mod}, kind, name, args, _guards, body) do generated = body |> Macro.prewalk(nil, fn {_, meta, _} = t, nil -> {t, Keyword.get(meta, :generated)} t, acc -> {t, acc} end) |> elem(1) report_override(generated, mod, kind, name, length(args)) end @reserved ~w| start_link init state get del put restart asynch_call synch_call multidel multiput __init_handler__ __namespace__ __dynamic_supervisor_module__ __internal_worker_module__ __registry_module__ __state_module__ |a defp report_override(nil, mod, kind, name, arity) when name in @reserved, do: Logger.warn(""" You are trying to override the reserved function in `#{kind} #{inspect(Function.capture(mod, name, arity))}`. Please consider choosing another name. """) defp report_override(_, _, _, _, _), do: :ok end
lib/tarearbol/dynamic_management/dynamic_manager.ex
0.895984
0.5816
dynamic_manager.ex
starcoder
defmodule Day11.Grid do @empty "L" @occupied "#" @space "." @seats [@empty, @occupied] def seat?(row, col, grid), do: Map.get(grid, {row, col}) in @seats def move(-1, _col, _delta, _area), do: {-1, -1} def move(_row, -1, _delta, _area), do: {-1, -1} def move(row, _col, _delta, {numrows, _, _}) when row == numrows, do: {-1, -1} def move(_row, col, _delta, {_, numcols, _}) when col == numcols, do: {-1, -1} def move(row, col, {dx, dy} = delta, {_, _, grid} = area) do newrow = row + dx newcol = col + dy if seat?(newrow, newcol, grid), do: {newrow, newcol}, else: move(newrow, newcol, delta, area) end def dirs(row, col, _area, 1 = _part) do [ {row, col - 1}, {row, col + 1}, {row - 1, col}, {row + 1, col}, {row - 1, col - 1}, {row - 1, col + 1}, {row + 1, col - 1}, {row + 1, col + 1} ] end def dirs(row, col, area, 2 = _part) do [ move(row, col, {0, -1}, area), move(row, col, {0, 1}, area), move(row, col, {-1, 0}, area), move(row, col, {1, 0}, area), move(row, col, {-1, -1}, area), move(row, col, {-1, 1}, area), move(row, col, {1, -1}, area), move(row, col, {1, 1}, area) ] end def occupiedaround(row, col, {numrows, numcols, grid} = area, part) do dirs(row, col, area, part) |> Enum.count(fn {r, c} -> r in 0..(numrows - 1) and c in 0..(numcols - 1) and Map.get(grid, {r, c}) == @occupied end) end def change({r, c, @space}, _area, _part), do: {{r, c}, @space} def change({r, c, @empty}, area, part) do countoccupied = occupiedaround(r, c, area, part) {{r, c}, if(countoccupied == 0, do: @occupied, else: @empty)} end def change({r, c, @occupied}, area, part) do countoccupied = occupiedaround(r, c, area, part) maxfree = if part == 1, do: 3, else: 4 {{r, c}, if(countoccupied > maxfree, do: @empty, else: @occupied)} end def step({numrows, numcols, grid} = area, part) do newgrid = for col <- 0..(numcols - 1), row <- 0..(numrows - 1) do change({row, col, Map.get(grid, {row, col})}, area, part) end |> Enum.reduce(%{}, fn {k, v}, acc -> Map.put(acc, k, v) end) {numrows, numcols, newgrid} end def countoccupied({numrows, numcols, grid}) do for( col <- 0..(numcols - 1), row <- 0..(numrows - 1), do: Map.get(grid, {row, col}) == @occupied ) |> Enum.count(& &1) end def printarea({numrows, numcols, grid} = area) do for(col <- 0..(numcols - 1), row <- 0..(numrows - 1), do: Map.get(grid, {row, col})) |> Enum.chunk_every(numrows) |> Enum.map(&Enum.join(&1, "")) |> Enum.join("\n") |> IO.puts() area end end defmodule Day11 do alias Day11.Grid def readinput() do rows = File.read!("11.test.txt") |> String.split("\n", trim: true) numrows = length(rows) numcols = String.length(Enum.at(rows, 0)) grid = rows |> Enum.with_index() |> Enum.flat_map(fn {rowstr, rownum} -> String.graphemes(rowstr) |> Enum.with_index() |> Enum.flat_map(fn {letter, colnum} -> %{{rownum, colnum} => letter} end) end) |> Enum.into(%{}) {numrows, numcols, grid} end def untilstable(area, occupied, part) do newarea = Grid.step(area, part) newoccupied = Grid.countoccupied(newarea) if newoccupied == occupied, do: newoccupied, else: untilstable(newarea, newoccupied, part) end def part1(area \\ readinput()) do untilstable(area, Grid.countoccupied(area), 1) end def part2(area \\ readinput()) do untilstable(area, Grid.countoccupied(area), 2) end end
2020/day11/lib/day11.ex
0.561215
0.625838
day11.ex
starcoder
defmodule ExWire.Message.Pong do @moduledoc """ A wrapper for ExWire's `Pong` message. """ alias ExWire.Struct.Endpoint @message_id 0x02 defstruct [ to: nil, hash: nil, timestamp: nil, ] @type t :: %__MODULE__{ to: Endpoint.t, hash: binary(), timestamp: integer() } @spec message_id() :: ExWire.Message.message_id def message_id, do: @message_id @doc """ Decodes a given message binary, which is assumed to be an RLP encoded list of elements. ## Examples iex> ExWire.Message.Pong.decode([[<<1,2,3,4>>, <<>>, <<0, 5>>], <<2>>, 3] |> ExRLP.encode) %ExWire.Message.Pong{ to: %ExWire.Struct.Endpoint{ip: {1, 2, 3, 4}, tcp_port: 5, udp_port: nil}, hash: <<2>>, timestamp: 3, } iex> ExWire.Message.Pong.decode([<<1>>] |> ExRLP.encode) ** (MatchError) no match of right hand side value: [<<1>>] """ @spec decode(binary()) :: t def decode(data) do [to, hash, timestamp] = ExRLP.decode(data) %__MODULE__{ to: Endpoint.decode(to), hash: hash, timestamp: :binary.decode_unsigned(timestamp), } end @doc """ Given a Pong message, encodes it so it can be sent on the wire in RLPx. ## Examples iex> ExWire.Message.Pong.encode(%ExWire.Message.Pong{ ...> to: %ExWire.Struct.Endpoint{ip: {1, 2, 3, 4}, tcp_port: 5, udp_port: nil}, ...> hash: <<2>>, ...> timestamp: 3} ...> ) |> ExRLP.decode() [[<<1, 2, 3, 4>>, "", <<0, 5>>], <<2>>, <<3>>] """ @spec encode(t) :: binary() def encode(%__MODULE__{to: to, hash: hash, timestamp: timestamp}) do ExRLP.encode([ Endpoint.encode(to), hash, timestamp, ]) end @doc """ Pong messages should be routed to given endpoint. ## Examples iex> ExWire.Message.Pong.to(%ExWire.Message.Pong{ ...> to: %ExWire.Struct.Endpoint{ip: {1, 2, 3, 4}, tcp_port: 5, udp_port: nil}, ...> hash: <<2>>, ...> timestamp: 3} ...> ) %ExWire.Struct.Endpoint{ip: {1, 2, 3, 4}, tcp_port: 5, udp_port: nil} """ @spec to(t) :: Endpoint.t | nil def to(message) do message.to end end
apps/ex_wire/lib/ex_wire/message/pong.ex
0.88113
0.418162
pong.ex
starcoder
defmodule Cassandrax.Query.Builder do @moduledoc """ Builds query clauses and adds them to a `Cassandrax.Query` """ @doc """ Converts the given `data` into a query clause and adds it to the given `Cassandrax.Query`. """ def build(type, queryable, {:^, _, [var]}) do quote do fragment = Cassandrax.Query.Builder.build_fragment(unquote(type), unquote(var)) query = Cassandrax.Queryable.to_query(unquote(queryable)) Cassandrax.Query.Builder.add_fragment(unquote(type), fragment, query) end end def build(type, queryable, [{_, value}] = expression) when is_tuple(value) do quote do query = Cassandrax.Queryable.to_query(unquote(queryable)) fragment = Cassandrax.Query.Builder.build_fragment(unquote(type), unquote(expression)) Cassandrax.Query.Builder.add_fragment(unquote(type), fragment, query) end end def build(type, queryable, value) do fragment = build_fragment(type, value) quote do query = Cassandrax.Queryable.to_query(unquote(queryable)) Cassandrax.Query.Builder.add_fragment(unquote(type), unquote(fragment), query) end end # TODO fix DSL so contains and contains_key work without having to define a custom where function @allowed_operators [ :==, :!=, :>, :<, :>=, :<=, :in # :contains, # :contains_key ] def build_fragment(:where, {operator, _, [field, value]}) when operator in @allowed_operators do [field, operator, value] end def build_fragment(:where, [{field, value}]) when is_list(value) do [field, :in, value] end def build_fragment(:where, [{field, value}]) do [field, :==, value] end def build_fragment(:where, conditions) when is_list(conditions) do Enum.map(conditions, &build_fragment(:where, [&1])) end def build_fragment(_type, value), do: value def add_fragment(:where, [], query), do: query def add_fragment(:where, [field, operator, _] = filter, query) when is_atom(field) and is_atom(operator), do: %{query | wheres: [filter | query.wheres]} def add_fragment(:where, [filter | filters], query), do: add_fragment(:where, filters, add_fragment(:where, filter, query)) def add_fragment(type, value, query), do: %{query | type => value} end
lib/cassandrax/query/builder.ex
0.675978
0.53279
builder.ex
starcoder
defmodule Rummage.Ecto.Hook do @moduledoc """ This module defines a behaviour that `Rummage.Ecto.Hook`s have to follow. This module also defines a `__using__` macro which mandates certain behaviours for a `Hook` module to follow. Native hooks that come with `Rummage.Ecto` follow this behaviour. Custom Search, Sort and Paginate hooks should follow this behaviour as well, in order for them to work well with `Rummage.Ecto` ## Usage - This is the preferred way of creating a Custom Hook. Using `Rummage.Ecto.Hook.__using__/1` macro, it can be ensured that `run/2` and `format_params/2` functions have been implemented. ```elixir defmodule MyCustomHook do use Rummage.Ecto.Hook def run(queryable, params), do: queryable def format_params(querable, params, opts), do: params end ``` - A Custom Hook can also be created by using `Rummage.Ecto.Hook` `@behviour` ```elixir defmodule MyCustomHook do @behviour Rummage.Ecto.Hook def run(queryable, params), do: queryable def format_params(querable, params, opts), do: params end ``` """ @doc """ All callback invoked by `Rummage.Ecto` which applies a set of translations to an ecto query, based on operations defined in the hook. """ @callback run(Ecto.Query.t(), map()) :: Ecto.Query.t() @doc """ All callback invoked by `Rummage.Ecto` which applies a set of translations to params passed to the hook. This is responsible for making sure that the params passed to the hook's `run/2` function are santized. """ @callback format_params(Ecto.Query.t(), map(), keyword()) :: map() @doc """ This macro allows us to write rummage hooks in an easier way. This includes a `@behaviour` module attribute and defines `raisable` callback implementations for the hook `using` this module. It also makes `run/2` and `format_params/3` overridable and expects them to be defined in the hook. ## Usage: ```elixir defmodule MyHook do use Rummage.Ecto.Hook def run(queryable, params), do: "do something" def format_params(q, params, opts), do: "do something" end ``` For a better example, check out `Rummage.Ecto.Hook.Paginate` or any other hooks defined in `Rummage.Ecto` """ defmacro __using__(_opts) do quote do import unquote(__MODULE__) @behviour unquote(__MODULE__) @spec run(Ecto.Query.t(), map()) :: Ecto.Query.t() def run(queryable, params) do raise "run/2 not implemented for hook: #{__MODULE__}" end @spec format_params(Ecto.Query.t(), map(), keyword()) :: map() def format_params(queryable, params, opts) do raise "format_params/2 not implemented for hook: #{__MODULE__}" end defoverridable [run: 2, format_params: 3] end end def resolve_field(field, queryable) do module = get_module(queryable) name = :"__rummage_field_#{field}" case function_exported?(module, name, 0) do true -> apply(module, name, []) _ -> field end end def get_module(module) when is_atom(module), do: module def get_module({_, module}) when is_atom(module), do: module def get_module(%Ecto.Query{from: _from} = query), do: get_module(query.from) def get_module(%Ecto.SubQuery{query: query}), do: get_module(query) end
lib/rummage_ecto/hook.ex
0.802091
0.891434
hook.ex
starcoder
defmodule CSMT.Utils do @moduledoc """ Utility functions required by `CSMT.Merkle`, `CSMT.Log`, `CSMT.Map`. """ @type tree :: CSMT.Types.tree() @type tree_node :: CSMT.Types.tree_node() @type backend :: CSMT.Types.backend() @type hash_algorithm :: CSMT.Types.hash_algorithm() @type hash :: CSMT.Types.hash() @doc """ Generates a unique TreeId for every tree. This TreeId is used by `Storage` and `CSMT.TreeStorage` for persisting `t:tree/0` and `t:tree_node/0` """ def generate_tree_id() do UUID.uuid4(:hex) # :rand.uniform(100000) end @doc """ Helper function to convert a Tuple List into a map. This is required as `CSMT.Trees` stores a map as a `List` of `Tuple`. """ @spec tuple_list_to_map(tuple) :: map def tuple_list_to_map(tpl) do Enum.into(tpl, %{}) end @doc """ Creates a new tree of type `t:tree/0`. Called by `CSMT.Merkle.new/4` when a new `CSMT.Map` or `CSMT.Log` has to be created. """ @spec make_tree(binary, hash_algorithm, backend) :: tree def make_tree(tree_name, hash_function, backend) do CSMT.Trees.Tree.new( treeId: generate_tree_id(), treeName: tree_name, hashFunction: hash_function, root: nil, backend: backend ) end @doc """ Creates a new node in the tree of type `t:tree_node/0`. """ def make_node(tree, hash, children, size, metadata) do CSMT.Nodes.Node.new( treeId: tree.treeId, hash: hash, children: children, size: size, metadata: metadata ) end @doc """ Creates an inner node in the tree of type `t:tree_node/0`. Creates a new node with its children as `left` and `right`. """ def make_node(tree, left, right) do # Salting of non leaf nodes is not required CSMT.Utils.make_node( tree, CSMT.Utils.make_hash(tree, left.hash <> right.hash), [left.hash, right.hash], left.size + right.size, nil ) end def make_map_node( tree, left = %CSMT.Nodes.Node{treeId: _, hash: _, children: _, metadata: _, key: _, size: _}, right ) do CSMT.Nodes.Node.new( treeId: tree.treeId, hash: CSMT.Utils.make_hash(tree, left.hash <> right.hash), children: [left.hash, right.hash], size: left.size + right.size, key: max(left.key, right.key) ) end def make_map_node(tree, key, value) do # salt the node with the key to prevent storage collisions # eg. if two keys have the same values, their hashes would be the same and as the nodes are being # indexed by their keys, storage would collide. # This scheme would prevent against preimage attacks as well CSMT.Nodes.Node.new( treeId: tree.treeId, hash: CSMT.Utils.make_hash(tree, CSMT.Utils.salt_node(key, value)), children: [], size: 1, metadata: value, key: key ) end def salt_node(k, v) do k <> v end @doc """ Hashes the binary data supplied based on the hash algorithm `t:hash_algorithm/0` specified in `t:tree`. """ def make_hash(tree, data) do :crypto.hash(tree.hashFunction, data) end @doc false def closest_pow_2(n) do p = :math.log2(n) case :math.ceil(p) - p do 0.0 -> trunc(:math.pow(2, p - 1)) _ -> trunc(:math.pow(2, trunc(p))) end end @doc """ Encodes `t:tree_node/0` into a `binary` using `exprotobuf`. """ def encode_node(node) do CSMT.Nodes.Node.encode(node) end @doc """ Decodes a `binary` into a `t:tree_node/0` using `exprotobuf`. """ def decode_node(node) do CSMT.Nodes.Node.decode(node) end @doc """ Encodes `t:tree/0` into a `binary` using `exprotobuf`. """ def encode_tree(tree) do CSMT.Trees.Tree.encode(tree) end @doc """ Decodes a `binary` into a `t:tree/0` using `exprotobuf`. """ def decode_tree(tree) do CSMT.Trees.Tree.decode(tree) end @doc """ Utility function for persisting a tree node. Calls `Storage.set_node/3`. """ def set_node(tree, key, value) do Storage.set_node(tree.backend, key, encode_node(value)) end @doc """ Utility function for retrieving a tree node. Calls `Storage.get_node/2`. """ def get_node(tree, key) do decode_node(Storage.get_node(tree.backend, key)) end def delete_node(tree, key) do Storage.delete_node(tree.backend, key) end @doc """ Utility function to retrieve the backend module from `backend`. """ def get_backend_module(backend) do backend = CSMT.Utils.tuple_list_to_map(backend) String.to_existing_atom("Elixir." <> backend["module"]) end end
lib/utils.ex
0.880912
0.574544
utils.ex
starcoder
defmodule Timex.Parse.DateTime.Parser do @moduledoc """ This is the base plugin behavior for all Timex date/time string parsers. """ import Combine.Parsers.Base, only: [eof: 0, map: 2, pipe: 2] alias Timex.{Timezone, TimezoneInfo, AmbiguousDateTime, AmbiguousTimezoneInfo} alias Timex.Parse.ParseError alias Timex.Parse.DateTime.Tokenizers.{Directive, Default, Strftime} @doc """ Parses a date/time string using the default parser. ## Examples iex> use Timex ...> {:ok, dt} = #{__MODULE__}.parse("2014-07-29T00:20:41.196Z", "{ISO:Extended:Z}") ...> dt.year 2014 iex> dt.month 7 iex> dt.day 29 iex> dt.time_zone "Etc/UTC" """ @spec parse(binary, binary) :: {:ok, DateTime.t | NaiveDateTime.t} | {:error, term} def parse(date_string, format_string) when is_binary(date_string) and is_binary(format_string), do: parse(date_string, format_string, Default) def parse(_, _), do: {:error, :badarg} @doc """ Parses a date/time string using the provided tokenizer. Tokenizers must implement the `Timex.Parse.DateTime.Tokenizer` behaviour. ## Examples iex> use Timex ...> {:ok, dt} = #{__MODULE__}.parse("2014-07-29T00:30:41.196-02:00", "{ISO:Extended}", Timex.Parse.DateTime.Tokenizers.Default) ...> dt.year 2014 iex> dt.month 7 iex> dt.day 29 iex> dt.time_zone "Etc/GMT+2" """ @spec parse(binary, binary, atom) :: {:ok, DateTime.t | NaiveDateTime.t} | {:error, term} def parse(date_string, format_string, tokenizer) when is_binary(date_string) and is_binary(format_string) do try do {:ok, parse!(date_string, format_string, tokenizer)} catch _type, %ParseError{:message => msg} -> {:error, msg} _type, %{:message => msg} -> {:error, msg} _type, reason -> {:error, reason} end end def parse(_, _, _), do: {:error, :badarg} @doc """ Same as `parse/2` and `parse/3`, but raises on error. """ @spec parse!(String.t, String.t, atom | nil) :: DateTime.t | NaiveDateTime.t | no_return def parse!(date_string, format_string, tokenizer \\ Default) def parse!(date_string, format_string, :strftime), do: parse!(date_string, format_string, Strftime) def parse!(date_string, format_string, tokenizer) when is_binary(date_string) and is_binary(format_string) and is_atom(tokenizer) do case tokenizer.tokenize(format_string) do {:error, err} when is_binary(err) -> raise ParseError, message: err {:error, err} -> raise ParseError, message: "#{inspect err}" {:ok, []} -> raise ParseError, message: "There were no parsing directives in the provided format string." {:ok, directives} -> case date_string do "" -> raise ParseError, message: "Input datetime string cannot be empty!" _ -> datetime = case do_parse(date_string, directives, tokenizer) do {:ok, %DateTime{time_zone: nil} = dt} -> Timex.to_naive_datetime(dt) {:ok, dt} -> dt {:error, reason} when is_binary(reason) -> raise ParseError, message: reason {:error, reason} -> raise ParseError, message: "#{inspect reason}" end if :calendar.valid_date(datetime.year, datetime.month, datetime.day) do datetime else raise ParseError, message: "#{inspect datetime} is an invalid date!" end end end end # Special case iso8601/rfc3339 for performance defp do_parse(str, [%Directive{:type => type}], _tokenizer) when type in [:iso_8601_extended, :iso_8601_extended_z, :rfc_3339, :rfc_3339z] do case Combine.parse(str, Timex.Parse.DateTime.Parsers.ISO8601Extended.parse) do {:error, _} = err -> err [parts] when is_list(parts) -> case Enum.into(parts, %{}) do %{year4: y, month: m, day: d, hour24: h, zname: tzname} = mapped -> mm = Map.get(mapped, :min, 0) ss = Map.get(mapped, :sec, 0) us = Map.get(mapped, :sec_fractional, {0,0}) tz = Timezone.get(tzname, {{y,m,d},{h,mm,ss}}) {:ok, %DateTime{ year: y, month: m, day: d, hour: h, minute: mm, second: ss, microsecond: us, time_zone: tz.full_name, zone_abbr: tz.abbreviation, utc_offset: tz.offset_utc, std_offset: tz.offset_std}} %{year4: y, month: m, day: d, hour24: h} = mapped -> mm = Map.get(mapped, :min, 0) ss = Map.get(mapped, :sec, 0) us = Map.get(mapped, :sec_fractional, {0,0}) {:ok, %NaiveDateTime{ year: y, month: m, day: d, hour: h, minute: mm, second: ss, microsecond: us}} end end end defp do_parse(str, directives, tokenizer) do parsers = directives |> Stream.map(fn %Directive{weight: weight, parser: parser} -> map(parser, &({&1, weight})) end) |> Stream.filter(fn nil -> false; _ -> true end) |> Enum.reverse case Combine.parse(str, pipe([eof()|parsers] |> Enum.reverse, &(&1))) do [results] when is_list(results) -> results |> extract_parse_results |> Stream.with_index |> Enum.sort_by(fn # If :force_utc exists, make sure it is applied last {{{:force_utc, true}, _}, _} -> 9999 # Timezones must always be applied after other date/time tokens -> {{{tz, _}, _}, _} when tz in [:zname, :zoffs, :zoffs_colon, :zoffs_sec] -> 9998 # If no weight is set, use the index as its weight {{{_token, _value}, 0}, i} -> i # Use the directive weight {{{_token, _value}, weight}, _} -> weight end) |> Stream.flat_map(fn {{token, _}, _} -> [token] end) |> Enum.filter(&Kernel.is_tuple/1) |> apply_directives(tokenizer) {:error, _} = err -> err end end defp extract_parse_results(parse_results), do: extract_parse_results(parse_results, []) defp extract_parse_results([], acc), do: Enum.reverse(acc) defp extract_parse_results([{tokens, weight}|rest], acc) when is_list(tokens) do extracted = extract_parse_results(tokens) |> Enum.map(fn {{token, value}, _weight} -> {{token, value}, weight} end) |> Enum.reverse extract_parse_results(rest, extracted ++ acc) end defp extract_parse_results([{{token, value}, weight}|rest], acc) when is_atom(token) do extract_parse_results(rest, [{{token, value}, weight}|acc]) end defp extract_parse_results([{token, value}|rest], acc) when is_atom(token) do extract_parse_results(rest, [{{token, value}, 0}|acc]) end defp extract_parse_results([[{token, value}]|rest], acc) when is_atom(token) do extract_parse_results(rest, [{{token, value}, 0}|acc]) end defp extract_parse_results([h|rest], acc) when is_list(h) do extracted = Enum.reverse(extract_parse_results(h)) extract_parse_results(rest, extracted ++ acc) end defp extract_parse_results([_|rest], acc) do extract_parse_results(rest, acc) end # Constructs a DateTime from the parsed tokens defp apply_directives([], _), do: {:ok, Timex.DateTime.Helpers.empty()} defp apply_directives(tokens, tokenizer), do: apply_directives(tokens, Timex.DateTime.Helpers.empty(), tokenizer) defp apply_directives([], date, _), do: {:ok, date} defp apply_directives([{token, value}|tokens], date, tokenizer) do case update_date(date, token, value, tokenizer) do {:error, _} = error -> error updated -> apply_directives(tokens, updated, tokenizer) end end # Given a date, a token, and the value for that token, update the # date according to the rules for that token and the provided value defp update_date(%AmbiguousDateTime{} = adt, token, value, tokenizer) when is_atom(token) do bd = update_date(adt.before, token, value, tokenizer) ad = update_date(adt.after, token, value, tokenizer) %{adt | :before => bd, :after => ad} end defp update_date(%{year: year, hour: hh} = date, token, value, tokenizer) when is_atom(token) do case token do # Formats clock when clock in [:kitchen, :strftime_iso_kitchen] -> date = cond do date == Timex.DateTime.Helpers.empty() -> {{y,m,d},_} = :calendar.universal_time() %{date | :year => y, :month => m, :day => d} true -> date end case apply_directives(value, date, tokenizer) do {:error, _} = err -> err {:ok, date} when clock == :kitchen -> %{date | :second => 0, :microsecond => {0,0}} {:ok, date} -> %{date | :microsecond => {0,0}} end # Years :century -> century = Timex.century(%{date | :year => year}) year_shifted = year + ((value - century) * 100) %{date | :year => year_shifted} y when y in [:year2, :iso_year2] -> {{y,_,_},_} = :calendar.universal_time() current_century = Timex.century(y) year_shifted = value + ((current_century - 1) * 100) %{date | :year => year_shifted} y when y in [:year4, :iso_year4] -> # Special case for UNIX format dates, where the year is parsed after the timezone, # so we must lookup the timezone again to ensure it's properly set case date do %{time_zone: tzname} when is_nil(tzname) == false -> seconds_from_zeroyear = Timex.to_gregorian_seconds(date) date = to_datetime(date) case Timezone.resolve(tzname, seconds_from_zeroyear) do %TimezoneInfo{} = tz -> %{date | :year => value, :time_zone => tz.full_name, :zone_abbr => tz.abbreviation, :utc_offset => tz.offset_utc, :std_offset => tz.offset_std} %AmbiguousTimezoneInfo{before: b, after: a} -> bd = %{date | :year => value, :time_zone => b.full_name, :zone_abbr => b.abbreviation, :utc_offset => b.offset_utc, :std_offset => b.offset_std} ad = %{date | :year => value, :time_zone => a.full_name, :zone_abbr => a.abbreviation, :utc_offset => a.offset_utc, :std_offset => a.offset_std} %AmbiguousDateTime{:before => bd, :after => ad} end _ -> %{date | :year => value} end # Months :month -> %{date | :month => value} month when month in [:mshort, :mfull] -> %{date | :month => Timex.month_to_num(value)} # Days :day -> %{date | :day => value} :oday when is_integer(value) and value >= 0 -> Timex.from_iso_day(value, date) :wday_mon -> current_day = Timex.weekday(date) cond do current_day == value -> date current_day > value -> Timex.shift(date, days: current_day - value) current_day < value -> Timex.shift(date, days: value - current_day) end :wday_sun -> current_day = Timex.weekday(date) - 1 cond do current_day == value -> date current_day > value -> Timex.shift(date, days: current_day - value) current_day < value -> Timex.shift(date, days: value - current_day) end day when day in [:wdshort, :wdfull] -> %{date | :day => Timex.day_to_num(value)} # Weeks :iso_weeknum -> {year, _, weekday} = Timex.iso_triplet(date) %Date{year: y, month: m, day: d} = Timex.from_iso_triplet({year, value, weekday}) %{date | :year => y, :month => m, :day => d} week_num when week_num in [:week_mon, :week_sun] -> reset = %{date | :month => 1, :day => 1} reset |> Timex.shift(weeks: value) :weekday -> date |> Timex.shift(days: value - 1) # Hours hour when hour in [:hour24, :hour12] -> %{date | :hour => value} :min -> %{date | :minute => value} :sec -> %{date | :second => value} :sec_fractional -> case value do "" -> date n when is_number(n) -> %{date | :microsecond => Timex.DateTime.Helpers.construct_microseconds(n)} {_n, _precision} = us -> %{date | :microsecond => us} end :us -> %{date | :microsecond => Timex.DateTime.Helpers.construct_microseconds(value)} :ms -> %{date | :microsecond => Timex.DateTime.Helpers.construct_microseconds(value*1_000)} :sec_epoch -> DateTime.from_unix!(value) am_pm when am_pm in [:am, :AM] -> cond do hh == 24 -> %{date | :hour => 0} hh == 12 and (String.downcase(value) == "am") -> %{date | :hour => 0} hh in (1..11) and String.downcase(value) == "pm" -> %{date | :hour => hh + 12} true -> date end # Timezones :zoffs -> date = to_datetime(date) case value do <<sign::utf8, h1::utf8, h2::utf8>> -> hour = <<h1::utf8,h2::utf8>> hours = String.to_integer(hour) minutes = 0 {gmt_sign, total_offset} = case sign do ?- -> {?+, -1 * ((hours*60*60) + (minutes*60))} ?+ -> {?-, ((hours*60*60) + (minutes*60))} end case hours do 0 -> %{date | :time_zone => "Etc/GMT+0", :zone_abbr => "GMT", :utc_offset => total_offset, :std_offset => 0} h when h < 10 -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, h2::utf8>>, :zone_abbr => <<sign::utf8, ?0, h2::utf8>>, :utc_offset => total_offset, :std_offset => 0} _ -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, hour::binary>>, :zone_abbr => <<sign::utf8, hour::binary>>, :utc_offset => total_offset, :std_offset => 0} end <<sign::utf8, h1::utf8, h2::utf8, m1::utf8, m2::utf8>> -> hour = <<h1::utf8,h2::utf8>> hours = String.to_integer(hour) minute = <<m1::utf8,m2::utf8>> minutes = String.to_integer(minute) {gmt_sign, total_offset} = case sign do ?- -> {?+, -1 * ((hours*60*60) + (minutes*60))} ?+ -> {?-, ((hours*60*60) + (minutes*60))} end case {hours, minutes} do {0, 0} -> %{date | :time_zone => "Etc/GMT+0", :zone_abbr => "GMT", :utc_offset => total_offset, :std_offset => 0} {h, 0} when h < 10 -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, h2::utf8>>, :zone_abbr => <<sign::utf8, ?0, h2::utf8>>, :utc_offset => total_offset, :std_offset => 0} {_, 0} -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, hour::binary>>, :zone_abbr => <<sign::utf8, hour::binary>>, :utc_offset => total_offset, :std_offset => 0} _ -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, hour::binary, ?:, minute::binary>>, :zone_abbr => <<sign::utf8, hour::binary, ?:, minute::binary>>, :utc_offset => total_offset, :std_offset => 0} end _ -> {:error, "invalid offset: #{inspect value}"} end :zname -> seconds_from_zeroyear = Timex.to_gregorian_seconds(date) case Timezone.name_of(value) do {:error, _} = err -> err tzname -> date = to_datetime(date) case Timezone.resolve(tzname, seconds_from_zeroyear) do %TimezoneInfo{} = tz -> %{date | :time_zone => tz.full_name, :zone_abbr => tz.abbreviation, :utc_offset => tz.offset_utc, :std_offset => tz.offset_std} %AmbiguousTimezoneInfo{before: b, after: a} -> bd = %{date | :time_zone => b.full_name, :zone_abbr => b.abbreviation, :utc_offset => b.offset_utc, :std_offset => b.offset_std} ad = %{date | :time_zone => a.full_name, :zone_abbr => a.abbreviation, :utc_offset => a.offset_utc, :std_offset => a.offset_std} %AmbiguousDateTime{:before => bd, :after => ad} end end :zoffs_colon -> date = to_datetime(date) case value do <<sign::utf8, h1::utf8, h2::utf8, ?:, m1::utf8, m2::utf8>> -> hour = <<h1::utf8,h2::utf8>> hours = String.to_integer(hour) minute = <<m1::utf8,m2::utf8>> minutes = String.to_integer(minute) {gmt_sign, total_offset} = case sign do ?- -> {?+, -1 * ((hours*60*60) + (minutes*60))} ?+ -> {?-, ((hours*60*60) + (minutes*60))} end case {hours, minutes} do {0, 0} -> %{date | :time_zone => "Etc/GMT+0", :zone_abbr => "GMT", :utc_offset => total_offset, :std_offset => 0} {h, 0} when h < 10 -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, h2::utf8>>, :zone_abbr => <<sign::utf8, ?0, h2::utf8>>, :utc_offset => total_offset, :std_offset => 0} {_, 0} -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, hour::binary>>, :zone_abbr => <<sign::utf8, hour::binary>>, :utc_offset => total_offset, :std_offset => 0} _ -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, hour::binary, ?:, minute::binary>>, :zone_abbr => <<sign::utf8, hour::binary, ?:, minute::binary>>, :utc_offset => total_offset, :std_offset => 0} end _ -> {:error, "invalid offset: #{inspect value}"} end :zoffs_sec -> date = to_datetime(date) case value do <<sign::utf8, h1::utf8, h2::utf8, ?:, m1::utf8, m2::utf8, ?:, s1::utf8, s2::utf8>> -> hour = <<h1::utf8,h2::utf8>> hours = String.to_integer(hour) minute = <<m1::utf8,m2::utf8>> minutes = String.to_integer(minute) second = <<s1::utf8,s2::utf8>> seconds = String.to_integer(second) {gmt_sign, total_offset} = case sign do ?- -> {?+, -1 * ((hours*60*60) + (minutes*60))} ?+ -> {?-, ((hours*60*60) + (minutes*60))} end case {hours, minutes, seconds} do {0, 0, 0} -> %{date | :time_zone => "Etc/GMT+0", :zone_abbr => "GMT", :utc_offset => total_offset, :std_offset => 0} {h, 0, 0} when h < 10 -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, h2::utf8>>, :zone_abbr => <<sign::utf8, ?0, h2::utf8>>, :utc_offset => total_offset, :std_offset => 0} {_, 0, 0} -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, hour::binary>>, :zone_abbr => <<sign::utf8, hour::binary>>, :utc_offset => total_offset, :std_offset => 0} _ -> %{date | :time_zone => <<"Etc/GMT", gmt_sign::utf8, hour::binary, ?:, minute::binary, ?:, second::binary>>, :zone_abbr => <<sign::utf8, hour::binary, ?:, minute::binary, ?:, second::binary>>, :utc_offset => total_offset, :std_offset => 0} end _ -> {:error, "invalid offset: #{inspect value}"} end :force_utc -> date = to_datetime(date) case date.time_zone do nil -> %{date | :time_zone => "Etc/UTC", :zone_abbr => "UTC", :utc_offset => 0, :std_offset => 0} _ -> Timezone.convert(date, "UTC") end :literal -> date :week_of_year -> shift_to_week_of_year(:mon, date, value) :week_of_year_sun -> shift_to_week_of_year(:sun, date, value) _ -> case tokenizer.apply(date, token, value) do {:ok, date} -> date {:error, _} = err -> err _ -> {:error, "Unrecognized token: #{token}"} end end end defp shift_to_week_of_year(:mon, %{year: y} = datetime, value) when is_integer(value) do shift = case :calendar.day_of_the_week({y, 1, 1}) do n when n < 5 -> # Week 1, seek backwards to beginning of week [days: -(7-(7-(n-1)))] n -> # Part of last year's week, seek forwards to beginning of week [days: (7-(7-(n-1))) - 1] end datetime = Timex.to_naive_datetime(datetime) do_shift_to_week_of_year(Timex.shift(%{datetime | month: 1, day: 1}, shift), value) end defp shift_to_week_of_year(:sun, %{year: y} = datetime, value) when is_integer(value) do n = :calendar.day_of_the_week({y, 1, 1}) shift = [days: -1 - (7-(7-(n-1)))] datetime = Timex.to_naive_datetime(datetime) do_shift_to_week_of_year(Timex.shift(%{datetime | month: 1, day: 1}, shift), value) end defp do_shift_to_week_of_year(%{year: y} = datetime, weeks) do # On leap years which start on Thursday, week numbers # are incremented by 1 from March thru the rest of the year shifted = Timex.shift(datetime, days: 7 * (weeks-1)) if :calendar.is_leap_year(y) do case :calendar.day_of_the_week({y,1,1}) do 4 -> case shifted do %{month: m} when m < 3 -> shifted _ -> Timex.shift(shifted, days: 7) end _n -> shifted end else shifted end end defp to_datetime(%DateTime{} = dt), do: dt defp to_datetime(%NaiveDateTime{year: y, month: m, day: d, hour: h, minute: mm, second: ss, microsecond: us}) do %DateTime{year: y, month: m, day: d, hour: h, minute: mm, second: ss, microsecond: us, time_zone: "Etc/UTC", zone_abbr: "UTC", utc_offset: 0, std_offset: 0} end end
lib/parse/datetime/parser.ex
0.880116
0.511839
parser.ex
starcoder
defmodule MathHelper do @moduledoc """ Simple functions to help with common math functions. """ @decimal_context %Decimal.Context{precision: 1_000} @spec sub(integer(), integer()) :: integer() def sub(num1, num2) do op = fn x1, x2 -> Decimal.sub(x1, x2) end decimal_operation(num1, num2, op) end @spec add(integer(), integer()) :: integer() def add(num1, num2) do op = fn x1, x2 -> Decimal.add(x1, x2) end decimal_operation(num1, num2, op) end @spec mult(integer(), integer()) :: integer() def mult(num1, num2) do op = fn x1, x2 -> Decimal.mult(x1, x2) end decimal_operation(num1, num2, op) end @spec div(integer(), integer()) :: integer() def div(num1, num2) do op = fn x1, x2 -> Decimal.div(x1, x2) end decimal_operation(num1, num2, op) end @spec decimal_operation(integer(), integer(), fun()) :: integer() defp decimal_operation(num1, num2, op) do Decimal.with_context(@decimal_context, fn -> num1 |> op.(num2) |> Decimal.round(0, :down) |> Decimal.to_integer() end) end @doc """ Simple floor function that makes sure we return an integer type. ## Examples iex> MathHelper.floor(3.5) 3 iex> MathHelper.floor(-3.5) -4 iex> MathHelper.floor(5) 5 """ @spec floor(number()) :: integer() def floor(x), do: round(:math.floor(x)) @doc """ Simple round function ## Examples iex> MathHelper.round_int(3.5) 3 iex> MathHelper.round_int(-3.5) -3 iex> MathHelper.round_int(-0.5) 0 """ @spec round_int(number()) :: integer() def round_int(n) when n < 0, do: round(:math.ceil(n)) def round_int(n), do: round(:math.floor(n)) @doc """ Simple helper to calculate a log in any given base. E.g. the `log_15(30)`, which would be expressed at `MathHelper.log(30, 15)`. ## Examples iex> MathHelper.log(225, 15) 2.0 iex> MathHelper.log(240, 15) 2.0238320992392618 iex> MathHelper.log(1024, 10) 3.0102999566398116 iex> MathHelper.log(999999, 9999) 1.500016178459417 """ @spec log(number(), number()) :: float() def log(x, b), do: :math.log(x) / :math.log(b) @doc """ Returns the byte size of an integer ## Examples iex> MathHelper.integer_byte_size(0) 0 iex> MathHelper.integer_byte_size(1) 1 iex> MathHelper.integer_byte_size(0xfffffffff) 5 """ @spec integer_byte_size(number()) :: non_neg_integer() def integer_byte_size(n) when n == 0, do: 0 def integer_byte_size(n), do: byte_size(:binary.encode_unsigned(n)) @doc """ Bits to words ## Examples iex> MathHelper.bits_to_words(0) 0 iex> MathHelper.bits_to_words(9) 1 iex> MathHelper.bits_to_words(256) 8 """ @spec bits_to_words(number()) :: integer() def bits_to_words(n), do: round(:math.ceil(n / EVM.word_size())) end
apps/evm/lib/math_helper.ex
0.866514
0.667561
math_helper.ex
starcoder
defmodule Sanbase.Clickhouse.HistoricalBalance.Behaviour do @moduledoc ~s""" Behavior for defining the callback functions for a module implemening historical balances for a given blockchain. In order to add a new blockchain the following steps must be done: - Implement the behaviour - Add dispatch logic in the `HistoricalBalance` dispatch module. """ @typedoc ~s""" An interval represented as string. It has the format of number followed by one of: ns, ms, s, m, h, d or w - each representing some time unit """ @type interval :: String.t() @type address :: String.t() @type address_or_addresses :: address | list(address) @type decimals :: non_neg_integer() @type datetime :: DateTime.t() @type currency :: String.t() @type contract :: String.t() @type target :: contract | currency @type slug_balance_map :: %{ slug: String.t(), balance: float() } @type historical_balance :: %{ datetime: datetime(), balance: float() } @type historical_balance_result :: {:ok, list(historical_balance)} | {:error, String.t()} @type balance_change :: {address, {balance_before :: number, balance_after :: number, balance_change :: number}} @type balance_change_result :: {:ok, list(balance_change)} | {:error, String.t()} @type historical_balance_change :: %{ datetime: datetime(), balance_change: number() } @type historical_balance_change_result :: {:ok, list(historical_balance_change)} | {:error, String.t()} @doc ~s""" Return a list of all assets that the address holds or has held in the past and the latest balance. """ @callback assets_held_by_address(address) :: {:ok, list(slug_balance_map)} | {:error, String.t()} @doc ~s""" For a given address or list of addresses returns the combined balance for each bucket of size `interval` in the from-to time period """ @callback historical_balance( address_or_addresses, target, decimals, from :: datetime, to :: datetime, interval ) :: historical_balance_result() @doc ~s""" For a given address or list of addresses returns the balance change for the from-to period. The returned lists indicates the address, before balance, after balance and the balance change """ @callback balance_change( address_or_addresses, target, decimals, from :: datetime, to :: datetime ) :: balance_change_result() @doc ~s""" For a given address or list of addresses returns the balance change for each bucket of size `interval` in the from-to time period. """ @callback historical_balance_change( address_or_addresses, target, decimals, from :: datetime, to :: datetime, interval ) :: historical_balance_change_result() @callback last_balance_before( address, target, decimals, before :: datetime ) :: {:ok, float()} | {:error, String.t()} @optional_callbacks historical_balance_change: 6 end
lib/sanbase/clickhouse/historical_balance/behaviour.ex
0.913223
0.510435
behaviour.ex
starcoder
defmodule Dynamo.Connection.Behaviour do @moduledoc """ Common behaviour used between `Dynamo.Connection` connection implementations. When used, it defines a private record via `Record.defrecordp` named `connection` with the following fields and their default values: * assigns - an empty list * params - `nil` * req_headers - `nil` * req_body - `nil` * resp_body - an empty binary * resp_charset - `"utf-8"` * resp_content_type - `nil` * resp_cookies - `[]` * resp_headers - an empty binary dict * state - `:unset` * status - `nil` * script_name_segments - an empty list Besides the fields above, it also defines the following fields, but it expects those fields to be set when the connection is initialized with the following contents: * main - the entry point module for the connection * before_send - a call to `Dynamo.Connection.default_before_send` * method - the current request method * original_method - the current request method * path_info_segments - the current path segments A developer can pass extra fields via `use`: use Dynamo.Connection.Behaviour, [:my_field, :other_field] """ @doc false defmacro __using__(opts) do fields = [ assigns: [], before_send: [], fetchable: [], main: nil, method: nil, original_method: nil, params: nil, path_info_segments: nil, private: [], req_body: nil, req_cookies: nil, req_headers: nil, resp_body: "", resp_charset: "utf-8", resp_cookies: [], resp_content_type: nil, resp_headers: Binary.Dict.new([{"cache-control", "max-age=0, private, must-revalidate"}]), route_params: [], state: :unset, status: nil, script_name_segments: [] ] ++ opts quote location: :keep do require Record @behaviour Dynamo.Connection Record.defrecordp :connection, __MODULE__, unquote(fields) ## Assigns @doc false def assigns(connection(assigns: assigns)) do assigns end @doc false def assign(key, value, connection(assigns: assigns) = conn) do connection(conn, assigns: Keyword.put(assigns, key, value)) end @doc false def put_assign(key, value, conn) do assign(key, value, conn) end @doc false def private(connection(private: private)) do private end @doc false def put_private(key, value, connection(private: private) = conn) do connection(conn, private: Keyword.put(private, key, value)) end @doc false def main(connection(main: main)) do main end ## Fetch @doc false def fetchable(atom, fun, connection(fetchable: fetchable) = conn) when is_atom(atom) and is_function(fun, 1) do connection(conn, fetchable: [{ atom, fun }|fetchable]) end ## Request @doc false def params(connection(params: nil)) do raise Dynamo.Connection.UnfetchedError, aspect: :params end @doc false def params(connection(params: params)) do params end @doc false def route_params(connection(route_params: route_params)) do route_params end @doc false def route_params(new, connection(params: nil, route_params: route_params) = conn) do connection(conn, route_params: route_params ++ new) end @doc false def route_params(new, connection(params: params, route_params: route_params) = conn) do connection(conn, route_params: route_params ++ new, params: Binary.Dict.merge(params, new)) end @doc false def method(connection(method: method)) do method end @doc false def method(method, conn) when is_binary(method) do connection(conn, method: method) end @doc false def req_headers(connection(req_headers: nil)) do raise Dynamo.Connection.UnfetchedError, aspect: :req_headers end @doc false def req_headers(connection(req_headers: req_headers)) do req_headers end @doc false def req_body(connection(req_body: nil)) do raise Dynamo.Connection.UnfetchedError, aspect: :req_body end @doc false def req_body(connection(req_body: req_body)) do req_body end ## Cookies @doc false def req_cookies(connection(req_cookies: nil)) do raise Dynamo.Connection.UnfetchedError, aspect: :cookies end @doc false def req_cookies(connection(req_cookies: req_cookies)) do req_cookies end @doc false def resp_cookies(connection(resp_cookies: resp_cookies)) do resp_cookies end @doc false def put_resp_cookie(key, value, opts, connection(resp_cookies: resp_cookies) = conn) when is_binary(key) and (is_binary(value) or nil?(value)) and is_list(opts) do resp_cookies = List.keydelete(resp_cookies, key, 0) connection(conn, resp_cookies: [{ key, value, opts }|resp_cookies]) end ## Paths @doc false def path_info_segments(connection(path_info_segments: segments)) do segments end @doc false def path_info(connection(path_info_segments: segments)) do to_path segments end @doc false def script_name_segments(connection(script_name_segments: segments)) do segments end @doc false def script_name(connection(script_name_segments: segments)) do to_path segments end @doc false def forward_to(segments, _target, connection(path_info_segments: path, script_name_segments: script) = conn) do { prefix, ^segments } = Enum.split path, length(path) - length(segments) connection(conn, path_info_segments: segments, script_name_segments: script ++ prefix ) end defp to_path(segments) do "/" <> Enum.join(segments, "/") end ## Response @doc false def status(connection(status: status)) do status end @doc false def status(status, connection(state: state) = conn) when is_integer(status) and state in [:unset, :set, :sendfile, :chunked] do connection(conn, status: status, state: :set) end @doc false def resp_body(connection(resp_body: resp_body)) do resp_body end @doc false def resp_body(body, connection(status: status, state: state) = conn) when state in [:unset, :set] do connection(conn, status: status || 200, resp_body: body, state: :set) end @doc false def resp_content_type(connection(resp_content_type: resp_content_type)) do resp_content_type end @doc false def resp_content_type(resp_content_type, conn) when is_binary(resp_content_type) do connection(conn, resp_content_type: resp_content_type) end @doc false def resp_charset(connection(resp_charset: resp_charset)) do resp_charset end @doc false def resp_charset(resp_charset, conn) when is_binary(resp_charset) do connection(conn, resp_charset: resp_charset) end @doc false def resp(status, body, connection(state: state) = conn) when is_integer(status) and state in [:unset, :set] do connection(conn, status: status, resp_body: body, state: :set ) end @doc false def send(connection(status: status, resp_body: body) = conn) do send(status, body, conn) end @doc false def state(connection(state: state)) do state end @doc false def resp_headers(connection(resp_headers: resp_headers)) do resp_headers end @doc false def put_resp_header(key, value, connection(resp_headers: resp_headers) = conn) do connection(conn, resp_headers: Binary.Dict.put(resp_headers, key, to_string(value))) end @doc false def delete_resp_header(key, connection(resp_headers: resp_headers) = conn) do connection(conn, resp_headers: Binary.Dict.delete(resp_headers, key)) end # Callbacks @doc false def before_send(fun, connection(before_send: before_send) = conn) when is_function(fun) do connection(conn, before_send: [fun|before_send]) end defp run_before_send(connection(before_send: before_send) = conn) do Enum.reduce Enum.reverse(before_send), conn, fn(fun, c) -> fun.(c) end end end end end
lib/dynamo/connection/behaviour.ex
0.815122
0.440951
behaviour.ex
starcoder
defmodule FunLand.Chainable do @moduledoc """ Defines a 'chain' operation to apply a function that takes a simple value and outputs a new Chainable to a value inside a Chainable. Something that is Chainable also needs to be Appliable. ## Fruit Salad Example There is one problem we haven't covered yet: What if you have an operation that itself returns a bowl? Say we have a 'blend' operation, that takes an apple, and returns a bowl with apple juice. If we would just `map` 'blend' over a bowl of apples, we would end up putting all the bowls of apple juice inside our original bowl. This is clearly not what we want. Instead, we want to combine the results together back into a single bowl. The implementation that tells how to do this, is called `chain`. For bowls, it would be 'put/pour contents of resulting bowl back into original bowl and forget about the other bowl.' ## In Other Environments - in Haskell, `chain` is known by the name `bind`, or `>>=`. """ @type chainable(_) :: FunLand.adt @callback chain(chainable(a), (a -> b)) :: chainable(b) when a: any, b: any defmacro __using__(_opts) do quote do @behaviour FunLand.Chainable end end defdelegate map(a, fun), to: FunLand.Mappable defdelegate apply_with(a, b), to: FunLand.Appliable defdelegate new(module, val), to: FunLand.Applicative @doc """ Chains a function that returns a Chainable at the end of some calculation that returns a Chainable. So to `chain` means: Taking the result of an operation that returns a container outside of its container, and passing it in to the next function, finally returning the resulting container. """ def chain(chainable, function_that_returns_new_chainable) # Stdlib structs for {stdlib_module, module} <- FunLand.Builtin.__stdlib_struct_modules__ do def new(a = %unquote(stdlib_module){}, b) do apply(unquote(module), :chain, [a, b]) end end # Custom structs def chain(a = %chainable{}, b) when is_function(b, 1) do chainable.chain(a, b) end # Builtin datatypes use FunLand.Helper.GuardMacros for {guard, module} <- FunLand.Builtin.__builtin__ do def chain(chainable_a, chainable_b) when is_function(chainable_b, 1) and unquote(guard)(chainable_a) do apply(unquote(module), :chain, [chainable_a, chainable_b]) end end end
lib/fun_land/chainable.ex
0.78838
0.89096
chainable.ex
starcoder
defmodule P1 do alias P1.Parser, as: Parser @moduledoc """ P1 is a communication standard for Dutch Smartmeters Whenever a serial connection is made with the P1 port, the Smartmeter is sending out a telegram every 10 seconds. This library is able to parse this telegram and produces elixir types and structs to reason about and further process this data. ## Example telegram ``` /ISk5MT382-1000 1-3:0.2.8(50) 0-0:1.0.0(101209113020W) 0-0:96.1.1(4B384547303034303436333935353037) 1-0:1.8.1(123456.789*kWh) 1-0:1.8.2(123456.789*kWh) 1-0:2.8.1(123456.789*kWh) 1-0:2.8.2(123456.789*kWh) 0-0:96.14.0(0002) 1-0:1.7.0(01.193*kW) 1-0:2.7.0(00.000*kW) 0-0:96.7.21(00004) 0-0:96.7.9(00002) 1-0:99.97.0(2)(0-0:96.7.19)(101208152415W)(0000000240*s)(101208151004W)(0000000301*s) 1-0:32.32.0(00002) 1-0:52.32.0(00001) 1-0:72.32.0(00000) 1-0:32.36.0(00000) 1-0:52.36.0(00003) 1-0:72.36.0(00000) 0-0:96.13.0(303132333435363738393A3B3C3D3E3F303132333435363738393A3B3C3D3E3F303132333435363738393A3B3C 3D3E3F303132333435363738393A3B3C3D3E3F303132333435363738393A3B3C3D3E3F) 1-0:32.7.0(220.1*V) 1-0:52.7.0(220.2*V) 1-0:72.7.0(220.3*V) 1-0:31.7.0(001*A) 1-0:51.7.0(002*A) 1-0:71.7.0(003*A) 1-0:21.7.0(01.111*kW) 1-0:41.7.0(02.222*kW) 1-0:61.7.0(03.333*kW) 1-0:22.7.0(04.444*kW) 1-0:42.7.0(05.555*kW) 1-0:62.7.0(06.666*kW) 0-1:24.1.0(003) 0-1:96.1.0(3232323241424344313233343536373839) 0-1:24.2.1(101209112500W)(12785.123*m3) !EF2F ``` """ defmodule Channel do @moduledoc """ Contains the medium and channel of the data The medium cam be `:abstract`, `:electricity`, `:heat`, `:gas`, `:water`, the channel always 0 for the meter itself and higher numbers for modbus connected devices to transform the medium from an integer to the atom, one can use the construct method on the struct ``` iex> P1.Channel.construct(1,0) %P1.Channel{channel: 0, medium: :electricity} ``` """ defstruct medium: nil, channel: 0 def construct(0, channel), do: %Channel{medium: :abstract, channel: channel} def construct(1, channel), do: %Channel{medium: :electricity, channel: channel} def construct(6, channel), do: %Channel{medium: :heat, channel: channel} def construct(7, channel), do: %Channel{medium: :gas, channel: channel} def construct(8, channel), do: %Channel{medium: :water, channel: channel} def construct(_, channel), do: %Channel{medium: :unknown, channel: channel} end defmodule Value do @moduledoc """ A Value with an Unit ``` iex> P1.parse!("1-0:32.7.0(220.1*V)") [ %P1.Channel{channel: 0, medium: :electricity}, %P1.Tags{tags: [{voltage:, :active}, {:phase, l1}]}, [%P1.Value{unit: "V", value: 220.1}] ] ``` """ defstruct value: 0, unit: "" end defmodule Tags do @moduledoc """ Contains a list of tags, describing the measurement, based on the C.D.E part of the OBIS reference ``` iex> P1.parse("0-0:1.0.0(181126113020W)") {:ok, [ %P1.Channel{channel: 0, medium: :abstract}, %P1.Tags{tags: [general: :timestamp]}, ["2018-11-26T11:30:20+01:00"] ]} ``` """ defstruct tags: [] end defmodule ObisCode do @moduledoc """ Struct that represents a data (OBIS) line in the telegram OBiS Codes have the following structure `A-B:C.D.E` and one our more values in parentheses (v1) where |Code|Description| |---|---| |A | specifies the medium 0=abstract, 1=electricity, 6=heat, 7=gas, 8=water| |B | specifies the channel, 0 is the meter itself, higher numbers are modbus connected devices| |C | specifies the physical value (current, voltage, energy, level, temperature, ...)| |D | specifies the quantity computation result of specific algorythm| |E | specifies the measurement type defined by groups A to D into individual measurements (e.g. switching ranges)​| The values consists of parentheses around for instance timestamps, integers, hexadecimal encoded texts and measurements with units (where a * separates value and unit) ``` iex> P1.parse!("1-0:2.7.0(01.869*kW)") |> P1.ObisCode.construct %P1.ObisCode{ channel: %P1.Channel{channel: 0, medium: :electricity}, tags: %P1.Tags{tags: [power: :active, phase: :all, direction: :produce]}, values: [%P1.Value{unit: "kW", value: 1.869}] } ``` """ defstruct channel: %Channel{}, tags: %Tags{}, values: [] def construct([channel, tags, values]), do: %ObisCode{channel: channel, tags: tags, values: values} end defmodule Header do @moduledoc """ contains the header of the telegram ``` iex(1)> P1.parse("/ISk5MT382-1000") {:ok, [%P1.Header{manufacturer: "ISk", model: "MT382-1000"}]} ``` """ defstruct manufacturer: "", model: "" end @doc false @spec parse_telegram(String.t()) :: {:ok, list} | {:error, String.t()} defdelegate parse_telegram(telegram), to: Parser, as: :parse_telegram @doc false @spec parse_telegram!(String.t()) :: list defdelegate parse_telegram!(telegram), to: Parser, as: :parse_telegram! @doc """ Parses a line of text according to the P1 protocol ## Example iex> P1.parse("1-0:1.7.0(01.193*kW)") {:ok, [ %P1.Channel{channel: 0, medium: :electricity}, %P1.Tags{tags: [{:power, :active}, {:phase, :all}, {:direction, :consume}]}, [%P1.Value{value: 1.193, unit: "kW"}] ]} """ @spec parse(String.t()) :: {:ok, list} | {:error, String.t()} defdelegate parse(line), to: Parser, as: :parse @doc """ Parses a line of text according to the P1 protocol ## Example iex> P1.parse!("1-0:1.8.1(123456.789*kWh)") [ %P1.Channel{channel: 0, medium: :electricity}, %P1.Tags{tags: [{:energy, :total},{:direction, :consume}, {:tariff, :low}]}, [%P1.Value{value: 123_456.789, unit: "kWh"}] ] """ @spec parse!(String.t()) :: list defdelegate parse!(line), to: Parser, as: :parse! end
lib/p1.ex
0.907753
0.888469
p1.ex
starcoder
defmodule Astar do alias Astar.HeapMap require HeapMap.Pattern @moduledoc """ A* graph pathfinding. """ @type vertex :: any @type nbs_f :: ((vertex) -> [vertex]) @type distance_f :: ((vertex,vertex) -> non_neg_integer) @type env :: {nbs_f, distance_f, distance_f} @doc """ Find path between two vertices in a directed weighted graph. * `env` - a graph "environment" - the tuple `{nbs, dist, h}` where each element is a function: * `nbs` - returns collection of neighbor vertices for a given vertex * `dist` - returns edge cost between two neighbor vertices * `h` - returns estimated cost between two arbitrary vertices * `start` - starting vertex * `goal` - vertex we want to reach, or a function of arity 1 to check if current vertex is a goal """ @spec astar(env, vertex, vertex | (vertex->bool)) :: [vertex] def astar({_nbs, _dist, h}=env, start, goal) do openmap = HeapMap.new |> HeapMap.add(h.(start,goal), start, 0) loop(env, goal, openmap, MapSet.new, Map.new) end defp has_reached_goal?(x, goal) when is_function(goal), do: goal.(x) defp has_reached_goal?(x, goal), do: x == goal @spec loop(env, vertex, HeapMap.t, MapSet.t, Map.t) :: [vertex] defp loop(_, _, HeapMap.Pattern.empty, _, _), do: [] defp loop({nbs, dist, h}=env, goal, openmap, closedset, parents) do {_fx, x, openmap} = HeapMap.pop(openmap) if has_reached_goal?(x, goal) do cons_path(parents, x) else closedset = MapSet.put(closedset, x) {openmap,parents} = Enum.reduce nbs.(x), {openmap,parents}, fn(y, {openmap,parents}=continue) -> if MapSet.member?(closedset, y) do continue else est_g = HeapMap.get_by_key(openmap,x) + dist.(x,y) {ty, gy} = HeapMap.mapping(openmap,y) if gy do if est_g < gy do openmap = openmap |> HeapMap.delete(ty, y) update(h, x, y, goal, est_g, openmap, parents) else continue end else update(h, x, y, goal, est_g, openmap, parents) end end end loop(env, goal, openmap, closedset, parents) end end defp update(h, x, y, goal, new_gy, openmap, parents) do nparents = Map.put(parents, y, x) fy = h.(y, goal) + new_gy nopenmap = openmap |> HeapMap.add(fy, y, new_gy) {nopenmap, nparents} end @spec cons_path(Dict.t, vertex) :: [vertex] defp cons_path(parents, vertex), do: cons_path(parents, vertex, []) defp cons_path(parents, vertex, acc) do parent = Map.get(parents,vertex) if parent do cons_path(parents, parent, [vertex|acc]) else acc end end end
lib/astar.ex
0.795539
0.647666
astar.ex
starcoder
defmodule AWS.ElasticLoadBalancing do @moduledoc """ Elastic Load Balancing A load balancer can distribute incoming traffic across your EC2 instances. This enables you to increase the availability of your application. The load balancer also monitors the health of its registered instances and ensures that it routes traffic only to healthy instances. You configure your load balancer to accept incoming traffic by specifying one or more listeners, which are configured with a protocol and port number for connections from clients to the load balancer and a protocol and port number for connections from the load balancer to the instances. Elastic Load Balancing supports three types of load balancers: Application Load Balancers, Network Load Balancers, and Classic Load Balancers. You can select a load balancer based on your application needs. For more information, see the [Elastic Load Balancing User Guide](http://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/). This reference covers the 2012-06-01 API, which supports Classic Load Balancers. The 2015-12-01 API supports Application Load Balancers and Network Load Balancers. To get started, create a load balancer with one or more listeners using `CreateLoadBalancer`. Register your instances with the load balancer using `RegisterInstancesWithLoadBalancer`. All Elastic Load Balancing operations are *idempotent*, which means that they complete at most one time. If you repeat an operation, it succeeds with a 200 OK response code. """ @doc """ Adds the specified tags to the specified load balancer. Each load balancer can have a maximum of 10 tags. Each tag consists of a key and an optional value. If a tag with the same key is already associated with the load balancer, `AddTags` updates its value. For more information, see [Tag Your Classic Load Balancer](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/add-remove-tags.html) in the *Classic Load Balancers Guide*. """ def add_tags(client, input, options \\ []) do request(client, "AddTags", input, options) end @doc """ Associates one or more security groups with your load balancer in a virtual private cloud (VPC). The specified security groups override the previously associated security groups. For more information, see [Security Groups for Load Balancers in a VPC](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-security-groups.html#elb-vpc-security-groups) in the *Classic Load Balancers Guide*. """ def apply_security_groups_to_load_balancer(client, input, options \\ []) do request(client, "ApplySecurityGroupsToLoadBalancer", input, options) end @doc """ Adds one or more subnets to the set of configured subnets for the specified load balancer. The load balancer evenly distributes requests across all registered subnets. For more information, see [Add or Remove Subnets for Your Load Balancer in a VPC](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-manage-subnets.html) in the *Classic Load Balancers Guide*. """ def attach_load_balancer_to_subnets(client, input, options \\ []) do request(client, "AttachLoadBalancerToSubnets", input, options) end @doc """ Specifies the health check settings to use when evaluating the health state of your EC2 instances. For more information, see [Configure Health Checks for Your Load Balancer](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-healthchecks.html) in the *Classic Load Balancers Guide*. """ def configure_health_check(client, input, options \\ []) do request(client, "ConfigureHealthCheck", input, options) end @doc """ Generates a stickiness policy with sticky session lifetimes that follow that of an application-generated cookie. This policy can be associated only with HTTP/HTTPS listeners. This policy is similar to the policy created by `CreateLBCookieStickinessPolicy`, except that the lifetime of the special Elastic Load Balancing cookie, `AWSELB`, follows the lifetime of the application-generated cookie specified in the policy configuration. The load balancer only inserts a new stickiness cookie when the application response includes a new application cookie. If the application cookie is explicitly removed or expires, the session stops being sticky until a new application cookie is issued. For more information, see [Application-Controlled Session Stickiness](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-sticky-sessions.html#enable-sticky-sessions-application) in the *Classic Load Balancers Guide*. """ def create_app_cookie_stickiness_policy(client, input, options \\ []) do request(client, "CreateAppCookieStickinessPolicy", input, options) end @doc """ Generates a stickiness policy with sticky session lifetimes controlled by the lifetime of the browser (user-agent) or a specified expiration period. This policy can be associated only with HTTP/HTTPS listeners. When a load balancer implements this policy, the load balancer uses a special cookie to track the instance for each request. When the load balancer receives a request, it first checks to see if this cookie is present in the request. If so, the load balancer sends the request to the application server specified in the cookie. If not, the load balancer sends the request to a server that is chosen based on the existing load-balancing algorithm. A cookie is inserted into the response for binding subsequent requests from the same user to that server. The validity of the cookie is based on the cookie expiration time, which is specified in the policy configuration. For more information, see [Duration-Based Session Stickiness](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-sticky-sessions.html#enable-sticky-sessions-duration) in the *Classic Load Balancers Guide*. """ def create_l_b_cookie_stickiness_policy(client, input, options \\ []) do request(client, "CreateLBCookieStickinessPolicy", input, options) end @doc """ Creates a Classic Load Balancer. You can add listeners, security groups, subnets, and tags when you create your load balancer, or you can add them later using `CreateLoadBalancerListeners`, `ApplySecurityGroupsToLoadBalancer`, `AttachLoadBalancerToSubnets`, and `AddTags`. To describe your current load balancers, see `DescribeLoadBalancers`. When you are finished with a load balancer, you can delete it using `DeleteLoadBalancer`. You can create up to 20 load balancers per region per account. You can request an increase for the number of load balancers for your account. For more information, see [Limits for Your Classic Load Balancer](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-limits.html) in the *Classic Load Balancers Guide*. """ def create_load_balancer(client, input, options \\ []) do request(client, "CreateLoadBalancer", input, options) end @doc """ Creates one or more listeners for the specified load balancer. If a listener with the specified port does not already exist, it is created; otherwise, the properties of the new listener must match the properties of the existing listener. For more information, see [Listeners for Your Classic Load Balancer](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-listener-config.html) in the *Classic Load Balancers Guide*. """ def create_load_balancer_listeners(client, input, options \\ []) do request(client, "CreateLoadBalancerListeners", input, options) end @doc """ Creates a policy with the specified attributes for the specified load balancer. Policies are settings that are saved for your load balancer and that can be applied to the listener or the application server, depending on the policy type. """ def create_load_balancer_policy(client, input, options \\ []) do request(client, "CreateLoadBalancerPolicy", input, options) end @doc """ Deletes the specified load balancer. If you are attempting to recreate a load balancer, you must reconfigure all settings. The DNS name associated with a deleted load balancer are no longer usable. The name and associated DNS record of the deleted load balancer no longer exist and traffic sent to any of its IP addresses is no longer delivered to your instances. If the load balancer does not exist or has already been deleted, the call to `DeleteLoadBalancer` still succeeds. """ def delete_load_balancer(client, input, options \\ []) do request(client, "DeleteLoadBalancer", input, options) end @doc """ Deletes the specified listeners from the specified load balancer. """ def delete_load_balancer_listeners(client, input, options \\ []) do request(client, "DeleteLoadBalancerListeners", input, options) end @doc """ Deletes the specified policy from the specified load balancer. This policy must not be enabled for any listeners. """ def delete_load_balancer_policy(client, input, options \\ []) do request(client, "DeleteLoadBalancerPolicy", input, options) end @doc """ Deregisters the specified instances from the specified load balancer. After the instance is deregistered, it no longer receives traffic from the load balancer. You can use `DescribeLoadBalancers` to verify that the instance is deregistered from the load balancer. For more information, see [Register or De-Register EC2 Instances](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-deregister-register-instances.html) in the *Classic Load Balancers Guide*. """ def deregister_instances_from_load_balancer(client, input, options \\ []) do request(client, "DeregisterInstancesFromLoadBalancer", input, options) end @doc """ Describes the current Elastic Load Balancing resource limits for your AWS account. For more information, see [Limits for Your Classic Load Balancer](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-limits.html) in the *Classic Load Balancers Guide*. """ def describe_account_limits(client, input, options \\ []) do request(client, "DescribeAccountLimits", input, options) end @doc """ Describes the state of the specified instances with respect to the specified load balancer. If no instances are specified, the call describes the state of all instances that are currently registered with the load balancer. If instances are specified, their state is returned even if they are no longer registered with the load balancer. The state of terminated instances is not returned. """ def describe_instance_health(client, input, options \\ []) do request(client, "DescribeInstanceHealth", input, options) end @doc """ Describes the attributes for the specified load balancer. """ def describe_load_balancer_attributes(client, input, options \\ []) do request(client, "DescribeLoadBalancerAttributes", input, options) end @doc """ Describes the specified policies. If you specify a load balancer name, the action returns the descriptions of all policies created for the load balancer. If you specify a policy name associated with your load balancer, the action returns the description of that policy. If you don't specify a load balancer name, the action returns descriptions of the specified sample policies, or descriptions of all sample policies. The names of the sample policies have the `ELBSample-` prefix. """ def describe_load_balancer_policies(client, input, options \\ []) do request(client, "DescribeLoadBalancerPolicies", input, options) end @doc """ Describes the specified load balancer policy types or all load balancer policy types. The description of each type indicates how it can be used. For example, some policies can be used only with layer 7 listeners, some policies can be used only with layer 4 listeners, and some policies can be used only with your EC2 instances. You can use `CreateLoadBalancerPolicy` to create a policy configuration for any of these policy types. Then, depending on the policy type, use either `SetLoadBalancerPoliciesOfListener` or `SetLoadBalancerPoliciesForBackendServer` to set the policy. """ def describe_load_balancer_policy_types(client, input, options \\ []) do request(client, "DescribeLoadBalancerPolicyTypes", input, options) end @doc """ Describes the specified the load balancers. If no load balancers are specified, the call describes all of your load balancers. """ def describe_load_balancers(client, input, options \\ []) do request(client, "DescribeLoadBalancers", input, options) end @doc """ Describes the tags associated with the specified load balancers. """ def describe_tags(client, input, options \\ []) do request(client, "DescribeTags", input, options) end @doc """ Removes the specified subnets from the set of configured subnets for the load balancer. After a subnet is removed, all EC2 instances registered with the load balancer in the removed subnet go into the `OutOfService` state. Then, the load balancer balances the traffic among the remaining routable subnets. """ def detach_load_balancer_from_subnets(client, input, options \\ []) do request(client, "DetachLoadBalancerFromSubnets", input, options) end @doc """ Removes the specified Availability Zones from the set of Availability Zones for the specified load balancer in EC2-Classic or a default VPC. For load balancers in a non-default VPC, use `DetachLoadBalancerFromSubnets`. There must be at least one Availability Zone registered with a load balancer at all times. After an Availability Zone is removed, all instances registered with the load balancer that are in the removed Availability Zone go into the `OutOfService` state. Then, the load balancer attempts to equally balance the traffic among its remaining Availability Zones. For more information, see [Add or Remove Availability Zones](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-disable-az.html) in the *Classic Load Balancers Guide*. """ def disable_availability_zones_for_load_balancer(client, input, options \\ []) do request(client, "DisableAvailabilityZonesForLoadBalancer", input, options) end @doc """ Adds the specified Availability Zones to the set of Availability Zones for the specified load balancer in EC2-Classic or a default VPC. For load balancers in a non-default VPC, use `AttachLoadBalancerToSubnets`. The load balancer evenly distributes requests across all its registered Availability Zones that contain instances. For more information, see [Add or Remove Availability Zones](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-disable-az.html) in the *Classic Load Balancers Guide*. """ def enable_availability_zones_for_load_balancer(client, input, options \\ []) do request(client, "EnableAvailabilityZonesForLoadBalancer", input, options) end @doc """ Modifies the attributes of the specified load balancer. You can modify the load balancer attributes, such as `AccessLogs`, `ConnectionDraining`, and `CrossZoneLoadBalancing` by either enabling or disabling them. Or, you can modify the load balancer attribute `ConnectionSettings` by specifying an idle connection timeout value for your load balancer. For more information, see the following in the *Classic Load Balancers Guide*: <ul> <li> [Cross-Zone Load Balancing](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-disable-crosszone-lb.html) </li> <li> [Connection Draining](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/config-conn-drain.html) </li> <li> [Access Logs](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/access-log-collection.html) </li> <li> [Idle Connection Timeout](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/config-idle-timeout.html) </li> </ul> """ def modify_load_balancer_attributes(client, input, options \\ []) do request(client, "ModifyLoadBalancerAttributes", input, options) end @doc """ Adds the specified instances to the specified load balancer. The instance must be a running instance in the same network as the load balancer (EC2-Classic or the same VPC). If you have EC2-Classic instances and a load balancer in a VPC with ClassicLink enabled, you can link the EC2-Classic instances to that VPC and then register the linked EC2-Classic instances with the load balancer in the VPC. Note that `RegisterInstanceWithLoadBalancer` completes when the request has been registered. Instance registration takes a little time to complete. To check the state of the registered instances, use `DescribeLoadBalancers` or `DescribeInstanceHealth`. After the instance is registered, it starts receiving traffic and requests from the load balancer. Any instance that is not in one of the Availability Zones registered for the load balancer is moved to the `OutOfService` state. If an Availability Zone is added to the load balancer later, any instances registered with the load balancer move to the `InService` state. To deregister instances from a load balancer, use `DeregisterInstancesFromLoadBalancer`. For more information, see [Register or De-Register EC2 Instances](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-deregister-register-instances.html) in the *Classic Load Balancers Guide*. """ def register_instances_with_load_balancer(client, input, options \\ []) do request(client, "RegisterInstancesWithLoadBalancer", input, options) end @doc """ Removes one or more tags from the specified load balancer. """ def remove_tags(client, input, options \\ []) do request(client, "RemoveTags", input, options) end @doc """ Sets the certificate that terminates the specified listener's SSL connections. The specified certificate replaces any prior certificate that was used on the same load balancer and port. For more information about updating your SSL certificate, see [Replace the SSL Certificate for Your Load Balancer](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-update-ssl-cert.html) in the *Classic Load Balancers Guide*. """ def set_load_balancer_listener_s_s_l_certificate(client, input, options \\ []) do request(client, "SetLoadBalancerListenerSSLCertificate", input, options) end @doc """ Replaces the set of policies associated with the specified port on which the EC2 instance is listening with a new set of policies. At this time, only the back-end server authentication policy type can be applied to the instance ports; this policy type is composed of multiple public key policies. Each time you use `SetLoadBalancerPoliciesForBackendServer` to enable the policies, use the `PolicyNames` parameter to list the policies that you want to enable. You can use `DescribeLoadBalancers` or `DescribeLoadBalancerPolicies` to verify that the policy is associated with the EC2 instance. For more information about enabling back-end instance authentication, see [Configure Back-end Instance Authentication](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-create-https-ssl-load-balancer.html#configure_backendauth_clt) in the *Classic Load Balancers Guide*. For more information about Proxy Protocol, see [Configure Proxy Protocol Support](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-proxy-protocol.html) in the *Classic Load Balancers Guide*. """ def set_load_balancer_policies_for_backend_server(client, input, options \\ []) do request(client, "SetLoadBalancerPoliciesForBackendServer", input, options) end @doc """ Replaces the current set of policies for the specified load balancer port with the specified set of policies. To enable back-end server authentication, use `SetLoadBalancerPoliciesForBackendServer`. For more information about setting policies, see [Update the SSL Negotiation Configuration](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/ssl-config-update.html), [Duration-Based Session Stickiness](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-sticky-sessions.html#enable-sticky-sessions-duration), and [Application-Controlled Session Stickiness](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-sticky-sessions.html#enable-sticky-sessions-application) in the *Classic Load Balancers Guide*. """ def set_load_balancer_policies_of_listener(client, input, options \\ []) do request(client, "SetLoadBalancerPoliciesOfListener", input, options) end @spec request(AWS.Client.t(), binary(), map(), list()) :: {:ok, Poison.Parser.t() | nil, Poison.Response.t()} | {:error, Poison.Parser.t()} | {:error, HTTPoison.Error.t()} defp request(client, action, input, options) do client = %{client | service: "elasticloadbalancing"} host = build_host("elasticloadbalancing", client) url = build_url(host, client) headers = [ {"Host", host}, {"Content-Type", "application/x-www-form-urlencoded"} ] input = Map.merge(input, %{"Action" => action, "Version" => "2012-06-01"}) payload = AWS.Util.encode_query(input) headers = AWS.Request.sign_v4(client, "POST", url, headers, payload) case HTTPoison.post(url, payload, headers, options) do {:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} -> {:ok, nil, response} {:ok, %HTTPoison.Response{status_code: 200, body: body} = response} -> {:ok, AWS.Util.decode_xml(body), response} {:ok, %HTTPoison.Response{body: body}} -> error = AWS.Util.decode_xml(body) {:error, error} {:error, %HTTPoison.Error{reason: reason}} -> {:error, %HTTPoison.Error{reason: reason}} end end defp build_host(_endpoint_prefix, %{region: "local"}) do "localhost" end defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do "#{endpoint_prefix}.#{region}.#{endpoint}" end defp build_url(host, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}/" end end
lib/aws/elastic_load_balancing.ex
0.923364
0.582788
elastic_load_balancing.ex
starcoder
defmodule Mix.Tasks.Netler.New do @moduledoc """ Creates a new embedded .NET project and an Elixir module for communicating with the .NET project ## Usage ```bash > mix netler.new ``` """ use Mix.Task alias Netler.Compiler.Dotnet @impl true def run(_args) do dotnet_project = Mix.Shell.IO.prompt("Please give your .NET project a name:") |> String.trim() |> Macro.underscore() case dotnet_project do "" -> log_error("Aborting: No project name given.") :error dotnet_project -> project_path = Dotnet.project_path(dotnet_project) app = Mix.Project.config() |> Keyword.get(:app) application_name = app |> Atom.to_string() lib_path = Path.expand("lib/#{application_name}") create_source_files_from_templates( application_name, lib_path, project_path, dotnet_project ) log_info( "Done! Remeber to add :#{dotnet_project} to the dotnet_projects list in your application's mix.exs" ) :ok end end defp create_source_files_from_templates( application_name, lib_path, project_path, dotnet_project ) do File.mkdir_p!(project_path) csproj_file = "#{project_path}/#{Macro.camelize(dotnet_project)}.csproj" program_file = "#{project_path}/Program.cs" File.write!(csproj_file, csproj_template()) log_info("Created #{csproj_file}") File.write!(program_file, program_template(dotnet_project)) log_info("Created #{program_file}") File.mkdir_p!(lib_path) ex_file = "#{lib_path}/#{dotnet_project}.ex" File.write!(ex_file, elixir_module_template(application_name, dotnet_project)) log_info("Created #{ex_file}") end defp elixir_module_template(application_name, dotnet_project) do """ defmodule #{Macro.camelize(application_name)}.#{Macro.camelize(dotnet_project)} do use Netler, dotnet_project: :#{dotnet_project} def add(a, b), do: invoke("Add", [a, b]) end """ end defp csproj_template do """ <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <OutputType>Exe</OutputType> <TargetFramework>netcoreapp3.1</TargetFramework> </PropertyGroup> <ItemGroup> <PackageReference Include="Netler.NET" Version="1.*" /> </ItemGroup> </Project> """ end defp program_template(dotnet_project) do """ using System; using System.Collections.Generic; using System.Threading.Tasks; using Netler; namespace #{Macro.camelize(dotnet_project)} { class Program { static async Task Main(string[] args) { var port = Convert.ToInt32(args[0]); var clientPid = Convert.ToInt32(args[1]); var server = Server.Create((config) => { config.UsePort(port); config.UseClientPid(clientPid); config.UseRoutes((routes) => { routes.Add("Add", Add); // More routes can be added here ... }); }); await server.Start(); } static object Add(params object[] parameters) { var a = Convert.ToInt32(parameters[0]); var b = Convert.ToInt32(parameters[1]); return a + b; } } } """ end defp log_info(message) do Mix.Shell.IO.info([:blue, message]) end defp log_error(message) do Mix.Shell.IO.info([:red, message]) end end
lib/mix/tasks/netler.new.ex
0.688887
0.50592
netler.new.ex
starcoder
defmodule Baseball.Base64.Decoder do @moduledoc """ Functions for Base64 Decoding The base64 encoded input should have a length that is divisible by 4, since it is emitted in 4-character chunks. 1 base64-encoded character represents 6 bits, and 1 base64-decoded character represents 8 bits. So 4 base64-encoded characters become 3 base64-decoded characters (assuming the decoded contents contains characters, it doesn't need to). We decode by reading chunks of 4 characters and decode them based on these criteria: 1) A chunk with 4 characters and no padding characters (=) is converted into 3 8-bit output values. 2) A chunk with 1 padding character is converted into 2 8-bit output values. 3) A chunk with 2 padding characters is converted into 1 8-bit output value. """ use Bitwise import Baseball.Helpers # Base64-decoding table @decoding_table "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" |> String.codepoints() |> Enum.with_index() |> Enum.into(%{}) @doc """ Decode a Base64 encoded binary """ def decode_binary(input, opts \\ []) when is_binary(input) do allow_padding? = Keyword.get(opts, :allow_padding?, true) if valid_length?(input) do input |> stream_binary_in_4_character_chunks(allow_padding?) |> Stream.map(&get_decoded_values/1) |> Stream.map(fn {:ok, block} -> {:ok, decode_block(block)} {:error, message} -> {:error, message} end) |> collect_while_ok() else {:error, :invalid_input_length} end end # Decode a block with 2 padding characters into a single 8-bit value # The 2 padding characters have been removed, so we only have 2 6-bit inputs defp decode_block([ia, ib_padded]) do # Remove padding from ib ib = Bitwise.>>>(ib_padded, 4) # Convert the 6-bit and 2-bit values to 1 8-bit value <<ia::6, ib::2>> end # Decode a block with 1 padding character into 2 8-bit values # The 1 padding character has been removed, so we only have 3 6-bit inputs defp decode_block([ia, ib, ic_padded]) do # Remove padding from ic ic = Bitwise.>>>(ic_padded, 2) # Convert the 6-bit and 4-bit values to 2 8-bit values <<ia::6, ib::6, ic::4>> end # Decode a full block into 3 8-bit values defp decode_block([ia, ib, ic, id]) do # Convert the 4 6-bit input values to 3 8-bit values <<ia::6, ib::6, ic::6, id::6>> end # Stream a binary in 4-character chunks, handling the end blocks by removing padding defp stream_binary_in_4_character_chunks(input, true) do Stream.unfold(input, fn <<>> -> nil <<a, b, c, d, rest::binary>> when rest != <<>> -> {<<a, b, c, d>>, rest} <<a, b, "=", "=">> -> {<<a, b>>, <<>>} <<a, b, c, "=">> -> {<<a, b, c>>, <<>>} <<a, b, c, d>> -> {<<a, b, c, d>>, <<>>} end) end defp stream_binary_in_4_character_chunks(input, false) do Stream.unfold(input, fn <<>> -> nil <<a, b, c, d, rest::binary>> -> {<<a, b, c, d>>, rest} end) end # Get the decoded values from the decoding map or return an error defp get_decoded_values(values) do values |> String.split("", trim: true) |> map_while_ok(fn val -> case Map.fetch(@decoding_table, val) do {:ok, val} -> {:ok, val} :error -> {:error, :invalid_character_in_input} end end) end defp valid_length?(input) when is_binary(input), do: rem(String.length(input), 4) == 0 end
lib/baseball/base64/decoder.ex
0.801315
0.536677
decoder.ex
starcoder
defmodule AWS.AppStream do @moduledoc """ Amazon AppStream 2.0 You can use Amazon AppStream 2.0 to stream desktop applications to any device running a web browser, without rewriting them. """ @doc """ Associates the specified fleet with the specified stack. """ def associate_fleet(client, input, options \\ []) do request(client, "AssociateFleet", input, options) end @doc """ Copies the image within the same region or to a new region within the same AWS account. Note that any tags you added to the image will not be copied. """ def copy_image(client, input, options \\ []) do request(client, "CopyImage", input, options) end @doc """ Creates a directory configuration. """ def create_directory_config(client, input, options \\ []) do request(client, "CreateDirectoryConfig", input, options) end @doc """ Creates a fleet. """ def create_fleet(client, input, options \\ []) do request(client, "CreateFleet", input, options) end @doc """ Creates an image builder. The initial state of the builder is `PENDING`. When it is ready, the state is `RUNNING`. """ def create_image_builder(client, input, options \\ []) do request(client, "CreateImageBuilder", input, options) end @doc """ Creates a URL to start an image builder streaming session. """ def create_image_builder_streaming_u_r_l(client, input, options \\ []) do request(client, "CreateImageBuilderStreamingURL", input, options) end @doc """ Creates a stack. """ def create_stack(client, input, options \\ []) do request(client, "CreateStack", input, options) end @doc """ Creates a URL to start a streaming session for the specified user. """ def create_streaming_u_r_l(client, input, options \\ []) do request(client, "CreateStreamingURL", input, options) end @doc """ Deletes the specified directory configuration. """ def delete_directory_config(client, input, options \\ []) do request(client, "DeleteDirectoryConfig", input, options) end @doc """ Deletes the specified fleet. """ def delete_fleet(client, input, options \\ []) do request(client, "DeleteFleet", input, options) end @doc """ Deletes the specified image. You cannot delete an image that is currently in use. After you delete an image, you cannot provision new capacity using the image. """ def delete_image(client, input, options \\ []) do request(client, "DeleteImage", input, options) end @doc """ Deletes the specified image builder and releases the capacity. """ def delete_image_builder(client, input, options \\ []) do request(client, "DeleteImageBuilder", input, options) end @doc """ Deletes the specified stack. After this operation completes, the environment can no longer be activated and any reservations made for the stack are released. """ def delete_stack(client, input, options \\ []) do request(client, "DeleteStack", input, options) end @doc """ Describes the specified directory configurations. Note that although the response syntax in this topic includes the account password, this password is not returned in the actual response. """ def describe_directory_configs(client, input, options \\ []) do request(client, "DescribeDirectoryConfigs", input, options) end @doc """ Describes the specified fleets or all fleets in the account. """ def describe_fleets(client, input, options \\ []) do request(client, "DescribeFleets", input, options) end @doc """ Describes the specified image builders or all image builders in the account. """ def describe_image_builders(client, input, options \\ []) do request(client, "DescribeImageBuilders", input, options) end @doc """ Describes the specified images or all images in the account. """ def describe_images(client, input, options \\ []) do request(client, "DescribeImages", input, options) end @doc """ Describes the streaming sessions for the specified stack and fleet. If a user ID is provided, only the streaming sessions for only that user are returned. If an authentication type is not provided, the default is to authenticate users using a streaming URL. """ def describe_sessions(client, input, options \\ []) do request(client, "DescribeSessions", input, options) end @doc """ Describes the specified stacks or all stacks in the account. """ def describe_stacks(client, input, options \\ []) do request(client, "DescribeStacks", input, options) end @doc """ Disassociates the specified fleet from the specified stack. """ def disassociate_fleet(client, input, options \\ []) do request(client, "DisassociateFleet", input, options) end @doc """ Stops the specified streaming session. """ def expire_session(client, input, options \\ []) do request(client, "ExpireSession", input, options) end @doc """ Lists the fleets associated with the specified stack. """ def list_associated_fleets(client, input, options \\ []) do request(client, "ListAssociatedFleets", input, options) end @doc """ Lists the stacks associated with the specified fleet. """ def list_associated_stacks(client, input, options \\ []) do request(client, "ListAssociatedStacks", input, options) end @doc """ Lists the tags for the specified AppStream 2.0 resource. You can tag AppStream 2.0 image builders, images, fleets, and stacks. For more information about tags, see [Tagging Your Resources](http://docs.aws.amazon.com/appstream2/latest/developerguide/tagging-basic.html) in the *Amazon AppStream 2.0 Developer Guide*. """ def list_tags_for_resource(client, input, options \\ []) do request(client, "ListTagsForResource", input, options) end @doc """ Starts the specified fleet. """ def start_fleet(client, input, options \\ []) do request(client, "StartFleet", input, options) end @doc """ Starts the specified image builder. """ def start_image_builder(client, input, options \\ []) do request(client, "StartImageBuilder", input, options) end @doc """ Stops the specified fleet. """ def stop_fleet(client, input, options \\ []) do request(client, "StopFleet", input, options) end @doc """ Stops the specified image builder. """ def stop_image_builder(client, input, options \\ []) do request(client, "StopImageBuilder", input, options) end @doc """ Adds or overwrites one or more tags for the specified AppStream 2.0 resource. You can tag AppStream 2.0 image builders, images, fleets, and stacks. Each tag consists of a key and an optional value. If a resource already has a tag with the same key, this operation updates its value. To list the current tags for your resources, use `ListTagsForResource`. To disassociate tags from your resources, use `UntagResource`. For more information about tags, see [Tagging Your Resources](http://docs.aws.amazon.com/appstream2/latest/developerguide/tagging-basic.html) in the *Amazon AppStream 2.0 Developer Guide*. """ def tag_resource(client, input, options \\ []) do request(client, "TagResource", input, options) end @doc """ Disassociates the specified tags from the specified AppStream 2.0 resource. To list the current tags for your resources, use `ListTagsForResource`. For more information about tags, see [Tagging Your Resources](http://docs.aws.amazon.com/appstream2/latest/developerguide/tagging-basic.html) in the *Amazon AppStream 2.0 Developer Guide*. """ def untag_resource(client, input, options \\ []) do request(client, "UntagResource", input, options) end @doc """ Updates the specified directory configuration. """ def update_directory_config(client, input, options \\ []) do request(client, "UpdateDirectoryConfig", input, options) end @doc """ Updates the specified fleet. If the fleet is in the `STOPPED` state, you can update any attribute except the fleet name. If the fleet is in the `RUNNING` state, you can update the `DisplayName` and `ComputeCapacity` attributes. If the fleet is in the `STARTING` or `STOPPING` state, you can't update it. """ def update_fleet(client, input, options \\ []) do request(client, "UpdateFleet", input, options) end @doc """ Updates the specified stack. """ def update_stack(client, input, options \\ []) do request(client, "UpdateStack", input, options) end @spec request(map(), binary(), map(), list()) :: {:ok, Poison.Parser.t | nil, Poison.Response.t} | {:error, Poison.Parser.t} | {:error, HTTPoison.Error.t} defp request(client, action, input, options) do client = %{client | service: "appstream2"} host = get_host("appstream2", client) url = get_url(host, client) headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}, {"X-Amz-Target", "PhotonAdminProxyService.#{action}"}] payload = Poison.Encoder.encode(input, []) headers = AWS.Request.sign_v4(client, "POST", url, headers, payload) case HTTPoison.post(url, payload, headers, options) do {:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} -> {:ok, nil, response} {:ok, response=%HTTPoison.Response{status_code: 200, body: body}} -> {:ok, Poison.Parser.parse!(body), response} {:ok, _response=%HTTPoison.Response{body: body}} -> error = Poison.Parser.parse!(body) exception = error["__type"] message = error["message"] {:error, {exception, message}} {:error, %HTTPoison.Error{reason: reason}} -> {:error, %HTTPoison.Error{reason: reason}} end end defp get_host(endpoint_prefix, client) do if client.region == "local" do "localhost" else "#{endpoint_prefix}.#{client.region}.#{client.endpoint}" end end defp get_url(host, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}/" end end
lib/aws/appstream.ex
0.860076
0.40698
appstream.ex
starcoder
defmodule ExJsonSchema.Schema do defmodule UnsupportedSchemaVersionError do defexception message: "unsupported schema version, only draft 4 is supported" end defmodule InvalidSchemaError do defexception message: "invalid schema" end alias ExJsonSchema.Schema.Draft4 alias ExJsonSchema.Schema.Root @type resolved :: %{String.t => ExJsonSchema.json_value | (Root.t -> {Root.t, resolved})} @current_draft_schema_url "http://json-schema.org/schema" @draft4_schema_url "http://json-schema.org/draft-04/schema" @spec resolve(Root.t) :: Root.t | no_return def resolve(root = %Root{}), do: resolve_root(root) @spec resolve(ExJsonSchema.json) :: Root.t | no_return def resolve(schema = %{}), do: resolve_root(%Root{schema: schema}) defp resolve_root(root) do assert_supported_schema_version(Map.get(root.schema, "$schema", @current_draft_schema_url <> "#")) assert_valid_schema(root.schema) {root, schema} = resolve_with_root(root, root.schema) %{root | schema: schema} end defp assert_supported_schema_version(version) do unless supported_schema_version?(version), do: raise UnsupportedSchemaVersionError end defp assert_valid_schema(schema) do unless meta?(schema) do case ExJsonSchema.Validator.validate(resolve(Draft4.schema), schema) do {:error, errors} -> raise InvalidSchemaError, message: "schema did not pass validation against its meta-schema: #{inspect(errors)}" _ -> end end end defp supported_schema_version?(version) do case version do @current_draft_schema_url <> _ -> true @draft4_schema_url <> _ -> true _ -> false end end defp resolve_with_root(root, schema, scope \\ "") defp resolve_with_root(root, schema = %{"id" => id}, scope) when is_binary(id) do do_resolve(root, schema, scope <> id) end defp resolve_with_root(root, schema = %{}, scope) do do_resolve(root, schema, scope) end defp resolve_with_root(root, non_schema, _scope) do {root, non_schema} end defp do_resolve(root, schema, scope) do {root, schema} = Enum.reduce schema, {root, %{}}, fn (property, {root, schema}) -> {root, {k, v}} = resolve_property(root, property, scope) {root, Map.put(schema, k, v)} end {root, schema |> sanitize_properties |> sanitize_items} end defp resolve_property(root, {key, value}, scope) when is_map(value) do {root, resolved} = resolve_with_root(root, value, scope) {root, {key, resolved}} end defp resolve_property(root, {key, values}, scope) when is_list(values) do {root, values} = Enum.reduce values, {root, []}, fn (value, {root, values}) -> {root, resolved} = resolve_with_root(root, value, scope) {root, [resolved | values]} end {root, {key, Enum.reverse(values)}} end defp resolve_property(root, {"$ref", ref}, scope) do ref = String.replace(scope <> ref, "##", "#") {root, ref} = resolve_ref(root, ref) {root, {"$ref", ref}} end defp resolve_property(root, tuple, _), do: {root, tuple} defp resolve_ref(root, "#") do {root, &root_schema_resolver/1} end defp resolve_ref(root, ref) do [url | fragments] = String.split(ref, "#") resolver = relative_resolver = case fragments do [fragment = "/" <> _] -> relative_ref_resolver(fragment) _ -> &root_schema_resolver/1 end if url != "" do root = resolve_and_cache_remote_schema(root, url) resolver = url_with_relative_ref_resolver(url, relative_resolver) end assert_reference_valid(resolver, root, ref) {root, resolver} end defp relative_ref_resolver(ref) do ["" | keys] = unescaped_ref_segments(ref) keys = Enum.map keys, fn key -> case key =~ ~r/^\d+$/ do true -> index = String.to_integer(key) fn :get, data, _ -> Enum.at(data, index) end false -> key end end fn root -> {root, get_in(root.schema, keys)} end end defp url_with_relative_ref_resolver(url, relative_ref_resolver) do fn root -> remote_schema = root.refs[url] relative_ref_resolver.(%{root | schema: remote_schema}) end end defp root_schema_resolver(root) do {root, root.schema} end defp resolve_and_cache_remote_schema(root, url) do unless root.refs[url] do root = fetch_and_resolve_remote_schema(root, url) end root end defp fetch_and_resolve_remote_schema(root, url) when url == @current_draft_schema_url or url == @draft4_schema_url do resolve_remote_schema(root, url, Draft4.schema) end defp fetch_and_resolve_remote_schema(root, url) do resolve_remote_schema(root, url, remote_schema_resolver.(url)) end defp resolve_remote_schema(root, url, remote_schema) do root = root_with_ref(root, url, remote_schema) resolved_root = resolve_root(%{root | schema: remote_schema}) root = %{root | refs: resolved_root.refs} root_with_ref(root, url, resolved_root.schema) end defp root_with_ref(root, url, ref) do %{root | refs: Map.put(root.refs, url, ref)} end defp remote_schema_resolver do Application.get_env(:ex_json_schema, :remote_schema_resolver) end defp assert_reference_valid(resolver, root, ref) do case resolver.(root) do {_, nil} -> raise InvalidSchemaError, message: "reference #{ref} could not be resolved" _ -> end end defp sanitize_properties(schema) do if Enum.any?(~w(patternProperties additionalProperties), &Map.has_key?(schema, &1)) and not Map.has_key?(schema, "properties") do schema = Map.put(schema, "properties", %{}) end schema end defp sanitize_items(schema) do if Map.has_key?(schema, "items") and not Map.has_key?(schema, "additionalItems") do schema = Map.put(schema, "additionalItems", true) end schema end defp unescaped_ref_segments(ref) do ref |> String.split("/") |> Enum.map fn segment -> segment |> String.replace("~0", "~") |> String.replace("~1", "/") |> URI.decode end end defp meta?(schema) do String.starts_with?(Map.get(schema, "id", ""), @draft4_schema_url) end end
lib/ex_json_schema/schema.ex
0.731251
0.450239
schema.ex
starcoder
defmodule OMG.Watcher.Web.Serializer.Response do @moduledoc """ Serializes the response into expected result/data format. """ @type response_result_t :: :success | :error @doc """ Append result of operation to the response data forming standard api response structure """ @spec serialize(any(), response_result_t()) :: %{result: response_result_t(), data: map()} def serialize(data, result) def serialize(data, :success), do: data |> clean_artifacts() |> to_response(:success) def serialize(data, :error), do: data |> to_response(:error) defp to_response(data, result), do: %{result: result, data: data} @doc """ Decodes specified keys in map from hex to binary """ @spec decode16(map(), list()) :: map() def decode16(data, keys) do keys |> Enum.filter(&Map.has_key?(data, &1)) |> Enum.into( %{}, fn key -> value = data[key] case is_binary(value) && Base.decode16(value, case: :mixed) do {:ok, newvalue} -> {key, newvalue} _ -> {key, value} end end ) |> (&Map.merge(data, &1)).() end @doc """ Removes or encodes fields in response that cannot be serialized to api response. By default, it: * encodes to hex all binary values * removes unloaded ecto associations values * removes metadata fields """ @spec clean_artifacts(any()) :: any() def clean_artifacts(response) def clean_artifacts(list) when is_list(list) do list |> Enum.map(&clean_artifacts/1) end def clean_artifacts(map_or_struct) when is_map(map_or_struct) do map_or_struct |> to_map() |> Enum.filter(fn {_k, v} -> Ecto.assoc_loaded?(v) end) |> Enum.map(fn {k, v} -> {k, clean_artifacts(v)} end) |> Map.new() end def clean_artifacts(bin) when is_binary(bin), do: Base.encode16(bin) def clean_artifacts(value), do: value defp to_map(struct) do if(Map.has_key?(struct, :__struct__), do: struct |> Map.from_struct(), else: struct) |> Map.delete(:__meta__) end end
apps/omg_watcher/lib/web/serializers/response.ex
0.843557
0.5083
response.ex
starcoder
defmodule FileType do @moduledoc """ Detect the MIME type of a file based on it's content. """ import FileType.Utils.Hex @required_bytes 262 @enforce_keys [:ext, :mime] defstruct [:ext, :mime] @type ext :: binary() @type mime :: binary() @type t :: {ext(), mime()} @type error :: File.posix() | :unrecognized @type result :: {:ok, t()} | {:error, error()} @doc """ This is the same as `from_io/1`, except that it will open and close a file for you. ## Examples iex> FileType.from_path("profile.png") {:ok, {"png", "image/png"}} iex> FileType.from_path("contract.docx") {:ok, {"docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document"}} iex> FileType.from_path("example.txt") {:error, :unrecognized} iex> FileType.from_path("does-not-exist.png") {:error, :enoent} """ @spec from_path(Path.t()) :: result() def from_path(path) when is_binary(path) do case File.open(path, [:read, :binary], &from_io/1) do {:ok, result} -> result {:error, reason} -> {:error, reason} end end @doc """ Determines a MIME type from an IO device. ## Examples iex> {:ok, io} = File.open("profile.png", [:read, :binary]) {:ok, #PID<0.109.0>} iex> FileType.from_io(io) {:ok, {"png", "image/png"}} """ @spec from_io(IO.device()) :: result() def from_io(io) do with {:ok, data} <- read(io, @required_bytes), {:ok, type} <- detect(io, data) do {:ok, type} else :eof -> {:error, :unrecognized} {:error, reason} -> {:error, reason} end end @doc """ Format an error returned by this library. ## Examples iex> FileType.format_error(:unrecognized) "does not match any known format" iex> FileType.format_error(:enoent) "no such file or directory" """ @spec format_error(error()) :: binary() def format_error(:unrecognized) do "does not match any known format" end def format_error(other) do other |> :file.format_error() |> to_string() end defp read(io, position \\ 0, size) do with {:ok, data} <- :file.pread(io, position, size) do case FileType.ID3.position(data) do 0 -> {:ok, data} position -> read(io, position, size) end end end defp detect(io, ~h"504b0304" <> _) do FileType.Zip.detect(io) end defp detect(io, ~h"d0cf11e0a1b11ae1" <> _) do FileType.CFB.detect(io) end defp detect(_io, data) do case FileType.Magic.run(data) do nil -> {:error, :unrecognized} type -> {:ok, type} end end end
lib/file_type.ex
0.787605
0.422981
file_type.ex
starcoder
defmodule Isbndbex do use GenServer alias Isbndbex.Api, as: Api @doc """ Starts a process with the given API `key`. """ def start(key), do: GenServer.start_link(__MODULE__, key) @doc """ Sends a message to the process `pid` to get the book with the given `id`. The `id` can be the books's isbn10, isbn13 or the internal isbndb book id. """ def get_book(pid, id), do: GenServer.call(pid, {:get_book, id}) @doc """ Sends a message to the process `pid` to get the details of the author with the given `id`. The `id` corresponds to the internal isbndb author id. """ def get_author(pid, id), do: GenServer.call(pid, {:get_author, id}) @doc """ Sends a message to the process `pid` to get the details of the publisher with the given `id`. The `id` corresponds to the internal isbndb publisher id. """ def get_publisher(pid, id), do: GenServer.call(pid, {:get_publisher, id}) @doc """ Sends a message to the process `pid` to get the details of the subject with the given `id`. The `id` corresponds to the internal isbndb subject id. """ def get_subject(pid, id), do: GenServer.call(pid, {:get_subject, id}) @doc """ Sends a message to the process `pid` to get the details of the category with the given `id`. The `id` corresponds to the internal isbndb category id. """ def get_category(pid, id), do: GenServer.call(pid, {:get_category, id}) @doc """ Sends a message to the process `pid` to get a list of the stores selling the book with the given `id`. The `id` can be the book's isbn10, isbn13 or the internal isbndb id. """ def get_prices(pid, id), do: GenServer.call(pid, {:get_prices, id}) @doc """ Sends a message to the process `pid` to search the `index` for books matching `query`. The `query` parameter expected value changes depending on which `index` is consulted. """ def get_books(pid, query, index), do: GenServer.call(pid, {:get_books, query, index}) @doc """ Sends a message to the process `pid` to search for authors who have names similar to `query`. """ def get_authors(pid, query), do: GenServer.call(pid, {:get_authors, query}) @doc """ Sends a message to the process `pid` to search for publishers that have names similar to `query`. """ def get_publishers(pid, query), do: GenServer.call(pid, {:get_publishers, query}) @doc """ Sends a message to the process `pid` to search for subjects that have names similar to `query`. """ def get_subjects(pid, query), do: GenServer.call(pid, {:get_subjects, query}) @doc """ Sends a message to the process `pid` to search for subjects that have categories similar to `query`. """ def get_categories(pid, query), do: GenServer.call(pid, {:get_categories, query}) def handle_call({:get_book, id}, _, key) , do: {:reply, Api.get_book(key, id) , key} def handle_call({:get_author, id}, _, key) , do: {:reply, Api.get_author(key, id) , key} def handle_call({:get_publisher, id}, _, key), do: {:reply, Api.get_publisher(key, id), key} def handle_call({:get_subject, id}, _, key) , do: {:reply, Api.get_subject(key, id) , key} def handle_call({:get_category, id}, _, key) , do: {:reply, Api.get_category(key, id) , key} def handle_call({:get_prices, id}, _, key) , do: {:reply, Api.get_prices(key, id) , key} def handle_call({:get_books, query, index}, _, key), do: {:reply, Api.get_books(key , query, index), key} def handle_call({:get_authors , query}, _, key) , do: {:reply, Api.get_authors(key , query), key} def handle_call({:get_publishers, query}, _, key) , do: {:reply, Api.get_publishers(key, query), key} def handle_call({:get_subjects , query}, _, key) , do: {:reply, Api.get_subjects(key , query), key} def handle_call({:get_categories, query}, _, key) , do: {:reply, Api.get_categories(key, query), key} end
lib/isbndbex.ex
0.86575
0.682157
isbndbex.ex
starcoder
defmodule ExPcap.Binaries do @moduledoc """ This module provides utility functions for dealing with binaries. """ @doc """ Converts a list of bytes to a binary. Ideally, this would be replaced by a standard elixir function, but I have not been able to find such a function in the standard library. ## Examples iex> ExPcap.Binaries.to_binary([1, 2, 3, 4]) <<1, 2, 3, 4>> """ @spec to_binary(list) :: binary def to_binary(list) do to_binary(list, <<>>) end @spec to_binary([], binary) :: binary def to_binary([], acc) do acc end @doc """ Moves the contents of the list to the end of the binary. This will recur until it reaches the degenerate case and returns the accumulator (binary). ## Examples iex> ExPcap.Binaries.to_binary([3, 4], <<1, 2>>) #<<1, 2, 3>> #and then <<1, 2, 3, 4>> """ @spec to_binary(list, binary) :: binary def to_binary(list, acc) do [h | t] = list to_binary(t, acc <> <<h>>) end @doc """ Converts a binary to a list of bytes. ## Examples iex> ExPcap.Binaries.to_list(<<1, 2, 3, 4>>) [1, 2, 3, 4] """ @spec to_list(binary) :: list def to_list(b) do to_list(b, []) end @spec to_list(<<>>, [any]) :: [any] def to_list(<<>>, acc) do Enum.reverse acc end @doc """ Moves the bytes from the binary to the list. The order of the bytes will be reversed until the degenerate case is reached. This will recur until it reaches the degenerate case and returns the accumulator (list). ## Examples iex> ExPcap.Binaries.to_list(<<3, 4>>, [2, 1]) #[3, 2, 1] #and then #[4, 3, 2, 1] #and then [1, 2, 3, 4] """ @spec to_list(binary, list) :: list def to_list(b, acc) do <<a :: size(8), rest :: binary>> = b to_list(rest, [a | acc]) end @spec reverse_binary(<<>>, binary) :: binary def reverse_binary(<<>>, acc) do acc end @doc """ Reversed the contents of the first binary and prepends them to the second binary. This will recur until it reaches the degenerate case and returns the accumulator. ## Examples iex> ExPcap.Binaries.reverse_binary(<<3, 4>>, <<2, 1>>) #<<3, 2, 1>> #and then <<4, 3, 2, 1>> """ @spec reverse_binary(binary, binary) :: binary def reverse_binary(<<h :: bytes-size(1), t :: binary>>, acc) do reverse_binary(t, h <> acc) end @doc """ Reverses the bytes in the binary. ## Examples iex> ExPcap.Binaries.reverse_binary(<<1, 2, 3, 4>>) <<4, 3, 2, 1>> """ @spec reverse_binary(binary) :: binary def reverse_binary(b) do reverse_binary(b, <<>>) end @doc """ Converts the first 4 bits of the binary to an unsigned integer. ## Examples iex> ExPcap.Binaries.to_uint4(<<0xf :: size(4)>>) 15 """ @spec to_uint4(binary) :: non_neg_integer def to_uint4(b) do <<n :: unsigned-integer-size(4)>> = b n end @doc """ Converts the first 16 bits of the binary to an unsigned integer. ## Examples iex> ExPcap.Binaries.to_uint16(<<255, 255>>) 65535 """ @spec to_uint16(binary) :: non_neg_integer def to_uint16(b) do <<n :: unsigned-integer-size(16)>> = b n end @doc """ Converts the first 32 bits of the binary to an unsigned integer. ## Examples iex> ExPcap.Binaries.to_uint32(<<255, 255, 255, 255>>) 4294967295 """ @spec to_uint32(binary) :: non_neg_integer def to_uint32(b) do <<n :: unsigned-integer-size(32)>> = b n end @doc """ Converts the first 32 bits of the binary to a signed integer. ## Examples iex> ExPcap.Binaries.to_int32(<<255, 255, 255, 255>>) -1 """ @spec to_int32(binary) :: integer def to_int32(b) do <<n :: signed-integer-size(32)>> = b n end @doc """ Converts a binary to a string that shows the bytes in the binary. The typical display of a binary truncates the bytes, the intent here was to show the entire contents of the binary. ## Examples iex> ExPcap.Binaries.to_string(<<1, 2, 3, 4>>) "<<1, 2, 3, 4>>" """ @spec to_string(binary) :: String.t def to_string(b) do Inspect.Algebra.to_doc(b, %Inspect.Opts{}) |> Inspect.Algebra.format(80) |> IO.iodata_to_binary() end @doc """ Converts a binary to a 'raw' representation of the bytes. ## Examples iex> ExPcap.Binaries.to_raw(<<1, 2, 3, 4>>) #<<1, 2, 3, 4>> "... redacted ..." """ @spec to_raw(binary) :: String.t def to_raw(_b) do # to_string(b) "... redacted ..." end @doc """ Converts a binary to a hex representation. This differs from 'Base.encode16' in that it adds the leading 0x prior to the hex value. Note that the return type could be cleaned up here to only include 0-9 and a-f but no need to do that right now. ## Examples iex> ExPcap.Binaries.to_hex(<<255, 0>>) "0xFF00" """ @spec to_hex(binary) :: String.t def to_hex(b) do "0x" <> Base.encode16(b) end end
lib/expcap/binaries.ex
0.718496
0.644882
binaries.ex
starcoder
defmodule NarouEx.Narou.API.Queries do @moduledoc """ Data representation of API query strings. """ @type user_id :: pos_integer() @type user_ids :: list(user_id) | [] @default_response_format :json defstruct( gzip: 5, out: @default_response_format, of: nil, lim: 20, st: 1, opt: :weekly, order: :new, userid: nil ) @typep of_element() :: :t | :n | :u | :w | :s | :bg | :g | :k | :gf | :gl | :nt | :e | :ga | :l | :ti | :i | :ir | :ib | :ig | :iz | :it | :it | :p | :gp | :dp | :wp | :mp | :qp | :yp | :f | :im | :r | :a | :ah | :sa | :ka | :nu | :ua @typep order_element :: :new | :favnovelcnt | :reviewcnt | :hyoka | :hyokaasc | :dailypoint | :weeklypoint | :monthlypoint | :quarterpoint | :yearlypoint | :impressioncnt | :hyokacnt | :hyokacntasc | :weekly | :lengthdesc | :lengthasc | :ncodedesc | :old @typep opt_element :: :weekly @typep output_format :: :yaml | :json | :php @type t() :: %__MODULE__{ gzip: 1..5 | nil, out: output_format(), of: String.t() | nil, lim: 1..500, st: 1..2000, opt: opt_element() | nil, order: order_element(), userid: String.t() | nil } @doc """ ## Examples ``` iex> %NarouEx.Narou.API.Queries{} iex> |> NarouEx.Narou.API.Queries.encode_of([:t, :n]) %NarouEx.Narou.API.Queries{ gzip: 5, lim: 20, of: "t-n", opt: :weekly, order: :new, out: :json, st: 1, userid: nil } ``` """ @spec encode_of(__MODULE__.t(), list(of_element())) :: __MODULE__.t() def encode_of(queries, of_elements) when is_struct(queries, __MODULE__) and is_list(of_elements) do of_string = of_elements |> Enum.map(&Atom.to_string()/1) |> Enum.join("-") %{queries | of: of_string} end @doc """ ## Examples ``` iex> queries = %NarouEx.Narou.API.Queries{} iex> queries |> NarouEx.Narou.API.Queries.encode_userid([1]) %NarouEx.Narou.API.Queries{ gzip: 5, lim: 20, of: nil, opt: :weekly, order: :new, out: :json, st: 1, userid: "1" } iex> queries |> NarouEx.Narou.API.Queries.encode_userid([1, 1000]) %NarouEx.Narou.API.Queries{ gzip: 5, lim: 20, of: nil, opt: :weekly, order: :new, out: :json, st: 1, userid: "1-1000" } iex> queries |> NarouEx.Narou.API.Queries.encode_userid(1) %NarouEx.Narou.API.Queries{ gzip: 5, lim: 20, of: nil, opt: :weekly, order: :new, out: :json, st: 1, userid: "1" } ``` """ @spec encode_userid(__MODULE__.t(), user_id() | user_ids() ) :: __MODULE__.t() def encode_userid(queries, user_ids) when is_list(user_ids) do userid_string = user_ids |> Enum.join("-") %{queries | userid: userid_string} end def encode_userid(queries, user_id) when is_integer(user_id) do %{queries | userid: Integer.to_string(user_id)} end end
lib/narou/api/queries.ex
0.728748
0.747662
queries.ex
starcoder
defmodule Optimal.Type do @moduledoc """ Exposes functions for validating types, and determining if a value matches a type. """ @scalar_types [ :any, :atom, :binary, :bitstring, :boolean, :float, :function, :int, :integer, :keyword, :list, :map, nil, :number, :pid, :port, :reference, :regex, :string, :struct, :tuple ] @spec validate_types( types :: Keyword.t(), field_name :: atom, opts :: Keyword.t(), schema :: Optimal.schema() ) :: [ {atom, String.t()} ] def validate_types(types, field_name, _opts, _schema) do Enum.reduce(types, [], fn field_and_type, errors -> type = case field_and_type do {_field, type} -> type _ -> :any end if valid_type?(type) do errors else [{field_name, "No such Optimal type: #{inspect(type)}"}] end end) end @spec matches_type?(type :: term, value :: term) :: boolean def matches_type?(types, value) when is_list(types), do: Enum.any?(types, &matches_type?(&1, value)) def matches_type?(:any, _), do: true def matches_type?({:keyword, value_type}, value) do matches_type?(:keyword, value) and Enum.all?(value, fn {_k, v} -> matches_type?(value_type, v) end) end def matches_type?({:list, type}, value) do matches_type?(:list, value) and Enum.all?(value, &matches_type?(type, &1)) end def matches_type?({:tuple, size, types}, value) when is_list(types) do matches_type?(:tuple, value) and tuple_size(value) == size and value |> Tuple.to_list() |> Enum.all?(&matches_type?(types, &1)) end def matches_type?({:tuple, size, type}, value) do types = List.duplicate(type, size) matches_type?({:tuple, size, types}, value) end def matches_type?({:tuple, size}, value) when is_integer(size), do: tuple_size(value) == size and matches_type?(:tuple, value) def matches_type?({:tuple, types}, value) when is_list(types), do: matches_type?({:tuple, tuple_size(value), types}, value) def matches_type?({:tuple, types}, value) when is_tuple(types) do if matches_type?(:tuple, value) and tuple_size(types) == tuple_size(value) do value = Tuple.to_list(value) types = Tuple.to_list(types) [types, value] |> List.zip() |> Enum.reduce(true, fn {type, value}, acc -> acc and matches_type?(type, value) end) else false end end def matches_type?({:tuple, type}, value) do matches_type?(:tuple, value) and value |> Tuple.to_list() |> Enum.all?(&matches_type?(type, &1)) end def matches_type?({:function, arity}, value) when is_function(value, arity), do: true def matches_type?({:function, _}, _), do: false def matches_type?({:struct, struct}, %struct{}), do: true def matches_type?({:struct, _}, _), do: false def matches_type?(%struct{}, %struct{}), do: true def matches_type?(%_{}, _), do: false def matches_type?({:enum, list}, value), do: value in list # Below this line is only scalar types. Do not move things below/above this line. def matches_type?(type, _) when not (type in @scalar_types), do: raise("Unreachable: no type #{inspect(type)}") def matches_type?(:int, value) when is_integer(value), do: true def matches_type?(:integer, value) when is_integer(value), do: true def matches_type?(:bitstring, value) when is_bitstring(value), do: true def matches_type?(:string, value) when is_binary(value), do: true def matches_type?(:binary, value) when is_binary(value), do: true def matches_type?(:float, value) when is_float(value), do: true def matches_type?(:keyword, value), do: Keyword.keyword?(value) def matches_type?(:list, value) when is_list(value), do: true def matches_type?(:boolean, value) when is_boolean(value), do: true def matches_type?(:atom, value) when is_atom(value), do: true def matches_type?(nil, nil), do: true def matches_type?(:function, value) when is_function(value), do: true def matches_type?(:map, value) when is_map(value), do: true def matches_type?(:number, value) when is_number(value), do: true def matches_type?(:pid, value) when is_pid(value), do: true def matches_type?(:port, value) when is_port(value), do: true def matches_type?(:reference, value) when is_reference(value), do: true def matches_type?(:tuple, value) when is_tuple(value), do: true def matches_type?(:struct, %_{}), do: true def matches_type?(:regex, %Regex{}), do: true def matches_type?(_, _), do: false @spec valid_type?(term) :: boolean def valid_type?(:any), do: true def valid_type?(types) when is_list(types), do: Enum.all?(types, &valid_type?/1) def valid_type?({:function, i}) when is_integer(i) and i >= 0, do: true def valid_type?({:keyword, type}), do: valid_type?(type) def valid_type?({:list, type}), do: valid_type?(type) def valid_type?({:struct, module}) when is_atom(module), do: true def valid_type?({:enum, values}) when is_list(values), do: true def valid_type?({:tuple, size, types}) when is_integer(size) and is_list(types) and length(types) == size, do: Enum.all?(types, &valid_type?/1) def valid_type?({:tuple, _size, type}), do: valid_type?(type) def valid_type?({:tuple, size}) when is_integer(size) and size >= 0, do: true def valid_type?({:tuple, types}) when is_list(types), do: Enum.all?(types, &valid_type?/1) def valid_type?({:tuple, types}) when is_tuple(types), do: types |> Tuple.to_list() |> Enum.all?(&valid_type?/1) def valid_type?({:tuple, type}), do: valid_type?(type) def valid_type?(%_{}), do: true def valid_type?(type) when type in @scalar_types, do: true def valid_type?(_), do: false # There may be missing cases here. @spec merge(left :: term, right :: term) :: term def merge(same, same), do: same def merge({:enum, left}, {:enum, right}), do: {:enum, left ++ right} def merge(left, right), do: [left, right] end
lib/optimal/type.ex
0.84124
0.613179
type.ex
starcoder
defmodule Expert do @engine :beer_expert def start do {:ok, _} = :seresye.start(@engine) Facts.add_to(@engine) Rules.add_to(@engine) end def stop do :ok = :seresye.stop(@engine) end def tell(beer_name, facts) when is_list(facts) do for fact <- facts, do: tell(beer_name, fact) end def tell(beer_name, fact) when is_tuple(fact) do # Load prior knowledge about given beer existing_facts = :seresye.query_kb(@engine, {:beer, beer_name, :'_'}) :io.format("existing_facts: ~p\n", [existing_facts]) # Remove existing facts matching given key remove_existing(beer_name, fact) # Find out what Expert has already figured out to be a match when last fact added existing_matches = :seresye.query_kb(@engine, {:beer_match, beer_name, :'_'}) :io.format("existing_matches: ~p\n", [existing_matches]) # Remove them from the engine as we are going to generate new matches remove_exisitng_matches(beer_name) # Tell Expert about new fact, which may generate new matches :seresye.assert(@engine, {:beer, beer_name, fact}) # If this was the first fact, then all matches are valid # Otherwise we need to find the intersection of the results like so: if(Enum.count(existing_facts) > 0) do # Find the new matches, from last fact execution new_matches = :seresye.query_kb(@engine, {:beer_match, beer_name, :'_'}) :io.format("new_matches: ~p\n", [new_matches]) # Remove them as we can now refine them base on existing matches from other facts remove_exisitng_matches(beer_name) # We intersect existing matches against the new matches e = Enum.into(existing_matches, HashSet.new) n = Enum.into(new_matches, HashSet.new) r = Set.intersection(e, n) # Store them in the engine! for new <- r, do: :seresye.assert(@engine, new) end end def ask(beer_name) do :seresye.query_kb(@engine, {:beer_match, beer_name, :'_'}) end defp remove_exisitng_matches(beer_name) do matches = :seresye.query_kb(@engine, {:beer_match, beer_name, :'_'}) for old_match <- matches do :io.format("retract existing match: ~p\n", [old_match]) :seresye.retract(@engine, old_match) end end defp remove_existing(beer_name, {key, value}) do existing_facts = :seresye.query_kb(@engine, {:beer, beer_name, :'_'}) for old_fact <- existing_facts, {:beer, _, {key, _}} = old_fact do :io.format("retracting fact: ~p\n", [old_fact]) :seresye.retract(@engine, old_fact) end end end defmodule Rules do require Logger def add_to(engine) do :seresye.add_rules(engine, [ {:'Elixir.Rules', :abv_rule}, {:'Elixir.Rules', :ibu_rule}, {:'Elixir.Rules', :category_rule}, {:'Elixir.Rules', :sub_category_rule}]) end def abv_rule( engine, {:beer, beerName, {:abv, abv}}, {:beer_style, styleNumber, styleName, {:abv, abvLower, abvUpper}}) when abvLower <= abv and abv <= abvUpper do Logger.debug("abv_categorise => Expert thinks #{beerName} could be a #{styleName} as abv #{abv} is between #{abvLower} & #{abvUpper}") :seresye_engine.assert(engine, {:beer_match, beerName, {:beer_style, styleNumber, styleName}}) end def ibu_rule( engine, {:beer, beerName, {:ibu, ibu}}, {:beer_style, styleNumber, styleName, {:ibu, ibuLower, ibuUpper}}) when ibuLower <= ibu and ibu <= ibuUpper do Logger.debug("ibu_categorise => Expert thinks #{beerName} could be a #{styleName} as ibu #{ibu} is between #{ibuLower} & #{ibuUpper}") :seresye_engine.assert(engine, {:beer_match, beerName, {:beer_style, styleNumber, styleName}}) end def category_rule( engine, {:beer, beerName, {:category, category}}, {:beer_style, styleNumber, styleName, {:catgeory, category}}) do Logger.debug("beer_category => Expert thinks #{beerName} could be a #{styleName} as category #{category} is a match") :seresye_engine.assert(engine, {:beer_match, beerName, {:beer_style, styleNumber, styleName}}) end def sub_category_rule( engine, {:beer, beerName, {:sub_category, sub_category}}, {:beer_style, styleNumber, styleName, {:sub_category, sub_category}}) do Logger.debug("beer_category => Expert thinks #{beerName} could be a #{styleName} as subcategory #{sub_category} is a match") :seresye_engine.assert(engine, {:beer_match, beerName, {:beer_style, styleNumber, styleName}}) end end defmodule Facts do @facts [ {:beer_style, 1, '<NAME>', {:catgeory, "Ale"}}, {:beer_style, 1, '<NAME>', {:sub_category, "Wheat Beer"}}, {:beer_style, 1, '<NAME>', {:original_gravity, 1.026, 1.036}}, {:beer_style, 1, '<NAME>', {:final_gravity, 1.006, 1.009}}, {:beer_style, 1, '<NAME>', {:abv, 2.5, 3.6}}, {:beer_style, 1, '<NAME>', {:ibu, 3, 12}}, {:beer_style, 1, '<NAME>', {:srm, 2, 4}}, {:beer_style, 1, '<NAME>', {:wiki, 'http://en.wikipedia.org/wiki/Berliner_Weisse'}}, {:beer_style, 4, 'Belgian White', {:catgeory, "Ale"}}, {:beer_style, 4, 'Belgian White', {:sub_category, "Wheat Beer"}}, {:beer_style, 4, 'Belgian White', {:original_gravity, 1.042, 1.055}}, {:beer_style, 4, 'Belgian White', {:final_gravity, 1.008, 1.012}}, {:beer_style, 4, 'Belgian White', {:abv, 4.5, 5.5}}, {:beer_style, 4, 'Belgian White', {:ibu, 15, 28}}, {:beer_style, 4, 'Belgian White', {:srm, 2, 4}}, {:beer_style, 4, 'Belgian White', {:wiki, 'http://en.wikipedia.org/wiki/Wheat_beer#Witbier'}}, {:beer_style, 7, 'American Wheat', {:catgeory, "Ale"}}, {:beer_style, 7, 'American Wheat', {:sub_category, "Wheat Beer"}}, {:beer_style, 7, 'American Wheat', {:original_gravity, 1.035, 1.055}}, {:beer_style, 7, 'American Wheat', {:final_gravity, 1.008, 1.018}}, {:beer_style, 7, 'American Wheat', {:abv, 3.5, 5.0}}, {:beer_style, 7, 'American Wheat', {:ibu, 5, 20}}, {:beer_style, 7, 'American Wheat', {:srm, 2, 8}}, {:beer_style, 7, 'American Wheat', {:wiki, 'http://www.brewingtechniques.com/library/backissues/issue1.1/bergen.html'}}, {:beer_style, 14, 'Weizenbier', {:catgeory, "Ale"}}, {:beer_style, 14, 'Weizenbier', {:sub_category, "Wheat Beer"}}, {:beer_style, 14, 'Weizenbier', {:original_gravity, 1.040, 1.056}}, {:beer_style, 14, 'Weizenbier', {:final_gravity, 1.008, 1.016}}, {:beer_style, 14, 'Weizenbier', {:abv, 4.3, 5.6}}, {:beer_style, 14, 'Weizenbier', {:ibu, 8, 15}}, {:beer_style, 14, 'Weizenbier', {:srm, 3, 9}}, {:beer_style, 14, 'Weizenbier', {:wiki, 'http://en.wikipedia.org/wiki/Weizenbier'}}, {:beer_style, 27, 'Dunkelweizen', {:catgeory, "Ale"}}, {:beer_style, 27, 'Dunkelweizen', {:sub_category, "Wheat Beer"}}, {:beer_style, 27, 'Dunkelweizen', {:original_gravity, 1.048, 1.056}}, {:beer_style, 27, 'Dunkelweizen', {:final_gravity, 1.008, 1.016}}, {:beer_style, 27, 'Dunkelweizen', {:abv, 4.5, 6.0}}, {:beer_style, 27, 'Dunkelweizen', {:ibu, 10, 15}}, {:beer_style, 27, 'Dunkelweizen', {:srm, 17, 23}}, {:beer_style, 27, 'Dunkelweizen', {:wiki, 'http://en.wikipedia.org/wiki/Dunkelweizen'}}, {:beer_style, 41, 'Weizenbock', {:catgeory, "Ale"}}, {:beer_style, 41, 'Weizenbock', {:sub_category, "Wheat Beer"}}, {:beer_style, 41, 'Weizenbock', {:original_gravity, 1.066, 1.080}}, {:beer_style, 41, 'Weizenbock', {:final_gravity, 1.016, 1.028}}, {:beer_style, 41, 'Weizenbock', {:abv, 6.5, 9.6}}, {:beer_style, 41, 'Weizenbock', {:ibu, 12, 25}}, {:beer_style, 41, 'Weizenbock', {:srm, 10, 30}}, {:beer_style, 41, 'Weizenbock', {:wiki, 'http://en.wikipedia.org/wiki/Weizenbock'}}, {:beer_style, 2, 'Lambic', {:catgeory, "Ale"}}, {:beer_style, 2, 'Lambic', {:sub_category, "Lambic & Sour"}}, {:beer_style, 2, 'Lambic', {:original_gravity, 1.044, 1.056}}, {:beer_style, 2, 'Lambic', {:final_gravity, 1.006, 1.012}}, {:beer_style, 2, 'Lambic', {:abv, 4.7, 6.4}}, {:beer_style, 2, 'Lambic', {:ibu, 5, 15}}, {:beer_style, 2, 'Lambic', {:srm, 4, 15}}, {:beer_style, 2, 'Lambic', {:wiki, 'http://en.wikipedia.org/wiki/Lambic'}}, {:beer_style, 5, 'Gueuze', {:catgeory, "Ale"}}, {:beer_style, 5, 'Gueuze', {:sub_category, "Lambic & Sour"}}, {:beer_style, 5, 'Gueuze', {:original_gravity, 1.044, 1.056}}, {:beer_style, 5, 'Gueuze', {:final_gravity, 1.006, 1.012}}, {:beer_style, 5, 'Gueuze', {:abv, 4.7, 6.4}}, {:beer_style, 5, 'Gueuze', {:ibu, 5, 15}}, {:beer_style, 5, 'Gueuze', {:srm, 4, 15}}, {:beer_style, 5, 'Gueuze', {:wiki, 'http://en.wikipedia.org/wiki/Gueuze'}}, {:beer_style, 8, 'Faro', {:catgeory, "Ale"}}, {:beer_style, 8, 'Faro', {:sub_category, "Lambic & Sour"}}, {:beer_style, 8, 'Faro', {:original_gravity, 1.040, 1.056}}, {:beer_style, 8, 'Faro', {:final_gravity, 1.006, 1.012}}, {:beer_style, 8, 'Faro', {:abv, 4.5, 5.5}}, {:beer_style, 8, 'Faro', {:ibu, 5, 15}}, {:beer_style, 8, 'Faro', {:srm, 4, 15}}, {:beer_style, 8, 'Faro', {:wiki, 'http://en.wikipedia.org/wiki/Lambic#Faro'}}, # Beer Fruit Beer has SRM N/A! Defaulting Range 0-100! {:beer_style, 15, 'Fruit Beer', {:catgeory, "Ale"}}, {:beer_style, 15, 'Fruit Beer', {:sub_category, "Lambic & Sour"}}, {:beer_style, 15, 'Fruit Beer', {:original_gravity, 1.040, 1.072}}, {:beer_style, 15, 'Fruit Beer', {:final_gravity, 1.008, 1.016}}, {:beer_style, 15, 'Fruit Beer', {:abv, 4.7, 7.0}}, {:beer_style, 15, 'Fruit Beer', {:ibu, 15, 21}}, {:beer_style, 15, 'Fruit Beer', {:srm, 0, 100}}, {:beer_style, 15, 'Fruit Beer', {:wiki, 'http://en.wikipedia.org/wiki/Lambic#Fruit'}}, {:beer_style, 28, 'Flanders Red', {:catgeory, "Ale"}}, {:beer_style, 28, 'Flanders Red', {:sub_category, "Lambic & Sour"}}, {:beer_style, 28, 'Flanders Red', {:original_gravity, 1.042, 1.060}}, {:beer_style, 28, 'Flanders Red', {:final_gravity, 1.008, 1.016}}, {:beer_style, 28, 'Flanders Red', {:abv, 4.0, 5.8}}, {:beer_style, 28, 'Flanders Red', {:ibu, 14, 25}}, {:beer_style, 28, 'Flanders Red', {:srm, 10, 16}}, {:beer_style, 28, 'Flanders Red', {:wiki, 'http://en.wikipedia.org/wiki/Flanders_red_ale'}}, {:beer_style, 42, 'Oud Bruin', {:catgeory, "Ale"}}, {:beer_style, 42, 'Oud Bruin', {:sub_category, "Lambic & Sour"}}, {:beer_style, 42, 'Oud Bruin', {:original_gravity, 1.042, 1.060}}, {:beer_style, 42, 'Oud Bruin', {:final_gravity, 1.008, 1.016}}, {:beer_style, 42, 'Oud Bruin', {:abv, 4.0, 6.5}}, {:beer_style, 42, 'Oud Bruin', {:ibu, 14, 30}}, {:beer_style, 42, 'Oud Bruin', {:srm, 12, 20}}, {:beer_style, 42, 'Oud Bruin', {:wiki, 'http://en.wikipedia.org/wiki/Oud_bruin'}}, {:beer_style, 3, 'Belgian Gold Ale', {:catgeory, "Ale"}}, {:beer_style, 3, 'Belgian Gold Ale', {:sub_category, "Belgian Ale"}}, {:beer_style, 3, 'Belgian Gold Ale', {:original_gravity, 1.065, 1.085}}, {:beer_style, 3, 'Belgian Gold Ale', {:final_gravity, 1.014, 1.020}}, {:beer_style, 3, 'Belgian Gold Ale', {:abv, 7.0, 9.0}}, {:beer_style, 3, 'Belgian Gold Ale', {:ibu, 25, 35}}, {:beer_style, 3, 'Belgian Gold Ale', {:srm, 4, 6}}, {:beer_style, 3, 'Belgian Gold Ale', {:wiki, 'http://en.wikipedia.org/wiki/Belgian_ale#Blonde_or_golden_ale'}}, {:beer_style, 6, 'Tripel', {:catgeory, "Ale"}}, {:beer_style, 6, 'Tripel', {:sub_category, "Belgian Ale"}}, {:beer_style, 6, 'Tripel', {:original_gravity, 1.070, 1.100}}, {:beer_style, 6, 'Tripel', {:final_gravity, 1.016, 1.024}}, {:beer_style, 6, 'Tripel', {:abv, 7.0, 10.0}}, {:beer_style, 6, 'Tripel', {:ibu, 20, 30}}, {:beer_style, 6, 'Tripel', {:srm, 4, 7}}, {:beer_style, 6, 'Tripel', {:wiki, 'http://en.wikipedia.org/wiki/Trippel'}}, {:beer_style, 9, 'Saison', {:catgeory, "Ale"}}, {:beer_style, 9, 'Saison', {:sub_category, "Belgian Ale"}}, {:beer_style, 9, 'Saison', {:original_gravity, 1.052, 1.080}}, {:beer_style, 9, 'Saison', {:final_gravity, 1.010, 1.015}}, {:beer_style, 9, 'Saison', {:abv, 4.5, 8.1}}, {:beer_style, 9, 'Saison', {:ibu, 25, 40}}, {:beer_style, 9, 'Saison', {:srm, 4, 10}}, {:beer_style, 9, 'Saison', {:wiki, 'http://en.wikipedia.org/wiki/Saison'}}, {:beer_style, 16, 'Belgian Pale Ale', {:catgeory, "Ale"}}, {:beer_style, 16, 'Belgian Pale Ale', {:sub_category, "Belgian Ale"}}, {:beer_style, 16, 'Belgian Pale Ale', {:original_gravity, 1.040, 1.055}}, {:beer_style, 16, 'Belgian Pale Ale', {:final_gravity, 1.008, 1.013}}, {:beer_style, 16, 'Belgian Pale Ale', {:abv, 3.9, 5.6}}, {:beer_style, 16, 'Belgian Pale Ale', {:ibu, 20, 35}}, {:beer_style, 16, 'Belgian Pale Ale', {:srm, 4, 14}}, {:beer_style, 16, 'Belgian Pale Ale', {:wiki, 'http://www.homebrewtalk.com/wiki/index.php/Belgian_Pale_Ale'}}, {:beer_style, 29, 'Belgian Dark Ale', {:catgeory, "Ale"}}, {:beer_style, 29, 'Belgian Dark Ale', {:sub_category, "Belgian Ale"}}, {:beer_style, 29, 'Belgian Dark Ale', {:original_gravity, 1.065, 1.098}}, {:beer_style, 29, 'Belgian Dark Ale', {:final_gravity, 1.014, 1.024}}, {:beer_style, 29, 'Belgian Dark Ale', {:abv, 7.0, 12.0}}, {:beer_style, 29, 'Belgian Dark Ale', {:ibu, 25, 40}}, {:beer_style, 29, 'Belgian Dark Ale', {:srm, 7, 20}}, {:beer_style, 29, 'Belgian Dark Ale', {:wiki, 'http://en.wikipedia.org/wiki/Belgian_Strong_Dark_Ale'}}, {:beer_style, 43, 'Dubbel', {:catgeory, "Ale"}}, {:beer_style, 43, 'Dubbel', {:sub_category, "Belgian Ale"}}, {:beer_style, 43, 'Dubbel', {:original_gravity, 1.065, 1.085}}, {:beer_style, 43, 'Dubbel', {:final_gravity, 1.012, 1.018}}, {:beer_style, 43, 'Dubbel', {:abv, 3.2, 8.0}}, {:beer_style, 43, 'Dubbel', {:ibu, 20, 25}}, {:beer_style, 43, 'Dubbel', {:srm, 10, 20}}, {:beer_style, 43, 'Dubbel', {:wiki, 'http://en.wikipedia.org/wiki/Dubbel'}}, {:beer_style, 10, 'Pale Ale', {:catgeory, "Ale"}}, {:beer_style, 10, 'Pale Ale', {:sub_category, "Pale Ale"}}, {:beer_style, 10, 'Pale Ale', {:original_gravity, 1.043, 1.056}}, {:beer_style, 10, 'Pale Ale', {:final_gravity, 1.008, 1.016}}, {:beer_style, 10, 'Pale Ale', {:abv, 4.5, 5.5}}, {:beer_style, 10, 'Pale Ale', {:ibu, 20, 40}}, {:beer_style, 10, 'Pale Ale', {:srm, 4, 11}}, {:beer_style, 10, 'Pale Ale', {:wiki, 'http://en.wikipedia.org/wiki/Pale_ale'}}, {:beer_style, 17, 'American Pale Ale', {:catgeory, "Ale"}}, {:beer_style, 17, 'American Pale Ale', {:sub_category, "Pale Ale"}}, {:beer_style, 17, 'American Pale Ale', {:original_gravity, 1.045, 1.056}}, {:beer_style, 17, 'American Pale Ale', {:final_gravity, 1.010, 1.015}}, {:beer_style, 17, 'American Pale Ale', {:abv, 4.5, 5.7}}, {:beer_style, 17, 'American Pale Ale', {:ibu, 20, 40}}, {:beer_style, 17, 'American Pale Ale', {:srm, 4, 11}}, {:beer_style, 17, 'American Pale Ale', {:wiki, 'http://en.wikipedia.org/wiki/American_Pale_Ale'}}, {:beer_style, 30, 'India Pale Ale', {:catgeory, "Ale"}}, {:beer_style, 30, 'India Pale Ale', {:sub_category, "Pale Ale"}}, {:beer_style, 30, 'India Pale Ale', {:original_gravity, 1.050, 1.075}}, {:beer_style, 30, 'India Pale Ale', {:final_gravity, 1.012, 1.018}}, {:beer_style, 30, 'India Pale Ale', {:abv, 5.1, 7.6}}, {:beer_style, 30, 'India Pale Ale', {:ibu, 40, 60}}, {:beer_style, 30, 'India Pale Ale', {:srm, 8, 14}}, {:beer_style, 30, 'India Pale Ale', {:wiki, 'http://en.wikipedia.org/wiki/India_Pale_Ale'}}, {:beer_style, 44, 'American Amber Ale', {:catgeory, "Ale"}}, {:beer_style, 44, 'American Amber Ale', {:sub_category, "Pale Ale"}}, {:beer_style, 44, 'American Amber Ale', {:original_gravity, 1.043, 1.056}}, {:beer_style, 44, 'American Amber Ale', {:final_gravity, 1.008, 1.016}}, {:beer_style, 44, 'American Amber Ale', {:abv, 4.5, 5.7}}, {:beer_style, 44, 'American Amber Ale', {:ibu, 20, 40}}, {:beer_style, 44, 'American Amber Ale', {:srm, 11, 18}}, {:beer_style, 44, 'American Amber Ale', {:wiki, 'http://en.wikipedia.org/wiki/Amber_lager'}}, {:beer_style, 18, 'Ordinary Bitter', {:catgeory, "Ale"}}, {:beer_style, 18, 'Ordinary Bitter', {:sub_category, "English Bitter"}}, {:beer_style, 18, 'Ordinary Bitter', {:original_gravity, 1.030, 1.038}}, {:beer_style, 18, 'Ordinary Bitter', {:final_gravity, 1.006, 1.012}}, {:beer_style, 18, 'Ordinary Bitter', {:abv, 3.0, 3.8}}, {:beer_style, 18, 'Ordinary Bitter', {:ibu, 20, 35}}, {:beer_style, 18, 'Ordinary Bitter', {:srm, 6, 12}}, {:beer_style, 18, 'Ordinary Bitter', {:wiki, 'http://en.wikipedia.org/wiki/Ordinary_bitter'}}, {:beer_style, 31, 'Special Bitter', {:catgeory, "Ale"}}, {:beer_style, 31, 'Special Bitter', {:sub_category, "English Bitter"}}, {:beer_style, 31, 'Special Bitter', {:original_gravity, 1.039, 1.045}}, {:beer_style, 31, 'Special Bitter', {:final_gravity, 1.006, 1.014}}, {:beer_style, 31, 'Special Bitter', {:abv, 3.7, 4.8}}, {:beer_style, 31, 'Special Bitter', {:ibu, 25, 40}}, {:beer_style, 31, 'Special Bitter', {:srm, 12, 14}}, {:beer_style, 31, 'Special Bitter', {:wiki, 'http://en.wikipedia.org/wiki/Bitter_(beer)'}}, {:beer_style, 45, 'Extra Special Bitter', {:catgeory, "Ale"}}, {:beer_style, 45, 'Extra Special Bitter', {:sub_category, "English Bitter"}}, {:beer_style, 45, 'Extra Special Bitter', {:original_gravity, 1.046, 1.065}}, {:beer_style, 45, 'Extra Special Bitter', {:final_gravity, 1.010, 1.018}}, {:beer_style, 45, 'Extra Special Bitter', {:abv, 3.7, 4.8}}, {:beer_style, 45, 'Extra Special Bitter', {:ibu, 30, 45}}, {:beer_style, 45, 'Extra Special Bitter', {:srm, 12, 14}}, {:beer_style, 45, 'Extra Special Bitter', {:wiki, 'http://en.wikipedia.org/wiki/Bitter_(beer)'}}, {:beer_style, 19, 'Scottish Light 60/-', {:catgeory, "Ale"}}, {:beer_style, 19, 'Scottish Light 60/-', {:sub_category, "Scottish Ale"}}, {:beer_style, 19, 'Scottish Light 60/-', {:original_gravity, 1.030, 1.035}}, {:beer_style, 19, 'Scottish Light 60/-', {:final_gravity, 1.006, 1.012}}, {:beer_style, 19, 'Scottish Light 60/-', {:abv, 2.8, 4.0}}, {:beer_style, 19, 'Scottish Light 60/-', {:ibu, 9, 20}}, {:beer_style, 19, 'Scottish Light 60/-', {:srm, 8, 17}}, {:beer_style, 19, 'Scottish Light 60/-', {:wiki, 'http://en.wikipedia.org/wiki/Scottish_beer'}}, {:beer_style, 32, 'Scottish Heavy 70/-', {:catgeory, "Ale"}}, {:beer_style, 32, 'Scottish Heavy 70/-', {:sub_category, "Scottish Ale"}}, {:beer_style, 32, 'Scottish Heavy 70/-', {:original_gravity, 1.035, 1.040}}, {:beer_style, 32, 'Scottish Heavy 70/-', {:final_gravity, 1.010, 1.014}}, {:beer_style, 32, 'Scottish Heavy 70/-', {:abv, 3.5, 4.1}}, {:beer_style, 32, 'Scottish Heavy 70/-', {:ibu, 12, 25}}, {:beer_style, 32, 'Scottish Heavy 70/-', {:srm, 10, 19}}, {:beer_style, 32, 'Scottish Heavy 70/-', {:wiki, 'http://en.wikipedia.org/wiki/Scottish_beer'}}, {:beer_style, 46, 'Scottish Export 80/-', {:catgeory, "Ale"}}, {:beer_style, 46, 'Scottish Export 80/-', {:sub_category, "Scottish Ale"}}, {:beer_style, 46, 'Scottish Export 80/-', {:original_gravity, 1.040, 1.050}}, {:beer_style, 46, 'Scottish Export 80/-', {:final_gravity, 1.010, 1.018}}, {:beer_style, 46, 'Scottish Export 80/-', {:abv, 4.0, 4.9}}, {:beer_style, 46, 'Scottish Export 80/-', {:ibu, 15, 36}}, {:beer_style, 46, 'Scottish Export 80/-', {:srm, 10, 19}}, {:beer_style, 46, 'Scottish Export 80/-', {:wiki, 'http://en.wikipedia.org/wiki/Scottish_beer'}}, {:beer_style, 20, 'English Mild', {:catgeory, "Ale"}}, {:beer_style, 20, 'English Mild', {:sub_category, "Brown Ale"}}, {:beer_style, 20, 'English Mild', {:original_gravity, 1.030, 1.038}}, {:beer_style, 20, 'English Mild', {:final_gravity, 1.004, 1.012}}, {:beer_style, 20, 'English Mild', {:abv, 2.5, 4.1}}, {:beer_style, 20, 'English Mild', {:ibu, 10, 24}}, {:beer_style, 20, 'English Mild', {:srm, 10, 25}}, {:beer_style, 20, 'English Mild', {:wiki, 'http://en.wikipedia.org/wiki/Mild_ale'}}, {:beer_style, 33, 'American Brown', {:catgeory, "Ale"}}, {:beer_style, 33, 'American Brown', {:sub_category, "Brown Ale"}}, {:beer_style, 33, 'American Brown', {:original_gravity, 1.040, 1.055}}, {:beer_style, 33, 'American Brown', {:final_gravity, 1.010, 1.018}}, {:beer_style, 33, 'American Brown', {:abv, 4.2, 6.0}}, {:beer_style, 33, 'American Brown', {:ibu, 25, 60}}, {:beer_style, 33, 'American Brown', {:srm, 15, 22}}, {:beer_style, 33, 'American Brown', {:wiki, 'http://beeradvocate.com/beer/style/73'}}, {:beer_style, 47, 'English Brown', {:catgeory, "Ale"}}, {:beer_style, 47, 'English Brown', {:sub_category, "Brown Ale"}}, {:beer_style, 47, 'English Brown', {:original_gravity, 1.040, 1.050}}, {:beer_style, 47, 'English Brown', {:final_gravity, 1.008, 1.014}}, {:beer_style, 47, 'English Brown', {:abv, 3.5, 6.0}}, {:beer_style, 47, 'English Brown', {:ibu, 15, 25}}, {:beer_style, 47, 'English Brown', {:srm, 15, 30}}, {:beer_style, 47, 'English Brown', {:wiki, 'http://en.wikipedia.org/wiki/English_brown_ale'}}, {:beer_style, 34, 'Brown Porter', {:catgeory, "Ale"}}, {:beer_style, 34, 'Brown Porter', {:sub_category, "Porter"}}, {:beer_style, 34, 'Brown Porter', {:original_gravity, 1.040, 1.050}}, {:beer_style, 34, 'Brown Porter', {:final_gravity, 1.008, 1.014}}, {:beer_style, 34, 'Brown Porter', {:abv, 3.8, 5.2}}, {:beer_style, 34, 'Brown Porter', {:ibu, 20, 30}}, {:beer_style, 34, 'Brown Porter', {:srm, 20, 30}}, {:beer_style, 34, 'Brown Porter', {:wiki, 'http://en.wikipedia.org/wiki/Porter_(beer)'}}, {:beer_style, 48, 'Robust Porter', {:catgeory, "Ale"}}, {:beer_style, 48, 'Robust Porter', {:sub_category, "Porter"}}, {:beer_style, 48, 'Robust Porter', {:original_gravity, 1.050, 1.065}}, {:beer_style, 48, 'Robust Porter', {:final_gravity, 1.012, 1.016}}, {:beer_style, 48, 'Robust Porter', {:abv, 4.8, 6.0}}, {:beer_style, 48, 'Robust Porter', {:ibu, 25, 45}}, {:beer_style, 48, 'Robust Porter', {:srm, 30, 40}}, {:beer_style, 48, 'Robust Porter', {:wiki, 'http://en.wikipedia.org/wiki/Porter_(beer)'}}, # Beer <NAME> has SRM 40+! Defaulting Range 40-100! {:beer_style, 21, '<NAME>', {:catgeory, "Ale"}}, {:beer_style, 21, '<NAME>', {:sub_category, "Stout"}}, {:beer_style, 21, '<NAME>', {:original_gravity, 1.035, 1.050}}, {:beer_style, 21, '<NAME>', {:final_gravity, 1.008, 1.014}}, {:beer_style, 21, '<NAME>', {:abv, 3.2, 5.5}}, {:beer_style, 21, '<NAME>', {:ibu, 30, 50}}, {:beer_style, 21, '<NAME>', {:srm, 40, 100}}, {:beer_style, 21, '<NAME>', {:wiki, 'http://en.wikipedia.org/wiki/Dry_stout'}}, # Beer Sweet Stout has SRM 40+! Defaulting Range 40-100! {:beer_style, 35, 'Sweet Stout', {:catgeory, "Ale"}}, {:beer_style, 35, 'Sweet Stout', {:sub_category, "Stout"}}, {:beer_style, 35, 'Sweet Stout', {:original_gravity, 1.035, 1.066}}, {:beer_style, 35, 'Sweet Stout', {:final_gravity, 1.010, 1.022}}, {:beer_style, 35, 'Sweet Stout', {:abv, 3.2, 6.4}}, {:beer_style, 35, 'Sweet Stout', {:ibu, 20, 40}}, {:beer_style, 35, 'Sweet Stout', {:srm, 40, 100}}, {:beer_style, 35, 'Sweet Stout', {:wiki, 'http://en.wikipedia.org/wiki/Sweet_stout'}}, # Beer Oatmeal Stout has SRM 40+! Defaulting Range 40-100! {:beer_style, 49, 'Oatmeal Stout', {:catgeory, "Ale"}}, {:beer_style, 49, 'Oatmeal Stout', {:sub_category, "Stout"}}, {:beer_style, 49, 'Oatmeal Stout', {:original_gravity, 1.035, 1.060}}, {:beer_style, 49, 'Oatmeal Stout', {:final_gravity, 1.008, 1.021}}, {:beer_style, 49, 'Oatmeal Stout', {:abv, 3.3, 6.1}}, {:beer_style, 49, 'Oatmeal Stout', {:ibu, 20, 50}}, {:beer_style, 49, 'Oatmeal Stout', {:srm, 40, 100}}, {:beer_style, 49, 'Oatmeal Stout', {:wiki, 'http://en.wikipedia.org/wiki/Oatmeal_stout'}}, # Beer Foreign Extra Stout has SRM 40+! Defaulting Range 40-100! {:beer_style, 22, 'Foreign Extra Stout', {:catgeory, "Ale"}}, {:beer_style, 22, 'Foreign Extra Stout', {:sub_category, "Stout"}}, {:beer_style, 22, 'Foreign Extra Stout', {:original_gravity, 1.050, 1.075}}, {:beer_style, 22, 'Foreign Extra Stout', {:final_gravity, 1.010, 1.017}}, {:beer_style, 22, 'Foreign Extra Stout', {:abv, 5.0, 7.5}}, {:beer_style, 22, 'Foreign Extra Stout', {:ibu, 35, 70}}, {:beer_style, 22, 'Foreign Extra Stout', {:srm, 40, 100}}, {:beer_style, 22, 'Foreign Extra Stout', {:wiki, 'http://www.brewwiki.com/index.php/Foreign_Extra_Stout'}}, # Beer Imperial Stout has SRM 40+! Defaulting Range 40-100! {:beer_style, 36, 'Imperial Stout', {:catgeory, "Ale"}}, {:beer_style, 36, 'Imperial Stout', {:sub_category, "Stout"}}, {:beer_style, 36, 'Imperial Stout', {:original_gravity, 1.075, 1.090}}, {:beer_style, 36, 'Imperial Stout', {:final_gravity, 1.020, 1.030}}, {:beer_style, 36, 'Imperial Stout', {:abv, 7.8, 9.0}}, {:beer_style, 36, 'Imperial Stout', {:ibu, 50, 80}}, {:beer_style, 36, 'Imperial Stout', {:srm, 40, 100}}, {:beer_style, 36, 'Imperial Stout', {:wiki, 'http://en.wikipedia.org/wiki/Imperial_stout'}}, # Beer Russian Imperial has SRM 40+! Defaulting Range 40-100! {:beer_style, 50, 'Russian Imperial', {:catgeory, "Ale"}}, {:beer_style, 50, 'Russian Imperial', {:sub_category, "Stout"}}, {:beer_style, 50, 'Russian Imperial', {:original_gravity, 1.075, 1.100}}, {:beer_style, 50, 'Russian Imperial', {:final_gravity, 1.018, 1.030}}, {:beer_style, 50, 'Russian Imperial', {:abv, 8.0, 12.0}}, {:beer_style, 50, 'Russian Imperial', {:ibu, 50, 90}}, {:beer_style, 50, 'Russian Imperial', {:srm, 40, 100}}, {:beer_style, 50, 'Russian Imperial', {:wiki, 'http://en.wikipedia.org/wiki/Russian_Imperial_Stout'}}, {:beer_style, 23, '<NAME>', {:catgeory, "Lager"}}, {:beer_style, 23, '<NAME>', {:sub_category, "Pilsner"}}, {:beer_style, 23, '<NAME>', {:original_gravity, 1.044, 1.050}}, {:beer_style, 23, '<NAME>', {:final_gravity, 1.006, 1.012}}, {:beer_style, 23, '<NAME>', {:abv, 4.6, 5.4}}, {:beer_style, 23, '<NAME>', {:ibu, 25, 45}}, {:beer_style, 23, '<NAME>', {:srm, 2, 4}}, {:beer_style, 23, '<NAME>', {:wiki, 'http://beeradvocate.com/articles/216'}}, {:beer_style, 37, 'Bohemian Pilsner', {:catgeory, "Lager"}}, {:beer_style, 37, 'Bohemian Pilsner', {:sub_category, "Pilsner"}}, {:beer_style, 37, 'Bohemian Pilsner', {:original_gravity, 1.044, 1.056}}, {:beer_style, 37, 'Bohemian Pilsner', {:final_gravity, 1.014, 1.020}}, {:beer_style, 37, 'Bohemian Pilsner', {:abv, 4.1, 5.1}}, {:beer_style, 37, 'Bohemian Pilsner', {:ibu, 35, 45}}, {:beer_style, 37, 'Bohemian Pilsner', {:srm, 3, 5}}, {:beer_style, 37, 'Bohemian Pilsner', {:wiki, 'http://en.wikipedia.org/wiki/Pilsner'}}, {:beer_style, 51, 'American Pilsner', {:catgeory, "Lager"}}, {:beer_style, 51, 'American Pilsner', {:sub_category, "Pilsner"}}, {:beer_style, 51, 'American Pilsner', {:original_gravity, 1.045, 1.060}}, {:beer_style, 51, 'American Pilsner', {:final_gravity, 1.012, 1.018}}, {:beer_style, 51, 'American Pilsner', {:abv, 5.0, 6.0}}, {:beer_style, 51, 'American Pilsner', {:ibu, 20, 40}}, {:beer_style, 51, 'American Pilsner', {:srm, 3, 6}}, {:beer_style, 51, 'American Pilsner', {:wiki, 'http://www.bjcp.org/2008styles/style02.php#1c'}}, {:beer_style, 24, 'American Standard', {:catgeory, "Lager"}}, {:beer_style, 24, 'American Standard', {:sub_category, "American Lager"}}, {:beer_style, 24, 'American Standard', {:original_gravity, 1.040, 1.046}}, {:beer_style, 24, 'American Standard', {:final_gravity, 1.006, 1.010}}, {:beer_style, 24, 'American Standard', {:abv, 4.1, 4.8}}, {:beer_style, 24, 'American Standard', {:ibu, 5, 17}}, {:beer_style, 24, 'American Standard', {:srm, 2, 6}}, {:beer_style, 24, 'American Standard', {:wiki, 'http://www.pintley.com/browse/style/Standard-American-Lager/4?p=1&s=n&d=a'}}, {:beer_style, 38, 'American Premium', {:catgeory, "Lager"}}, {:beer_style, 38, 'American Premium', {:sub_category, "American Lager"}}, {:beer_style, 38, 'American Premium', {:original_gravity, 1.046, 1.050}}, {:beer_style, 38, 'American Premium', {:final_gravity, 1.010, 1.014}}, {:beer_style, 38, 'American Premium', {:abv, 4.6, 5.1}}, {:beer_style, 38, 'American Premium', {:ibu, 13, 23}}, {:beer_style, 38, 'American Premium', {:srm, 2, 8}}, {:beer_style, 38, 'American Premium', {:wiki, 'http://greatbrewers.com/style/american-style-premium-lager'}}, {:beer_style, 52, 'American Dark', {:catgeory, "Lager"}}, {:beer_style, 52, 'American Dark', {:sub_category, "American Lager"}}, {:beer_style, 52, 'American Dark', {:original_gravity, 1.040, 1.050}}, {:beer_style, 52, 'American Dark', {:final_gravity, 1.008, 1.012}}, {:beer_style, 52, 'American Dark', {:abv, 4.1, 5.6}}, {:beer_style, 52, 'American Dark', {:ibu, 14, 20}}, {:beer_style, 52, 'American Dark', {:srm, 10, 20}}, {:beer_style, 52, 'American Dark', {:wiki, 'http://www.brewwiki.com/index.php/Dark_American_Lager'}}, {:beer_style, 12, '<NAME>', {:catgeory, "Lager"}}, {:beer_style, 12, '<NAME>', {:sub_category, "European Lager"}}, {:beer_style, 12, '<NAME>', {:original_gravity, 1.044, 1.050}}, {:beer_style, 12, '<NAME>', {:final_gravity, 1.008, 1.012}}, {:beer_style, 12, '<NAME>', {:abv, 4.5, 5.6}}, {:beer_style, 12, '<NAME>', {:ibu, 18, 25}}, {:beer_style, 12, '<NAME>', {:srm, 3, 5}}, {:beer_style, 12, '<NAME>', {:wiki, 'http://beeradvocate.com/beer/style/21'}}, {:beer_style, 25, 'Dortmunder', {:catgeory, "Lager"}}, {:beer_style, 25, 'Dortmunder', {:sub_category, "European Lager"}}, {:beer_style, 25, 'Dortmunder', {:original_gravity, 1.048, 1.056}}, {:beer_style, 25, 'Dortmunder', {:final_gravity, 1.010, 1.014}}, {:beer_style, 25, 'Dortmunder', {:abv, 5.1, 6.1}}, {:beer_style, 25, 'Dortmunder', {:ibu, 23, 29}}, {:beer_style, 25, 'Dortmunder', {:srm, 4, 6}}, {:beer_style, 25, 'Dortmunder', {:wiki, 'http://en.wikipedia.org/wiki/Dortmunder_(beer)'}}, {:beer_style, 39, '<NAME>', {:catgeory, "Lager"}}, {:beer_style, 39, '<NAME>', {:sub_category, "European Lager"}}, {:beer_style, 39, '<NAME>', {:original_gravity, 1.052, 1.056}}, {:beer_style, 39, '<NAME>', {:final_gravity, 1.010, 1.014}}, {:beer_style, 39, '<NAME>', {:abv, 4.8, 5.4}}, {:beer_style, 39, '<NAME>', {:ibu, 16, 25}}, {:beer_style, 39, '<NAME>', {:srm, 17, 23}}, {:beer_style, 39, 'Munich Dunkel', {:wiki, 'http://en.wikipedia.org/wiki/Munich_dunkel_lager'}}, # Beer Schwarzbier converted 40+ to 100 {:beer_style, 53, 'Schwarzbier', {:catgeory, "Lager"}}, {:beer_style, 53, 'Schwarzbier', {:sub_category, "European Lager"}}, {:beer_style, 53, 'Schwarzbier', {:original_gravity, 1.044, 1.052}}, {:beer_style, 53, 'Schwarzbier', {:final_gravity, 1.012, 1.016}}, {:beer_style, 53, 'Schwarzbier', {:abv, 3.8, 5.0}}, {:beer_style, 53, 'Schwarzbier', {:ibu, 22, 30}}, {:beer_style, 53, 'Schwarzbier', {:srm, 25, 100}}, {:beer_style, 53, 'Schwarzbier', {:wiki, 'http://en.wikipedia.org/wiki/Schwarzbier'}}, {:beer_style, 13, '<NAME>', {:catgeory, "Lager"}}, {:beer_style, 13, '<NAME>', {:sub_category, "Bock"}}, {:beer_style, 13, '<NAME>', {:original_gravity, 1.066, 1.074}}, {:beer_style, 13, '<NAME>', {:final_gravity, 1.011, 1.020}}, {:beer_style, 13, '<NAME>', {:abv, 6.0, 7.5}}, {:beer_style, 13, '<NAME>', {:ibu, 20, 35}}, {:beer_style, 13, '<NAME>', {:srm, 4, 10}}, {:beer_style, 13, '<NAME>', {:wiki, 'http://en.wikipedia.org/wiki/Bock#Maibock_or_helles_bock'}}, {:beer_style, 26, 'Doppelbock', {:catgeory, "Lager"}}, {:beer_style, 26, 'Doppelbock', {:sub_category, "Bock"}}, {:beer_style, 26, 'Doppelbock', {:original_gravity, 1.074, 1.080}}, {:beer_style, 26, 'Doppelbock', {:final_gravity, 1.020, 1.028}}, {:beer_style, 26, 'Doppelbock', {:abv, 6.6, 7.9}}, {:beer_style, 26, 'Doppelbock', {:ibu, 20, 30}}, {:beer_style, 26, 'Doppelbock', {:srm, 12, 30}}, {:beer_style, 26, 'Doppelbock', {:wiki, 'http://en.wikipedia.org/wiki/Bock#Doppelbock'}}, {:beer_style, 40, 'Traditional Bock', {:catgeory, "Lager"}}, {:beer_style, 40, 'Traditional Bock', {:sub_category, "Bock"}}, {:beer_style, 40, 'Traditional Bock', {:original_gravity, 1.066, 1.074}}, {:beer_style, 40, 'Traditional Bock', {:final_gravity, 1.018, 1.024}}, {:beer_style, 40, 'Traditional Bock', {:abv, 6.4, 7.6}}, {:beer_style, 40, 'Traditional Bock', {:ibu, 20, 30}}, {:beer_style, 40, 'Traditional Bock', {:srm, 15, 30}}, {:beer_style, 40, 'Traditional Bock', {:wiki, 'http://en.wikipedia.org/wiki/Bock#Traditional_bock'}}, # Beer Eisbock converted 40+ to 100 {:beer_style, 54, 'Eisbock', {:catgeory, "Lager"}}, {:beer_style, 54, 'Eisbock', {:sub_category, "Bock"}}, {:beer_style, 54, 'Eisbock', {:original_gravity, 1.090, 1.116}}, {:beer_style, 54, 'Eisbock', {:final_gravity, 1.023, 1.035}}, {:beer_style, 54, 'Eisbock', {:abv, 8.7, 14.4}}, {:beer_style, 54, 'Eisbock', {:ibu, 25, 50}}, {:beer_style, 54, 'Eisbock', {:srm, 18, 100}}, {:beer_style, 54, 'Eisbock', {:wiki, 'http://en.wikipedia.org/wiki/Bock#Eisbock'}}, {:beer_style, 61, 'Altbier', {:catgeory, "Other"}}, {:beer_style, 61, 'Altbier', {:sub_category, "Other"}}, {:beer_style, 61, 'Altbier', {:original_gravity, 1.044, 1.048}}, {:beer_style, 61, 'Altbier', {:final_gravity, 1.008, 1.014}}, {:beer_style, 61, 'Altbier', {:abv, 4.6, 5.1}}, {:beer_style, 61, 'Altbier', {:ibu, 25, 48}}, {:beer_style, 61, 'Altbier', {:srm, 11, 19}}, {:beer_style, 61, 'Altbier', {:wiki, 'http://en.wikipedia.org/wiki/Altbier'}}, {:beer_style, 56, 'Biére de Garde', {:catgeory, "Other"}}, {:beer_style, 56, 'Biére de Garde', {:sub_category, "Other"}}, {:beer_style, 56, 'Biére de Garde', {:original_gravity, 1.060, 1.080}}, {:beer_style, 56, 'Biére de Garde', {:final_gravity, 1.012, 1.016}}, {:beer_style, 56, 'Biére de Garde', {:abv, 4.5, 8.0}}, {:beer_style, 56, 'Biére de Garde', {:ibu, 20, 30}}, {:beer_style, 56, 'Biére de Garde', {:srm, 5, 12}}, {:beer_style, 56, 'Biére de Garde', {:wiki, 'http://en.wikipedia.org/wiki/Bi%C3%A8re_de_Garde'}}, {:beer_style, 57, 'Oktoberfest', {:catgeory, "Other"}}, {:beer_style, 57, 'Oktoberfest', {:sub_category, "Other"}}, {:beer_style, 57, 'Oktoberfest', {:original_gravity, 1.050, 1.056}}, {:beer_style, 57, 'Oktoberfest', {:final_gravity, 1.012, 1.016}}, {:beer_style, 57, 'Oktoberfest', {:abv, 5.1, 6.5}}, {:beer_style, 57, 'Oktoberfest', {:ibu, 18, 30}}, {:beer_style, 57, 'Oktoberfest', {:srm, 7, 12}}, {:beer_style, 57, 'Oktoberfest', {:wiki, 'http://en.wikipedia.org/wiki/Oktoberfest_Beer'}}, {:beer_style, 62, 'Vienna', {:catgeory, "Other"}}, {:beer_style, 62, 'Vienna', {:sub_category, "Other"}}, {:beer_style, 62, 'Vienna', {:original_gravity, 1.048, 1.056}}, {:beer_style, 62, 'Vienna', {:final_gravity, 1.010, 1.014}}, {:beer_style, 62, 'Vienna', {:abv, 4.6, 5.5}}, {:beer_style, 62, 'Vienna', {:ibu, 20, 28}}, {:beer_style, 62, 'Vienna', {:srm, 8, 14}}, {:beer_style, 62, 'Vienna', {:wiki, 'http://en.wikipedia.org/wiki/Vienna_lager'}}, {:beer_style, 58, 'Cream Ale', {:catgeory, "Other"}}, {:beer_style, 58, 'Cream Ale', {:sub_category, "Other"}}, {:beer_style, 58, 'Cream Ale', {:original_gravity, 1.044, 1.055}}, {:beer_style, 58, 'Cream Ale', {:final_gravity, 1.007, 1.010}}, {:beer_style, 58, 'Cream Ale', {:abv, 4.5, 6.0}}, {:beer_style, 58, 'Cream Ale', {:ibu, 10, 35}}, {:beer_style, 58, 'Cream Ale', {:srm, 8, 14}}, {:beer_style, 58, 'Cream Ale', {:wiki, 'http://en.wikipedia.org/wiki/Cream_ale'}}, {:beer_style, 63, 'Steam Beer', {:catgeory, "Other"}}, {:beer_style, 63, 'Steam Beer', {:sub_category, "Other"}}, {:beer_style, 63, 'Steam Beer', {:original_gravity, 1.040, 1.055}}, {:beer_style, 63, 'Steam Beer', {:final_gravity, 1.012, 1.018}}, {:beer_style, 63, 'Steam Beer', {:abv, 3.6, 5.0}}, {:beer_style, 63, 'Steam Beer', {:ibu, 35, 45}}, {:beer_style, 63, 'Steam Beer', {:srm, 8, 17}}, {:beer_style, 63, 'Steam Beer', {:wiki, 'http://en.wikipedia.org/wiki/Steam_beer'}}, {:beer_style, 59, 'Smoked Beer', {:catgeory, "Other"}}, {:beer_style, 59, 'Smoked Beer', {:sub_category, "Other"}}, {:beer_style, 59, 'Smoked Beer', {:original_gravity, 1.050, 1.055}}, {:beer_style, 59, 'Smoked Beer', {:final_gravity, 1.012, 1.016}}, {:beer_style, 59, 'Smoked Beer', {:abv, 5.0, 5.5}}, {:beer_style, 59, 'Smoked Beer', {:ibu, 20, 30}}, {:beer_style, 59, 'Smoked Beer', {:srm, 12, 17}}, {:beer_style, 59, 'Smoked Beer', {:wiki, 'http://en.wikipedia.org/wiki/Smoked_beer'}}, {:beer_style, 64, 'Barleywine', {:catgeory, "Other"}}, {:beer_style, 64, 'Barleywine', {:sub_category, "Other"}}, {:beer_style, 64, 'Barleywine', {:original_gravity, 1.085, 1.120}}, {:beer_style, 64, 'Barleywine', {:final_gravity, 1.024, 1.032}}, {:beer_style, 64, 'Barleywine', {:abv, 8.4, 12.2}}, {:beer_style, 64, 'Barleywine', {:ibu, 50, 100}}, {:beer_style, 64, 'Barleywine', {:srm, 14, 22}}, {:beer_style, 64, 'Barleywine', {:wiki, 'http://en.wikipedia.org/wiki/Barley_wine'}}, {:beer_style, 60, 'English Old Ale', {:catgeory, "Other"}}, {:beer_style, 60, 'English Old Ale', {:sub_category, "Other"}}, {:beer_style, 60, 'English Old Ale', {:original_gravity, 1.060, 1.090}}, {:beer_style, 60, 'English Old Ale', {:final_gravity, 1.015, 1.022}}, {:beer_style, 60, 'English Old Ale', {:abv, 6.1, 8.5}}, {:beer_style, 60, 'English Old Ale', {:ibu, 30, 40}}, {:beer_style, 60, 'English Old Ale', {:srm, 12, 16}}, {:beer_style, 60, 'English Old Ale', {:wiki, 'http://en.wikipedia.org/wiki/Old_ale'}}, # Beer Strong Scotch Ale converted 40+ to 100 {:beer_style, 65, 'Strong Scotch Ale', {:catgeory, "Other"}}, {:beer_style, 65, 'Strong Scotch Ale', {:sub_category, "Other"}}, {:beer_style, 65, 'Strong Scotch Ale', {:original_gravity, 1.072, 1.085}}, {:beer_style, 65, 'Strong Scotch Ale', {:final_gravity, 1.016, 1.028}}, {:beer_style, 65, 'Strong Scotch Ale', {:abv, 6.0, 9.0}}, {:beer_style, 65, 'Strong Scotch Ale', {:ibu, 20, 40}}, {:beer_style, 65, 'Strong Scotch Ale', {:srm, 10, 100}}, {:beer_style, 65, 'Strong Scotch Ale', {:wiki, 'http://en.wikipedia.org/wiki/Scotch_Ale'}} ] def add_to(engine) do :seresye.assert(engine, @facts) end end
lib/expert.ex
0.668015
0.534309
expert.ex
starcoder
defmodule Observables.Operator.ZipVar do @moduledoc false use Observables.GenObservable alias Observables.Obs def init([pids_inds, obstp]) do Logger.debug("CombineLatestn: #{inspect(self())}") # Define the index for the next observable. index = length(pids_inds) # Unzip the indices for the initial observables. {_pids, inds} = Enum.unzip(pids_inds) # Create empty lists to store future values for the initial observables. queues = inds |> Enum.map(fn _ -> [] end) # Create a map that maps observable pids to their indices. indmap = pids_inds |> Map.new # Create a map that maps indices of observables to their value queues. qmap = Enum.zip(inds, queues) |> Map.new {:ok, {qmap, indmap, index, obstp}} end #Handle a new observable to listen to. def handle_event({:newobs, obs}, {qmap, indmap, index, obstp}) do # Tag the new observable with a :newval tag and its newly given index so that we can process it properly. {t_f, t_pid} = obs |> Obs.map(fn val -> {:newval, index, val} end) # Make the tagged observable send to us. t_f.(self()) # Add the initial value as the entry for the new observable to the value map with its newly given index as the key. new_qmap = qmap |> Map.put(index, []) # Add the given index as the entry for the new observable to the index map with the pid of the tagged observable as its key. new_indmap = indmap |> Map.put(t_pid, index) # Increase the index counter new_index = index + 1 {:novalue, {new_qmap, new_indmap, new_index, obstp}} end #Handle a new value being sent to us from one of the observables we listen to. def handle_event({:newval, index, value}, {qmap, indmap, cindex, obstp}) do # Update the queue of the observable by adding its newly received value to the back. new_queue = Map.get(qmap, index) ++ [value] new_qmap = %{qmap | index => new_queue} # Get the first value of every queue (represented as a list) or nil if that queue is empty. firsts = new_qmap |> Map.values |> Enum.map(fn vs -> if Enum.empty?(vs), do: :empty, else: List.first(vs) end) # Check if received from all dependencies. # If so, produce a new value from the first elements and pop these from their respective queues. if Enum.any?(firsts, fn fst -> fst == :empty end) do {:novalue, {new_qmap, indmap, cindex, obstp}} else new_qmap |> Enum.map(fn {index, queue} -> {index, Enum.drop(queue, 1)} end) |> Map.new {:value, List.to_tuple(firsts), {new_qmap, indmap, cindex, obstp}} end end def handle_done(obstp, {qmap, indmap, cindex, obstp}) do Logger.debug("#{inspect(self())}: zipvar has a dead observable stream, going on with possibility of termination.") {:ok, :continue, {qmap, indmap, cindex, nil}} end def handle_done(pid, {qmap, indmap, cindex, nil}) do Logger.debug("#{inspect(self())}: zipvar has one dead dependency and already a dead observable stream, going on with possibility of termination.") index = Map.get(indmap, pid) new_indmap = Map.delete(indmap, pid) new_qmap = Map.delete(qmap, index) {:ok, :continue, {new_qmap, new_indmap, cindex, nil}} end def handle_done(pid, {qmap, indmap, cindex, obstp}) do Logger.debug("#{inspect(self())}: zipvar has one dead dependency, but an active observable stream, going on without possibility of termination at this point.") index = Map.get(indmap, pid) new_indmap = Map.delete(indmap, pid) new_qmap = Map.delete(qmap, index) {:ok, :continue, :notermination, {new_qmap, new_indmap, cindex, obstp}} end end
lib/observables/zip_var.ex
0.709623
0.621498
zip_var.ex
starcoder
defmodule YtPotion.Video do import YtPotion.Base @moduledoc """ Provides methods to interact with the YouTube Videos API """ @doc """ Returns the YouTube API response ## Examples ```elixir iex > YtPotion.Video.list(%{id: "gben9fsNYTM,LTke1j_fkLc", part: "statistics"}) {:ok, %HTTPoison.Response{body: "{\n \"kind\": \"youtube#videoListResponse\",\n \"etag\": \"\\\"gMxXHe-zinKdE9lTnzKu8vjcmDI/VkN1rb33o20QpEbqSoDYD_uBVhk\\\"\",\n \"pageInfo\": {\n \"totalResults\": 2,\n \"resultsPerPage\": 2\n },\n \"items\": [\n {\n \"kind\": \"youtube#video\",\n \"etag\": \"\\\"gMxXHe-zinKdE9lTnzKu8vjcmDI/4iGPLnHQ0TP8qMdCSoYGmZDc0kY\\\"\",\n \"id\": \"gben9fsNYTM\",\n \"statistics\": {\n \"viewCount\": \"142517\",\n \"likeCount\": \"1384\",\n \"dislikeCount\": \"47\",\n \"favoriteCount\": \"0\",\n \"commentCount\": \"76\"\n }\n },\n {\n \"kind\": \"youtube#video\",\n \"etag\": \"\\\"gMxXHe-zinKdE9lTnzKu8vjcmDI/MZfxDD4YW6qu4n4TazWHAvc8mqU\\\"\",\n \"id\": \"LTke1j_fkLc\",\n \"statistics\": {\n \"viewCount\": \"72923\",\n \"likeCount\": \"731\",\n \"dislikeCount\": \"8\",\n \"favoriteCount\": \"0\",\n \"commentCount\": \"112\"\n }\n }\n ]\n}\n", headers: [{"Expires", "Wed, 01 Feb 2017 01:53:21 GMT"}, {"Date", "Wed, 01 Feb 2017 01:53:21 GMT"}, {"Cache-Control", "private, max-age=0, must-revalidate, no-transform"}, {"ETag", "\"gMxXHe-zinKdE9lTnzKu8vjcmDI/VkN1rb33o20QpEbqSoDYD_uBVhk\""}, {"Vary", "Origin"}, {"Vary", "X-Origin"}, {"Content-Type", "application/json; charset=UTF-8"}, {"X-Content-Type-Options", "nosniff"}, {"X-Frame-Options", "SAMEORIGIN"}, {"X-XSS-Protection", "1; mode=block"}, {"Content-Length", "764"}, {"Server", "GSE"}, {"Alt-Svc", "quic=\":443\"; ma=2592000; v=\"35,34\""}], status_code: 200}} ``` """ @spec list(map, map) :: map #HTTPoison.Response{} yt_macro_call list(filter_part, options \\ %{}) do get_request("videos", Map.merge(filter_part, options)) end end
lib/yt_potion/yt_potion_video.ex
0.784897
0.416886
yt_potion_video.ex
starcoder
defmodule Bitset do @moduledoc """ Documentation for Bitset. the fixed-size N bits """ import Kernel, except: [to_string: 1] @set_bit 1 @unset_bit 0 @type t :: %__MODULE__{} defstruct size: 0, data: <<>> @spec new(bitstring() | non_neg_integer()) :: Bitset.t() def new(size) when is_integer(size) do %Bitset{size: size, data: <<@unset_bit::size(size)>>} end @spec new(bitstring(), integer()) :: Bitset.t() def new(<<data::bits>>, size) when is_integer(size) do %Bitset{size: size, data: data} end @spec size(Bitset.t()) :: integer() def size(bitset = %Bitset{}) do bitset.size end @spec size(bitstring()) :: integer() def size(<<data::bits>>) do bit_size(data) end @spec test?(Bitset.t(), integer()) :: boolean() def test?(bitset = %Bitset{}, pos) do at(bitset.data, pos) |> elem(2) == @set_bit end @spec all?(bitstring() | Bitset.t()) :: boolean() def all?(bitset = %Bitset{}), do: all?(bitset.data) def all?(<<1::1>>), do: true def all?(<<0::1>>), do: false def all?(<<1::1, rest::bits>>), do: all?(rest) def all?(<<0::1, _rest::bits>>), do: false @spec any?(bitstring() | Bitset.t()) :: boolean() def any?(bitset = %Bitset{}), do: any?(bitset.data) def any?(<<>>), do: false def any?(<<1::1, _rest::bits>>), do: true def any?(<<0::1, rest::bits>>), do: any?(rest) @spec none?(bitstring() | Bitset.t()) :: boolean() def none?(bitset = %Bitset{}), do: none?(bitset.data) def none?(<<>>), do: true def none?(<<1::1, _rest::bits>>), do: false def none?(<<0::1, rest::bits>>), do: none?(rest) @doc "the number of bits set to true" @spec count(Bitset.t()) :: integer def count(bitset = %Bitset{}) do for(<<bit::1 <- bitset.data>>, do: bit) |> Enum.sum() end @doc "sets all bits to true" @spec set(Bitset.t()) :: Bitset.t() def set(bitset = %Bitset{}) do size = bitset.size value = trunc(:math.pow(2, size)) - 1 new(<<value::size(size)>>, size) end @doc "sets the bit at the position to bit value" @spec set(Bitset.t(), integer, integer) :: Bitset.t() def set(bitset = %Bitset{}, pos, bit \\ @set_bit) do data = set_bit(bitset.data, pos, bit) new(data, bitset.size) end @doc "sets all bits to false" @spec reset(Bitset.t()) :: Bitset.t() def reset(bitset = %Bitset{}) do size = bitset.size new(<<@unset_bit::size(size)>>, size) end @doc "sets the bit at the position to false" @spec reset(Bitset.t(), integer) :: Bitset.t() def reset(bitset = %Bitset{}, pos) do data = set_bit(bitset.data, pos, @unset_bit) new(data, bitset.size) end @doc "toggle all bits" @spec flip(Bitset.t()) :: Bitset.t() def flip(bitset = %Bitset{}) do new(flip_bit(bitset.data, <<>>), bitset.size) end @doc "toggle the bit at the position" @spec flip(Bitset.t(), integer) :: Bitset.t() def flip(bitset = %Bitset{}, pos) when is_integer(pos) do <<prefix::size(pos), val::1, rest::bits>> = bitset.data new(<<prefix::size(pos), :erlang.band(val + 1, 1)::1, rest::bits>>, bitset.size) end @spec reverse(Bitset.t()) :: Bitset.t() def reverse(bitset = %Bitset{}), do: new(reverse_bit(bitset.data, <<>>), bitset.size) @doc "return string representation of the bitset" @spec to_string(bitstring() | Bitset.t(), bitstring()) :: binary() def to_string(_, acc \\ <<>>) def to_string(bitset = %Bitset{}, acc) do to_string(bitset.data, acc) end def to_string(<<>>, acc), do: acc def to_string(<<1::1, rest::bits>>, acc) do to_string(rest, acc <> "1") end def to_string(<<0::1, rest::bits>>, acc) do to_string(rest, acc <> "0") end @doc "return Bitset data binary" @spec to_data(Bitset.t()) :: bitstring() def to_data(bitset = %Bitset{}) do bitset.data end @doc "convert to binary format" @deprecated "Use to_bytes/1 instead" @spec to_binary(Bitset.t()) :: bitstring() def to_binary(bitset = %Bitset{}) do reverse_byte(bitset.data, <<>>) end @doc since: "0.2.1" @spec to_bytes(Bitset.t()) :: bitstring() def to_bytes(bitset = %Bitset{}) do reverse_byte(bitset.data, <<>>) end defp at(data, pos) do <<prefix::size(pos), bit::size(1), rest::bits>> = data {prefix, bit, rest} end defp set_bit(data, pos, bit) do <<prefix::size(pos), val::size(1), rest::bits>> = data if val != bit do <<prefix::size(pos), bit::size(1), rest::bits>> else data end end defp flip_bit(<<>>, acc), do: acc defp flip_bit(<<1::1, rest::bits>>, acc) do flip_bit(rest, <<acc::bits, 0::1>>) end defp flip_bit(<<0::1, rest::bits>>, acc) do flip_bit(rest, <<acc::bits, 1::1>>) end defp reverse_bit(<<>>, acc), do: acc defp reverse_bit(<<bit::1, rest::bits>>, acc) do reverse_bit(rest, <<bit::1, acc::bits>>) end defp reverse_byte(<<>>, acc), do: acc defp reverse_byte(<<bit::8, rest::bits>>, acc) do reverse_byte(rest, acc <> reverse_bit(<<bit::8>>, <<>>)) end defp reverse_byte(<<rest::bits>>, acc) do padding = 8 - bit_size(rest) reverse_byte(<<rest::bits, 0::size(padding)>>, acc) end end
lib/bitset.ex
0.676086
0.489686
bitset.ex
starcoder
defmodule NcsaHmac.Plug do import Keyword, only: [has_key?: 2] @moduledoc """ Plug functions for loading and authorizing resources for the current request. The plugs all store data in conn.assigns (in Phoenix applications, keys in conn.assigns can be accessed with `@key_name` in templates) You must also specify the Ecto repo to use in your configuration: ``` config :ncsa_hmac, repo: Project.Repo ``` If you wish, you may also specify the key where NcsaHmac will look for the record to authorize against: ``` config :ncsa_hmac, api_keys: :some_api_key_record ``` You can specify a handler function (in this case, `Helpers.handle_unauthorized`) to be called when an action is unauthorized like so: ```elixir config :ncsa_hmac, unauthorized_handler: {Helpers, :handle_unauthorized} ``` or to handle when a resource is not found: ```elixir config :ncsa_hmac, not_found_handler: {Helpers, :handle_not_found} ``` NcsaHmac will pass the `conn` to the handler function. """ @doc """ Load the given resource. Load the resource with id given by `conn.params["id"]` (or `conn.params[opts[:id_name]]` if `opts[:id_name]` is specified) and ecto model given by `opts[:model]` into `conn.assigns.resource_name`. `resource_name` is either inferred from the model name or specified in the plug declaration with the `:as` key. To infer the `resource_name`, the most specific(right most) name in the model's module name will be used, converted to underscore case. For example, `load_resource model: Some.Project.BlogPost` will load the resource into `conn.assigns.blog_post` If the resource cannot be fetched, `conn.assigns.resource_name` is set to nil. Required opts: * `:model` - Specifies the module name of the model to load resources from Optional opts: * `:as` - Specifies the `resource_name` to use * `:only` - Specifies which actions to authorize * `:except` - Specifies which actions for which to skip authorization * `:preload` - Specifies association(s) to preload * `:id_name` - Specifies the name of the id in `conn.params`, defaults to "id" * `:id_field` - Specifies the name of the ID field in the database for searching :id_name value, defaults to "id". * `:persisted` - Specifies the resource should always be loaded from the database, defaults to false * `:not_found_handler` - Specify a handler function to be called if the resource is not found Examples: ``` plug :load_resource, model: Post plug :load_resource, model: User, preload: :posts, as: :the_user plug :load_resource, model: User, only: [:index, :show], preload: :posts, as: :person plug :load_resource, model: User, except: [:destroy] plug :load_resource, model: Post, id_name: "post_id", only: [:new, :create], persisted: true plug :load_resource, model: Post, id_name: "slug", id_field: "slug", only: [:show], persisted: true ``` """ def load_resource(conn, opts) do conn |> action_valid?(opts) |> case do true -> conn |> _load_resource(opts) |> handle_not_found(opts) false -> conn end end defp _load_resource(conn, opts) do get_action(conn) loaded_resource = fetch_resource(conn, opts) %{conn | assigns: Map.put(conn.assigns, resource_name(opts), loaded_resource)} end @doc """ Authorize the resource, assuming the matching db record is loaded into the conn. In order to use this function, 1) `conn.assigns[Application.get_env(:ncsa_hmac, :record, :record)]` must be an ecto struct representing the a record that has the signing_key and auth_id 2) `conn.private` must be a map (this should not be a problem unless you explicitly modified it) If authorization succeeds, sets `conn.assigns.authorized` to true. After authorization, conn.assigns.resource_name is set to nil. Required opts: * `:model` - Specifies the module name of the model to authorize access to Optional opts: * `:only` - Specifies which actions to authorize * `:except` - Specifies which actions for which to skip authorization * `:preload` - Specifies association(s) to preload * `:id_name` - Specifies the name of the id in `conn.params`, defaults to "id" * `:id_field` - Specifies the name of the ID field in the database for searching :id_name value, defaults to "id". * `:persisted` - Specifies the resource should always be loaded from the database, defaults to false * `:unauthorized_handler` - Specify a handler function to be called if the action is unauthorized Examples: ``` plug :authorize_resource, model: Post plug :authorize_resource, model: User, preload: :posts plug :authorize_resource, model: User, only: [:index, :show], preload: :posts plug :load_resource, model: Post, id_name: "post_id", only: [:index], persisted: true, preload: :comments plug :load_resource, model: Post, id_name: "slug", id_field: "slug", only: [:show], persisted: true ``` """ def authorize_resource(conn, opts) do conn |> action_valid?(opts) |> case do true -> conn |> _authorize_resource(opts) |> handle_unauthorized(opts) false -> conn end end defp _authorize_resource(conn, opts) do authentication = NcsaHmac.Authentication.authenticate!(conn, opts) case authentication do {:ok, true} -> conn |> Plug.Conn.assign(:authorized, true) |> purge_resource(opts) {:error, message} -> conn |> Plug.Conn.assign(:authorized, false) |> Plug.Conn.assign(:error_message, message) |> purge_resource(opts) end end @doc """ Authorize the given resource and then load it if authorization succeeds. If the resource cannot be loaded or authorization fails, conn.assigns.resource_name is set to nil. The result of the authorization (true/false) is assigned to conn.assigns.authorized. After authorization, conn.assigns.resource_name is set to nil. Also, see the documentation for load_resource/2 and authorize_resource/2. Required opts: * `:model` - Specifies the module name of the model to load resources from Optional opts: * `:as` - Specifies the `resource_name` to use * `:only` - Specifies which actions to authorize * `:except` - Specifies which actions for which to skip authorization * `:preload` - Specifies association(s) to preload * `:id_name` - Specifies the name of the id in `conn.params`, defaults to "id" * `:id_field` - Specifies the name of the ID field in the database for searching :id_name value, defaults to "id". * `:unauthorized_handler` - Specify a handler function to be called if the action is unauthorized * `:not_found_handler` - Specify a handler function to be called if the resource is not found Note: If both an `:unauthorized_handler` and a `:not_found_handler` are specified for `load_and_authorize_resource`, and the request meets the criteria for both, the `:unauthorized_handler` will be called first. Examples: ``` plug :load_and_authorize_resource, model: Post plug :load_and_authorize_resource, model: User, preload: :posts, as: :the_user plug :load_and_authorize_resource, model: User, only: [:index, :show], preload: :posts, as: :person plug :load_and_authorize_resource, model: User, except: [:destroy] plug :load_and_authorize_resource, model: Post, id_name: "slug", id_field: "slug", only: [:show], persisted: true ``` """ def load_and_authorize_resource(conn, opts) do conn |> action_valid?(opts) |> case do true -> _load_authorize_and_purge_resource(conn, opts) false -> conn end end defp _load_authorize_and_purge_resource(conn, opts) do conn |> load_resource(opts) |> authorize_resource(opts) |> purge_resource(opts) end defp purge_resource(conn, opts), do: %{conn | assigns: Map.put(conn.assigns, resource_name(opts), nil)} defp fetch_resource(conn, opts) do repo = opts[:repo] || Application.get_env(:ncsa_hmac, :repo) map_args = get_map_args(conn, opts) conn.assigns |> Map.fetch(resource_name(opts)) # check if a resource is already loaded at the key |> case do :error -> repo.get_by(opts[:model], map_args) {:ok, nil} -> repo.get_by(opts[:model], map_args) {:ok, resource} -> case (resource.__struct__ == opts[:model]) do true -> # A resource of the type passed as opts[:model] is already loaded; do not clobber it resource false -> repo.get_by(opts[:model], map_args) end end end defp get_map_args(conn, opts) do field_name = (opts[:id_field] || "id") resource_id = get_resource_id(conn, opts) resource = case resource_id do nil -> NcsaHmac.Authentication.auth_id(conn) _ -> resource_id end %{String.to_atom(field_name) => resource} end defp get_resource_id(conn, opts) do case opts[:id_name] do nil -> conn.params["id"] id_name -> conn.params[id_name] end end defp get_action(conn) do conn.assigns |> Map.fetch(:ncsa_hmac_action) |> case do {:ok, action} -> action _ -> conn.private.phoenix_action end end defp action_exempt?(conn, opts) do action = get_action(conn) (is_list(opts[:except]) && action in opts[:except]) |> case do true -> true false -> action == opts[:except] end end defp action_included?(conn, opts) do action = get_action(conn) (is_list(opts[:only]) && action in opts[:only]) |> case do true -> true false -> action == opts[:only] end end defp action_valid?(conn, opts) do cond do has_key?(opts, :except) && has_key?(opts, :only) -> false has_key?(opts, :except) -> !action_exempt?(conn, opts) has_key?(opts, :only) -> action_included?(conn, opts) true -> true end end def resource_name(opts) do case opts[:as] do nil -> opts[:model] |> Module.split |> List.last |> Macro.underscore |> String.to_atom as -> as end end defp handle_unauthorized(conn = %{assigns: %{authorized: true}}, _opts), do: conn defp handle_unauthorized(conn = %{assigns: %{authorized: false}}, opts), do: apply_error_handler(conn, :unauthorized_handler, opts) defp handle_not_found(conn, opts) do action = get_action(conn) case is_nil(Map.get(conn.assigns, resource_name(opts))) and not action in [:index, :new, :create] do true -> apply_error_handler(conn, :not_found_handler, opts) false -> conn end end defp apply_error_handler(conn, handler_key, opts) do handler = Keyword.get(opts, handler_key) || Application.get_env(:ncsa_hmac, handler_key) case handler do {mod, fun} -> apply(mod, fun, [conn]) nil -> conn end end end
lib/ncsa_hmac/plug.ex
0.810929
0.884688
plug.ex
starcoder
defmodule Membrane.Core.Child.PadModel do @moduledoc false # Utility functions for veryfying and manipulating pads and their data. use Bunch alias Membrane.Core.Child alias Membrane.{Pad, UnknownPadError} @type bin_pad_data_t :: %Membrane.Bin.PadData{ ref: Membrane.Pad.ref_t(), options: Membrane.ParentSpec.pad_options_t(), link_id: Membrane.Core.Parent.ChildLifeController.LinkHandler.link_id_t(), endpoint: Membrane.Core.Parent.Link.Endpoint.t(), linked?: boolean(), response_received?: boolean(), spec_ref: Membrane.Core.Parent.ChildLifeController.spec_ref_t(), accepted_caps: Membrane.Caps.Matcher.caps_specs_t(), availability: Pad.availability_t(), direction: Pad.direction_t(), mode: Pad.mode_t(), name: Pad.name_t(), demand_unit: Membrane.Buffer.Metric.unit_t() | nil } @type element_pad_data_t :: %Membrane.Element.PadData{ accepted_caps: Membrane.Caps.Matcher.caps_specs_t(), availability: Pad.availability_t(), caps: Membrane.Caps.t() | nil, demand: integer() | nil, start_of_stream?: boolean(), end_of_stream?: boolean(), direction: Pad.direction_t(), mode: Pad.mode_t(), name: Pad.name_t(), ref: Pad.ref_t(), demand_unit: Membrane.Buffer.Metric.unit_t() | nil, other_demand_unit: Membrane.Buffer.Metric.unit_t() | nil, pid: pid, other_ref: Pad.ref_t(), sticky_messages: [Membrane.Event.t()], input_queue: Membrane.Core.Element.InputQueue.t() | nil, options: %{optional(atom) => any}, toilet: Membrane.Core.Element.Toilet.t() | nil, demand_mode: :auto | :manual | nil, auto_demand_size: pos_integer() | nil, associated_pads: [Pad.ref_t()] | nil } @type pad_data_t :: bin_pad_data_t | element_pad_data_t @type pads_data_t :: %{Pad.ref_t() => pad_data_t} @type pad_info_t :: %{ required(:accepted_caps) => any, required(:availability) => Pad.availability_t(), required(:direction) => Pad.direction_t(), required(:mode) => Pad.mode_t(), required(:name) => Pad.name_t(), optional(:demand_unit) => Membrane.Buffer.Metric.unit_t(), optional(:other_demand_unit) => Membrane.Buffer.Metric.unit_t(), optional(:demand_mode) => :auto | :manual } @type pads_info_t :: %{Pad.name_t() => pad_info_t} @spec assert_instance(Child.state_t(), Pad.ref_t()) :: :ok | {:error, :unknown_pad} def assert_instance(%{pads_data: data}, pad_ref) when is_map_key(data, pad_ref), do: :ok def assert_instance(_state, _pad_ref), do: {:error, :unknown_pad} @spec assert_instance!(Child.state_t(), Pad.ref_t()) :: :ok def assert_instance!(state, pad_ref) do :ok = assert_instance(state, pad_ref) end defmacro assert_data(state, pad_ref, pattern) do quote do use Bunch withl get: {:ok, data} <- unquote(__MODULE__).get_data(unquote(state), unquote(pad_ref)), match: unquote(pattern) <- data do :ok else get: {:error, :unknown_pad} -> {:error, :unknown_pad} match: _data -> {:error, :no_match} end end end defmacro assert_data!(state, pad_ref, pattern) do quote do pad_ref = unquote(pad_ref) state = unquote(state) case unquote(__MODULE__).get_data!(state, pad_ref) do unquote(pattern) -> :ok data -> raise Membrane.PadError, """ Assertion on data of the pad #{inspect(pad_ref)} failed, pattern: #{unquote(Macro.to_string(pattern))} Pad data: #{inspect(data, pretty: true)} """ end end end @spec filter_refs_by_data(Child.state_t(), constraints :: map) :: [Pad.ref_t()] def filter_refs_by_data(state, constraints \\ %{}) def filter_refs_by_data(state, constraints) when constraints == %{} do state.pads_data |> Map.keys() end def filter_refs_by_data(state, constraints) do state.pads_data |> Enum.filter(fn {_name, data} -> data |> constraints_met?(constraints) end) |> Keyword.keys() end @spec filter_data(Child.state_t(), constraints :: map) :: %{atom => pad_data_t} def filter_data(state, constraints \\ %{}) def filter_data(state, constraints) when constraints == %{} do state.pads_data end def filter_data(state, constraints) do state.pads_data |> Enum.filter(fn {_name, data} -> data |> constraints_met?(constraints) end) |> Map.new() end # TODO: leave the main branch only when we stop supporting elixir prior 1.14 if Version.match?(System.version(), ">= 1.14.0-dev") do alias Membrane.Core.Helper.FastMap require FastMap defmacro get_data(state, pad_ref, keys \\ []) do keys = Bunch.listify(keys) pad_data_var = Macro.unique_var(:pad_data, __MODULE__) quote do pad_ref_var = unquote(pad_ref) case unquote(state) do %{pads_data: %{^pad_ref_var => unquote(pad_data_var)}} -> {:ok, unquote(FastMap.generate_get_in!(pad_data_var, keys))} _state -> {:error, :unknown_pad} end end end defmacro get_data!(state, pad_ref, keys \\ []) do keys = Bunch.listify(keys) FastMap.generate_get_in!(state, [:pads_data, pad_ref] ++ keys) |> wrap_with_reraise(pad_ref, state) end defmacro set_data!(state, pad_ref, keys \\ [], value) do keys = Bunch.listify(keys) FastMap.generate_set_in!(state, [:pads_data, pad_ref] ++ keys, value) |> wrap_with_reraise(pad_ref, state) end defmacro set_data(state, pad_ref, keys \\ [], value) do keys = Bunch.listify(keys) {:ok, FastMap.generate_set_in!(state, [:pads_data, pad_ref] ++ keys, value)} |> wrap_with_pad_check(pad_ref, state) end defmacro update_data!(state, pad_ref, keys \\ [], f) do keys = Bunch.listify(keys) FastMap.generate_update_in!(state, [:pads_data, pad_ref] ++ keys, f) |> wrap_with_reraise(pad_ref, state) end defmacro update_data(state, pad_ref, keys \\ [], f) do keys = Bunch.listify(keys) FastMap.generate_get_and_update_in!(state, [:pads_data, pad_ref] ++ keys, f) |> wrap_with_pad_check(pad_ref, state) end defmacro get_and_update_data!(state, pad_ref, keys \\ [], f) do keys = Bunch.listify(keys) FastMap.generate_get_and_update_in!(state, [:pads_data, pad_ref] ++ keys, f) |> wrap_with_reraise(pad_ref, state) end defmacro get_and_update_data(state, pad_ref, keys \\ [], f) do FastMap.generate_get_and_update_in!(state, [:pads_data, pad_ref] ++ keys, f) |> wrap_with_pad_check(pad_ref, state) end else @spec get_data(Child.state_t(), Pad.ref_t()) :: {:ok, pad_data_t() | any} | {:error, :unknown_pad} def get_data(%{pads_data: data}, pad_ref) do case Map.fetch(data, pad_ref) do {:ok, pad_data} -> {:ok, pad_data} :error -> {:error, :unknown_pad} end end @spec get_data(Child.state_t(), Pad.ref_t(), keys :: atom | [atom]) :: {:ok, pad_data_t | any} | {:error, :unknown_pad} def get_data(%{pads_data: data}, pad_ref, keys) when is_map_key(data, pad_ref) and is_list(keys) do data |> get_in([pad_ref | keys]) ~> {:ok, &1} end def get_data(%{pads_data: data}, pad_ref, key) when is_map_key(data, pad_ref) and is_atom(key) do data |> get_in([pad_ref, key]) ~> {:ok, &1} end def get_data(_state, _pad_ref, _keys), do: {:error, :unknown_pad} @spec get_data!(Child.state_t(), Pad.ref_t()) :: pad_data_t | any def get_data!(state, pad_ref) do {:ok, pad_data} = get_data(state, pad_ref) pad_data end @spec get_data!(Child.state_t(), Pad.ref_t(), keys :: atom | [atom]) :: pad_data_t | any def get_data!(state, pad_ref, keys) do {:ok, pad_data} = get_data(state, pad_ref, keys) pad_data end @spec set_data(Child.state_t(), Pad.ref_t(), keys :: atom | [atom], value :: term()) :: Bunch.Type.stateful_t(:ok | {:error, :unknown_pad}, Child.state_t()) def set_data(state, pad_ref, keys \\ [], value) do case assert_instance(state, pad_ref) do :ok -> put_in(state, data_keys(pad_ref, keys), value) ~> {:ok, &1} {:error, reason} -> {{:error, reason}, state} end end @spec set_data!(Child.state_t(), Pad.ref_t(), keys :: atom | [atom], value :: term()) :: Child.state_t() def set_data!(state, pad_ref, keys \\ [], value) do {:ok, state} = set_data(state, pad_ref, keys, value) state end @spec update_data( Child.state_t(), Pad.ref_t(), keys :: atom | [atom], (data -> {:ok | error, data}) ) :: Bunch.Type.stateful_t(:ok | error | {:error, :unknown_pad}, Child.state_t()) when data: pad_data_t | any, error: {:error, reason :: any} def update_data(state, pad_ref, keys \\ [], f) do case assert_instance(state, pad_ref) do :ok -> state |> get_and_update_in(data_keys(pad_ref, keys), f) {:error, reason} -> {{:error, reason}, state} end end @spec update_data!(Child.state_t(), Pad.ref_t(), keys :: atom | [atom], (data -> data)) :: Child.state_t() when data: pad_data_t | any def update_data!(state, pad_ref, keys \\ [], f) do :ok = assert_instance(state, pad_ref) state |> update_in(data_keys(pad_ref, keys), f) end @spec get_and_update_data( Child.state_t(), Pad.ref_t(), keys :: atom | [atom], (data -> {success | error, data}) ) :: Bunch.Type.stateful_t(success | error | {:error, :unknown_pad}, Child.state_t()) when data: pad_data_t | any, success: {:ok, data}, error: {:error, reason :: any} def get_and_update_data(state, pad_ref, keys \\ [], f) do case assert_instance(state, pad_ref) do :ok -> state |> get_and_update_in(data_keys(pad_ref, keys), f) {:error, reason} -> {{:error, reason}, state} end end @spec get_and_update_data!( Child.state_t(), Pad.ref_t(), keys :: atom | [atom], (data -> {data, data}) ) :: Bunch.Type.stateful_t(data, Child.state_t()) when data: pad_data_t | any def get_and_update_data!(state, pad_ref, keys \\ [], f) do :ok = assert_instance(state, pad_ref) state |> get_and_update_in(data_keys(pad_ref, keys), f) end @spec data_keys(Pad.ref_t(), keys :: atom | [atom]) :: [atom] @compile {:inline, data_keys: 2} defp data_keys(pad_ref, keys) defp data_keys(pad_ref, keys) when is_list(keys) do [:pads_data, pad_ref | keys] end defp data_keys(pad_ref, key) do [:pads_data, pad_ref, key] end end @spec pop_data(Child.state_t(), Pad.ref_t()) :: {{:ok, pad_data_t} | {:error, :unknown_pad}, Child.state_t()} def pop_data(state, pad_ref) do with :ok <- assert_instance(state, pad_ref) do {data, state} = pop_in(state, [:pads_data, pad_ref]) {{:ok, data}, state} else {:error, :unknown_pad} -> {{:error, :unknown_pad}, state} end end @spec pop_data!(Child.state_t(), Pad.ref_t()) :: {pad_data_t, Child.state_t()} def pop_data!(state, pad_ref) do case pop_data(state, pad_ref) do {{:ok, pad_data}, state} -> {pad_data, state} {{:error, :unknown_pad}, state} -> raise UnknownPadError, pad: pad_ref, module: state.module end end @spec delete_data(Child.state_t(), Pad.ref_t()) :: {:ok | {:error, :unknown_pad}, Child.state_t()} def delete_data(state, pad_ref) do with {{:ok, _out}, state} <- pop_data(state, pad_ref) do {:ok, state} end end @spec delete_data!(Child.state_t(), Pad.ref_t()) :: Child.state_t() def delete_data!(state, pad_ref) do {_data, state} = pop_data!(state, pad_ref) state end @spec constraints_met?(pad_data_t, map) :: boolean defp constraints_met?(data, constraints) do constraints |> Enum.all?(fn {k, v} -> data[k] === v end) end defp wrap_with_pad_check(code, pad_ref, state) do quote do pad_ref_var = unquote(pad_ref) case unquote(state) do %{pads_data: %{^pad_ref_var => _pad_data}} -> unquote(code) state -> {{:error, :unknown_pad}, state} end end end defp wrap_with_reraise(code, pad_ref, state) do quote do try do unquote(code) rescue e in MatchError -> pad_ref = unquote(pad_ref) state = unquote(state) case unquote(__MODULE__).assert_instance(state, pad_ref) do :ok -> reraise e, __STACKTRACE__ {:error, :unknown_pad} -> reraise UnknownPadError, [pad: pad_ref, module: state.module], __STACKTRACE__ end end end end end
lib/membrane/core/child/pad_model.ex
0.767298
0.489198
pad_model.ex
starcoder
defmodule Quantity do @moduledoc """ A data structure that encapsulates a decimal value with a unit. """ alias Quantity.Math @type t :: %__MODULE__{ value: Decimal.t(), unit: unit } @type unit :: base_unit | {:div | :mult, unit, unit} @type base_unit :: String.t() | 1 defstruct [ :value, :unit ] defdelegate add!(quantity_1, quantity_2), to: Math defdelegate add(quantity_1, quantity_2), to: Math defdelegate div(dividend, divisor), to: Math defdelegate inverse(quantity), to: Math defdelegate mult(quantity, quantity_or_scalar), to: Math defdelegate round(quantity, decimals), to: Math defdelegate sub!(quantity_1, quantity_2), to: Math defdelegate sub(quantity_1, quantity_2), to: Math defdelegate sum!(quantities), to: Math defdelegate sum!(quantities, exp, unit), to: Math defdelegate sum(quantities), to: Math defdelegate sum(quantities, exp, unit), to: Math @doc """ Builds a new Quantity from a Decimal and a unit """ @spec new(Decimal.t(), unit) :: t def new(value, unit) do unit = normalize_unit(unit) %__MODULE__{ value: value, unit: unit } end @doc """ Builds a new Quantity from a base value, exponent and unit """ @spec new(integer, integer, unit) :: t def new(base_value, exponent, unit) do sign = if base_value < 0, do: -1, else: 1 positive_base_value = abs(base_value) value = Decimal.new(sign, positive_base_value, exponent) new(value, unit) end @doc """ Parses a string representation of a quantity (perhaps generated with to_string/1) iex> Quantity.parse("99.0 red_balloons") {:ok, Quantity.new(~d[99.0], "red_balloons")} iex> Quantity.parse("15 bananas/monkey") {:ok, Quantity.new(~d[15], {:div, "bananas", "monkey"})} iex> Quantity.parse("15 m*m") {:ok, Quantity.new(~d[15], {:mult, "m", "m"})} iex> Quantity.parse("bogus") :error """ @spec parse(String.t()) :: {:ok, t} | :error def parse(input) do with {:ok, value_string, unit_string} <- parse_split_value_and_unit(input), {value, ""} <- Decimal.parse(value_string) do unit = parse_unit(unit_string) {:ok, new(value, unit)} else _ -> :error end end defp parse_split_value_and_unit(input) do case String.split(input, " ", parts: 2) do [value] -> {:ok, value, "1"} [value, unit] -> {:ok, value, unit} _ -> :error end end defp parse_unit(unit_string) do if unit_string =~ "/" do [:div | unit_string |> String.split("/", parts: 2) |> Enum.map(&parse_mult_unit/1)] |> List.to_tuple() else parse_mult_unit(unit_string) end end defp parse_mult_unit(unit_string) do unit_string |> String.split("*") |> Enum.map(&parse_base_unit/1) |> Enum.reduce(&{:mult, &1, &2}) end defp parse_base_unit("1"), do: 1 defp parse_base_unit(unit_string), do: unit_string @doc """ Same as parse/1, but raises if it could not parse """ @spec parse!(String.t()) :: t def parse!(input) do {:ok, quantity} = parse(input) quantity end @doc """ Encodes the quantity as a string. The result is parsable with parse/1 If the exponent is positive, encode usinge the "raw" format to preserve precision iex> Quantity.new(42, -1, "db") |> Quantity.to_string() "4.2 db" iex> Quantity.new(42, 1, "db") |> Quantity.to_string() "42E1 db" iex> Quantity.new(~d[3600], {:div, "seconds", "hour"}) |> Quantity.to_string() "3600 seconds/hour" iex> Quantity.new(~d[34], {:mult, "m", "m"}) |> Quantity.to_string() "34 m*m" """ @spec to_string(t) :: String.t() def to_string(quantity) do decimal_string = decimal_to_string(quantity.value) unit_string = case quantity.unit do 1 -> "" unit -> " #{unit_to_string(unit)}" end "#{decimal_string}#{unit_string}" end defp unit_to_string(1), do: "1" defp unit_to_string(unit) when is_binary(unit), do: unit defp unit_to_string({:div, a, b}), do: "#{unit_to_string(a)}/#{unit_to_string(b)}" defp unit_to_string({:mult, a, b}), do: "#{unit_to_string(a)}*#{unit_to_string(b)}" @doc """ Encodes a decimal as string. Uses either :raw (E-notation) or :normal based on exponent, so that precision is not lost iex> Quantity.decimal_to_string(~d[1.234]) "1.234" iex> Quantity.decimal_to_string(~d[1E3]) "1E3" """ @spec decimal_to_string(Decimal.t()) :: String.t() def decimal_to_string(%Decimal{} = decimal) do if decimal.exp > 0 do Decimal.to_string(decimal, :raw) else Decimal.to_string(decimal, :normal) end end @doc """ Tests if a quantity has zero value iex> Quantity.zero?(~Q[0.00 m^2]) true iex> Quantity.zero?(~Q[0E7 m^2]) true iex> Quantity.zero?(~Q[10 m^2]) false """ @spec zero?(t) :: boolean def zero?(quantity), do: quantity.value.coef == 0 @doc """ Test whether a Quantity is negative iex> ~Q[100.00 DKK] |> Quantity.negative?() false iex> ~Q[0.00 DKK] |> Quantity.negative?() false iex> ~Q[-1.93 DKK] |> Quantity.negative?() true """ @spec negative?(t) :: boolean() def negative?(%{value: value}), do: Decimal.negative?(value) @doc """ Test whether a Quantity is positive iex> ~Q[100.00 DKK] |> Quantity.positive?() true iex> ~Q[0.00 DKK] |> Quantity.positive?() false iex> ~Q[-1.93 DKK] |> Quantity.positive?() false """ @spec positive?(t) :: boolean() def positive?(%{value: value}), do: Decimal.positive?(value) @doc """ Returns true if the two quantities are numerically equal iex> Quantity.equals?(~Q[5 bananas], ~Q[5.0 bananas]) true iex> Quantity.equals?(~Q[5 bananas], ~Q[5 apples]) false """ @spec equals?(t, t) :: boolean def equals?(q1, q2) do reduce(q1) == reduce(q2) end @doc """ Reduces the value to the largest possible exponent without altering the numerical value iex> Quantity.reduce(~Q[1.200 m]) ~Q[1.2 m] """ @spec reduce(t) :: t def reduce(quantity) do %{quantity | value: Decimal.normalize(quantity.value)} end @doc """ Return a quantity with a zero value and the same unit and precision as another Quantity iex> ~Q[123.99 EUR] |> Quantity.to_zero() ~Q[0.00 EUR] iex> ~Q[1 person] |> Quantity.to_zero() ~Q[0 person] iex> ~Q[-123 seconds] |> Quantity.to_zero() ~Q[0 seconds] """ @spec to_zero(t) :: t def to_zero(%{unit: unit, value: %Decimal{exp: exp}}), do: Quantity.new(0, exp, unit) @doc """ Converts the quantity to have a new unit. The new unit must be a whole 10-exponent more or less than the original unit. The exponent given is the difference in exponents (new-exponent - old-exponent). For example when converting from kWh to MWh: 6 (MWh) - 3 (kWh) = 3 iex> ~Q[1234E3 Wh] |> Quantity.convert_unit("MWh", 6) ~Q[1.234 MWh] iex> ~Q[25.2 m] |> Quantity.convert_unit("mm", -3) ~Q[252E2 mm] """ @spec convert_unit(t, String.t(), integer) :: t def convert_unit(quantity, new_unit, exponent) do new(Decimal.new(quantity.value.sign, quantity.value.coef, quantity.value.exp - exponent), new_unit) end @doc """ Compares two quantities with the same unit numerically iex> Quantity.compare(~Q[1.00 m], ~Q[2.00 m]) :lt iex> Quantity.compare(~Q[1.00 m], ~Q[1 m]) :eq iex> Quantity.compare(~Q[3.00 m], ~Q[2.9999999 m]) :gt """ @spec compare(t, t) :: :lt | :eq | :gt def compare(%{unit: unit} = q1, %{unit: unit} = q2) do Decimal.compare(q1.value, q2.value) end @doc """ Extracts the base value from the quantity """ @spec base_value(t) :: integer def base_value(quantity), do: quantity.value.coef * quantity.value.sign @doc """ Extracts the exponent from the quantity """ @spec exponent(t) :: integer def exponent(quantity), do: quantity.value.exp @doc """ Converts a 1-unit quantity to a decimal. If the quantity does not represent a decimal (a unit other than 1) it fails. iex> Quantity.to_decimal!(~Q[42]) ~d[42] """ @spec to_decimal!(t) :: Decimal.t() def to_decimal!(%{unit: 1} = quantity), do: quantity.value @doc """ Extracts the unit from the quantity """ @spec unit(t) :: unit def unit(quantity), do: quantity.unit defimpl String.Chars, for: __MODULE__ do def to_string(quantity) do @for.to_string(quantity) end end defimpl Inspect, for: __MODULE__ do def inspect(quantity, _options) do "~Q[#{@for.to_string(quantity)}]" end end # Normalizes unit to a standard form: # * Shorten unit as much as possible # * At most one :div (with possibly many :mults on each side) # * All :mult units are sorted # * Extra 1-units are removed defp normalize_unit(unit) do [numerators, denominators] = unit |> isolate_units([[], []]) |> shorten() |> Enum.map(&Enum.sort/1) case {numerators, denominators} do {[], []} -> 1 {nums, []} -> reduce_mults(nums) {[], dens} -> {:div, 1, reduce_mults(dens)} {nums, dens} -> {:div, reduce_mults(nums), reduce_mults(dens)} end end defp reduce_mults(units) do units |> Enum.reverse() |> Enum.reduce(&{:mult, &1, &2}) end # Remove common elements in numerator and denominator defp shorten([numerators, denominators]) do [numerators, denominators] = [numerators, denominators] # Can be replaced with Enum.frequencies/1 when we no longer support Elixir 1.9 |> Enum.map(fn list -> list |> Enum.group_by(& &1) |> Enum.into(%{}, fn {unit, count_list} -> {unit, length(count_list)} end) end) numerators |> Map.keys() |> Enum.reduce([numerators, denominators], fn key, [num, den] -> common = min(Map.fetch!(num, key), Map.get(den, key, 0)) num = Map.update!(num, key, &(&1 - common)) den = Map.update(den, key, 0, &(&1 - common)) [num, den] end) |> Enum.map(fn map -> map |> Enum.flat_map(fn {key, count} -> List.duplicate(key, count) end) end) end # Splits units in numerators and denominators, so they are of the form (a * b * ...) / (c * d * ...) defp isolate_units({:div, a, b}, [acc_n, acc_d]) do [acc_n, acc_d] = isolate_units(a, [acc_n, acc_d]) [acc_d, acc_n] = isolate_units(b, [acc_d, acc_n]) [acc_n, acc_d] end defp isolate_units({:mult, a, b}, acc), do: Enum.reduce([a, b], acc, &isolate_units/2) defp isolate_units(a, [acc_n, acc_d]) when is_binary(a), do: [[a | acc_n], acc_d] defp isolate_units(1, acc), do: acc end
lib/quantity.ex
0.888463
0.753557
quantity.ex
starcoder
defmodule PlugSessionRedis.Store do @moduledoc """ To configure and install, add in your plug pipeline code like the following: ``` plug Plug.Session, store: PlugSessionRedis.Store, key: "_my_app_key", # Cookie name where the id is stored table: :redis_sessions, # Name of poolboy queue, make up anything signing_salt: "<PASSWORD>", # Keep this private encryption_salt: "<PASSWORD>", # Keep this private ttl: 360, # Optional, defaults to :infinity serializer: CustomSerializer, # Optional, defaults to `PlugSessionRedis.BinaryEncoder` path: &MyPath.path_for_sid/1 # Optional, defaults to the passed in session id only ``` Custom Serializers can work to provide a way to encode and decode the data stored in Redis if you're integrating with a legacy system. You provide the module name that implements `encode/1`, `encode!/1`, `decode/1`, and `decode!/1` which is called by `Plug` when fetching and storing the session state back. Path dictates under what key within redis to store the key. You will be passed a single session ID binary and expected to return another binary indicating where the key should be stored/fetched from. This allows you to store data under a nested key so that other data can be stored within the same database. (i.e. key can become "sessions:" <> id instead) """ alias PlugSessionRedis.BinaryEncoder @behaviour Plug.Session.Store def init(opts) do { Keyword.fetch!(opts, :table), Keyword.get(opts, :ttl, :infinite), Keyword.get(opts, :serializer, BinaryEncoder), Keyword.get(opts, :path, &__MODULE__.path/1) } end def get(_conn, sid, {table, _, serializer, path}) do case :poolboy.transaction(table, fn(client) -> :redo.cmd(client, ["GET", path.(sid)]) end) do :undefined -> {nil, %{}} data -> {sid, serializer.decode!(data)} end end def put(_conn, nil, data, state) do put_new(data, state) end def put(_conn, sid, data, {table, _, serializer, path}) do :poolboy.transaction(table, fn(client) -> :redo.cmd(client, ["SET", path.(sid), serializer.encode!(data)]) end) sid end def delete(_conn, sid, {table, _, _, path}) do :poolboy.transaction(table, fn(client) -> :redo.cmd(client, ["DEL", path.(sid)]) end) :ok end def path(sid), do: sid @max_tries 5 defp put_new(data, {table, ttl, serializer, path}, counter \\ 0) when counter < @max_tries do sid = :crypto.strong_rand_bytes(96) |> Base.encode64 case :poolboy.transaction(table, fn(client) -> store_data_with_ttl(client, ttl, path.(sid), serializer.encode!(data)) end) do "OK" -> sid _ -> put_new(data, {table, ttl, serializer, path}, counter + 1) end end defp store_data_with_ttl(client, :infinite, sid, bin) do :redo.cmd(client, ["SET", sid, bin]) end defp store_data_with_ttl(client, ttl, sid, bin) do [ret, _] = :redo.cmd(client, [["SET", sid, bin], ["EXPIRE", sid, ttl]]) ret end end
lib/plug_session_redis/store.ex
0.819533
0.716293
store.ex
starcoder
defmodule Timex.Parse.DateTime.Parser do @moduledoc """ This is the base plugin behavior for all Timex date/time string parsers. """ import Combine.Parsers.Base, only: [eof: 0, sequence: 1, map: 2, pipe: 2] alias Timex.Date alias Timex.Time alias Timex.DateTime alias Timex.TimezoneInfo alias Timex.Timezone alias Timex.Parse.ParseError alias Timex.Parse.DateTime.Tokenizers.Directive alias Timex.Parse.DateTime.Tokenizers.Default, as: DefaultTokenizer alias Timex.Date.Convert, as: DateConvert @doc """ Parses a date/time string using the default parser. ## Examples iex> use Timex ...> {:ok, dt} = #{__MODULE__}.parse("2014-07-29T00:20:41.196Z", "{ISOz}") ...> dt.year 2014 iex> dt.month 7 iex> dt.day 29 iex> dt.timezone.full_name "UTC" """ @spec parse(binary, binary) :: {:ok, %DateTime{}} | {:error, term} def parse(date_string, format_string) when is_binary(date_string) and is_binary(format_string), do: parse(date_string, format_string, DefaultTokenizer) @doc """ Parses a date/time string using the provided tokenizer. Tokenizers must implement the `Timex.Parse.DateTime.Tokenizer` behaviour. ## Examples iex> use Timex ...> {:ok, dt} = #{__MODULE__}.parse("2014-07-29T00:30:41.196-0200", "{ISO}", Timex.Parse.DateTime.Tokenizers.Default) ...> dt.year 2014 iex> dt.month 7 iex> dt.day 29 iex> dt.timezone.full_name "Etc/GMT+2" """ @spec parse(binary, binary, atom) :: {:ok, %DateTime{}} | {:error, term} def parse(date_string, format_string, tokenizer) when is_binary(date_string) and is_binary(format_string) do case tokenizer.tokenize(format_string) do {:error, err} -> {:error, {:format, err}} {:ok, []} -> {:error, "There were no parsing directives in the provided format string."} {:ok, directives} -> case date_string do "" -> {:error, "Input datetime string cannot be empty."} _ -> do_parse(date_string, directives, tokenizer) end end end @doc """ Same as `parse/2` and `parse/3`, but raises on error. """ @spec parse!(String.t, String.t, atom | nil) :: %DateTime{} | no_return def parse!(date_string, format_string, tokenizer \\ DefaultTokenizer) when is_binary(date_string) and is_binary(format_string) and is_atom(tokenizer) do case parse(date_string, format_string, tokenizer) do {:ok, result} -> result {:error, reason} -> raise ParseError, message: reason end end defp do_parse(str, directives, tokenizer) do parsers = directives |> Stream.map(fn %Directive{weight: weight, parser: parser} -> map(parser, &({&1, weight})) end) |> Stream.filter(fn nil -> false; _ -> true end) |> Enum.reverse case Combine.parse(str, pipe([eof|parsers] |> Enum.reverse, &(&1))) do [results] when is_list(results) -> results |> extract_parse_results |> Stream.with_index |> Enum.sort_by(fn # If :force_utc exists, make sure it is applied last {{{:force_utc, true}, _}, _} -> 9999 # Timezones must always be applied after other date/time tokens -> {{{tz, _}, _}, _} when tz in [:zname, :zoffs, :zoffs_colon, :zoffs_sec] -> 9998 # If no weight is set, use the index as its weight {{{_token, _value}, 0}, i} -> i # Use the directive weight {{{_token, _value}, weight}, _} -> weight end) |> Stream.flat_map(fn {{token, _}, _} -> [token] end) |> Enum.filter(&Kernel.is_tuple/1) |> apply_directives(tokenizer) {:error, _} = err -> err end end defp extract_parse_results(parse_results), do: extract_parse_results(parse_results, []) defp extract_parse_results([], acc), do: Enum.reverse(acc) defp extract_parse_results([{tokens, _}|rest], acc) when is_list(tokens) do extracted = Enum.reverse(extract_parse_results(tokens)) extract_parse_results(rest, extracted ++ acc) end defp extract_parse_results([{{token, value}, weight}|rest], acc) when is_atom(token) do extract_parse_results(rest, [{{token, value}, weight}|acc]) end defp extract_parse_results([{token, value}|rest], acc) when is_atom(token) do extract_parse_results(rest, [{{token, value}, 0}|acc]) end defp extract_parse_results([[{token, value}]|rest], acc) when is_atom(token) do extract_parse_results(rest, [{{token, value}, 0}|acc]) end defp extract_parse_results([h|rest], acc) when is_list(h) do extracted = Enum.reverse(extract_parse_results(h)) extract_parse_results(rest, extracted ++ acc) end defp extract_parse_results([_|rest], acc) do extract_parse_results(rest, acc) end # Constructs a DateTime from the parsed tokens defp apply_directives([], _), do: {:ok, %DateTime{}} defp apply_directives(tokens, tokenizer), do: apply_directives(tokens, %DateTime{}, tokenizer) defp apply_directives([], %DateTime{timezone: nil} = date, tokenizer) do apply_directives([], %{date | :timezone => %TimezoneInfo{}}, tokenizer) end defp apply_directives([], %DateTime{} = date, _), do: {:ok, date} defp apply_directives([{token, value}|tokens], %DateTime{} = date, tokenizer) do case update_date(date, token, value, tokenizer) do {:error, _} = error -> error updated -> apply_directives(tokens, updated, tokenizer) end end # Given a date, a token, and the value for that token, update the # date according to the rules for that token and the provided value defp update_date(%DateTime{year: year, hour: hh} = date, token, value, tokenizer) when is_atom(token) do case token do # Formats clock when clock in [:kitchen, :strftime_iso_kitchen] -> case apply_directives(value, Date.now, tokenizer) do {:error, _} = err -> err {:ok, date} when clock == :kitchen -> %{date | :second => 0, :ms => 0} {:ok, date} -> %{date | :ms => 0} end # Years :century -> century = Date.century(%{date | :year => year}) year_shifted = year + ((value - century) * 100) %{date | :year => year_shifted} y when y in [:year2, :iso_year2] -> current_century = Date.century(Date.now) year_shifted = value + ((current_century - 1) * 100) %{date | :year => year_shifted} y when y in [:year4, :iso_year4] -> # Special case for UNIX format dates, where the year is parsed after the timezone, # so we must lookup the timezone again to ensure it's properly set case date do %DateTime{timezone: %Timex.TimezoneInfo{:full_name => tzname}} -> zone_date = DateConvert.to_erlang_datetime(%{date | :year => value}) %{date | :year => value, :timezone => Timezone.get(tzname, zone_date)} %DateTime{timezone: nil} -> %{date | :year => value} end # Months :month -> %{date | :month => value} month when month in [:mshort, :mfull] -> %{date | :month => Date.month_to_num(value)} # Days :day -> %{date | :day => value} :oday when is_integer(value) and value >= 0 -> Date.from_iso_day(value, date) :wday_mon -> current_day = Date.weekday(date) cond do current_day == value -> date current_day > value -> Date.shift(date, days: current_day - value) current_day < value -> Date.shift(date, days: value - current_day) end :wday_sun -> current_day = Date.weekday(date) - 1 cond do current_day == value -> date current_day > value -> Date.shift(date, days: current_day - value) current_day < value -> Date.shift(date, days: value - current_day) end day when day in [:wdshort, :wdfull] -> %{date | :day => Date.day_to_num(value)} # Weeks :iso_weeknum -> {year, _, weekday} = Date.iso_triplet(date) %DateTime{year: y, month: m, day: d} = Date.from_iso_triplet({year, value, weekday}) %{date | :year => y, :month => m, :day => d} week_num when week_num in [:week_mon, :week_sun] -> reset = %{date | :month => 1, :day => 1} reset |> Date.shift(weeks: value) # Hours hour when hour in [:hour24, :hour12] -> %{date | :hour => value} :min -> %{date | :minute => value} :sec -> %{date | :second => value} :sec_fractional -> case value do "" -> date n when is_number(n) -> %{date | :ms => n} end :us -> %{date | :ms => div(value, 1000)} :sec_epoch -> Date.from(value, :secs, :epoch) am_pm when am_pm in [:am, :AM] -> {converted, hemisphere} = Time.to_12hour_clock(hh) case value do am when am in ["am", "AM"]-> %{date | :hour => converted} pm when pm in ["pm", "PM"] and hemisphere == :am -> cond do converted + 12 == 24 -> %{date | :hour => 0} :else -> %{date | :hour => converted + 12} end _ -> %{date | :hour => converted} end # Timezones :zoffs -> zone_date = DateConvert.to_erlang_datetime(date) %{date | :timezone => Timezone.get(value, zone_date)} :zname -> zone_date = DateConvert.to_erlang_datetime(date) %{date | :timezone => Timezone.get(value, zone_date)} tz when tz in [:zoffs_colon, :zoffs_sec] -> case value do <<?-, h1::utf8, h2::utf8, _::binary>> -> zone_date = DateConvert.to_erlang_datetime(date) %{date | :timezone => Timezone.get(<<?-, h1::utf8, h2::utf8>>, zone_date)} <<?+, h1::utf8, h2::utf8, _::binary>> -> zone_date = DateConvert.to_erlang_datetime(date) %{date | :timezone => Timezone.get(<<?+, h1::utf8, h2::utf8>>, zone_date)} _ -> {:error, "#{token} not implemented"} end :force_utc -> case date do %DateTime{timezone: nil} -> %{date | :timezone => %Timex.TimezoneInfo{}} _ -> Timezone.convert(date, "UTC") end :literal -> date _ -> case tokenizer.apply(date, token, value) do {:ok, date} -> date {:error, _} = err -> err _ -> {:error, "Unrecognized token: #{token}"} end end end end
lib/parse/datetime/parser.ex
0.890702
0.567337
parser.ex
starcoder
defmodule Sue.DB.Graph do alias :mnesia, as: Mnesia alias Sue.DB alias Sue.DB.Schema.Vertex @type result() :: {:ok, any()} | {:error, any()} @edge_table :edges # Public API @doc """ Check if a Vertex exists in our graph. """ @spec exists?(Vertex.t()) :: boolean() def exists?(v) do {:ok, res} = t_exists?(v) |> DB.exec() res end @spec t_exists?(Vertex.t()) :: fun() defp t_exists?(v) do fn -> DB.t_read({Vertex.label(v), Vertex.id(v)}).() != [] end end @doc """ Check if a uni-directional edge exists between two vertices. """ @spec exists_uni_edge?( Vertex.t() | Vertex.tuple_t(), Vertex.t() | Vertex.tuple_t() ) :: boolean() def exists_uni_edge?(v1, v2) when is_tuple(v1) and is_tuple(v2) do {:ok, res} = t_exists_uni_edge?(v1, v2) |> DB.exec() res end def exists_uni_edge?(v1, v2) do exists_uni_edge?( {Vertex.label(v1), Vertex.id(v1)}, {Vertex.label(v2), Vertex.id(v2)} ) end @spec t_exists_uni_edge?(Vertex.tuple_t(), Vertex.tuple_t()) :: fun() defp t_exists_uni_edge?({src_type, src_id}, {dst_type, dst_id}) do fn -> t_match_vertex(src_type, dst_type, src_id, dst_id, :_).() != [] end end @doc """ Check if a bi-directional edge exists between two vertices. """ @spec exists_bi_edge?( Vertex.t() | Vertex.tuple_t(), Vertex.t() | Vertex.tuple_t() ) :: boolean() def exists_bi_edge?(v1, v2) when is_tuple(v1) and is_tuple(v2) do {:ok, res} = t_exists_bi_edge?(v1, v2) |> DB.exec() res end def exists_bi_edge?(v1, v2) do exists_bi_edge?( {Vertex.label(v1), Vertex.id(v1)}, {Vertex.label(v2), Vertex.id(v2)} ) end @spec t_exists_bi_edge?(Vertex.tuple_t(), Vertex.tuple_t()) :: fun() defp t_exists_bi_edge?({src_type, src_id}, {dst_type, dst_id}) do fn -> t_exists_uni_edge?({src_type, src_id}, {dst_type, dst_id}).() and t_exists_uni_edge?({dst_type, dst_id}, {src_type, src_id}).() end end @doc """ Add a uni-directional edge to the graph. """ @spec add_uni_edge(Vertex.t(), Vertex.t(), Map.t()) :: result() def add_uni_edge(v1, v2, metadata \\ %{}) do t_add_uni_edge(v1, v2, metadata) |> DB.exec() end @spec t_add_uni_edge(Vertex.t(), Vertex.t(), Map.t()) :: fun() defp t_add_uni_edge(v1, v2, metadata) do DB.t_write({ @edge_table, Vertex.label(v1), Vertex.label(v2), Vertex.id(v1), Vertex.id(v2), metadata }) end @doc """ Add a bi-directional edge to the graph. """ @spec add_bi_edge(Vertex.t(), Vertex.t(), Map.t()) :: result() def add_bi_edge(v1, v2, metadata \\ %{}) do t_add_bi_edge(v1, v2, metadata) |> DB.exec() end @spec t_add_bi_edge(Vertex.t(), Vertex.t(), Map.t()) :: fun() defp t_add_bi_edge(v1, v2, metadata \\ %{}) do fn -> t_add_uni_edge(v1, v2, metadata).() t_add_uni_edge(v2, v1, metadata).() end end @doc """ Create a vertex. """ @spec add_vertex(Vertex.t()) :: result() def add_vertex(v) do t_add_vertex(v) |> DB.exec() end @spec t_add_vertex(Vertex.t()) :: fun() def t_add_vertex(v) do DB.t_write({Vertex.label(v), Vertex.id(v), v}) end @spec upsert_vertex(Vertex.t()) :: result() def upsert_vertex(v) do t_upsert_vertex(v) |> DB.exec() end @spec t_upsert_vertex(Vertex.t()) :: fun() defp t_upsert_vertex(v) do table = Vertex.label(v) id = Vertex.id(v) fn -> case Mnesia.read({table, id}) do [] -> Mnesia.write({table, id, v}) [_record] -> :exists end end end @spec upsert_bi_edge(Vertex.t(), Vertex.t()) :: result() def upsert_bi_edge(v1, v2) do t_upsert_bi_edge(v1, v2) |> DB.exec() end @spec t_upsert_bi_edge(Vertex.t(), Vertex.t()) :: fun() defp t_upsert_bi_edge(v1, v2) do fn -> t_upsert_vertex(v1).() t_upsert_vertex(v2).() t_add_bi_edge(v1, v2, %{}).() end end # TODO: Horrid naming, tbh. Redo in a more generic way that lets us specify # the max number of edges we are allowed to associate with a vertex. # Preferably we'd specify that number in the schema. (In this case, max=1 # (thus the `if_unique`)) @spec getsert_bi_edge_if_unique(Vertex.t(), Vertex.t()) :: result() def getsert_bi_edge_if_unique(v1, v2) do t_getsert_bi_edge_if_unique(v1, v2) |> DB.exec() end @spec t_getsert_bi_edge_if_unique(Vertex.t(), Vertex.t()) :: fun() defp t_getsert_bi_edge_if_unique(v1, v2) do fn -> case t_upsert_vertex(v1).() do # v1 already exists. Check if it links with another vert of v2's type. :exists -> case t_adjacent(v1, Vertex.label(v2)).() do [] -> t_add_vertex(v2).() t_add_bi_edge(v1, v2).() [{dst_type, dst_id}] -> [{_table, _key, val}] = DB.t_read({dst_type, dst_id}).() val end # v1 doesn't exist, can't have edges. :ok -> # perform the -sert t_upsert_bi_edge(v1, v2).() # perform the get- v2 end end end @doc """ Get 1-hop neighbors of vertex. """ @spec adjacent(Vertex.t() | Vertex.tuple_t(), atom(), :_ | map()) :: result() def adjacent(v, dst_type \\ :_, metadata \\ :_) def adjacent(v, dst_type, metadata) do t_adjacent(v, dst_type, metadata) |> DB.exec() end @spec t_adjacent(Vertex.t() | Vertex.tuple_t(), atom(), :_ | map()) :: fun() defp t_adjacent(src_v, dst_type, metadata \\ :_) defp t_adjacent({src_type, src_id}, dst_type, metadata) do fn -> for {@edge_table, _src_type, dst_type, _src_id, dst_id, _metadata} <- t_match_vertex(src_type, dst_type, src_id, :_, metadata).() do {dst_type, dst_id} end end end defp t_adjacent(v, dst_type, metadata) do t_adjacent({Vertex.label(v), Vertex.id(v)}, dst_type, metadata) end @doc """ Continually fetch neighbors of specified hop types """ @spec path(Vertex.t() | Vertex.tuple_t(), [Vertex.module_name_t()]) :: result() def path(v_start, hop_layers), do: t_path(v_start, hop_layers) |> DB.exec() @spec t_path(Vertex.t() | Vertex.tuple_t(), [Vertex.module_name_t()]) :: fun() def t_path(v_start, []), do: v_start def t_path(v_start, [hop_type | hop_layer_types]) do fn -> for next_hop <- t_adjacent(v_start, hop_type).() do t_path(next_hop, hop_layer_types).() end |> List.flatten() |> Enum.filter(fn v -> not Vertex.equals?(v_start, v) end) end end def match_vertex(src_type, dst_type, src_id, dst_id, metadata) do t_match_vertex(src_type, dst_type, src_id, dst_id, metadata) |> DB.exec() end defp t_match_vertex(src_type, dst_type, src_id, dst_id, metadata) do DB.t_match({@edge_table, src_type, dst_type, src_id, dst_id, metadata}) end # Internal Methods :: Utility @spec elixirize_output({:aborted, any} | {:atomic, any}) :: {:error, any} | {:ok, any} def elixirize_output(out) do case out do {:atomic, res} -> {:ok, res} {:aborted, reason} -> {:error, reason} end end end
apps/sue/lib/sue/db/graph.ex
0.706596
0.513485
graph.ex
starcoder
defmodule Absinthe.Adapter do @moduledoc """ Absinthe supports an adapter mechanism that allows developers to define their schema using one code convention (eg, `snake_cased` fields and arguments), but accept query documents and return results (including names in errors) in another (eg, `camelCase`). Adapters aren't a part of GraphQL, but a utility that Absinthe adds so that both client and server can use use conventions most natural to them. Absinthe ships with four adapters: * `Absinthe.Adapter.LanguageConventions`, which expects schemas to be defined in `snake_case` (the standard Elixir convention), translating to/from `camelCase` for incoming query documents and outgoing results. (This is the default as of v0.3.) * `Absinthe.Adapter.Underscore`, which is similar to the `Absinthe.Adapter.LanguageConventions` adapter but converts all incoming identifiers to underscores and does not modify outgoing identifiers (since those are already expected to be underscores). Unlike `Absinthe.Adapter.Passthrough` this does not break introspection. * `Absinthe.Adapter.Passthrough`, which is a no-op adapter and makes no modifications. (Note at the current time this does not support introspection if you're using camelized conventions). * `Absinthe.Adapter.StrictLanguageConventions`, which expects schemas to be defined in `snake_case`, translating to `camelCase` for outgoing results. This adapter requires incoming query documents to use `camelCase`. To set an adapter, you pass a configuration option at runtime: For `Absinthe.run/3`: ``` Absinthe.run( query, MyApp.Schema, adapter: YourApp.Adapter.TheAdapterName ) ``` For `Absinthe.Plug`: ``` forward "/api", to: Absinthe.Plug, init_opts: [schema: MyAppWeb.Schema, adapter: YourApp.Adapter.TheAdapterName] ``` For GraphiQL: ``` forward "/graphiql", to: Absinthe.Plug.GraphiQL, init_opts: [schema: MyAppWeb.Schema, adapter: YourApp.Adapter.TheAdapterName] ``` Check `Absinthe.Plug` for full documentation on using the Plugs Notably, this means you're able to switch adapters on case-by-case basis. In a Phoenix application, this means you could even support using different adapters for different clients. A custom adapter module must merely implement the `Absinthe.Adapter` protocol, in many cases with `use Absinthe.Adapter` and only overriding the desired functions. ## Writing Your Own All you may need to implement in your adapter is `to_internal_name/2` and `to_external_name/2`. Check out `Absinthe.Adapter.LanguageConventions` for a good example. Note that types that are defined external to your application (including the introspection types) may not be compatible if you're using a different adapter. """ @type t :: module defmacro __using__(_) do quote do @behaviour unquote(__MODULE__) def to_internal_name(external_name, _role) do external_name end def to_external_name(internal_name, _role) do internal_name end defoverridable to_internal_name: 2, to_external_name: 2 end end @typedoc "The lexical role of a name within the document/schema." @type role_t :: :operation | :field | :argument | :result | :type | :directive @doc """ Convert a name from an external name to an internal name. ## Examples Prefix all names with their role, just for fun! ``` def to_internal_name(external_name, role) do role_name = role |> to_string role_name <> "_" <> external_name end ``` """ @callback to_internal_name(binary | nil, role_t) :: binary | nil @doc """ Convert a name from an internal name to an external name. ## Examples Remove the role-prefix (the inverse of what we did in `to_internal_name/2` above): ``` def to_external_name(internal_name, role) do internal_name |> String.replace(~r/^\#{role}_/, "") end ``` """ @callback to_external_name(binary | nil, role_t) :: binary | nil end
lib/absinthe/adapter.ex
0.900234
0.874185
adapter.ex
starcoder
defmodule Timex.Ecto.DateTime do @moduledoc """ Support for using Timex with :datetime fields """ use Timex @behaviour Ecto.Type def type, do: :datetime @doc """ We can let Ecto handle blank input """ defdelegate blank?(value), to: Ecto.Type @doc """ Handle casting to Timex.Ecto.DateTime """ def cast(%DateTime{timezone: nil} = datetime), do: {:ok, %{datetime | :timezone => %TimezoneInfo{}}} def cast(%DateTime{} = datetime), do: {:ok, datetime} # Support embeds_one/embeds_many def cast(%{"calendar" => _, "year" => y, "month" => m, "day" => d, "hour" => h, "minute" => mm, "second" => s, "ms" => ms, "timezone" => %{"full_name" => tz_abbr}}) do datetime = Timex.datetime({{y,m,d},{h,mm,s}}, tz_abbr) {:ok, %{datetime | :millisecond => ms}} end def cast(%{"calendar" => _, "year" => y, "month" => m, "day" => d, "hour" => h, "minute" => mm, "second" => s, "millisecond" => ms, "timezone" => %{"full_name" => tz_abbr}}) do datetime = Timex.datetime({{y,m,d},{h,mm,s}}, tz_abbr) {:ok, %{datetime | :millisecond => ms}} end def cast(input) do case Ecto.DateTime.cast(input) do {:ok, datetime} -> load({{datetime.year, datetime.month, datetime.day}, {datetime.hour, datetime.min, datetime.sec, datetime.usec}}) :error -> :error end end @doc """ Handle casting to Timex.Ecto.DateTime without returning a tuple """ def cast!(input) do case cast(input) do {:ok, datetime} -> datetime :error -> :error end end @doc """ Load from the native Ecto representation """ def load({{year, month, day}, {hour, min, sec, usec}}) do datetime = Timex.datetime({{year, month, day}, {hour, min, sec}}) {:ok, %{datetime | :millisecond => Time.from(usec, :microseconds) |> Time.to_milliseconds}} end def load(_), do: :error @doc """ Convert to native Ecto representation """ def dump(%DateTime{} = date) do %DateTime{year: y, month: m, day: d, hour: h, minute: min, second: s, millisecond: ms} = Timezone.convert(date, "UTC") {:ok, {{y, m, d}, {h, min, s, round(ms * 1_000)}}} end def dump(_), do: :error end
lib/types/datetime.ex
0.811153
0.402862
datetime.ex
starcoder
defmodule ExAlgo.Queue do @moduledoc """ A basic queue implementation. """ @type underflow_error :: {:error, :underflow} @type value_type :: any() @type t :: %__MODULE__{left: [value_type()], right: [value_type()]} @doc """ A queue consists of a left and a right list. """ defstruct left: [], right: [] @doc """ Creates a new empty Queue. ## Example iex> Queue.new() %Queue{left: [], right: []} """ @spec new() :: t() def new, do: %__MODULE__{left: [], right: []} @doc """ Creates a queue from a list. ## Example iex> ExAlgo.Queue.from 1..3 %Queue{left: [3, 2, 1], right: []} """ @spec from(Enumerable.t()) :: t() def from(enumerable), do: %__MODULE__{left: enumerable |> Enum.to_list() |> Enum.reverse(), right: []} @doc """ Enqueues item in left of the queue. ## Example iex> Queue.new |> Queue.enqueue(10) |> Queue.enqueue(20) %Queue{left: [20, 10], right: []} """ @spec enqueue(t(), value_type()) :: t() def enqueue(%__MODULE__{left: left} = queue, item), do: %{queue | left: [item | left]} @doc """ Dequeues the last item from the list. ## Example iex> 1..4 |> Queue.from() |> Queue.dequeue() {1, %Queue{left: [], right: [2, 3, 4]}} iex> Queue.new |> Queue.dequeue() {:error, :underflow} """ @spec dequeue(t()) :: {value_type(), t()} | underflow_error() def dequeue(%__MODULE__{left: [], right: []}), do: {:error, :underflow} def dequeue(%__MODULE__{right: [value | rest]} = queue), do: {value, %{queue | right: rest}} def dequeue(%__MODULE__{left: left}), do: %__MODULE__{left: [], right: Enum.reverse(left)} |> dequeue() @doc """ Appends at the right of the list. ## Example iex> Queue.new |> Queue.append(10) %Queue{left: [], right: [10]} iex> {_, queue} = 1..4 |> Queue.from() |> Queue.dequeue() iex> queue |> Queue.enqueue(5) |> Queue.append(6) %Queue{left: [5], right: [6, 2, 3, 4]} """ @spec append(t(), value_type()) :: t() def append(%__MODULE__{right: right} = queue, item), do: %{queue | right: [item | right]} @doc """ Returns a list representation of the queue. ## Example iex> Queue.new() |> Queue.to_list() [] iex> Queue.from(1..4) |> Queue.to_list() [4, 3, 2, 1] iex> Queue.from(1..4) ...> |> Queue.dequeue() ...> |> then(fn {_, queue} -> queue end) ...> |> Queue.enqueue(-1) ...> |> Queue.append(10) ...> |> Queue.to_list() [-1, 4, 3, 2, 10] """ @spec to_list(t()) :: List.t() def to_list(%__MODULE__{left: left, right: []}), do: left def to_list(%__MODULE__{left: [], right: right}), do: Enum.reverse(right) def to_list(%__MODULE__{left: left, right: right}), do: left ++ Enum.reverse(right) end
lib/ex_algo/queue/queue.ex
0.924253
0.567727
queue.ex
starcoder
defmodule Predicator do @moduledoc """ Documentation for Predicator. Lexer and Parser currently only compatible with 0.4.0 predicate syntax """ alias Predicator.Evaluator @lexer :predicate_lexer @atom_parser :atom_instruction_parser @string_parser :string_instruction_parser @type token_key_t :: :atom_key_inst | :string_key_inst @type predicate :: String.t | charlist @doc """ Currently only compatible with 0.4.0 predicate syntax leex_string/1 takes string or charlist and returns a lexed tuple for parsing. iex> leex_string('10 > 5') {:ok, [{:lit, 1, 10}, {:comparator, 1, :GT}, {:lit, 1, 5}], 1} iex> leex_string("apple > 5532") {:ok, [{:load, 1, :apple}, {:comparator, 1, :GT}, {:lit, 1, 5532}], 1} """ @spec leex_string(predicate) :: {:ok|:error, list|tuple, non_neg_integer()} def leex_string(str) when is_binary(str), do: str |> to_charlist |> leex_string def leex_string(str) when is_list(str), do: @lexer.string(str) @doc """ Currently only compatible with 0.4.0 predicate syntax parse_lexed/1 takes a leexed token(list or tup) and returns a predicate. It also can take optional atom for type of token keys to return. options are `:string_ey_inst` & `:atom_key_inst` iex> parse_lexed({:ok, [{:load, 1, :apple}, {:comparator, 1, :GT}, {:lit, 1, 5532}], 1}) {:ok, [["load", :apple], ["lit", 5532], ["comparator", "GT"]]} iex> parse_lexed({:ok, [{:load, 1, :apple}, {:comparator, 1, :GT}, {:lit, 1, 5532}], 1}, :string_key_inst) {:ok, [["load", :apple], ["lit", 5532], ["comparator", "GT"]]} iex> parse_lexed([{:load, 1, :apple}, {:comparator, 1, :GT}, {:lit, 1, 5532}], :atom_key_inst) {:ok, [[:load, :apple], [:lit, 5532], [:comparator, :GT]]} """ @spec parse_lexed(list, token_key_t) :: {:ok|:error, list|tuple} def parse_lexed(token, opt \\ :string_key_inst) def parse_lexed(token, :string_key_inst) when is_list(token), do: @string_parser.parse(token) def parse_lexed({_, token, _}, :string_key_inst), do: @string_parser.parse(token) def parse_lexed(token, :atom_key_inst) when is_list(token), do: @atom_parser.parse(token) def parse_lexed({_, token, _}, :atom_key_inst), do: @atom_parser.parse(token) @doc """ Currently only compatible with 0.4.0 predicate syntax leex_and_parse/1 takes a string or charlist and does all lexing and parsing then returns the predicate. iex> leex_and_parse("13 > 12") [["lit", 13], ["lit", 12], ["comparator", "GT"]] iex> leex_and_parse('532 == 532', :atom_key_inst) [[:lit, 532], [:lit, 532], [:comparator, :EQ]] """ @spec leex_and_parse(String.t) :: list|{:error, any(), non_neg_integer} def leex_and_parse(str, token_type \\ :string_key_inst) do with {:ok, tokens, _} <- leex_string(str), {:ok, predicate} <- parse_lexed(tokens, token_type) do predicate end end @doc """ eval/3 takes a predicate set, a context struct and options """ def eval(inst, context \\ %{}, opts \\ [map_type: :string]) def eval(inst, context, opts), do: Evaluator.execute(inst, context, opts) def compile(predicate) do with {:ok, tokens, _} <- leex_string(predicate), {:ok, predicate} <- parse_lexed(tokens, :string_key_inst) do {:ok, predicate} else {:error, _} = err -> err {:error, left, right} -> {:error, {left, right}} end end def matches?(predicate, context) when is_list(context) do matches?(predicate, Map.new(context)) end def matches?(predicate, context) when is_binary(predicate) or is_list(predicate) do with {:ok, predicate} <- compile(predicate) do eval(predicate, context) end end end
lib/predicator.ex
0.716715
0.524821
predicator.ex
starcoder
defmodule TflDemo.YoloX do alias TflDemo.YoloX.Prediction def apply_yolox(img_file) do img = CImg.load(img_file) # yolox prediction {:ok, res} = Prediction.apply(img) # draw result box Enum.reduce(res, CImg.builder(img), &draw_object(&2, &1)) |> CImg.runit() end defp draw_object(builder, {_name, boxes}) do Enum.reduce(boxes, builder, fn [_score|box], img -> [x0, y0, x1, y1] = Enum.map(box, &round(&1)) CImg.draw_rect(img, x0, y0, x1, y1, {255,0,0}) end) end end defmodule TflDemo.YoloX.Prediction do use TflInterp, model: "priv/yolox_s.tflite", label: "priv/coco.label" @yolox_shape {640, 640} def apply(img) do # preprocess bin = CImg.dup(img) |> CImg.get_resize(@yolox_shape, :ul, 114) |> CImg.to_flat([{:range, {0.0, 255.0}}, :nchw, :bgr]) # prediction outputs = TflDemo.YoloX.Prediction |> TflInterp.set_input_tensor(0, bin.data) |> TflInterp.invoke() |> TflInterp.get_output_tensor(0) |> Nx.from_binary({:f, 32}) |> Nx.reshape({:auto, 85}) # postprocess boxes = extract_boxes(outputs, scale(img)) scores = extract_scores(outputs) TflInterp.non_max_suppression_multi_class(__MODULE__, Nx.shape(scores), Nx.to_binary(boxes), Nx.to_binary(scores) ) end defp extract_boxes(tensor, scale \\ 1.0) do {grid, strides} = grid_strides(@yolox_shape, [8, 16, 32]) [ Nx.add(Nx.slice_axis(tensor, 0, 2, 1), grid), Nx.exp(Nx.slice_axis(tensor, 2, 2, 1)) ] |> Nx.concatenate(axis: 1) |> Nx.multiply(strides) |> Nx.multiply(scale) end defp grid_strides({wsize, hsize}, block) do reso = Enum.map(block, fn x -> {div(hsize, x), div(wsize, x), x} end) { Enum.map(reso, &grid/1) |> Nx.concatenate(axis: 0), Enum.map(reso, &strides/1) |> Nx.concatenate(axis: 0) } end defp grid({hsize, wsize, _}) do xv = Nx.iota({wsize}) |> Nx.tile([hsize, 1]) yv = Nx.iota({hsize}) |> Nx.tile([wsize, 1]) |> Nx.transpose Nx.stack([xv, yv], axis: 2) |> Nx.reshape({:auto, 2}) end defp strides({hsize, wsize, stride}) do Nx.tensor(stride) |> Nx.tile([hsize*wsize, 1]) end defp extract_scores(tensor) do Nx.multiply(Nx.slice_axis(tensor, 4, 1, 1), Nx.slice_axis(tensor, 5, 80, 1)) end defp scale(img) do {w, h, _, _} = CImg.shape(img) {wsize, hsize} = @yolox_shape max(w/wsize, h/hsize) end end
tfl_demo/lib/tfl_demo/yolo_x.ex
0.635222
0.52208
yolo_x.ex
starcoder
defmodule XeeThemeScript do @moduledoc """ A behaviour module for definition a Xee theme. ## Examples def YourTheme do use XeeThemeScript # Callbacks def init, do: %{ids: MapSet.new(), logs: []} def receive_meta(data, %{host_id: host_id, token: token}) do {:ok, data} end def join(%{ids: ids} = data, id) do {:ok, %{data | ids: MapSet.put(ids, id)}} end def handle_message(data, message, token) do handle_received(data, message, token) end def handle_received(data, received) do handle_received(data, received, :host) end def handle_received(%{logs: logs} = data, received, id) do {:ok, %{data | logs: [{id, received} | logs]}} end end """ @typedoc "Return values of `init/0`, `join/2`, and `handle_received/*` functions." @type result :: {:ok, new_state :: term} | :error | {:error, reason :: term} @doc """ Invoked when the theme is loaded or reloaded. """ @callback install :: :ok | :error | {:error, reason :: term} @doc """ Invoked before the experiment is created. Returning `{:ok, new_state}` sets the initial state to `new_state`. Returning `:error` or `{:error, reason}` fails the creating of experiment. """ @callback init :: result @doc """ Invoked just after the experiment is created. Returning `{:ok, new_state}` changes the state to `new_state`. Returning `:error` or `{:error, reason}` keeps the state. """ @callback receive_meta(state :: term, meta :: %{host_id: term, token: term}) :: result @doc """ Invoked when a participant loads the experiment page. Returning `{:ok, new_state}` changes the state to `new_state`. Returning `:error` or `{:error, reason}` keeps the state. """ @callback join(state :: term, id :: term) :: result @doc """ Invoked when the experiment receives data from a host. Returning `{:ok, new_state}` changes the state to `new_state`. Returning `:error` or `{:error, reason}` keeps the state. """ @callback handle_received(data :: term, received :: term) :: result @doc """ Invoked when the experiment receives data from a participant. Returning `{:ok, new_state}` changes the state to `new_state`. Returning `:error` or `{:error, reason}` keeps the state. """ @callback handle_received(data :: term, received :: term, id :: term) :: result @doc """ Invoked when the experiment receives a message from another experiment. Returning `{:ok, new_state}` changes the state to `new_state`. Returning `:error` or `{:error, reason}` keeps the state. """ @callback handle_message(data :: term, message :: term, token :: term) :: result @doc false defmacro __using__(_) do quote location: :keep do import unquote(__MODULE__) @behaviour unquote(__MODULE__) Module.register_attribute(__MODULE__, :require_file, accumulate: true) @before_compile unquote(__MODULE__) @doc false def init, do: {:ok, nil} def script_type, do: :data def install, do: :ok def receive_meta(data, meta) do {:error, "There is no matched `receive_meta/3`. data = #{inspect data}, meta = #{inspect meta}"} end def handle_message(data, message, token) do {:error, "There is no matched `handle_message/3`. data = #{inspect data}, message = #{inspect message}, token = #{token}"} end def handle_received(data, received) do {:error, "There is no matched `handle_received/2`. data = #{inspect data}, received = #{inspect received}"} end def handle_received(data, received, id) do {:error, "There is no matched `handle_received/3`. data = #{inspect data}, received = #{inspect received}, id = #{inspect id}"} end defoverridable [init: 0, install: 0, script_type: 0, handle_received: 2, handle_received: 3, handle_message: 3, receive_meta: 2] end end defmacro __before_compile__(_env) do quote do def require_files do IO.warn "Script-style themes are deprecated. You should use module-style themes." @require_file end end end defmacro require_file(file) do quote do @require_file unquote(file) end end end
lib/xeethemescript.ex
0.863622
0.450057
xeethemescript.ex
starcoder
defmodule Donut.GraphQL.Identity.Contact do use Donut.GraphQL.Schema.Notation @desc "The priority of a contact" enum :contact_priority do value :primary value :secondary end @desc "A generic contact interface" mutable_interface :contact do immutable do field :priority, non_null(:contact_priority), description: "The priority of the contact" field :status, non_null(:verification_status), description: "The current verification status of the contact" field :presentable, non_null(:string), description: "The presentable information about the contact" end @desc "Change the priority of the contact" field :set_priority, type: result(:error) do arg :priority, non_null(:contact_priority) end field :remove, result(:error), description: "Remove the email contact" end @desc "An email contact" mutable_object :email_contact do immutable do field :priority, non_null(:contact_priority), description: "The priority of the email contact" field :status, non_null(:verification_status), description: "The current verification status of the email contact" field :presentable, non_null(:string), description: "The presentable information about the email contact" field :email, non_null(:string), description: "The email address" interface :contact is_type_of fn %{ email: _ } -> true _ -> false end end interface :mutable_contact @desc "Change the priority of the email contact" field :set_priority, type: result(:error) do arg :priority, non_null(:contact_priority) resolve fn %{ priority: priority }, %{ priority: priority }, _ -> { :ok, nil } %{ email: email }, %{ priority: priority }, %{ context: %{ identity: identity } } -> case Sherbet.API.Contact.Email.set_priority(identity, email, priority) do :ok -> { :ok, nil } { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end end end @desc "Remove the email contact" field :remove, type: result(:error) do resolve fn %{ email: email }, _, %{ context: %{ identity: identity } } -> case Sherbet.API.Contact.Email.remove(identity, email) do :ok -> { :ok, nil } { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end end end end @desc "A mobile contact" mutable_object :mobile_contact do immutable do field :priority, non_null(:contact_priority), description: "The priority of the mobile contact" field :status, non_null(:verification_status), description: "The current verification status of the mobile contact" field :presentable, non_null(:string), description: "The presentable information about the mobile contact" field :mobile, non_null(:string), description: "The mobile number" interface :contact is_type_of fn %{ mobile: _ } -> true _ -> false end end interface :mutable_contact @desc "Change the priority of the mobile contact" field :set_priority, type: result(:error) do arg :priority, non_null(:contact_priority) resolve fn %{ priority: priority }, %{ priority: priority }, _ -> { :ok, nil } %{ mobile: mobile }, %{ priority: priority }, %{ context: %{ identity: identity } } -> case Sherbet.API.Contact.Mobile.set_priority(identity, mobile, priority) do :ok -> { :ok, nil } { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end end end @desc "Remove the mobile contact" field :remove, type: result(:error) do resolve fn %{ mobile: mobile }, _, %{ context: %{ identity: identity } } -> case Sherbet.API.Contact.Mobile.remove(identity, mobile) do :ok -> { :ok, nil } { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end end end end @desc """ The collection of possible results from a contact request. If successful returns the `Contact` trying to be accessed, otherwise returns an error. """ result :contact, [:email_contact, :mobile_contact] @desc """ The collection of possible results from a contact mutate request. If successful returns the `MutableContact` trying to be modified, otherwise returns an error. """ result :mutable_contact, [:mutable_email_contact, :mutable_mobile_contact] mutable_object :contact_queries do immutable do @desc "The contacts associated with the identity" field :contacts, list_of(result(mutable(:contact))) do @desc "The status of the contacts to retrieve" arg :status, :verification_status @desc "The priority of the contacts to retrieve" arg :priority, :contact_priority resolve fn %{ id: identity }, args, env = %{ definition: %{ selections: selections } } -> contacts = Enum.reduce(selections, [], fn %Absinthe.Blueprint.Document.Fragment.Inline{ schema_node: %Absinthe.Type.Object{ identifier: object } }, acc when object in [mutable(:email_contact), mutable(:mobile_contact)] -> [object|acc] %Absinthe.Blueprint.Document.Fragment.Inline{ schema_node: %Absinthe.Type.Interface{ identifier: contact } }, acc when contact in [:contact, mutable(:contact)] -> [mutable(:email_contact), mutable(:mobile_contact)] ++ acc _, acc -> acc end) |> Enum.uniq |> Enum.reduce([], fn mutable(:email_contact), acc -> case Sherbet.API.Contact.Email.contacts(identity) do { :ok, contacts } -> filter_contacts(contacts, args, acc, fn { status, priority, email } -> mutable(%{ priority: priority, status: status, presentable: email, email: email }, env) end) { :error, reason } -> %Donut.GraphQL.Result.Error{ message: reason } end mutable(:mobile_contact), acc -> case Sherbet.API.Contact.Mobile.contacts(identity) do { :ok, contacts } -> filter_contacts(contacts, args, acc, fn { status, priority, mobile } -> mutable(%{ priority: priority, status: status, presentable: mobile, mobile: mobile }, env) end) { :error, reason } -> %Donut.GraphQL.Result.Error{ message: reason } end end) |> Enum.reverse { :ok, contacts } end end end end defp filter_contacts(contacts, %{ status: status, priority: priority }, acc, get_object) do Enum.reduce(contacts, acc, fn contact, acc -> case get_object.(contact) do object = %{ status: ^status, priority: ^priority } -> [object|acc] _ -> acc end end) end defp filter_contacts(contacts, %{ priority: priority }, acc, get_object) do Enum.reduce(contacts, acc, fn contact, acc -> case get_object.(contact) do object = %{ priority: ^priority } -> [object|acc] _ -> acc end end) end defp filter_contacts(contacts, %{ status: status }, acc, get_object) do Enum.reduce(contacts, acc, fn contact, acc -> case get_object.(contact) do object = %{ status: ^status } -> [object|acc] _ -> acc end end) end defp filter_contacts(contacts, _, acc, get_object) do Enum.reduce(contacts, acc, &([get_object.(&1)|&2])) end object :contact_mutations do @desc "Request a contact be removed from its associated identity" field :request_remove_contact, type: result(:error) do @desc "The email contact to request be removed" arg :email, :string @desc "The mobile contact to request be removed" arg :mobile, :string resolve fn args = %{ email: email }, _ when map_size(args) == 1 -> case Sherbet.API.Contact.Email.request_removal(email) do :ok -> { :ok, nil } { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end args = %{ mobile: mobile }, _ when map_size(args) == 1 -> case Sherbet.API.Contact.Mobile.request_removal(mobile) do :ok -> { :ok, nil } { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end %{}, _ -> { :error, "Missing contact" } _, _ -> { :error, "Only one contact can be specified" } end end @desc "Finalise a contact be removed from its associated identity" field :finalise_remove_contact, type: result(:error) do @desc "The email contact to be removed" arg :email, :string @desc "The mobile contact to be removed" arg :mobile, :string @desc "The confirmation key" arg :key, non_null(:string) resolve fn args = %{ email: email, key: key }, _ when map_size(args) == 2 -> case Sherbet.API.Contact.Email.finalise_removal(email, key) do :ok -> { :ok, nil } { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end args = %{ mobile: mobile, key: key }, _ when map_size(args) == 2 -> case Sherbet.API.Contact.Mobile.finalise_removal(mobile, key) do :ok -> { :ok, nil } { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end %{ key: _ }, _ -> { :error, "Missing contact" } _, _ -> { :error, "Only one contact can be specified" } end end end object :contact_identity_mutations do @desc "Add a contact to be associated with an identity" field :add_contact, type: result(:mutable_contact) do @desc "The email contact to be added" arg :email, :string @desc "The mobile contact to be added" arg :mobile, :string resolve fn %{ id: identity }, args = %{ email: email }, env when map_size(args) == 1 -> case Sherbet.API.Contact.Email.add(identity, email) do :ok -> case Sherbet.API.Contact.Email.contacts(identity) do { :ok, contacts } -> Enum.find_value(contacts, fn { status, priority, ^email } -> %{ priority: priority, status: status, presentable: email, email: email } _ -> false end) |> case do false -> { :ok, %Donut.GraphQL.Result.Error{ message: "Failed to retrieve newly added email contact" } } contact -> { :ok, mutable(contact, env) } end { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end %{ id: identity }, args = %{ mobile: mobile }, env when map_size(args) == 1 -> case Sherbet.API.Contact.Mobile.add(identity, mobile) do :ok -> case Sherbet.API.Contact.Mobile.contacts(identity) do { :ok, contacts } -> Enum.find_value(contacts, fn { status, priority, ^mobile } -> %{ priority: priority, status: status, presentable: mobile, mobile: mobile } _ -> false end) |> case do false -> { :ok, %Donut.GraphQL.Result.Error{ message: "Failed to retrieve newly added mobile contact" } } contact -> { :ok, mutable(contact, env) } end { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end { :error, reason } -> { :ok, %Donut.GraphQL.Result.Error{ message: reason } } end _, %{}, _ -> { :error, "Missing contact" } _, _, _ -> { :error, "Only one contact can be specified" } end end end end
apps/donut_graphql/lib/donut.graphql/identity/contact.ex
0.751101
0.405213
contact.ex
starcoder