code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
defmodule Mix.Triplex do @moduledoc """ Useful functions for any triplex mix task. Here is the list of tasks we have for now: - [`mix triplex.gen.migration`](./Mix.Tasks.Triplex.Gen.Migration.html) - generates a tenant migration for the repo - [`mix triplex.migrate`](./Mix.Tasks.Triplex.Migrate.html) - runs the repository tenant migrations - [`mix triplex.migrations`](./Mix.Tasks.Triplex.Migrations.html) - displays the repository migration status - [`mix triplex.rollback`](./Mix.Tasks.Triplex.Rollback.html) - rolls back the repository tenant migrations """ alias Ecto.Migrator alias Logger.App alias Mix.Ecto alias Mix.Project @aliases [ r: :repo, n: :step ] @switches [ all: :boolean, step: :integer, to: :integer, start: :boolean, quiet: :boolean, prefix: :string, pool_size: :integer, log_sql: :boolean, strict_version_order: :boolean, repo: [:keep, :string], no_compile: :boolean, no_deps_check: :boolean ] @doc """ Ensures the migrations path exists for the given `repo`. Returns the path for the `repo` tenant migrations folder if succeeds or `Mix.raise`'s if it fails. """ def ensure_tenant_migrations_path(repo) do path = Path.join(source_repo_priv(repo), Triplex.config().migrations_path) if not Project.umbrella?() and not File.dir?(path) do raise_missing_migrations(Path.relative_to_cwd(path), repo) end path end defp raise_missing_migrations(path, repo) do Mix.raise(""" Could not find migrations directory #{inspect(path)} for repo #{inspect(repo)}. This may be because you are in a new project and the migration directory has not been created yet. Creating an empty directory at the path above will fix this error. If you expected existing migrations to be found, please make sure your repository has been properly configured and the configured path exists. """) end @doc """ Runs the tenant migrations with the given `args` and using `migrator` function. """ def run_tenant_migrations(args, direction, migrator \\ &Migrator.run/4) do repos = Ecto.parse_repo(args) {opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases) opts = if opts[:to] || opts[:step] || opts[:all], do: opts, else: Keyword.put(opts, :step, 1) opts = if opts[:quiet], do: Keyword.merge(opts, log: false, log_sql: false), else: opts Enum.each(repos, &run_tenant_migrations(&1, args, opts, direction, migrator)) end defp run_tenant_migrations(repo, args, opts, direction, migrator) do Ecto.ensure_repo(repo, args) path = ensure_tenant_migrations_path(repo) {:ok, pid, apps} = ensure_started(repo, opts) pool = repo.config[:pool] Code.compiler_options(ignore_module_conflict: true) migrated = Enum.flat_map(Triplex.all(repo), fn tenant -> opts = Keyword.put(opts, :prefix, Triplex.to_prefix(tenant)) if function_exported?(pool, :unboxed_run, 2) do pool.unboxed_run(repo, fn -> migrator.(repo, path, direction, opts) end) else migrator.(repo, path, direction, opts) end end) Code.compiler_options(ignore_module_conflict: false) pid && repo.stop() restart_apps_if_migrated(apps, migrated) end @doc """ Returns the private repository path relative to the source. """ def source_repo_priv(repo) do config = repo.config() priv = config[:priv] || "priv/#{repo |> Module.split() |> List.last() |> Macro.underscore()}" app = Keyword.fetch!(config, :otp_app) Path.join(Project.deps_paths()[app] || File.cwd!(), priv) end @doc """ Ensures the given repository is started and running. """ @spec ensure_started(Ecto.Repo.t(), Keyword.t()) :: {:ok, pid | nil, [atom]} def ensure_started(repo, opts) do {:ok, started} = Application.ensure_all_started(:ecto_sql) # If we starting EctoSQL just now, assume # logger has not been properly booted yet. if :ecto_sql in started && Process.whereis(Logger) do backends = Application.get_env(:logger, :backends, []) try do App.stop() Application.put_env(:logger, :backends, [:console]) :ok = App.start() after Application.put_env(:logger, :backends, backends) end end {:ok, apps} = repo.__adapter__.ensure_all_started(repo.config(), :temporary) pool_size = Keyword.get(opts, :pool_size, 2) case repo.start_link(pool_size: pool_size) do {:ok, pid} -> {:ok, pid, apps} {:error, {:already_started, _pid}} -> {:ok, nil, apps} {:error, error} -> Mix.raise("Could not start repo #{inspect(repo)}, error: #{inspect(error)}") end end @doc """ Restarts the app if there was any migration command. """ @spec restart_apps_if_migrated([atom], list()) :: :ok def restart_apps_if_migrated(_apps, []), do: :ok def restart_apps_if_migrated(apps, [_ | _]) do # Silence the logger to avoid application down messages. Logger.remove_backend(:console) for app <- Enum.reverse(apps) do Application.stop(app) end for app <- apps do Application.ensure_all_started(app) end :ok after Logger.add_backend(:console, flush: true) end end
lib/mix/triplex.ex
0.807764
0.533762
triplex.ex
starcoder
defmodule ExchemaCoercion.Coercions.DateAndTime do @moduledoc """ Coercions for Date and Time types """ alias Exchema.Types, as: T @doc """ Converts a ISO 8601 String to a native date/time format """ @spec from_iso8601(any, Exchema.Type.t, [ExchemaCoercion.coercion]) :: ExchemaCoercion.result def from_iso8601(input, T.DateTime, _) when is_binary(input) do with {:ok, val, _} <- DateTime.from_iso8601(input) do {:ok, val} end end def from_iso8601(input, T.NaiveDateTime, _) when is_binary(input) do NaiveDateTime.from_iso8601(input) end def from_iso8601(input, T.Date, _) when is_binary(input) do Date.from_iso8601(input) end def from_iso8601(input, T.Time, _) when is_binary(input) do Time.from_iso8601(input) end def from_iso8601(_, _, _), do: :error defguard valid_time_unit(unit) when unit in [:second, :millisecond, :microsecond, :nanosecond] or (is_integer(unit) and unit >= 1) @doc """ This converts integer to datetimes (either naive and not). It supports a optional fourth argument to specify the unit (`:second` by default) """ @spec from_epoch(any, Exchema.Type.t, [ExchemaCoercion.coercion], System.time_unit()) :: ExchemaCoercion.result def from_epoch(value, type, _, unit \\ :second) def from_epoch(value, T.DateTime, _, unit) when valid_time_unit(unit) and is_integer(value) do DateTime.from_unix(value, unit) end def from_epoch(value, T.NaiveDateTime, _, unit) when valid_time_unit(unit) and is_integer(value) do value |> DateTime.from_unix(unit) |> DateTime.to_naive end def from_epoch(_, _, _, _), do: :error @doc """ Converts from types with more information to those with less That is, it can convert from DateTime to everything, but doesn't convert from anything to DateTime, so we don't assume anything. The hierarchy is DateTime > NaiveDateTime > (Date & Time) """ @spec without_assumptions(any, Exchema.Type.t, [ExchemaCoercion.coercion]) :: ExchemaCoercion.result def without_assumptions(%DateTime{} = input, T.NaiveDateTime, _) do {:ok, DateTime.to_naive(input)} end def without_assumptions(%DateTime{} = input, T.Date, _) do {:ok, DateTime.to_date(input)} end def without_assumptions(%DateTime{} = input, T.Time, _) do {:ok, DateTime.to_time(input)} end def without_assumptions(%NaiveDateTime{} = input, T.Date, _) do {:ok, NaiveDateTime.to_date(input)} end def without_assumptions(%NaiveDateTime{} = input, T.Time, _) do {:ok, NaiveDateTime.to_time(input)} end def without_assumptions(_, _, _), do: :error @doc """ This creates some assumptions when converting date times. So if we try to convert a NaiveDateTime to a DateTime, it will assume UTC Time Zone. If we try to convert a Date to DateTime or NaiveDateTime, it will assume the beginning of the day """ @spec with_assumptions(any, Exchema.Type.t, [ExchemaCoercion.coercion]) :: ExchemaCoercion.result def with_assumptions(%NaiveDateTime{} = input, T.DateTime, _) do DateTime.from_naive(input, "Etc/UTC") end def with_assumptions(%Date{} = input, T.DateTime, _) do with {:ok, naive} <- with_assumptions(input, T.NaiveDateTime, []) do DateTime.from_naive(naive, "Etc/UTC") end end def with_assumptions(%Date{} = input, T.NaiveDateTime, _) do with date <- Date.to_erl(input) do NaiveDateTime.from_erl({date, {0, 0, 0}}) end end def with_assumptions(_, _, _), do: :error end
lib/coercions/date_and_time.ex
0.87308
0.665626
date_and_time.ex
starcoder
defmodule Mix.Tasks.Adify do @shortdoc ~s(Runs Adify task with given options) @task ~s(mix adify) @info """ #{@task} runs adify commands: Usage: $ `@{task} --os osx Options: ``` Option Alias Description -------------------------------------------------------------------------- --os NO-ALIAS The Operating System on which Adify needs to run. Can be one of either supporting os's. To get a list of all supporting OS's run: `$ mix adify.list_os` --noconfirm NO-ALIAS Determines whether there needs to be a confirmation before installing each tool --tools-dir -t Path to the Tools Directory. Check out docs to get the template of tools directory expected by Adify --digest-file -d Path to where the Digest file will be generated after Adification. ``` """ @moduledoc """ This tasks run an adification process. It makes calls to Adify module to run the process with given inputs ## Info: #{@info} """ use Mix.Task @switches [ digest_file: :string, help: :boolean, noconfirm: :boolean, os: :string, tools_dir: :string ] @aliases [ d: :digest_file, h: :help, t: :tools_dir ] @doc """ Runs the command ## Examples: # When help is given as an argument iex> Mix.Tasks.Adify.run(["-h"]) :ok # When help is not given as an argument iex> Mix.Tasks.Adify.run(["--noconfirm", "-t", "test/support/tools"]) iex> :noconfirm :noconfirm """ def run(args) do {parsed, _args, _} = OptionParser.parse(args, switches: @switches, aliases: @aliases) case parsed[:help] do true -> print_help() _ -> parsed |> prep_opts() |> Adify.run() end end defp print_help do Mix.Shell.IO.info(""" #{@shortdoc} #{@info} """) end defp prep_opts(adify_opts) do [confirm: !Keyword.get(adify_opts, :noconfirm, false)] |> Keyword.merge(adify_opts) end end
lib/adify/mix/adify.ex
0.797281
0.619759
adify.ex
starcoder
defmodule Absinthe.Fixtures.PetsSchema do use Absinthe.Schema interface :being do field :name, :string do arg :surname, :boolean end end interface :pet do field :name, :string do arg :surname, :boolean end end interface :canine do field :name, :string do arg :surname, :boolean end end enum :dog_command do value :sit, as: 0 value :heel, as: 1 value :down, as: 2 end object :dog do is_type_of fn _ -> true end field :name, :string do arg :surname, :boolean end field :nickname, :string field :bark_volume, :integer field :barks, :boolean field :does_know_command, :boolean do arg :dog_command, :dog_command end field :is_housetrained, :boolean do arg :at_other_homes, :boolean, default_value: true end field :is_at_location, :boolean do arg :x, :integer arg :y, :integer end interfaces [:being, :pet, :canine] end object :cat do is_type_of fn _ -> true end field :name, :string do arg :surname, :boolean end field :nickname, :string field :meows, :boolean field :meow_volume, :integer field :fur_color, :fur_color interfaces [:being, :pet] end union :cat_or_dog do types [:dog, :cat] end interface :intelligent do field :iq, :integer end object :human do is_type_of fn _ -> true end interfaces [:being, :intelligent] field :name, :string do arg :surname, :boolean end field :pets, list_of(:pet) field :relatives, list_of(:human) field :iq, :integer end object :alien do is_type_of fn _ -> true end interfaces [:being, :intelligent] field :iq, :integer field :name, :string do arg :surname, :boolean end field :num_eyes, :integer end union :dog_or_human do types [:dog, :human] end union :human_or_alien do types [:human, :alien] end enum :fur_color do value :brown, as: 0 value :black, as: 1 value :tan, as: 2 value :spotted, as: 3 end input_object :complex_input do field :required_field, non_null(:boolean) field :int_field, :integer field :string_field, :string field :boolean_field, :boolean field :string_list_field, list_of(:string) end object :complicated_args do field :int_arg_field, :string do arg :int_arg, :integer end field :non_null_int_arg_field, :string do arg :non_null_int_arg, non_null(:integer) end field :string_arg_field, :string do arg :string_arg, :string end field :boolean_arg_field, :string do arg :boolean_arg, :boolean end field :float_arg_field, :string do arg :float_arg, :float end field :id_arg_field, :string do arg :id_arg, :id end field :string_list_arg_field, :string do arg :string_list_arg, list_of(:string) end field :string_list_of_list_arg_field, :string do arg :string_list_of_list_arg, list_of(list_of(:string)) end field :complex_arg_field, :string do arg :complex_arg, :complex_input arg :complex_arg_list, list_of(:complex_input) end field :multiple_reqs, :string do arg :req1, non_null(:integer) arg :req2, non_null(:integer) end field :multiple_opts, :string do arg :opt1, :integer, default_value: 0 arg :opt2, :integer, default_value: 0 end field :multiple_opt_and_req, :string do arg :req1, non_null(:integer) arg :req2, non_null(:integer) arg :opt1, :integer, default_value: 0 arg :opt2, :integer, default_value: 0 end end query do field :human, :human do arg :id, :id end field :alien, :alien field :dog, :dog field :cat, :cat field :pet, :pet field :cat_or_dog, :cat_or_dog field :dog_or_human, :dog_or_human field :human_or_alien, :human_or_alien field :complicated_args, :complicated_args end directive :on_query do on [:query] end directive :on_mutation do on [:mutation] end directive :on_subscription do on [:subscription] end directive :on_field do on [:field] end directive :on_fragment_definition do on [:fragment_definition] end directive :on_fragment_spread do on [:fragment_spread] end directive :on_inline_fragment do on [:inline_fragment] end directive :on_schema do on [:schema] end directive :on_scalar do on [:scalar] end directive :on_object do on [:object] end directive :on_field_definition do on [:field_definition] end directive :on_argument_definition do on [:argument_definition] end directive :on_interface do on [:interface] end directive :on_union do on [:union] end directive :on_enum do on [:enum] end directive :on_enum_value do on [:enum_value] end directive :on_input_object do on [:input_object] end directive :on_input_field_definition do on [:input_field_definition] end end
test/support/fixtures/pets_schema.ex
0.538012
0.645602
pets_schema.ex
starcoder
defmodule Parent.Client do @moduledoc """ Functions for interacting with parent's children from other processes. All of these functions issue a call to the parent process. Therefore, they can't be used from inside the parent process. Use functions from the `Parent` module instead to interact with the children from within the process. Likewise these functions can't be invoked inside the child process during its initialization. Defer interacting with the parent to `c:GenServer.handle_continue/2`, or if you're using another behaviour which doesn't support such callback, send yourself a message to safely do the post-init interaction with the parent. If parent is configured with the `registry?: true` option, some query functions, such as `child_pid/2` will perform an ETS lookup instead of issuing a call, so the caveats above won't apply. """ alias Parent.Registry @doc """ Client interface to `Parent.children/0`. If the parent is a registry, the result will be obtained from the ETS table. """ @spec children(GenServer.server()) :: [Parent.child()] def children(parent) do case Registry.table(parent) do {:ok, table} -> Registry.children(table) :error -> call(parent, :children) end end @doc """ Client interface to `Parent.child_pid/1`. If the parent is a registry, the result will be obtained from the ETS table. """ @spec child_pid(GenServer.server(), Parent.child_id()) :: {:ok, pid} | :error def child_pid(parent, child_id) do case Registry.table(parent) do {:ok, table} -> Registry.child_pid(table, child_id) :error -> call(parent, :child_pid, [child_id]) end end @doc """ Client interface to `Parent.child_meta/1`. If the parent is a registry, the result will be obtained from the ETS table. """ @spec child_meta(GenServer.server(), Parent.child_ref()) :: {:ok, Parent.child_meta()} | :error def child_meta(parent, child_ref) do case Registry.table(parent) do {:ok, table} -> Registry.child_meta(table, child_ref) :error -> call(parent, :child_meta, [child_ref]) end end @doc "Client interface to `Parent.start_child/2`." @spec start_child(GenServer.server(), Parent.start_spec(), Keyword.t()) :: Parent.on_start_child() def start_child(parent, child_spec, overrides \\ []), do: call(parent, :start_child, [child_spec, overrides], :infinity) @doc "Client interface to `Parent.shutdown_child/1`." @spec shutdown_child(GenServer.server(), Parent.child_ref()) :: {:ok, Parent.stopped_children()} | :error def shutdown_child(parent, child_ref), do: call(parent, :shutdown_child, [child_ref], :infinity) @doc "Client interface to `Parent.restart_child/1`." @spec restart_child(GenServer.server(), Parent.child_ref()) :: :ok | :error def restart_child(parent, child_ref), do: call(parent, :restart_child, [child_ref], :infinity) @doc "Client interface to `Parent.shutdown_all/1`." @spec shutdown_all(GenServer.server(), any) :: Parent.stopped_children() def shutdown_all(server, reason \\ :shutdown), do: call(server, :shutdown_all, [reason], :infinity) @doc "Client interface to `Parent.return_children/1`." @spec return_children(GenServer.server(), Parent.stopped_children()) :: :ok def return_children(parent, stopped_children), do: call(parent, :return_children, [stopped_children], :infinity) @doc "Client interface to `Parent.update_child_meta/2`." @spec update_child_meta( GenServer.server(), Parent.child_id(), (Parent.child_meta() -> Parent.child_meta()) ) :: :ok | :error def update_child_meta(parent, child_ref, updater), do: call(parent, :update_child_meta, [child_ref, updater], :infinity) @doc false def whereis_name({parent, child_id}) do case child_pid(parent, child_id) do {:ok, pid} -> pid :error -> :undefined end end defp call(server, function, args \\ [], timeout \\ 5000) when (is_integer(timeout) and timeout >= 0) or timeout == :infinity do # This is the custom implementation of a call. We're not using standard GenServer calls to # ensure that this call won't end up in some custom behaviour's handle_call. request = {__MODULE__, function, args} case GenServer.whereis(server) do nil -> exit({:noproc, {__MODULE__, :call, [server, request, timeout]}}) pid when pid == self() -> exit({:calling_self, {__MODULE__, :call, [server, request, timeout]}}) pid -> try do :gen.call(pid, :"$parent_call", request, timeout) catch :exit, reason -> exit({reason, {__MODULE__, :call, [server, request, timeout]}}) else {:ok, res} -> res end end end end
lib/parent/client.ex
0.890501
0.556038
client.ex
starcoder
defmodule Mix.Tasks.Hex.Outdated do use Mix.Task alias Hex.Registry.Server, as: Registry @shortdoc "Shows outdated Hex deps for the current project" @moduledoc """ Shows all Hex dependencies that have newer versions in the registry. mix hex.outdated [APP] By default, it only shows top-level packages explicitly listed in the `mix.exs` file. All outdated packages can be displayed by using the `--all` command line option. By default, `hex.outdated` will exit with a non-zero exit code (1) if there are any outdated dependencies. You can override this to respect the requirements as specified in your `mix.exs` file, with the `--within-requirements` command line option, so it only exits with non-zero exit code if the update is possible. For example, if your version requirement is "~> 2.0" but the latest version is `3.0`, with `--within-requirements` it will exit successfully, but if the latest version is `2.8`, then `--within-requirements` will exit with non-zero exit code (1). One scenario this could be useful is to ensure you always have the latest version of your dependencies, except for major version bumps. If a dependency name is given all requirements on that dependency, from the entire dependency tree, are listed. This is useful if you are trying to figure why a package isn't updating when you run `mix deps.update`. Note that when this task determines if a package is updatable it only looks at the project's current set of dependency requirements and what version they are locked to. When `mix deps.update` is called multiple packages may be updated that in turn update their own dependencies, which may cause the package you want to update to not be able to update. ## Command line options * `--all` - shows all outdated packages, including children of packages defined in `mix.exs` * `--pre` - include pre-releases when checking for newer versions * `--within-requirements` - exit with non-zero code only if requirements specified in `mix.exs` is met. """ @behaviour Hex.Mix.TaskDescription @switches [all: :boolean, pre: :boolean, within_requirements: :boolean] @impl true def run(args) do Hex.Mix.check_deps() Hex.start() {opts, args} = Hex.OptionParser.parse!(args, strict: @switches) Registry.open() lock = Mix.Dep.Lock.read() lock |> Hex.Mix.packages_from_lock() |> Hex.Registry.Server.prefetch() case args do [app] -> single(lock, app, opts) [] -> all(lock, opts) _ -> Mix.raise(""" Invalid arguments, expected: mix hex.outdated [APP] """) end end @impl true def tasks() do [ {"", "Shows outdated Hex deps for the current project"}, {"[APP]", "Shows outdated Hex deps for the given dependency"} ] end defp single(lock, app, opts) do app = String.to_atom(app) deps = Hex.Mix.top_level_deps() {repo, package, current} = case Hex.Utils.lock(lock[app]) do %{repo: repo, name: package, version: version} -> {repo, package, version} nil -> Mix.raise("Dependency #{app} not locked as a Hex package") end latest = latest_version(repo, package, current, opts[:pre]) outdated? = Hex.Version.compare(current, latest) == :lt lock_requirements = get_requirements_from_lock(app, lock) deps_requirements = get_requirements_from_deps(app, deps) requirements = deps_requirements ++ lock_requirements if outdated? do [ "There is newer version of the dependency available ", [:bright, latest, " > ", current, :reset, "!"] ] |> IO.ANSI.format_fragment() |> Hex.Shell.info() else ["Current version ", :bright, current, :reset, " of dependency is up to date!"] |> IO.ANSI.format_fragment() |> Hex.Shell.info() end header = ["Source", "Requirement", "Up-to-date"] values = Enum.map(requirements, &format_single_row(&1, latest)) Hex.Shell.info("") Mix.Tasks.Hex.print_table(header, values) message = "Up-to-date indicates if the requirement matches the latest version." Hex.Shell.info(["\n", message]) if outdated?, do: Mix.Tasks.Hex.set_exit_code(1) end defp get_requirements_from_lock(app, lock) do Enum.flat_map(lock, fn {source, lock} -> case Hex.Utils.lock(lock) do %{deps: nil} -> [] %{deps: deps} -> Enum.flat_map(deps, fn {dep_app, req, _opts} -> if app == dep_app, do: [[Atom.to_string(source), req]], else: [] end) nil -> [] end end) end defp get_requirements_from_deps(app, deps) do # TODO: Path to umbrella child's mix.exs case Map.fetch(deps, app) do {:ok, deps} -> Enum.map(deps, fn {src, req, _opts} -> [Path.join([src, "mix.exs"]), req] end) :error -> [] end end defp format_single_row([source, req], latest) do req_matches? = version_match?(latest, req) req_color = if req_matches?, do: :green, else: :red up_to_date? = if req_matches?, do: "Yes", else: "No" [[:bright, source], [req_color, req || ""], [req_color, up_to_date?]] end defp all(lock, opts) do deps = Hex.Mix.top_level_deps() dep_names = if opts[:all], do: Map.keys(lock), else: Map.keys(deps) versions = dep_names |> Enum.sort() |> get_versions(deps, lock, opts[:pre]) values = Enum.map(versions, &format_all_row/1) diff_links = Enum.map(versions, &build_diff_link/1) |> Enum.reject(&is_nil/1) if Enum.empty?(values) do Hex.Shell.info("No hex dependencies") else header = ["Dependency", "Current", "Latest", "Status"] Mix.Tasks.Hex.print_table(header, values) base_message = "Run `mix hex.outdated APP` to see requirements for a specific dependency." diff_message = maybe_diff_message(diff_links) Hex.Shell.info(["\n", base_message, diff_message]) any_outdated? = any_outdated?(versions) req_met? = any_req_matches?(versions) cond do any_outdated? && opts[:within_requirements] && req_met? -> Mix.Tasks.Hex.set_exit_code(1) any_outdated? && opts[:within_requirements] && not req_met? -> nil any_outdated? -> Mix.Tasks.Hex.set_exit_code(1) true -> nil end end end defp get_versions(dep_names, deps, lock, pre?) do Enum.flat_map(dep_names, fn name -> case Hex.Utils.lock(lock[name]) do %{repo: repo, name: package, version: lock_version} -> latest_version = latest_version(repo, package, lock_version, pre?) lock_requirements = get_requirements_from_lock(name, lock) deps_requirements = get_requirements_from_deps(name, deps) requirements = (deps_requirements ++ lock_requirements) |> Enum.map(fn [_, req_version] -> req_version end) [[Atom.to_string(name), lock_version, latest_version, requirements]] _ -> [] end end) end defp latest_version(repo, package, default, pre?) do {:ok, default} = Hex.Version.parse(default) pre? = pre? || default.pre != [] latest = Registry.versions(repo, package) |> highest_version(pre?) latest || default end defp highest_version(versions, pre?) do versions = if pre? do versions else Enum.filter(versions, fn version -> {:ok, version} = Hex.Version.parse(version) version.pre == [] end) end List.last(versions) end defp format_all_row([package, lock, latest, requirements]) do outdated? = Hex.Version.compare(lock, latest) == :lt latest_color = if outdated?, do: :red, else: :green req_matches? = req_matches?(requirements, latest) status = case {outdated?, req_matches?} do {true, true} -> [:yellow, "Update possible"] {true, false} -> [:red, "Update not possible"] {false, _} -> [:green, "Up-to-date"] end [ [:bright, package], lock, [latest_color, latest], status ] end defp build_diff_link([package, lock, latest, requirements]) do outdated? = Hex.Version.compare(lock, latest) == :lt req_matches? = Enum.all?(requirements, &version_match?(latest, &1)) case {outdated?, req_matches?} do {true, true} -> "diffs[]=#{package}:#{lock}:#{latest}" {_, _} -> nil end end defp version_match?(_version, nil), do: true defp version_match?(version, req), do: Hex.Version.match?(version, req) defp any_outdated?(versions) do Enum.any?(versions, fn [_package, lock, latest, _requirements] -> Hex.Version.compare(lock, latest) == :lt end) end defp maybe_diff_message([]), do: "" defp maybe_diff_message(diff_links) do "\n\nTo view the diffs in each available update, visit:\n" <> diff_link(diff_links) end defp diff_link(diff_links) do long_url = "https://diff.hex.pm/diffs?" <> Enum.join(diff_links, "&") if Hex.State.fetch!(:no_short_urls) do long_url else maybe_get_short_link(long_url) end end defp maybe_get_short_link(long_url) do case Hex.API.ShortURL.create(long_url) do :error -> long_url {:ok, short_url} -> short_url end end defp any_req_matches?(versions) do Enum.any?(versions, fn [_package, _lock, latest, requirements] -> req_matches?(requirements, latest) end) end defp req_matches?(requirements, latest) do Enum.all?(requirements, &version_match?(latest, &1)) end end
lib/mix/tasks/hex.outdated.ex
0.760295
0.416915
hex.outdated.ex
starcoder
defmodule Oban.Plugins.Gossip do @moduledoc """ Periodically broadcast queue activity to the gossip notification channel. ## Using the Plugin The following example demonstrates using the plugin without any configuration, which will broadcast the state of each local queue every 1 second: config :my_app, Oban, plugins: [Oban.Plugins.Gossip], ... Override the default options to broadcast every 5 seconds: config :my_app, Oban, plugins: [{Oban.Plugins.Gossip, interval: :timer.seconds(5)}], ... ## Options * `:interval` — the number of milliseconds between gossip broadcasts ## Instrumenting with Telemetry The `Oban.Plugins.Gossip` plugin adds the following metadata to the `[:oban, :plugin, :stop]` event: * `:gossip_count` - the number of queues that had activity broadcasted """ use GenServer alias Oban.{Config, Notifier} @type option :: {:conf, Config.t()} | {:name, GenServer.name()} | {:interval, pos_integer()} defmodule State do @moduledoc false defstruct [:conf, :name, :timer, interval: :timer.seconds(1)] end @doc false @spec start_link([option()]) :: GenServer.on_start() def start_link(opts) do GenServer.start_link(__MODULE__, opts, name: opts[:name]) end @impl GenServer def init(opts) do Process.flag(:trap_exit, true) state = State |> struct!(opts) |> schedule_gossip() {:ok, state} end @impl GenServer def terminate(_reason, state) do if is_reference(state.timer), do: Process.cancel_timer(state.timer) :ok end @impl GenServer def handle_info(:gossip, %State{} = state) do meta = %{conf: state.conf, plugin: __MODULE__} match = [{{{state.conf.name, {:producer, :_}}, :"$1", :_}, [], [:"$1"]}] :telemetry.span([:oban, :plugin], meta, fn -> checks = Oban.Registry |> Registry.select(match) |> Enum.map(&safe_check/1) |> Enum.reject(&is_nil/1) |> Enum.map(&sanitize_name/1) if Enum.any?(checks), do: Notifier.notify(state.conf, :gossip, checks) {:ok, Map.put(meta, :gossip_count, length(checks))} end) {:noreply, schedule_gossip(state)} end defp schedule_gossip(state) do %{state | timer: Process.send_after(self(), :gossip, state.interval)} end defp safe_check(pid) do if Process.alive?(pid), do: GenServer.call(pid, :check) catch _exit -> nil end defp sanitize_name(%{name: name} = check) when is_binary(name), do: check defp sanitize_name(%{name: name} = check), do: %{check | name: inspect(name)} end
lib/oban/plugins/gossip.ex
0.84626
0.591104
gossip.ex
starcoder
defmodule CMS.Helpers do @moduledoc """ Various helper functions that aid in parsing CMS JSON data. """ alias CMS.API alias CMS.Config alias CMS.CustomHTML5Scrubber alias CMS.Field.{File, Image, Link} alias CMS.Partial.Paragraph alias Phoenix.HTML @doc """ Each CMS entity hosts a variety of fields which all follow common patterns, depending on the field type: - Long text field (optional: with summary): - value: raw database value from user input - processed: Drupal's post-processed (safer) value - summary: A plain-text value separate from above - Entity reference field (collapsed) - target_id: a integer value of another entity - Entity reference field (expanded) - the entire map is the page struct - default: most other fields - value: mixed type, but most common pattern """ @spec field_value(map, String.t()) :: any def field_value(entity, field) do case entity[field] do [%{"processed" => value}] -> value [%{"value" => value}] -> value [%{"target_id" => value}] -> value [%{"nid" => _} = node] -> node _ -> nil end end @doc """ Handles entity fields that support multiple values. Common patterns above apply here. """ @spec field_values(map, String.t()) :: [map] def field_values(entity, field) do entity |> Map.get(field, []) |> Enum.map(&Map.get(&1, "value")) end @spec handle_html(String.t() | nil) :: HTML.safe() def handle_html(html) do (html || "") |> CustomHTML5Scrubber.html5() |> rewrite_static_file_links |> HTML.raw() end @spec parse_body(map) :: HTML.safe() def parse_body(%{} = data) do data |> field_value("body") |> handle_html end @spec parse_files(map, String.t()) :: [File.t()] def parse_files(%{} = data, field) do data |> Map.get(field, []) |> Enum.map(&File.from_api/1) end @spec path_alias(map) :: String.t() | nil def path_alias(data) do data |> parse_path_alias() end @spec parse_path_alias(map) :: String.t() | nil def parse_path_alias(%{"path" => [%{"alias" => path_alias}]}), do: path_alias def parse_path_alias(_), do: nil @spec parse_image(map, String.t()) :: Image.t() | nil def parse_image(%{} = data, field) do case parse_images(data, field) do [image] -> image [] -> nil end end @spec parse_images(map, String.t()) :: [Image.t()] | [] def parse_images(%{} = data, field) do data |> Map.get(field, []) |> Enum.map(&Image.from_api/1) end @spec parse_iso_datetime(String.t()) :: DateTime.t() | nil def parse_iso_datetime(nil) do nil end def parse_iso_datetime(time) do case String.split(time, ":") do [_date_hr, _min, _sec] -> time |> Timex.parse("{ISOdate}T{ISOtime}") |> do_parse_iso_datetime(:deprecated) [_date_hr, _min, _sec, _tz] -> time |> Timex.parse("{ISO:Extended}") |> do_parse_iso_datetime(:extended) _ -> nil end end defp do_parse_iso_datetime({:ok, dt}, :deprecated), do: Timex.to_datetime(dt, "Etc/UTC") defp do_parse_iso_datetime({:ok, dt}, :extended), do: dt defp do_parse_iso_datetime(_, _), do: nil @spec parse_date(map, String.t()) :: Date.t() | nil def parse_date(data, field) do case data[field] do [%{"value" => date}] -> parse_date_string(date, "{YYYY}-{0M}-{0D}") _ -> nil end end @spec parse_date_string(String.t(), String.t()) :: Date.t() | nil defp parse_date_string(date, format_string) do case Timex.parse(date, format_string) do {:error, _message} -> nil {:ok, naive_datetime} -> NaiveDateTime.to_date(naive_datetime) end end @spec parse_link(map, String.t()) :: Link.t() | nil def parse_link(%{} = data, field) do case data[field] do [link] -> Link.from_api(link) _ -> nil end end @doc """ Gathers information about desired view mode based on incoming query parameters. Key presence is enough to count as true. - page: whether the page is in preview mode (drafts are rendered) - paragraphs: whether unpublished paragraphs should render """ @spec preview_opts(map) :: Keyword.t() def preview_opts(query_params \\ %{}) do [ page: Map.has_key?(query_params, "preview"), paragraphs: Map.has_key?(query_params, "paragraphs") ] end @doc """ Expects raw JSON data for a CMS object that contains a paragraphs field. This field value will always be a list of potential paragraphs. """ @spec parse_paragraphs(map, Keyword.t(), String.t()) :: [Paragraph.t()] def parse_paragraphs(data, preview_opts \\ [], target_field \\ "field_paragraphs") do data |> Map.get(target_field, []) |> Enum.filter(&show_paragraph?(&1, preview_opts)) |> Enum.map(&Paragraph.from_api(&1, preview_opts)) end @spec show_paragraph?(map | nil, Keyword.t()) :: boolean defp show_paragraph?(field_data, preview_opts) # Skip broken/missing paragraphs (CMS unable to load and returns NULL) defp show_paragraph?(nil, _) do false end # Reusable paragraphs instance aren't automatically removed when their child # paragraphs are deleted from the database, so catch that here. defp show_paragraph?(%{"field_reusable_paragraph" => [nil]}, _) do false end # Reusable paragraphs are not directly renderable since they act as instance containers. # However, these instances can be unpublished. If unpublished, stop and return false. # If published, continue checking the nested child paragraph for publish status. defp show_paragraph?(%{"field_reusable_paragraph" => [child]} = parent, preview_opts) do %{"status" => [parent_status]} = parent %{"paragraphs" => [paragraph]} = child case parent_status do %{"value" => false} -> false _ -> show_paragraph?(paragraph, preview_opts) end end # In "preview" mode, allow unpublished paragraphs to be rendered if requested defp show_paragraph?(_, preview: true, paragraphs: true) do true end defp show_paragraph?(%{"status" => [%{"value" => value}]}, _) do value end @spec rewrite_static_file_links(String.t()) :: String.t() defp rewrite_static_file_links(body) do static_path = Config.static_path() Regex.replace(~r/"(#{static_path}[^"]+)"/, body, fn _, path -> ['"', Util.site_path(:static_url, [path]), '"'] end) end @spec rewrite_url(String.t()) :: String.t() def rewrite_url(url) when is_binary(url) do uri = URI.parse(url) path = if uri.query do "#{uri.path}?#{uri.query}" else uri.path end Util.site_path(:static_url, [path]) end @spec int_or_string_to_int(integer | String.t() | nil) :: integer | nil def int_or_string_to_int(nil), do: nil def int_or_string_to_int(num) when is_integer(num), do: num def int_or_string_to_int(str) when is_binary(str) do case Integer.parse(str) do {int, ""} -> int _ -> nil end end @doc """ Retrieves category from CMS data. If "field_page_type" field is empty or not found, the returned category is an empty string. iex> category(%{"field_page_type" => [%{"name" => "Guides"}]}) "Guides" iex> category(%{"field_page_type" => []}) "" iex> category(%{}) "" iex> category(nil) "" """ @spec category(map | nil) :: String.t() def category(nil), do: "" def category(data) do data |> Map.get("field_page_type", [%{}]) |> Enum.at(0, %{}) |> Map.get("name", "") end @spec content_type(String.t()) :: API.type() | nil for atom <- ~w( diversion event news_entry page project project_update )a do str = Atom.to_string(atom) def content_type(unquote(str)), do: unquote(atom) end def content_type(_), do: nil @doc "Returns the text if present, otherwise returns nil" @spec content(String.t()) :: String.t() | nil @spec content(HTML.safe()) :: HTML.safe() | nil def content(nil) do nil end def content({:safe, string} = safe_html) do if content(string) do safe_html end end def content(string) do case String.trim(string) do "" -> nil string -> string end end @doc """ Parses the related_transit field for route-specific data. Could contain multiple routes. Contains mode, branch, and other info. """ @spec routes([map()]) :: [API.route_term()] def routes(route_data) do route_data |> Enum.map(&Map.get(&1, "data")) |> Enum.map(&route_metadata/1) end # Maps the tagged CMS route term, its group, and its parent mode. # For routes and misc. groupings like "local_bus," the CMS will have # mapped the appropriate GTFS mode to that tag prior to parsing here. @spec route_metadata(map()) :: API.route_term() defp route_metadata(route_data) do Map.new( id: Map.get(route_data, "gtfs_id"), group: Map.get(route_data, "gtfs_group"), mode: route_data |> Map.get("gtfs_ancestry") |> Map.get("mode") |> route_mode() ) end # Some CMS routes are actually custom groups that may not # have any single MBTA mode associated with them (mode: nil). # There should never be more than one, single mode in the list. @spec route_mode([String.t()] | nil) :: String.t() | nil defp route_mode(nil), do: nil defp route_mode([mode]), do: mode end
apps/cms/lib/helpers.ex
0.796134
0.490297
helpers.ex
starcoder
defmodule Phoenix.Component do @moduledoc """ API for function components. A function component is any function that receives an assigns map as argument and returns a rendered struct built with the `~H` sigil. Here is an example: defmodule MyComponent do use Phoenix.Component # Optionally also bring the HTML helpers # use Phoenix.HTML def greet(assigns) do ~H"\"" <p>Hello, <%= assigns.name %></p> "\"" end end The component can be invoked as a regular function: MyComponent.greet(%{name: "Jane"}) But it is typically invoked using the function component syntax from the `~H` sigil: ~H"\"" <MyComponent.greet name="Jane" /> "\"" If the `MyComponent` module is imported or if the function is defined locally, you can skip the module name: ~H"\"" <.greet name="Jane" /> "\"" Learn more about the `~H` sigil [in its documentation](`Phoenix.LiveView.Helpers.sigil_H/2`). ## `use Phoenix.Component` Modules that have to define components should call `use Phoenix.Component` at the top. Doing so will import the functions found in the `Phoenix.LiveView` and `Phoenix.LiveView.Helpers` modules. Note it is not necessary to `use Phoenix.Component` inside `Phoenix.LiveView` and `Phoenix.LiveComponent`. ## Blocks It is also possible to HTML blocks to function components, as to regular HTML tags. For example, you could create a button component that is invoked like this: <.button> This does <strong>inside</strong> the button! </.button> Where the function component would be defined as: def button(assigns) do ~H"\"" <button class="btn"> <%= render_block(@inner_block) %> </button> "\"" end Where `render_block` is defined at `Phoenix.LiveView.Helpers.render_block/2`. """ @doc false defmacro __using__(_) do quote do import Phoenix.LiveView import Phoenix.LiveView.Helpers end end end
lib/phoenix_component.ex
0.875747
0.544378
phoenix_component.ex
starcoder
defmodule Model.Route do @moduledoc """ Route represents a line in the transit system. """ use Recordable, [ :id, :agency_id, :short_name, :long_name, :description, :fare_class, :type, :color, :text_color, :sort_order, :line_id, :listed_route, direction_names: [nil, nil], direction_destinations: [nil, nil] ] @typedoc """ The color must be be a six-character hexadecimal number, for example, `00FFFF`. If no color is specified, the default route color is white (`FFFFFF`). """ @type color :: String.t() @type id :: String.t() @typedoc """ | Value | Name | Description | |-------|------------|-------------------------------------------------------------------| | `0` | Light Rail | Any light rail or street level system within a metropolitan area. | | `1` | Subway | Any underground rail system within a metropolitan area. | | `2` | Rail | Used for intercity or long-distance travel. | | `3` | Bus | Used for short- and long-distance bus routes. | | `4` | Ferry | Used for short- and long-distance boat service. | See [GTFS `routes.txt` `route_type`](https://github.com/google/transit/blob/master/gtfs/spec/en/reference.md#routestxt). """ @type route_type :: 0 | 1 | 2 | 3 | 4 @typedoc """ * `:id` - Unique ID * `:agency_id` - Unique ID of the agency * `:color` - A color that corresponds to the route, such as the line color on a map. The color difference between `:color` and `:text_color` should provide sufficient contrast when viewed on a black and white screen. The [W3C Techniques for Accessibility Evaluation And Repair Tools document](https://www.w3.org/TR/AERT#color-contrast) offers a useful algorithm for evaluating color contrast. There are also helpful online tools for choosing contrasting colors, including the [snook.ca Color Contrast Check application](http://snook.ca/technical/colour_contrast/colour.html#fg=33FF33,bg=333333). See [GTFS `routes.txt` `route_color`](https://github.com/google/transit/blob/master/gtfs/spec/en/reference.md#routestxt). * `:description` - Details about stops, schedule, and/or service. See [GTFS `routes.txt` `route_desc`](https://github.com/google/transit/blob/master/gtfs/spec/en/reference.md#routestxt). * `:long_name` - The full name of a route. This name is generally more descriptive than the `:short_name` and will often include the route's destination or stop. See [GTFS `routes.txt` `route_long_name`](https://github.com/google/transit/blob/master/gtfs/spec/en/reference.md#routestxt). * `:short_name` - This will often be a short, abstract identifier like "32", "100X", or "Green" that riders use to identify a route, but which doesn't give any indication of what places the route serves. At least one of `:short_name` or `:long_name` must be specified. See [GTFS `routes.txt` `route_short_name`](https://github.com/google/transit/blob/master/gtfs/spec/en/reference.md#routestxt). * `:fare_class` - Specifies the fare type of the route, which can differ from the service category. * `:sort_order` - routes are sorted in ascending order of this field. * `:text_color` - A legible color to use for text drawn against a background of `:color`. If no color is specified, the default text color is black (`000000`). The color difference between `:color` and `:text_color` should provide sufficient contrast when viewed on a black and white screen. See [GTFS `routes.txt` `route_text_color`](https://github.com/google/transit/blob/master/gtfs/spec/en/reference.md#routestxt). * `:type` - Type of vehicle used on route. See [GTFS `routes.txt` `route_type`](https://github.com/google/transit/blob/master/gtfs/spec/en/reference.md#routestxt). * `:line_id` - References `line_id` values from `lines.txt`. Indicates in which grouping of routes this route belongs * `:listed_route` - Indicates whether route should be included in a public-facing list of all routes. * `:direction_names` - names of direction ids for this route in ascending ordering starting at `0` for the first index. * `:direction_destinations` - destinations for direction ids for this route in ascending ordering starting at `0` for the first index. """ @type t :: %__MODULE__{ id: id, agency_id: id, color: color, description: String.t(), long_name: String.t() | nil, short_name: String.t() | nil, fare_class: String.t() | nil, sort_order: non_neg_integer, text_color: color, type: route_type, line_id: id, listed_route: boolean, direction_names: [String.t() | nil], direction_destinations: [String.t() | nil] } end
apps/model/lib/model/route.ex
0.943996
0.643574
route.ex
starcoder
defmodule Exfmt.Comment do @moduledoc """ We leverage `Code.string_to_quoted/2` to get the AST from Elixir source code. This is great as it's maintained by the core team (i.e. not me). This is not great as it doesn't preserve comments, so we need to extract them ourselves and then merge them into the AST later. """ @type t :: {:"#", [line: non_neg_integer], [String.t]} @doc """ Extract comments from a string of Elixir source code. """ @spec extract_comments(String.t) :: {:ok, [t]} | :error def extract_comments(src) do case tokenize(src) do {:ok, _line, _column, tokens} -> comments = tokens |> Enum.filter(&match?({:comment, _, _}, &1)) |> Enum.map(&transform_comment/1) |> Enum.reverse() {:ok, comments} error -> error end end defp tokenize(src) do src |> String.to_charlist() |> :elixir_tokenizer.tokenize(1, preserve_comments: true) end defp transform_comment({:comment, {line, _, _}, chars}) do content = chars |> tl() |> to_string() {:"#", [line: line], [content]} end @doc """ Merge the given comments into an Elixir abstract syntax tree. iex> comments = [{:"#", [line: 1], []}] ...> ast = {:ok, [line: 1], []} ...> merge(comments, ast) {:"#comment_block", [], [{:ok, [line: 1], []}, {:"#", [line: 1], []}]} """ @spec merge([t], Macro.t) :: Macro.t def merge(comments, nil) do {:"#comment_block", [], Enum.reverse(comments)} end def merge(comments, {:__block__, _, [nil]}) do {:"#comment_block", [], Enum.reverse(comments)} end def merge(comments, ast) do case Macro.prewalk(ast, Enum.reverse(comments), &merge_node/2) do {merged, []} -> merged {merged, remaining_comments} -> {:"#comment_block", [], [merged | remaining_comments]} end end defp merge_node(ast, []) do {ast, []} end defp merge_node(ast, comments) do ast_line = line(ast) before_node = fn(c) -> line(c) < ast_line end case Enum.split_while(comments, before_node) do {[], _} -> {ast, comments} {earlier, rest} -> block = {:"#comment_block", [], earlier ++ [ast]} {block, rest} end end defp line({_, meta, _}) do meta[:line] || 0 end defp line(_) do 0 end end
lib/exfmt/comment.ex
0.535827
0.459864
comment.ex
starcoder
defmodule MonEx do @moduledoc ~S""" MonEx implements two most common monadic data types: * `MonEx.Result` - container for a result of operation or error. Result can be created using a constructor macro: `ok(value)` or `error(e)`, where underlying structure is a tuple: `{:ok, value}` or `{:error, e}` respectively. * `MonEx.Option` - container for a value that might be present or missing. Use `some(value)` to create Option with value and `none()` to create an empty Option. Mind the parentheses, they are important when using it with pattern matching. * `MonEx` - collection of utility functions to work with both of these types. ## Result Result type fits perfectly with idiomatic Erlang/Elixir return values. When some library function returns either `{:ok, val}` or `{:error, err}`, you can use functions provided by MonEx right away. The most typical example, where MonEx shines, is a pipeline, where each operation can fail. Normally this would be organized in a form of nested case expressions: final = case op1(x) do {:ok, res1} -> case op2(res1) do {:ok, res2} -> op3(res2) {:error, e} -> {:error, e} end {:error, e} -> {:error, e} end With MonEx you can do the same using `flat_map` operation: final = op1(x) |> flat_map(&op2/1) |> flat_map(&op3/1) Once any of the operations returns `error(e)`, following operations are skipped and the error is returned. You can either do something based on pattern matching or provide a fallback (can be a function or a default value). case final do ok(value) -> IO.puts(value) error(e) -> IO.puts("Oh, no, the error occured!") end final |> fallback(ok("No problem, I got it")) ## Option Option type wraps the value. If value is present, it's `some(value)`, if it's missing, `none()` is used instead. With Option type, you can use the same set of functions, such as `map`, `flat_map`, etc. find_user(id) |> map(&find_posts_by_user/1) This will only request for posts if the user was found. Then content of `some()` will be transformed into posts, or `none()` will be returned. See docs per Result and Option modules for details. """ import MonEx.{Option, Result} alias MonEx.{Option, Result} @typep m(a, b) :: Option.t(a) | Result.t(a, b) @doc """ Transforms the content of monadic type. Function is applied only if it's `ok` or `some`. Otherwise value stays intact. Example: f = fn (x) -> x * 2 end some(5) |> map(f) == some(10) none() |> map(f) == none() """ @spec map(m(a, b), (a -> c)) :: m(c, b) when a: any, b: any, c: any def map(some(x), f) when is_function(f, 1), do: some(f.(x)) def map(none(), f) when is_function(f, 1), do: none() def map(ok(x), f) when is_function(f, 1), do: ok(f.(x)) def map(error(m), f) when is_function(f, 1), do: error(m) @doc """ Applies function that returns monadic type itself to the content of the monadic type. This is useful in a chain of operations, where argument to the next op has to be unwrapped to proceed. Example: inverse = fn (x) -> if x == 0 do none() else some(1/x) end some(5) |> flat_map(f) == some(1/5) some(0) |> flat_map(f) == none() """ @spec flat_map(m(a, b), (a -> m(c, b))) :: m(c, b) when a: any, b: any, c: any def flat_map(some(x), f) when is_function(f, 1), do: f.(x) def flat_map(none(), f) when is_function(f, 1), do: none() def flat_map(ok(x), f) when is_function(f, 1), do: f.(x) def flat_map(error(m), f) when is_function(f, 1), do: error(m) @doc """ Performs a calculation with the content of monadic container and returns the argument intact. Even though the convention says to return nothing (Unit) this one passes value along for convenience — this way we can perform more than one operation. some(5) |> foreach(fn x -> IO.inspect(x) end) |> foreach(fn x -> IO.inspect(2 * x) end) This will print: 5 10 """ @spec foreach(m(a, b), (a -> no_return)) :: m(a, b) when a: any, b: any def foreach(some(x) = res, f) when is_function(f, 1), do: (f.(x); res) def foreach(none() = z, _), do: z def foreach(ok(x) = res, f) when is_function(f, 1), do: (f.(x); res) def foreach(error(_) = z, _), do: z end
lib/monex.ex
0.95183
0.796688
monex.ex
starcoder
defmodule Spell.Message do @moduledoc """ The `Spell.Message` module defines the struct and functions to back WAMP messages. Note that these are distinct from Erlixir's messages. """ # Module Attributes defstruct [:type, :code, :args] @basic_codes [hello: 1, welcome: 2, abort: 3, challenge: 4, authenticate: 5, goodbye: 6, error: 8, publish: 16, published: 17, subscribe: 32, subscribed: 33, unsubscribe: 34, unsubscribed: 35, event: 36, call: 48, cancel: 49, result: 50, register: 64, registered: 65, unregister: 66, unregistered: 67, invocation: 68, interrupt: 69, yield: 70] # Type Specs # TODO - these types are probably an over-abstraction. cut? @typep type :: atom | nil @typep args :: args # Message Datatypes @type wamp_type :: wamp_integer | wamp_string | wamp_bool | wamp_dict | wamp_list | wamp_id | wamp_uri @type wamp_integer :: integer # non-negative @type wamp_string :: String.t @type wamp_bool :: boolean @type wamp_id :: wamp_integer # see id @type wamp_uri :: wamp_string # see uri @type wamp_list :: List.t(wamp_type) @type wamp_dict :: Dict.t(wamp_string, wamp_type) @type new_error :: :type_code_missing | :type_code_mismatch | {:args, :not_list} | {:code, :out_of_range | :bad_value} @type t :: %__MODULE__{ type: type, code: integer, args: args} # Public Macros @doc """ This macro expands familiar `:ok`/`:error` clauses to a receive clause for their respective WAMP message from `peer` given `type`. This macro is meant to be a convenience -- feel free to drop down to the underlying `receive`. ## Example def receive_subscribed(peer, subscribe_id) do receive_message peer, :subscribed do {:ok, [^subscribe_id, subscription]} -> {:ok, subscription} {:error, reason} -> {:error, reason} end end is expanded to def receive_subscribed(peer, subscribe_id) do receive do {Peer, ^peer, %Message{type: :subscribed, args: [^subscribe_id, subscription]}} -> {:ok, subscription} {Peer, ^peer, %Message{type: :error, args: [33, _, reason | _]}} -> {:error, reason} {Peer, ^peer, {:closed, reason}} -> {:closed, reason} after @timeout -> {:error, :timeout} end end """ defmacro receive_message(peer, type, body) do code = get_code_for_type(type) closed_branch = quote do {Spell.Peer, ^unquote(peer), {:closed, _} = closed} -> closed end branches = (body[:do] || []) |> Enum.map(fn {:->, _, [[match], branch_body]} -> {match, guards} = case match do {:when, _, [match, guards]} -> {match, guards} match -> {match, true} end case match do {:ok, args} -> quote do {Spell.Peer, ^unquote(peer), %Spell.Message{type: unquote(type), args: unquote(args)}} when unquote(guards) -> unquote(branch_body) end # TODO: Support matching against details, arguments, arguments_kw {:error, reason} -> quote do {Spell.Peer, ^unquote(peer), %Spell.Message{type: :error, args: [unquote(code), _details, unquote(reason) | _]}} when unquote(guards) -> unquote(branch_body) end end end) |> Enum.map(fn [branch] -> branch end) |> Enum.concat(closed_branch) quote do receive do unquote(branches) after 5000 -> {:error, :timeout} end end end # Public Functions @doc """ Returns a new message, or raises an exception. ## Options See `new/1`. """ @spec new!(Keyword.t) :: t def new!(options) do # `:_unknown` is used to prevent conflicts with existing types case new(options) do {:ok, message} -> message {:error, :type_code_missing} -> raise ArgumentError, message: ":type or :code must be present" {:error, :type_code_mismatch} -> raise ArgumentError, message: ":type is not consistent with `code`" {:error, {:code, :out_of_range}} -> raise ArgumentError, message: ":code is out of range [0 - 1024]" {:error, {:code, :bad_value}} -> raise ArgumentError, message: "bad value for :code" {:error, {:args, :not_list}} -> raise ArgumentError, message: ":args must be a list" end end @doc """ Returns a new message. ## Options There is a one to one mapping between `type` and `code`. Either `type` or `code` must be provided. If both are provided, they must be consistent. * `type :: atom` the name of the message type. If `type` isn't provided, it is be set by `get_type_for_integer(code)`. * `code :: integer` the integer code for the message type. If `code` isn't isn't provided it is set by `get_integer_for_type(type)`. `type` must have a valid code. * `args :: [wamp_type]` defaults to `[]`, the list of message arguments. """ @spec new([type: type, code: integer, args: [wamp_type]]) :: {:ok, t} | {:error, new_error} def new(options) do new(Dict.get(options, :type, :_unknown), Dict.get(options, :code, :_unknown), Dict.get(options, :args, [])) end @spec new(type | nil | :_unkown, integer | nil | :_unkown, [wamp_type]) :: {:ok, t} | {:error, new_error} defp new(:_unknown, :_unknown, _args) do {:error, :type_code_missing} end defp new(:_unknown, code, args) do new(get_type_for_code(code), code, args) end defp new(type, :_unknown, args) do new(type, get_code_for_type(type), args) end defp new(_type, _code, args) when not is_list(args) do {:error, {:args, :not_list}} end defp new(_type, nil, _args) do {:error, {:code, :bad_value}} end defp new(_type, code, _args) when code < 1 or code > 1024 do {:error, {:code, :out_of_range}} end defp new(type, code, args) do if code == get_code_for_type(type) do {:ok, %__MODULE__{type: type, code: code, args: args}} else {:error, :type_code_mismatch} end end @doc """ Return a new WAMP id. To ensure the uniqueness of the new id we use :crypto.rand_bytes to generate a random seed TODO: improve `:random.uniform` using a Mersenne Twister PRNG algorithm """ @spec new_id :: integer def new_id do << a :: 32, b :: 32, c :: 32 >> = :crypto.rand_bytes(12) :random.seed(a,b,c) ((:math.pow(2, 53) + 1) |> round |> :random.uniform) - 1 end @doc """ Get the `code` for the message `type`. """ @spec get_code_for_type(type, default) :: integer | nil | default when default: any def get_code_for_type(type, default \\ nil) for {type, code} <- @basic_codes do def get_code_for_type(unquote(type), _default), do: unquote(code) end def get_code_for_type(_, default), do: default @doc """ Get the message `type` for `code`. """ @spec get_type_for_code(integer, default) :: type | nil | default when default: any def get_type_for_code(type, default \\ nil) for {type, code} <- @basic_codes do def get_type_for_code(unquote(code), _default), do: unquote(type) end def get_type_for_code(_, default), do: default end
lib/spell/message.ex
0.623377
0.440409
message.ex
starcoder
defmodule Cldr.DateAndTime.Backend do @moduledoc false def define_backend_modules(config) do backend = config.backend config = Macro.escape(config) quote location: :keep, bind_quoted: [config: config, backend: backend] do defmodule DateTime do @doc """ Formats a DateTime according to a format string as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html) ## Arguments * `datetime` is a `%DateTime{}` `or %NaiveDateTime{}`struct or any map that contains the keys `:year`, `:month`, `:day`, `:calendar`. `:hour`, `:minute` and `:second` with optional `:microsecond`. * `options` is a keyword list of options for formatting. ## Options * `format:` `:short` | `:medium` | `:long` | `:full` or a format string or any of the keys returned by `Cldr.DateTime.available_format_names`. The default is `:medium` * `locale` is any valid locale name returned by `Cldr.known_locale_names/0` or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0` * `number_system:` a number system into which the formatted date digits should be transliterated * `era: :variant` will use a variant for the era is one is available in the locale. In the "en" for example, the locale `era: :variant` will return "BCE" instead of "BC". * `period: :variant` will use a variant for the time period and flexible time period if one is available in the locale. For example, in the "en" locale `period: :variant` will return "pm" instead of "PM" ## Returns * `{:ok, formatted_datetime}` or * `{:error, reason}` ## Examples iex> {:ok, datetime} = DateTime.from_naive(~N[2000-01-01 23:59:59.0], "Etc/UTC") iex> #{inspect(__MODULE__)}.to_string datetime {:ok, "Jan 1, 2000, 11:59:59 PM"} iex> #{inspect(__MODULE__)}.to_string datetime, locale: "en" {:ok, "Jan 1, 2000, 11:59:59 PM"} iex> #{inspect(__MODULE__)}.to_string datetime, format: :long, locale: "en" {:ok, "January 1, 2000 at 11:59:59 PM UTC"} iex> #{inspect(__MODULE__)}.to_string datetime, format: :hms, locale: "en" {:ok, "11:59:59 PM"} iex> #{inspect(__MODULE__)}.to_string datetime, format: :full, locale: "en" {:ok, "Saturday, January 1, 2000 at 11:59:59 PM GMT"} iex> #{inspect(__MODULE__)}.to_string datetime, format: :full, locale: "fr" {:ok, "samedi 1 janvier 2000 à 23:59:59 UTC"} """ @spec to_string(map, Keyword.t()) :: {:ok, String.t()} | {:error, {module, String.t()}} def to_string(date_time, options \\ []) do Cldr.DateTime.to_string(date_time, unquote(backend), options) end @doc """ Formats a DateTime according to a format string as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html) returning a formatted string or raising on error. ## Arguments * `datetime` is a `%DateTime{}` `or %NaiveDateTime{}`struct or any map that contains the keys `:year`, `:month`, `:day`, `:calendar`. `:hour`, `:minute` and `:second` with optional `:microsecond`. * `options` is a keyword list of options for formatting. ## Options * `format:` `:short` | `:medium` | `:long` | `:full` or a format string or any of the keys returned by `Cldr.DateTime.available_format_names` or a format string. The default is `:medium` * `locale` is any valid locale name returned by `Cldr.known_locale_names/0` or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0` * `number_system:` a number system into which the formatted date digits should be transliterated * `era: :variant` will use a variant for the era is one is available in the locale. In the "en" for example, the locale `era: :variant` will return "BCE" instead of "BC". * `period: :variant` will use a variant for the time period and flexible time period if one is available in the locale. For example, in the "en" locale `period: :variant` will return "pm" instead of "PM" ## Returns * `formatted_datetime` or * raises an exception ## Examples iex> {:ok, datetime} = DateTime.from_naive(~N[2000-01-01 23:59:59.0], "Etc/UTC") iex> #{inspect(__MODULE__)}.to_string! datetime, locale: "en" "Jan 1, 2000, 11:59:59 PM" iex> #{inspect(__MODULE__)}.to_string! datetime, format: :long, locale: "en" "January 1, 2000 at 11:59:59 PM UTC" iex> #{inspect(__MODULE__)}.to_string! datetime, format: :full, locale: "en" "Saturday, January 1, 2000 at 11:59:59 PM GMT" iex> #{inspect(__MODULE__)}.to_string! datetime, format: :full, locale: "fr" "samedi 1 janvier 2000 à 23:59:59 UTC" """ @spec to_string!(map, Keyword.t()) :: String.t() | no_return def to_string!(date_time, options \\ []) do Cldr.DateTime.to_string!(date_time, unquote(backend), options) end end defmodule Date do @doc """ Formats a date according to a format string as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html) ## Arguments * `date` is a `%Date{}` struct or any map that contains the keys `year`, `month`, `day` and `calendar` * `options` is a keyword list of options for formatting. The valid options are: ## Options * `format:` `:short` | `:medium` | `:long` | `:full` or a format string. The default is `:medium` * `locale:` any locale returned by `Cldr.known_locale_names/1`. The default is `Cldr.get_locale()`. * `number_system:` a number system into which the formatted date digits should be transliterated ## Returns * `{:ok, formatted_string}` or * `{:error, reason}` ## Examples iex> #{inspect(__MODULE__)}.to_string ~D[2017-07-10], format: :medium, locale: "en" {:ok, "Jul 10, 2017"} iex> #{inspect(__MODULE__)}.to_string ~D[2017-07-10], locale: "en" {:ok, "Jul 10, 2017"} iex> #{inspect(__MODULE__)}.to_string ~D[2017-07-10], format: :full, locale: "en" {:ok, "Monday, July 10, 2017"} iex> #{inspect(__MODULE__)}.to_string ~D[2017-07-10], format: :short, locale: "en" {:ok, "7/10/17"} iex> #{inspect(__MODULE__)}.to_string ~D[2017-07-10], format: :short, locale: "fr" {:ok, "10/07/2017"} iex> #{inspect(__MODULE__)}.to_string ~D[2017-07-10], format: :long, locale: "af" {:ok, "10 Julie 2017"} """ @spec to_string(map, Keyword.t()) :: {:ok, String.t()} | {:error, {module, String.t()}} def to_string(date, options \\ []) do Cldr.Date.to_string(date, unquote(backend), options) end @doc """ Formats a date according to a format string as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html) ## Arguments * `date` is a `%Date{}` struct or any map that contains the keys `year`, `month`, `day` and `calendar` * `options` is a keyword list of options for formatting. ## Options * `format:` `:short` | `:medium` | `:long` | `:full` or a format string. The default is `:medium` * `locale` is any valid locale name returned by `Cldr.known_locale_names/0` or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0` * `number_system:` a number system into which the formatted date digits should be transliterated ## Returns * `formatted_date` or * raises an exception. ## Examples iex> #{inspect(__MODULE__)}.to_string! ~D[2017-07-10], format: :medium, locale: "en" "Jul 10, 2017" iex> #{inspect(__MODULE__)}.to_string! ~D[2017-07-10], locale: "en" "Jul 10, 2017" iex> #{inspect(__MODULE__)}.to_string! ~D[2017-07-10], format: :full,locale: "en" "Monday, July 10, 2017" iex> #{inspect(__MODULE__)}.to_string! ~D[2017-07-10], format: :short, locale: "en" "7/10/17" iex> #{inspect(__MODULE__)}.to_string! ~D[2017-07-10], format: :short, locale: "fr" "10/07/2017" iex> #{inspect(__MODULE__)}.to_string! ~D[2017-07-10], format: :long, locale: "af" "10 Julie 2017" """ @spec to_string!(map, Keyword.t()) :: String.t() | no_return def to_string!(date, options \\ []) do Cldr.Date.to_string!(date, unquote(backend), options) end end defmodule Time do @doc """ Formats a time according to a format string as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html) ## Returns * `{:ok, formatted_time}` or * `{:error, reason}`. ## Arguments * `time` is a `%DateTime{}` or `%NaiveDateTime{}` struct or any map that contains the keys `hour`, `minute`, `second` and optionally `calendar` and `microsecond` * `options` is a keyword list of options for formatting. ## Options * `format:` `:short` | `:medium` | `:long` | `:full` or a format string. The default is `:medium` * `locale:` any locale returned by `Cldr.known_locale_names/1`. The default is ` Cldr.get_locale()` * `number_system:` a number system into which the formatted date digits should be transliterated * `era: :variant` will use a variant for the era is one is available in the locale. In the "en" locale, for example, `era: :variant` will return "BCE" instead of "BC". * `period: :variant` will use a variant for the time period and flexible time period if one is available in the locale. For example, in the "en" locale `period: :variant` will return "pm" instead of "PM" ## Examples iex> Cldr.Time.to_string ~T[07:35:13.215217] {:ok, "7:35:13 AM"} iex> Cldr.Time.to_string ~T[07:35:13.215217], format: :short {:ok, "7:35 AM"} iex> Cldr.Time.to_string ~T[07:35:13.215217], format: :medium, locale: "fr" {:ok, "07:35:13"} iex> Cldr.Time.to_string ~T[07:35:13.215217], format: :medium {:ok, "7:35:13 AM"} iex> {:ok, datetime} = DateTime.from_naive(~N[2000-01-01 23:59:59.0], "Etc/UTC") iex> Cldr.Time.to_string datetime, format: :long {:ok, "11:59:59 PM UTC"} """ @spec to_string(map, Keyword.t()) :: {:ok, String.t()} | {:error, {module, String.t()}} def to_string(time, options \\ []) do Cldr.Time.to_string(time, unquote(backend), options) end @doc """ Formats a time according to a format string as defined in CLDR and described in [TR35](http://unicode.org/reports/tr35/tr35-dates.html). ## Arguments * `time` is a `%DateTime{}` or `%NaiveDateTime{}` struct or any map that contains the keys `hour`, `minute`, `second` and optionally `calendar` and `microsecond` * `options` is a keyword list of options for formatting. ## Options * `format:` `:short` | `:medium` | `:long` | `:full` or a format string. The default is `:medium` * `locale` is any valid locale name returned by `Cldr.known_locale_names/0` or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0` * `number_system:` a number system into which the formatted date digits should be transliterated * `era: :variant` will use a variant for the era is one is available in the locale. In the "en" locale, for example, `era: :variant` will return "BCE" instead of "BC". * `period: :variant` will use a variant for the time period and flexible time period if one is available in the locale. For example, in the "en" locale `period: :variant` will return "pm" instead of "PM" ## Returns * `formatted_time_string` or * raises an exception. ## Examples iex> #{inspect(__MODULE__)}.to_string! ~T[07:35:13.215217] "7:35:13 AM" iex> #{inspect(__MODULE__)}.to_string! ~T[07:35:13.215217], format: :short "7:35 AM" iex> #{inspect(__MODULE__)}.to_string ~T[07:35:13.215217], format: :short, period: :variant {:ok, "7:35 AM"} iex> #{inspect(__MODULE__)}.to_string! ~T[07:35:13.215217], format: :medium, locale: "fr" "07:35:13" iex> #{inspect(__MODULE__)}.to_string! ~T[07:35:13.215217], format: :medium "7:35:13 AM" iex> {:ok, datetime} = DateTime.from_naive(~N[2000-01-01 23:59:59.0], "Etc/UTC") iex> #{inspect(__MODULE__)}.to_string! datetime, format: :long "11:59:59 PM UTC" """ @spec to_string!(map, Keyword.t()) :: String.t() | no_return def to_string!(time, options \\ []) do Cldr.Time.to_string!(time, unquote(backend), options) end end end end end
lib/cldr/backend/date_time.ex
0.919845
0.606994
date_time.ex
starcoder
defmodule ClusterGCE.Strategy.Labels do @moduledoc """ Clustering strategy for Google Compute Engine. This strategy checks for the list of app versions that are currently receiving HTTP. For each version that is listed, the list of instances running for that version are fetched. Once all of the instances have been received, they attempt to connect to each other. **Note**: This strategy only connects nodes that are able to receive GCP internal DNS traffic To cluster an application running in Google Compute Engine, define a topology for `libcluster`. ```elixir config :libcluster, topologies: [ my_app: [ strategy: ClusterGCE.Strategy.Labels, config: [ polling_interval: 10_000, project_id: "my-project", label: "my-app:environment-name" ] ] ] ``` ## Configurable Options Options can be set for the strategy under the `:config` key when defining the topology. * `:polling_interval` - Interval for checking for the list of running instances. Defaults to `10_000` * `:project_id` - Google Cloud project name * `:label` - A key value map of instance labels to match ### Release Configuration Update your release's `vm.args` file to include the following lines. ``` ## Name of the node -name <%= release_name%>@${GOOGLE_COMPUTE_ENGINE_INSTANCE}.${GOOGLE_COMPUTE_ENGINE_ZONE}.c.${GOOGLE_CLOUD_PROJECT}.internal ## Limit distributed erlang ports to a single port -kernel inet_dist_listen_min 9999 -kernel inet_dist_listen_max 9999 ``` Run your application with the environment variable `REPLACE_OS_VARS=true` and forward the following tcp ports: - `4369 # epmd` - `9999 # erlang distribution` ``` """ use GenServer use Cluster.Strategy alias Cluster.Strategy.State def start_link(args) do GenServer.start_link(__MODULE__, args) end @impl true def init([%State{} = state]) do {:ok, load(state)} end @impl true def handle_info(:timeout, state) do handle_info(:load, state) end def handle_info(:load, %State{} = state) do {:noreply, load(state)} end def handle_info(_, state) do {:noreply, state} end defp load(%State{} = state) do connect = state.connect list_nodes = state.list_nodes topology = state.topology {:ok, nodes} = ClusterGCE.get_nodes(state) Cluster.Strategy.connect_nodes(topology, connect, list_nodes, nodes) Process.send_after(self(), :load, polling_interval(state)) state end @default_polling_interval 10_000 defp polling_interval(%State{config: config}) do Keyword.get(config, :polling_interval, @default_polling_interval) end end
lib/cluster_gce/strategy/labels.ex
0.875068
0.854703
labels.ex
starcoder
defmodule Plug.Conn.Cookies do @moduledoc """ Conveniences for encoding and decoding cookies. """ @doc """ Decodes the given cookies as given in either a request or response header. If a cookie is invalid, it is automatically discarded from the result. ## Examples iex> decode("key1=value1;key2=value2") %{"key1" => "value1", "key2" => "value2"} """ def decode(cookie) do do_decode(:binary.split(cookie, ";", [:global]), %{}) end defp do_decode([], acc), do: acc defp do_decode([h | t], acc) do case decode_kv(h) do {k, v} -> do_decode(t, Map.put(acc, k, v)) false -> do_decode(t, acc) end end defp decode_kv(""), do: false defp decode_kv(<<h, t::binary>>) when h in [?\s, ?\t], do: decode_kv(t) defp decode_kv(kv), do: decode_key(kv, "") defp decode_key("", _key), do: false defp decode_key(<<?=, _::binary>>, ""), do: false defp decode_key(<<?=, t::binary>>, key), do: decode_value(t, "", key, "") defp decode_key(<<h, _::binary>>, _key) when h in [?\s, ?\t, ?\r, ?\n, ?\v, ?\f], do: false defp decode_key(<<h, t::binary>>, key), do: decode_key(t, <<key::binary, h>>) defp decode_value("", _spaces, key, value), do: {key, value} defp decode_value(<<?\s, t::binary>>, spaces, key, value), do: decode_value(t, <<spaces::binary, ?\s>>, key, value) defp decode_value(<<h, _::binary>>, _spaces, _key, _value) when h in [?\t, ?\r, ?\n, ?\v, ?\f], do: false defp decode_value(<<h, t::binary>>, spaces, key, value), do: decode_value(t, "", key, <<value::binary, spaces::binary, h>>) @doc """ Encodes the given cookies as expected in a response header. """ def encode(key, opts \\ %{}) when is_map(opts) do value = Map.get(opts, :value) path = Map.get(opts, :path, "/") IO.iodata_to_binary([ "#{key}=#{value}; path=#{path}", emit_if(opts[:domain], &["; domain=", &1]), emit_if(opts[:max_age], &encode_max_age(&1, opts)), emit_if(Map.get(opts, :secure, false), "; secure"), emit_if(Map.get(opts, :http_only, true), "; HttpOnly"), emit_if(opts[:extra], &["; ", &1]) ]) end defp encode_max_age(max_age, opts) do time = Map.get(opts, :universal_time) || :calendar.universal_time() time = add_seconds(time, max_age) ["; expires=", rfc2822(time), "; max-age=", Integer.to_string(max_age)] end defp emit_if(value, fun_or_string) do cond do !value -> [] is_function(fun_or_string) -> fun_or_string.(value) is_binary(fun_or_string) -> fun_or_string end end defp pad(number) when number in 0..9, do: <<?0, ?0 + number>> defp pad(number), do: Integer.to_string(number) defp rfc2822({{year, month, day} = date, {hour, minute, second}}) do # Sat, 17 Apr 2010 14:00:00 GMT [ weekday_name(:calendar.day_of_the_week(date)), ?,, ?\s, pad(day), ?\s, month_name(month), ?\s, Integer.to_string(year), ?\s, pad(hour), ?:, pad(minute), ?:, pad(second), " GMT" ] end defp weekday_name(1), do: "Mon" defp weekday_name(2), do: "Tue" defp weekday_name(3), do: "Wed" defp weekday_name(4), do: "Thu" defp weekday_name(5), do: "Fri" defp weekday_name(6), do: "Sat" defp weekday_name(7), do: "Sun" defp month_name(1), do: "Jan" defp month_name(2), do: "Feb" defp month_name(3), do: "Mar" defp month_name(4), do: "Apr" defp month_name(5), do: "May" defp month_name(6), do: "Jun" defp month_name(7), do: "Jul" defp month_name(8), do: "Aug" defp month_name(9), do: "Sep" defp month_name(10), do: "Oct" defp month_name(11), do: "Nov" defp month_name(12), do: "Dec" defp add_seconds(time, seconds_to_add) do time_seconds = :calendar.datetime_to_gregorian_seconds(time) :calendar.gregorian_seconds_to_datetime(time_seconds + seconds_to_add) end end
lib/plug/conn/cookies.ex
0.720663
0.415462
cookies.ex
starcoder
defmodule ICalex.Components.Component do @moduledoc false alias ICalex.Props.{Factory, Parameters, VText} alias ICalex.Parsers.{ContentLines, ContentLine} @enforce_keys [:name] defstruct name: nil, properties: %{}, components: [], required: [], singletons: [], multiple: [], exclusive: [], inclusive: [], canonical_order: [] def is_empty(%__MODULE__{properties: properties, components: components} = _params), do: properties === %{} and components === [] def sorted_keys(%__MODULE__{} = component) do keys = component.properties |> Map.keys() |> Enum.map(&String.downcase(&1)) canonical_keys = component.canonical_order |> Enum.filter(&(&1 in keys)) non_canonical_keys = keys -- canonical_keys canonical_keys ++ Enum.sort(non_canonical_keys) end def property_items( %__MODULE__{} = component, recursive \\ true, sorted \\ true ) do component_name = ICal.to_ical(%VText{value: component.name}) properties = [{"BEGIN", component_name}] property_names = if sorted, do: sorted_keys(component), else: Map.keys(component.properties) properties = property_names |> Enum.reduce(properties, fn name, acc -> values = Map.get(component.properties, name) if is_list(values) do acc ++ Enum.map(values, fn value -> {String.upcase(name), value} end) else acc ++ [{String.upcase(name), values}] end end) properties = if recursive do component.components |> Enum.reduce(properties, fn component, acc -> acc ++ property_items(component, recursive, sorted) end) else properties end properties ++ [{"END", component_name}] end def to_ical(%__MODULE__{} = component) do component |> property_items |> Enum.map(fn {name, value} -> params = if is_map(value) and Map.has_key?(value, :params), do: value.params, else: %Parameters{} ContentLine.from_parts(name, params, value, true) end) |> ContentLines.to_ical() end def add_component(%__MODULE__{} = component, sub_component) do Map.put(component, :components, component.components ++ [sub_component]) end def add( %__MODULE__{properties: properties} = component, name, value, parameters \\ nil, encode \\ true ) do name = String.downcase(name) value = case value do # set to UTC %NaiveDateTime{} when name in ["dtstamp", "created", "last-modified"] -> Timex.to_datetime(value) # TODO when TZID in Parameters or DateTime not utc _ -> value end value = if encode and is_list(value) and String.downcase(name) not in ["rdate", "exdate"] do for v <- value, do: encode(name, v, parameters, encode) else encode(name, value, parameters, encode) end value = if Map.has_key?(properties, name) do old_value = Map.get(properties, name) cond do is_list(old_value) and is_list(value) -> old_value ++ value is_list(old_value) -> old_value ++ [value] is_list(value) -> [old_value | value] true -> [old_value, value] end else value end Map.put(component, :properties, Map.put(properties, name, value)) end defp encode(_, value, _, false), do: value defp encode(name, value, nil, encode), do: encode(name, value, %Parameters{}, encode) defp encode(name, value, %Parameters{} = parameters, _encode) do if ICalex.Props.is_prop(value) do value else Factory.get_type_name(name) |> Factory.get_type(value, parameters) end end defp encode(name, value, %{} = parameters, encode), do: encode(name, value, %Parameters{parameters: parameters}, encode) def validate(%__MODULE__{} = component) do _keys = component.properties |> Map.keys() |> Enum.map(&String.downcase(&1)) # TODO validate end defimpl ICal do def to_ical(data), do: ICalex.Components.Component.to_ical(data) end end
lib/components/component.ex
0.569134
0.418935
component.ex
starcoder
defmodule Exfile.ProcessorChain do @moduledoc """ A module to run a chain of processors on a file. """ alias Exfile.{LocalFile, ProcessorRegistry} @type uploadable :: %Exfile.File{} | %Exfile.LocalFile{} @type name :: String.t @type args :: [String.t, ...] @type opts :: [key: any] @type definition :: name | {name, args} | {name, args, opts} @doc """ Apply a chain of processors to an uploadable. If the list of processor definitions is empty, it will pass-through the uploadable argument untouched. If the list of processor definitions is not empty, it will coerce the uploadable argument in to a LocalFile, downloading it from the backend if necessary. The definition list accepts three different terms: * A string, representing the processor name to apply * A 2-element tuple: `{string, list of processor arguments}` * A 3-element tuple: `{string, list of processor arguments, list of processor options}` If a processor encounters an error, the chain is halted and the error is returned immediately in the format `{:error, reason}`. """ @spec apply_processors([], uploadable) :: {:ok, uploadable} def apply_processors([], uploadable) do # Pass-through {:ok, uploadable} end @spec apply_processors([definition, ...], uploadable) :: {:ok, LocalFile.t} | {:error, atom} def apply_processors(processors, uploadable) do local_file = coerce_to_local_file(uploadable) do_process(processors, {:ok, local_file}) end def coerce_to_local_file(%LocalFile{} = local_file), do: local_file def coerce_to_local_file(%Exfile.File{} = file) do case Exfile.File.open(file) do { :ok, local_file } -> local_file { :error, _ } -> %LocalFile{} end end defp do_process(_, {:error, _} = error_term), do: error_term defp do_process([{name, args} | other], file), do: do_process([{name, args, []} | other], file) defp do_process([name | other], file) when is_binary(name), do: do_process([{name, [], []} | other], file) defp do_process([{name, args, opts} | other], {:ok, file}) do process_result = ProcessorRegistry.process(name, file, args, opts) do_process(other, process_result) end defp do_process([], file), do: file end
lib/exfile/processor_chain.ex
0.765856
0.429728
processor_chain.ex
starcoder
defmodule ToxiproxyEx do alias ToxiproxyEx.{Proxy, Client, Toxic, ToxicCollection, ServerError} @external_resource "README.md" @moduledoc "README.md" |> File.read!() |> String.split("<!-- MDOC !-->") |> Enum.fetch!(1) @typedoc """ A proxy that intercepts traffic to and from an upstream server. """ @opaque proxy :: %Proxy{} @typedoc """ A collection of proxies. """ @opaque toxic_collection :: %ToxicCollection{} @typedoc """ A hostname or IP address including a port number, e.g. `localhost:4539`. """ @type host_with_port :: String.t() @typedoc """ A map containing fields required to setup a proxy. Designed to be used with `ToxiproxyEx.populate!/1`. """ @type proxy_map :: %{required(:name) => String.t(), required(:upstream) => host_with_port(), optional(:listen) => host_with_port(), optional(:enabled) => true | false} @doc """ Creates a proxy on the toxiproxy server. Raises `ToxiproxyEx.ServerError` if the creation fails. ## Examples Create a new proxy: iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") Create a new proxy that listens on a specific port: iex> ToxiproxyEx.create!(upstream: "localhost:3306", listen: "localhost:5555", name: "test_mysql_master") Create a new proxy that is disabled by default: iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master", enabled: false) """ @spec create!( upstream: host_with_port(), name: String.t() | atom(), listen: host_with_port() | nil, enabled: true | false | nil ) :: proxy() def create!(options) do case Proxy.create(options) do {:ok, proxy} -> proxy :error -> raise ServerError, message: "Could not create proxy" end end @doc """ Deletes one or multiple proxies on the toxiproxy server. Raises `ToxiproxyEx.ServerError` if the deletion fails. ## Examples Destroy a single proxy: iex> ToxiproxyEx.create!(upstream: "localhost:3456", name: :test_mysql_master) iex> proxy = ToxiproxyEx.get!(:test_mysql_master) iex> ToxiproxyEx.destroy!(proxy) :ok Destroy all proxies: iex> ToxiproxyEx.create!(upstream: "localhost:3456", name: :test_mysql_master) iex> proxies = ToxiproxyEx.all!() iex> ToxiproxyEx.destroy!(proxies) :ok """ @spec destroy!(proxy() | toxic_collection()) :: :ok def destroy!(%Proxy{} = proxy) do destroy!(ToxicCollection.new(proxy)) end def destroy!(%ToxicCollection{proxies: proxies}) do Enum.each(proxies, fn proxy -> case Proxy.destroy(proxy) do :ok -> nil :error -> raise ServerError, message: "Could not destroy proxy" end end) :ok end @doc """ Retrieves a proxy from the toxiproxy server. Raises `ToxiproxyEx.ServerError` if the proxy could not be retrieved. Raises `ArgumentError` if the proxy does not exist. ## Examples Retrievs a proxy: iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> ToxiproxyEx.get!(:test_mysql_master) """ @spec get!(atom() | String.t()) :: proxy() def get!(name) when is_atom(name) do get!(Atom.to_string(name)) end def get!(name) do case Enum.find(all!().proxies, &(&1.name == name)) do nil -> raise ArgumentError, message: "Unknown proxy with name '#{name}'" proxy -> proxy end end @doc """ Retrieves a list of proxies from the toxiproxy server where the name matches the specificed regex. Raises `ToxiproxyEx.ServerError` if the list of proxies could not be retrieved. Raises `ArgumentError` if no proxy matching the specified regex does exist. ## Examples Retrievs a proxy: iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> ToxiproxyEx.create!(upstream: "localhost:3307", name: "test_mysql_follower") iex> ToxiproxyEx.create!(upstream: "localhost:3308", name: "test_redis_master") iex> ToxiproxyEx.grep!(~r/master/) """ @spec grep!(Regex.t()) :: toxic_collection() def grep!(pattern) do case Enum.filter(all!().proxies, &String.match?(&1.name, pattern)) do proxies = [_h | _t] -> ToxicCollection.new(proxies) [] -> raise ArgumentError, message: "No proxies found for regex '#{pattern}'" end end @doc """ Retrieves a list of all proxies from the toxiproxy server. Raises `ToxiproxyEx.ServerError` if the list of proxies could not be retrieved. ## Examples Retrievs a proxy: iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> ToxiproxyEx.create!(upstream: "localhost:3307", name: "test_redis_master") iex> ToxiproxyEx.all!() """ @spec all!() :: toxic_collection() def all!() do case Client.list_proxies() do {:ok, %{body: proxies}} -> Enum.map(proxies, &parse_proxy/1) _ -> raise ServerError, message: "Could not fetch proxies." end |> ToxicCollection.new() end defp parse_proxy( {_proxy_name, %{ "upstream" => upstream, "listen" => listen, "name" => name, "enabled" => enabled }} ) do %Proxy{upstream: upstream, listen: listen, name: name, enabled: enabled} end @doc """ Adds an upstream toxic to the proxy or list of proxies that will be enabled when passed to `ToxiproxyEx.apply!/2`. ## Examples Add an upstream toxic to a proxy: iex> proxy = ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> proxies = ToxiproxyEx.upstream(proxy, :latency, latency: 1000) iex> ToxiproxyEx.apply!(proxies, fn -> ...> # Do some testing ...> nil ...> end) Add an upstream toxic to a list of proxies: iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> ToxiproxyEx.create!(upstream: "localhost:3307", name: "test_mysql_follower") iex> proxies = ToxiproxyEx.all!() iex> proxies = ToxiproxyEx.upstream(proxies, :latency, latency: 1000) iex> ToxiproxyEx.apply!(proxies, fn -> ...> # Do some testing ...> nil ...> end) """ @spec upstream(proxy() | toxic_collection(), atom(), []) :: toxic_collection() def upstream(proxy_or_collection, type, attrs \\ []) def upstream(proxy = %Proxy{}, type, attrs) do upstream(ToxicCollection.new(proxy), type, attrs) end def upstream(%ToxicCollection{proxies: proxies, toxics: toxics}, type, attrs) do name = Keyword.get(attrs, :name) toxicity = Keyword.get(attrs, :toxicity) attrs = attrs |> Keyword.delete(:name) |> Keyword.delete(:toxicity) new_toxics = Enum.map(proxies, fn proxy -> Toxic.new( name: name, type: type, proxy_name: proxy.name, stream: :upstream, toxicity: toxicity, attributes: attrs ) end) %ToxicCollection{proxies: proxies, toxics: toxics ++ new_toxics} end @doc """ Alias for `ToxiproxyEx.downstream/3`. """ @spec toxic(proxy() | toxic_collection(), atom(), []) :: toxic_collection() def toxic(proxy_or_collection, type, attrs \\ []) do downstream(proxy_or_collection, type, attrs) end @doc """ Alias for `ToxiproxyEx.downstream/3`. """ @spec toxicate(proxy() | toxic_collection(), atom(), []) :: toxic_collection() def toxicate(proxy_or_collection, type, attrs \\ []) do downstream(proxy_or_collection, type, attrs) end @doc """ Adds an downstream toxic to the proxy or list of proxies that will be enabled when passed to `ToxiproxyEx.apply!/2`. ## Examples Add an downstream toxic to a proxy: iex> proxy = ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> proxies = ToxiproxyEx.downstream(proxy, :latency, latency: 1000) iex> ToxiproxyEx.apply!(proxies, fn -> ...> # Do some testing ...> nil ...> end) Add an downstream toxic to a list of proxies: iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> ToxiproxyEx.create!(upstream: "localhost:3307", name: "test_mysql_follower") iex> proxies = ToxiproxyEx.all!() iex> proxies = ToxiproxyEx.downstream(proxies, :latency, latency: 1000) iex> ToxiproxyEx.apply!(proxies, fn -> ...> # Do some testing ...> nil ...> end) """ @spec downstream(proxy() | toxic_collection(), atom(), []) :: toxic_collection() def downstream(proxy_or_collection, type, attrs \\ []) def downstream(proxy = %Proxy{}, type, attrs) do downstream(ToxicCollection.new(proxy), type, attrs) end def downstream(%ToxicCollection{proxies: proxies, toxics: toxics}, type, attrs) do name = Keyword.get(attrs, :name) toxicity = Keyword.get(attrs, :toxicity) attrs = attrs |> Keyword.delete(:name) |> Keyword.delete(:toxicity) new_toxics = Enum.map(proxies, fn proxy -> Toxic.new( name: name, type: type, proxy_name: proxy.name, stream: :downstream, toxicity: toxicity, attributes: attrs ) end) %ToxicCollection{proxies: proxies, toxics: toxics ++ new_toxics} end @doc """ Applies all toxics previously defined on the list of proxies during the duration of the given function. Raises `ToxiproxyEx.ServerError` if the toxics could not be enabled and disabled again on the server. ## Examples Add toxics and apply them toxic to a single proxy: iex> proxy = ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> proxies = ToxiproxyEx.downstream(proxy, :slow_close, delay: 100) iex> proxies = ToxiproxyEx.downstream(proxies, :latency, jitter: 300) iex> ToxiproxyEx.apply!(proxies, fn -> ...> # All calls to mysql master are now slow at responding and closing. ...> nil ...> end) Add toxics and apply them toxic to a list of proxies: iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_follower") iex> proxies = ToxiproxyEx.all!() iex> proxies = ToxiproxyEx.downstream(proxies, :slow_close, delay: 100) iex> proxies = ToxiproxyEx.downstream(proxies, :latency, jitter: 300) iex> ToxiproxyEx.apply!(proxies, fn -> ...> # All calls to mysql master and follower are now slow at responding and closing. ...> nil ...> end) """ @spec apply!(toxic_collection(), (-> any())) :: :ok def apply!(%ToxicCollection{toxics: toxics}, fun) do dups = Enum.group_by(toxics, fn t -> [t.name, t.proxy_name] end) |> Enum.map(fn {_group, toxics} -> toxics end) |> Enum.filter(fn toxics -> length(toxics) > 1 end) if Enum.empty?(dups) do # Note: We probably don't care about the updated toxies here but we still use them rather than the one passed into the function. toxics = Enum.map(toxics, fn toxic -> case Toxic.create(toxic) do {:ok, toxic} -> toxic :error -> raise ServerError, message: "Could not create toxic '#{toxic.name}'" end end) fun.() Enum.each(toxics, fn toxic -> case Toxic.destroy(toxic) do :ok -> nil :error -> raise ServerError, message: "Could not destroy toxic '#{toxic.name}'" end end) :ok else raise ArgumentError, message: "There are multiple toxics with the name '#{hd(hd(dups)).name}' for proxy '#{ hd(hd(dups)).proxy_name }', please override the default name (<type>_<direction>)" end end @doc """ Takes down the proxy or the list of proxies during the duration of the given function. Raises `ToxiproxyEx.ServerError` if the proxy or list of proxies could not have been disabled and enabled again on the server. ## Examples Take down a single proxy: iex> proxy = ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> ToxiproxyEx.down!(proxy, fn -> ...> # Takes mysql master down. ...> nil ...> end) Take down a list of proxies: iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_master") iex> ToxiproxyEx.create!(upstream: "localhost:3306", name: "test_mysql_follower") iex> proxies = ToxiproxyEx.all!() iex> ToxiproxyEx.down!(proxies, fn -> ...> # Takes mysql master and follower down. ...> nil ...> end) """ @spec down!(toxic_collection(), (-> any())) :: :ok def down!(proxy = %Proxy{}, fun) do down!(ToxicCollection.new(proxy), fun) end def down!(%ToxicCollection{proxies: proxies}, fun) do Enum.each(proxies, fn proxy -> case Proxy.disable(proxy) do :ok -> nil :error -> raise ServerError, message: "Could not disable proxy '#{proxy.name}'" end end) fun.() Enum.each(proxies, fn proxy -> case Proxy.enable(proxy) do :ok -> nil :error -> raise ServerError, message: "Could not enable proxy '#{proxy.name}'" end end) :ok end @doc """ Re-enables are proxies and disables all toxics on toxiproxy. Raises `ToxiproxyEx.ServerError` if the server could not have been reset. ## Examples Reset toxiproxy: iex> ToxiproxyEx.reset!() :ok """ @spec reset!() :: :ok def reset!() do case Client.reset() do {:ok, _} -> :ok _ -> raise ServerError, message: "Could not reset toxiproxy" end end @doc """ Gets the version of the running toxiproxy server. Raises `ToxiproxyEx.ServerError` if the version could not have been fetched from the server. ## Examples Get running toxiproxy version: iex> ToxiproxyEx.version!() "2.1.2" """ @spec version!() :: :ok def version!() do case Client.version() do {:ok, %{body: res}} -> res _ -> raise ServerError, message: "Could not fetch version" end end @doc """ Creates proxies based on the passed data. This is usefull to quickly create multiple proxies based on hardcoded value or values read from external sources such as a config file. Nonexisting proxies will be created and existing ones will be updated to match the passed data. Raises `ToxiproxyEx.ServerError` if the proxies could not have been created on the server. ## Examples Creating proxies: iex> ToxiproxyEx.populate!([ ...> %{name: "test_mysql_master", upstream: "localhost:5765"}, ...> %{name: "test_mysql_follower", upstream: "localhost:5766", enabled: false} ...> ]) """ @spec populate!([proxy_map()]) :: toxic_collection() def populate!(proxies) when is_list(proxies) do Enum.map(proxies, fn proxy_attrs -> name = Map.get(proxy_attrs, :name) upstream = Map.get(proxy_attrs, :upstream) listen = Map.get(proxy_attrs, :upstream) existing = Enum.find(all!().proxies, &(&1.name == name)) if existing do if existing.upstream == upstream && existing.listen == listen do existing else destroy!(existing) Keyword.new(proxy_attrs) |> create!() end else Keyword.new(proxy_attrs) |> create!() end end) |> ToxicCollection.new() end end
lib/toxiproxy_ex.ex
0.847795
0.409103
toxiproxy_ex.ex
starcoder
defmodule LoggerJSON.FormatterUtils do @moduledoc """ This module contains functions that can be used across different `LoggerJSON.Formatter` implementations to provide common functionality. """ import Jason.Helpers, only: [json_map: 1] @doc """ Format an exception for use within a log entry """ def format_process_crash(md) do if crash_reason = Keyword.get(md, :crash_reason) do initial_call = Keyword.get(md, :initial_call) json_map( initial_call: format_initial_call(initial_call), reason: format_crash_reason(crash_reason) ) end end @doc """ RFC3339 UTC "Zulu" format """ def format_timestamp({date, time}) do [format_date(date), ?T, format_time(time), ?Z] |> IO.iodata_to_binary() end @doc """ Provide a string output of the MFA log entry """ def format_function(nil, function), do: function def format_function(module, function), do: "#{module}.#{function}" def format_function(module, function, arity), do: "#{module}.#{function}/#{arity}" @doc """ """ def maybe_put(map, _key, nil), do: map def maybe_put(map, key, value), do: Map.put(map, key, value) defp format_initial_call(nil), do: nil defp format_initial_call({module, function, arity}), do: format_function(module, function, arity) defp format_crash_reason({:throw, reason}) do Exception.format(:throw, reason) end defp format_crash_reason({:exit, reason}) do Exception.format(:exit, reason) end defp format_crash_reason({%{} = exception, stacktrace}) do Exception.format(:error, exception, stacktrace) end defp format_crash_reason(other) do inspect(other) end defp format_time({hh, mi, ss, ms}) do [pad2(hh), ?:, pad2(mi), ?:, pad2(ss), ?., pad3(ms)] end defp format_date({yy, mm, dd}) do [Integer.to_string(yy), ?-, pad2(mm), ?-, pad2(dd)] end defp pad3(int) when int < 10, do: [?0, ?0, Integer.to_string(int)] defp pad3(int) when int < 100, do: [?0, Integer.to_string(int)] defp pad3(int), do: Integer.to_string(int) defp pad2(int) when int < 10, do: [?0, Integer.to_string(int)] defp pad2(int), do: Integer.to_string(int) end
lib/logger_json/formatter_utils.ex
0.784526
0.412796
formatter_utils.ex
starcoder
defmodule Bolt.Sips.Internals.BoltProtocol do @moduledoc false # A library that handles Bolt Protocol (v1 and v2). # Note that for now, only Neo4j implements Bolt v2. # It handles all the protocol specific steps (i.e. # handshake, init) as well as sending and receiving messages and wrapping # them in chunks. # It abstracts transportation, expecting the transport layer to define # `send/2` and `recv/3` analogous to `:gen_tcp`. # ## Logging configuration # Logging can be enable / disable via config files (e.g, `config/config.exs`). # - `:log`: (bool) wether Bolt.Sips.Internals. should produce logs or not. Defaults to `false` # - `:log_hex`: (bool) wether Bolt.Sips.Internals. should produce logs hexadecimal counterparts. While this may be interesting, # note that all the hexadecimal data will be written and this can be very long, and thus can seriously impact performances. Defaults to `false` # For example, configuration to see the logs and their hexadecimal counterparts: # ``` # config :Bolt.Sips.Internals., # log: true, # log_hex: true # ``` # # #### Examples of logging (without log_hex) # iex> Bolt.Sips.Internals.test('localhost', 7687, "RETURN 1 as num", %{}, {"neo4j", "password"}) # C: HANDSHAKE ~ "<<0x60, 0x60, 0xB0, 0x17>> [2, 1, 0, 0]" # S: HANDSHAKE ~ 2 # C: INIT ~ ["BoltSips/1.1.0.rc2", %{credentials: "password", principal: "neo4j", scheme: "basic"}] # S: SUCCESS ~ %{"server" => "Neo4j/3.4.1"} # C: RUN ~ ["RETURN 1 as num", %{}] # S: SUCCESS ~ %{"fields" => ["num"], "result_available_after" => 1} # C: PULL_ALL ~ [] # S: RECORD ~ [1] # S: SUCCESS ~ %{"result_consumed_after" => 0, "type" => "r"} # [ # success: %{"fields" => ["num"], "result_available_after" => 1}, # record: [1], # success: %{"result_consumed_after" => 0, "type" => "r"} # ] # #### Examples of logging (with log_hex) # iex> Bolt.Sips.Internals.test('localhost', 7687, "RETURN 1 as num", %{}, {"neo4j", "password"}) # 13:32:23.882 [debug] C: HANDSHAKE ~ "<<0x60, 0x60, 0xB0, 0x17>> [2, 1, 0, 0]" # S: HANDSHAKE ~ <<0x0, 0x0, 0x0, 0x2>> # S: HANDSHAKE ~ 2 # C: INIT ~ ["BoltSips/1.1.0.rc2", %{credentials: "password", principal: "neo4j", scheme: "basic"}] # C: INIT ~ <<0x0, 0x42, 0xB2, 0x1, 0x8C, 0x42, 0x6F, 0x6C, 0x74, 0x65, 0x78, 0x2F, 0x30, 0x2E, 0x35, 0x2E, 0x30, 0xA3, 0x8B, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6E, 0x74, 0x69, 0x61, 0x6C, 0x73, 0x88, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6F, 0x72, 0x64, 0x89, 0x70, 0x72, 0x69, 0x6E, 0x63, 0x69, 0x70, 0x61, 0x6C, 0x85, 0x6E, 0x65, 0x6F, 0x34, 0x6A, 0x86, 0x73, 0x63, 0x68, 0x65, 0x6D, 0x65, 0x85, 0x62, 0x61, 0x73, 0x69, 0x63, 0x0, 0x0>> # S: SUCCESS ~ <<0xA1, 0x86, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x8B, 0x4E, 0x65, 0x6F, 0x34, 0x6A, 0x2F, 0x33, 0x2E, 0x34, 0x2E, 0x31>> # S: SUCCESS ~ %{"server" => "Neo4j/3.4.1"} # C: RUN ~ ["RETURN 1 as num", %{}] # C: RUN ~ <<0x0, 0x13, 0xB2, 0x10, 0x8F, 0x52, 0x45, 0x54, 0x55, 0x52, 0x4E, 0x20, 0x31, 0x20, 0x61, 0x73, 0x20, 0x6E, 0x75, 0x6D, 0xA0, 0x0, 0x0>> # S: SUCCESS ~ <<0xA2, 0xD0, 0x16, 0x72, 0x65, 0x73, 0x75, 0x6C, 0x74, 0x5F, 0x61, 0x76, 0x61, 0x69, 0x6C, 0x61, 0x62, 0x6C, 0x65, 0x5F, 0x61, 0x66, 0x74, 0x65, 0x72, 0x1, 0x86, 0x66, 0x69, 0x65, 0x6C, 0x64, 0x73, 0x91, 0x83, 0x6E, 0x75, 0x6D>> # S: SUCCESS ~ %{"fields" => ["num"], "result_available_after" => 1} # C: PULL_ALL ~ [] # C: PULL_ALL ~ <<0x0, 0x2, 0xB0, 0x3F, 0x0, 0x0>> # S: RECORD ~ <<0x91, 0x1>> # S: RECORD ~ [1] # S: SUCCESS ~ <<0xA2, 0xD0, 0x15, 0x72, 0x65, 0x73, 0x75, 0x6C, 0x74, 0x5F, 0x63, 0x6F, 0x6E, 0x73, 0x75, 0x6D, 0x65, 0x64, 0x5F, 0x61, 0x66, 0x74, 0x65, 0x72, 0x0, 0x84, 0x74, 0x79, 0x70, 0x65, 0x81, 0x72>> # S: SUCCESS ~ %{"result_consumed_after" => 0, "type" => "r"} # [ # success: %{"fields" => ["num"], "result_available_after" => 1}, # record: [1], # success: %{"result_consumed_after" => 0, "type" => "r"} # ] # ## Shared options # Functions that allow for options accept these default options: # * `recv_timeout`: The timeout for receiving a response from the Neo4J s # server (default: #{@recv_timeout}) alias Bolt.Sips.Metadata alias Bolt.Sips.Internals.BoltProtocolV1 alias Bolt.Sips.Internals.BoltProtocolV3 defdelegate handshake(transport, port, options \\ []), to: BoltProtocolV1 defdelegate init(transport, port, version, auth \\ {}, options \\ []), to: BoltProtocolV1 defdelegate hello(transport, port, version, auth \\ {}, options \\ []), to: BoltProtocolV3 defdelegate goodbye(transport, port, version), to: BoltProtocolV3 defdelegate ack_failure(transport, port, bolt_version, options \\ []), to: BoltProtocolV1 defdelegate reset(transport, port, bolt_version, options \\ []), to: BoltProtocolV1 defdelegate discard_all(transport, port, bolt_version, options \\ []), to: BoltProtocolV1 defdelegate begin(transport, port, bolt_version, metadata \\ %Metadata{}, options \\ []), to: BoltProtocolV3 defdelegate commit(transport, port, bolt_version, options \\ []), to: BoltProtocolV3 defdelegate rollback(transport, port, bolt_version, options \\ []), to: BoltProtocolV3 defdelegate pull_all(transport, port, bolt_version, options \\ []), to: BoltProtocolV1 @doc """ run for all Bolt version, but call differs. For Bolt <= 2, use: run_statement(transport, port, bolt_version, statement, params, options) For Bolt >=3: run_statement(transport, port, bolt_version, statement, params, metadata, options) Note that Bolt V2 calls works with Bolt V3, but it is preferrable to update them. """ @spec run( atom(), port(), integer(), String.t(), map(), nil | Keyword.t() | Bolt.Sips.Metadata.t(), nil | Keyword.t() ) :: {:ok, tuple()} | Bolt.Sips.Internals.Error.t() def run( transport, port, bolt_version, statement, params \\ %{}, options_or_metadata \\ [], options \\ [] ) def run(transport, port, bolt_version, statement, params, options_or_metadata, _) when bolt_version <= 2 do BoltProtocolV1.run( transport, port, bolt_version, statement, params, options_or_metadata || [] ) end def run(transport, port, bolt_version, statement, params, metadata, options) when bolt_version >= 2 do metadata = case metadata do [] -> %{} metadata -> metadata end {metadata, options} = manage_metadata_and_options(metadata, options) BoltProtocolV3.run(transport, port, bolt_version, statement, params, metadata, options) end defp manage_metadata_and_options([], options) do {:ok, empty_metadata} = Metadata.new(%{}) {empty_metadata, options} end defp manage_metadata_and_options([_ | _] = metadata, options) do {:ok, empty_metadata} = Metadata.new(%{}) {empty_metadata, metadata ++ options} end defp manage_metadata_and_options(metadata, options) do {metadata, options} end @doc """ run_statement for all Bolt version, but call differs. For Bolt <= 2, use: run_statement(transport, port, bolt_version, statement, params, options) For Bolt >=3: run_statement(transport, port, bolt_version, statement, params, metadata, options) Note that Bolt V2 calls works with Bolt V3, but it is preferrable to update them. """ @spec run_statement( atom(), port(), integer(), String.t(), map(), nil | Keyword.t() | Bolt.Sips.Metadata.t(), nil | Keyword.t() ) :: list() | Bolt.Sips.Internals.Error.t() def run_statement( transport, port, bolt_version, statement, params \\ %{}, options_v2_or_metadata_v3 \\ [], options_v3 \\ [] ) def run_statement(transport, port, bolt_version, statement, params, options_or_metadata, _) when bolt_version <= 2 do BoltProtocolV1.run_statement( transport, port, bolt_version, statement, params, options_or_metadata || [] ) end def run_statement(transport, port, bolt_version, statement, params, metadata, options) when bolt_version >= 2 do metadata = case metadata do [] -> %{} metadata -> metadata end BoltProtocolV3.run_statement( transport, port, bolt_version, statement, params, metadata, options ) end end
lib/bolt_sips/internals/bolt_protocol.ex
0.677154
0.532547
bolt_protocol.ex
starcoder
defmodule Notifications.ExceptionInfo do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ class: String.t(), title: String.t(), msg: String.t(), backtrace: [String.t()] } defstruct [:class, :title, :msg, :backtrace] field :class, 1, type: :string field :title, 2, type: :string field :msg, 3, type: :string field :backtrace, 4, repeated: true, type: :string end defmodule Notifications.TimeInfo do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ start_time: String.t(), end_time: String.t() } defstruct [:start_time, :end_time] field :start_time, 1, type: :string field :end_time, 2, type: :string end defmodule Notifications.Profile do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ name: String.t(), title: String.t(), version: String.t(), summary: String.t(), maintainer: String.t(), license: String.t(), copyright: String.t(), copyright_email: String.t(), sha256: String.t(), supports: [Notifications.PlatformSupport.t()], attributes: [Notifications.Profile.Attribute.t()], failed_controls: [Notifications.Profile.Control.t()], stats: Notifications.Profile.ControlTotals.t() | nil } defstruct [ :name, :title, :version, :summary, :maintainer, :license, :copyright, :copyright_email, :sha256, :supports, :attributes, :failed_controls, :stats ] field :name, 1, type: :string field :title, 2, type: :string field :version, 3, type: :string field :summary, 4, type: :string field :maintainer, 5, type: :string field :license, 6, type: :string field :copyright, 7, type: :string field :copyright_email, 8, type: :string field :sha256, 9, type: :string field :supports, 10, repeated: true, type: Notifications.PlatformSupport field :attributes, 11, repeated: true, type: Notifications.Profile.Attribute field :failed_controls, 12, repeated: true, type: Notifications.Profile.Control field :stats, 13, type: Notifications.Profile.ControlTotals end defmodule Notifications.Profile.Control do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ id: String.t(), impact: float, title: String.t(), code: String.t(), desc: String.t(), source_location: Notifications.SourceLocation.t() | nil, refs: [Notifications.Refs.t()], failed_results: [Notifications.Profile.Control.Result.t()], stats: Notifications.Profile.Control.ResultTotals.t() | nil } defstruct [:id, :impact, :title, :code, :desc, :source_location, :refs, :failed_results, :stats] field :id, 1, type: :string field :impact, 2, type: :float field :title, 3, type: :string field :code, 4, type: :string field :desc, 5, type: :string field :source_location, 6, type: Notifications.SourceLocation field :refs, 7, repeated: true, type: Notifications.Refs field :failed_results, 9, repeated: true, type: Notifications.Profile.Control.Result field :stats, 10, type: Notifications.Profile.Control.ResultTotals end defmodule Notifications.Profile.Control.Result do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ status: String.t(), code_desc: String.t(), run_time: float, start_time: String.t(), message: String.t(), skip_message: String.t() } defstruct [:status, :code_desc, :run_time, :start_time, :message, :skip_message] field :status, 1, type: :string field :code_desc, 2, type: :string field :run_time, 3, type: :float field :start_time, 4, type: :string field :message, 5, type: :string field :skip_message, 6, type: :string end defmodule Notifications.Profile.Control.ResultTotals do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ num_tests: integer, num_failed_tests: integer, num_skipped_tests: integer, num_passed_tests: integer } defstruct [:num_tests, :num_failed_tests, :num_skipped_tests, :num_passed_tests] field :num_tests, 1, type: :int32 field :num_failed_tests, 2, type: :int32 field :num_skipped_tests, 3, type: :int32 field :num_passed_tests, 4, type: :int32 end defmodule Notifications.Profile.Attribute do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ name: String.t(), options: Notifications.Profile.Attribute.Options.t() | nil } defstruct [:name, :options] field :name, 1, type: :string field :options, 2, type: Notifications.Profile.Attribute.Options end defmodule Notifications.Profile.Attribute.Options do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ description: String.t() } defstruct [:description] field :description, 1, type: :string end defmodule Notifications.Profile.ControlTotals do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ num_tests: integer, num_failed_tests: integer, num_skipped_tests: integer, num_passed_tests: integer } defstruct [:num_tests, :num_failed_tests, :num_skipped_tests, :num_passed_tests] field :num_tests, 1, type: :int32 field :num_failed_tests, 2, type: :int32 field :num_skipped_tests, 3, type: :int32 field :num_passed_tests, 4, type: :int32 end defmodule Notifications.SourceLocation do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ ref: String.t(), line: integer } defstruct [:ref, :line] field :ref, 1, type: :string field :line, 2, type: :int32 end defmodule Notifications.Refs do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ ref: String.t(), uri: String.t(), url: String.t() } defstruct [:ref, :uri, :url] field :ref, 1, type: :string field :uri, 2, type: :string field :url, 3, type: :string end defmodule Notifications.PlatformSupport do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ inspec: String.t(), os_name: String.t(), os_family: String.t(), release: String.t() } defstruct [:inspec, :os_name, :os_family, :release] field :inspec, 1, type: :string field :os_name, 2, type: :string field :os_family, 3, type: :string field :release, 4, type: :string end defmodule Notifications.ComplianceSuccess do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ id: String.t(), compliance_url: String.t(), node_name: String.t(), node_id: String.t(), end_time: String.t(), timestamp: String.t() } defstruct [:id, :compliance_url, :node_name, :node_id, :end_time, :timestamp] field :id, 1, type: :string field :compliance_url, 2, type: :string field :node_name, 3, type: :string field :node_id, 4, type: :string field :end_time, 5, type: :string field :timestamp, 6, type: :string end defmodule Notifications.ComplianceFailure do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ id: String.t(), compliance_url: String.t(), node_name: String.t(), node_id: String.t(), inspec_version: String.t(), test_totals: Notifications.ComplianceFailure.ControlTotals.t() | nil, failed_profiles: [Notifications.Profile.t()], end_time: String.t(), timestamp: String.t() } defstruct [ :id, :compliance_url, :node_name, :node_id, :inspec_version, :test_totals, :failed_profiles, :end_time, :timestamp ] field :id, 1, type: :string field :compliance_url, 2, type: :string field :node_name, 3, type: :string field :node_id, 4, type: :string field :inspec_version, 5, type: :string field :test_totals, 6, type: Notifications.ComplianceFailure.ControlTotals field :failed_profiles, 7, repeated: true, type: Notifications.Profile field :end_time, 8, type: :string field :timestamp, 9, type: :string end defmodule Notifications.ComplianceFailure.ControlTotals do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ skipped: integer, passed: integer, failed: integer, critical: integer, critical_failed: integer } defstruct [:skipped, :passed, :failed, :critical, :critical_failed] field :skipped, 1, type: :int32 field :passed, 2, type: :int32 field :failed, 3, type: :int32 field :critical, 4, type: :int32 field :critical_failed, 5, type: :int32 end defmodule Notifications.CCRFailure do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ run_id: String.t(), node_name: String.t(), node_url: String.t(), run_url: String.t(), cookbook: String.t(), recipe: String.t(), time: Notifications.TimeInfo.t() | nil, exception: Notifications.ExceptionInfo.t() | nil, timestamp: String.t() } defstruct [ :run_id, :node_name, :node_url, :run_url, :cookbook, :recipe, :time, :exception, :timestamp ] field :run_id, 1, type: :string field :node_name, 2, type: :string field :node_url, 3, type: :string field :run_url, 4, type: :string field :cookbook, 5, type: :string field :recipe, 6, type: :string field :time, 7, type: Notifications.TimeInfo field :exception, 8, type: Notifications.ExceptionInfo field :timestamp, 9, type: :string end defmodule Notifications.CCRSuccess do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ run_id: String.t(), node_name: String.t(), run_url: String.t(), time: Notifications.TimeInfo.t() | nil, updated_resource_count: integer, timestamp: String.t() } defstruct [:run_id, :node_name, :run_url, :time, :updated_resource_count, :timestamp] field :run_id, 1, type: :string field :node_name, 2, type: :string field :run_url, 3, type: :string field :time, 4, type: Notifications.TimeInfo field :updated_resource_count, 5, type: :int32 field :timestamp, 6, type: :string end defmodule Notifications.Response do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{} defstruct [] end defmodule Notifications.Event do @moduledoc false use Protobuf, syntax: :proto3 @type t :: %__MODULE__{ event: {atom, any}, id: String.t() } defstruct [:event, :id] oneof :event, 0 field :id, 1, type: :string field :CCRSuccess, 2, type: Notifications.CCRSuccess, oneof: 0 field :CCRFailure, 3, type: Notifications.CCRFailure, oneof: 0 field :ComplianceSuccess, 4, type: Notifications.ComplianceSuccess, oneof: 0 field :ComplianceFailure, 5, type: Notifications.ComplianceFailure, oneof: 0 end
components/notifications-service/server/lib/pb/notifications.pb.ex
0.749821
0.497498
notifications.pb.ex
starcoder
defmodule TrademarkFreeStrategicLandWarfare.Players.Sam do alias TrademarkFreeStrategicLandWarfare.{Board, Player, Piece} @behaviour Player @type direction() :: :north | :west | :east | :south @type count() :: Integer.t() @type state() :: any() @spec name() :: binary() def name() do "<NAME>" end # should return a list with 4 lists of 10 piece-name atoms (:miner, :colonel, etc) per list @spec initial_pieces_placement() :: nonempty_list([Atom.t(), ...]) def initial_pieces_placement() do pieces = Board.piece_name_counts() |> Enum.flat_map( fn {type, count} -> for _ <- 1..count, do: type end) {flag_defense_strategy, flag_protection} = case :rand.uniform() do val when val < 0.15 -> {:right_side_defense, [:flag, :bomb, :bomb, :bomb]} val when val < 0.30 -> {:left_side_defense, [:flag, :bomb, :bomb, :bomb]} val when val < 0.5 -> {:corner_defense, [:flag, :bomb, :bomb]} _ -> {:bottom_defense, [:flag, :bomb, :bomb, :bomb]} end pieces |> Kernel.--(flag_protection) |> Enum.shuffle() |> insert_defense(flag_defense_strategy) |> Enum.chunk_every(10) end @spec turn( %TrademarkFreeStrategicLandWarfare.Board{}, %TrademarkFreeStrategicLandWarfare.Player{}, state() ) :: {binary(), direction(), count(), state()} def turn(%Board{rows: rows} = board, %Player{number: number}, state) do # find all eligible pieces move_partitioned_pieces = rows |> List.flatten() |> Enum.flat_map(fn %Piece{player: ^number, name: name} = piece when name not in [:bomb, :flag] -> [piece] _ -> [] end) |> partition_by_move(board) # select from them, biasing towards pieces that can win, then those that can advance, # then west/east, then move backward eligible_moves = Enum.shuffle( [ Map.get(move_partitioned_pieces, :win, []), Map.get(move_partitioned_pieces, :win, []), Map.get(move_partitioned_pieces, :win, []), Map.get( move_partitioned_pieces, Board.maybe_invert_player_direction(:north, number), [] ), Map.get(move_partitioned_pieces, :west, []) ++ Map.get(move_partitioned_pieces, :east, []), Map.get( move_partitioned_pieces, Board.maybe_invert_player_direction(:south, number), [] ) ]) |> Enum.find(fn list -> length(list) > 0 end) # randomly select one from the list returned case eligible_moves do nil -> raise "no move possible" moves -> moves |> Enum.random() |> Tuple.append(state) end end defp partition_by_move(pieces, board) do # TODO: reduce_while and halt when preferred one found (win, progressing forward) Enum.reduce(pieces, %{}, fn piece, acc -> Enum.reduce([:north, :west, :east, :south], acc, fn direction, dir_acc -> case Board.move(board, piece.player, piece.uuid, direction, 1) do {:ok, :win, _} -> # this shouldn't ever get hit, because we'll never know as a player # where the opponent's flag is without trying to capture it. putting # this here for that note, and just in case. Map.update( dir_acc, :win, [{piece.uuid, direction, 1}], &[{piece.uuid, direction, 1} | &1] ) {:error, :unknown_result} -> # allowed move, but masked piece. include in the possibles. Map.update( dir_acc, direction, [{piece.uuid, direction, 1}], &[{piece.uuid, direction, 1} | &1] ) {:ok, %Board{}} -> # allowed move -- no differentiation on whether attack happened Map.update( dir_acc, direction, [{piece.uuid, direction, 1}], &[{piece.uuid, direction, 1} | &1] ) _ -> dir_acc end end) end) end defp insert_defense(positions, :bottom_defense) do flag_col = Enum.random(1..8) positions |> List.insert_at(20 + flag_col, :bomb) |> List.insert_at(30 + flag_col - 1, :bomb) |> List.insert_at(30 + flag_col, :flag) |> List.insert_at(30 + flag_col + 1, :bomb) end defp insert_defense(positions, :corner_defense) do case Enum.random([0, 9]) do 0 -> positions |> List.insert_at(20, :bomb) |> List.insert_at(30, :flag) |> List.insert_at(31, :bomb) 9 -> positions |> List.insert_at(29, :bomb) |> List.insert_at(38, :bomb) |> List.insert_at(39, :flag) end end defp insert_defense(positions, :left_side_defense) do case Enum.random(1..2) do 1 -> positions |> List.insert_at(0, :bomb) |> List.insert_at(10, :flag) |> List.insert_at(11, :bomb) |> List.insert_at(20, :bomb) 2 -> positions |> List.insert_at(10, :bomb) |> List.insert_at(20, :flag) |> List.insert_at(21, :bomb) |> List.insert_at(30, :bomb) end end defp insert_defense(positions, :right_side_defense) do case Enum.random(1..2) do 1 -> positions |> List.insert_at(9, :bomb) |> List.insert_at(18, :bomb) |> List.insert_at(19, :flag) |> List.insert_at(29, :bomb) 2 -> positions |> List.insert_at(19, :bomb) |> List.insert_at(28, :bomb) |> List.insert_at(29, :flag) |> List.insert_at(39, :bomb) end end end
lib/trademark_free_strategic_land_warfare/players/sam.ex
0.520253
0.511595
sam.ex
starcoder
defmodule Feedback.Message do @moduledoc """ Information for a customer support message. """ # The integration with HEAT only accepts certain values for the message type # and subject. Only "Complaint", "Suggestion", "Inquiry" and "Commendation" # are supported service options, and each has a list of allowed subjects. # Other values will cause HEAT to throw an error and reject the ticket @type service_name :: String.t() @type service_value :: String.t() @type subject_value :: String.t() @type service_option_with_subjects :: {service_name(), service_value(), [subject_value()]} @service_options_with_subjects [ {"Complaint", "Complaint", [ "Bus Stop", "CharlieCards & Tickets", "Employee Complaint", "Fare Evasion", "Maintenance Complaint", "Mobile Ticketing", "Parking", "Service Complaint", "TAlerts/Countdowns/Apps", "Other" ]}, {"Comment", "Suggestion", [ "Bus Stop", "CharlieCards & Tickets", "Fare Policy", "Maintenance", "MBTA Projects/Programs", "Parking", "Schedules", "Service Inquiry", "Website", "Other" ]}, {"Question", "Inquiry", [ "CharlieCards & Tickets", "Disability ID Cards", "Fare Policy", "Maintenance", "Mobile Ticketing", "Parking", "Schedules", "Senior ID Cards", "Service Inquiry", "Trip Planner", "Website", "Other" ]}, {"Compliment", "Commendation", [ "Employee", "Maintenance", "MBTA Projects/Programs", "Service", "Other" ]} ] @enforce_keys [:comments, :service, :no_request_response] defstruct [ :email, :phone, :first_name, :last_name, :comments, :service, :subject, :incident_date_time, :no_request_response, :photos, :ticket_number, :mode, :line, :vehicle ] @type t :: %__MODULE__{ email: String.t() | nil, phone: String.t() | nil, first_name: String.t(), last_name: String.t(), comments: String.t(), service: String.t(), subject: String.t(), no_request_response: boolean, incident_date_time: DateTime.t(), photos: [Plug.Upload.t()] | nil, ticket_number: String.t() | nil, mode: String.t() | nil, line: String.t() | nil, vehicle: String.t() | nil } @spec service_options() :: [service_option_with_subjects()] def service_options do @service_options_with_subjects end @spec valid_service?(service_value()) :: boolean() def valid_service?(value) do value in Enum.map(service_options(), &elem(&1, 1)) end @spec valid_subject_for_service?(subject_value(), service_value()) :: boolean() def valid_subject_for_service?(subject_value, service_value) do case service(service_value) do nil -> false service -> subject_value in subjects(service) end end @spec service(service_value()) :: service_option_with_subjects() | nil defp service(value) do Enum.find(service_options(), &(elem(&1, 1) == value)) end @spec subjects(service_option_with_subjects()) :: [subject_value()] defp subjects(service), do: elem(service, 2) end
apps/feedback/lib/message.ex
0.762778
0.401306
message.ex
starcoder
defmodule FCSupport.Normalization do def to_utc_iso8601(datetime) do datetime |> Timex.Timezone.convert("UTC") |> DateTime.to_iso8601() end def from_utc_iso8601(nil), do: nil def from_utc_iso8601(iso8601) do Timex.parse!(iso8601, "{ISO:Extended}") end def normalize_by(map, root_key, key, test_func, normalize_func) do value = map |> Map.get(root_key) |> Map.get(key) if test_func.(value) do root_value = map |> Map.get(root_key) |> Map.put(key, normalize_func.(value)) Map.put(map, root_key, root_value) else map end end @doc """ Trim all values in the struct that are string. """ def trim_strings(struct) do Enum.reduce(Map.keys(struct), struct, fn(k, acc) -> v = Map.get(struct, k) if String.valid?(v) do Map.put(acc, k, String.trim(v)) else acc end end) end @doc """ Downcase values of the given keys in the struct. Non-string values will be safely ignored. If `keys` not provided, defaults to all keys of the given `struct`. """ def downcase_strings(struct, keys \\ nil) do keys = keys || Map.keys(struct) Enum.reduce(keys, struct, fn(k, acc) -> v = Map.get(struct, k) if String.valid?(v) do Map.put(acc, k, String.downcase(v)) else acc end end) end def atomize_keys(m, permitted \\ nil) do permitted_atom = permitted || Map.keys(m) permitted_string = stringify_list(permitted_atom) Enum.reduce(m, %{}, fn({k, v}, acc) -> cond do is_binary(k) && Enum.member?(permitted_string, k) -> Map.put(acc, String.to_existing_atom(k), v) is_atom(k) && Enum.member?(permitted_atom, k) -> Map.put(acc, k, v) true -> acc end end) end def stringify_list(l) do Enum.reduce(l, [], fn(item, acc) -> if is_atom(item) do acc ++ [Atom.to_string(item)] else acc ++ [item] end end) end def atomize_list(l) do Enum.reduce(l, [], fn(item, acc) -> if is_binary(item) do acc ++ [String.to_existing_atom(item)] else acc ++ [item] end end) end end
base/fc_support/lib/fc_support/normalization.ex
0.700383
0.484075
normalization.ex
starcoder
defmodule Openflow.Action.NxRegMove do @moduledoc """ Copies src[src_ofs:src_ofs+n_bits] to dst[dst_ofs:dst_ofs+n_bits], where a[b:c] denotes the bits within 'a' numbered 'b' through 'c' (not including bit 'c'). Bit numbering starts at 0 for the least-significant bit, 1 for the next most significant bit, and so on. The following nxm_header values are potentially acceptable as 'src': - `:in_port` - `:eth_dst` - `:eth_src` - `:eth_type` - `:vlan_tci` - `:ip_tos` - `:ip_proto` - `:ip_src` - `:ip_dst` - `:tcp_src` - `:tcp_dst` - `:udp_src` - `:udp_dst` - `:icmp_type` - `:icmp_code` - `:arp_op` - `:arp_spa` - `:arp_tpa` - `:tun_id` - `:arp_sha` - `:arp_tha` - `:icmpv6_type` - `:icmpv6_code` - `:nd_sll` - `:nd_tll` - `:reg(idx)` for idx in the switch's accepted range. - `:pkt_mark` - `:tun_ipv4_src` - `:tun_ipv4_dst` The following nxm_header values are potentially acceptable as 'dst': - `:eth_dst` - `:eth_src` - `:ip_tos` - `:ip_src` - `:ip_dst` - `:tcp_src` - `:tcp_dst` - `:udp_src` - `:udp_dst` - `:icmp_type` - `:icmp_code` - `:icmpv6_type` - `:icmpv6_code` - `:arp_sha` - `:arp_tha` - `:arp_op` - `:arp_spa` - `:arp_tpa` Modifying any of the above fields changes the corresponding packet header. - `:in_port` - `:reg(idx)` for idx in the switch's accepted range. - `:pkt_mark` - `:vlan_tci`. Modifying this field's value has side effects on the packet's 802.1Q header. Setting a value with CFI=0 removes the 802.1Q header (if any), ignoring the other bits. Setting a value with CFI=1 adds or modifies the 802.1Q header appropriately, setting the TCI field to the field's new value (with the CFI bit masked out). - `:tun_id`, `:tun_ipv4_src`, `:tun_ipv4_dst`. Modifying any of these values modifies the corresponding tunnel header field used for the packet's next tunnel encapsulation, if allowed by the configuration of the output tunnel port. A given nxm_header value may be used as 'src' or 'dst' only on a flow whose nx_match satisfies its prerequisites. For example, NXM_OF_IP_TOS may be used only if the flow's nx_match includes an nxm_entry that specifies nxm_type=NXM_OF_ETH_TYPE, nxm_hasmask=0, and nxm_value=0x0800. The switch will reject actions for which src_ofs+n_bits is greater than the width of 'src' or dst_ofs+n_bits is greater than the width of 'dst' with error type OFPET_BAD_ACTION, code OFPBAC_BAD_ARGUMENT. This action behaves properly when 'src' overlaps with 'dst', that is, it behaves as if 'src' were copied out to a temporary buffer, then the temporary buffer copied to 'dst'. """ defstruct( n_bits: 0, src_offset: 0, dst_offset: 0, src_field: nil, dst_field: nil ) @experimenter 0x00002320 @nxast 6 alias __MODULE__ alias Openflow.Action.Experimenter @type t :: %NxRegMove{ n_bits: 0..0xFFFF, src_offset: 0..0xFFFF, dst_offset: 0..0xFFFF, src_field: atom(), dst_field: atom() } @doc """ Creates a new reg_move action struct ## Options: - n_bits: Number of bits - src_offset: Starting bit offset in source - dst_offset: Starting bit offset in destination - src_field: oxm/nxm field name for source field - dst_field: oxm/nxm field name for destination field ## Example ```elixir # 1. move:NXM_OF_IN_PORT[]->NXM_OF_VLAN_TCI[] iex> NxRegMove.new(src_field: :nx_in_port, :nx_vlan_tci) # 2. move:NXM_NX_TUN_ID[40..57]->NXM_NX_REG1[0..17] iex> NxRegMove.new( iex> src_field: :tun_id, iex> src_offset: 40, iex> dst_field: :reg1 iex> dst_offset: 0, iex> n_bits: 18 iex> ) ``` """ @spec new( n_bits: 0..0xFFFF, src_offset: 0..0xFFFF, dst_offset: 0..0xFFFF, src_field: atom(), dst_field: atom() ) :: t() def new(options \\ []) do src_field = options[:src_field] || raise "src_field must be specified" dst_field = options[:dst_field] || raise "dst_field must be specified" default_n_bits = Openflow.Match.n_bits_of(dst_field) %NxRegMove{ n_bits: options[:n_bits] || default_n_bits, src_offset: options[:src_offset] || 0, dst_offset: options[:dst_offset] || 0, src_field: src_field, dst_field: dst_field } end @spec to_binary(t()) :: binary() def to_binary(%NxRegMove{} = move) do src_field_bin = Openflow.Match.codec_header(move.src_field) dst_field_bin = Openflow.Match.codec_header(move.dst_field) Experimenter.pack_exp_header(<< @experimenter::32, @nxast::16, move.n_bits::16, move.src_offset::16, move.dst_offset::16, src_field_bin::4-bytes, dst_field_bin::4-bytes >>) end @spec read(binary()) :: t() def read(<<@experimenter::32, @nxast::16, body::bytes>>) do <<n_bits::16, src_ofs::16, dst_ofs::16, src_field_bin::4-bytes, dst_field_bin::4-bytes>> = body src_field = Openflow.Match.codec_header(src_field_bin) dst_field = Openflow.Match.codec_header(dst_field_bin) %NxRegMove{ n_bits: n_bits, src_offset: src_ofs, dst_offset: dst_ofs, src_field: src_field, dst_field: dst_field } end end
lib/openflow/actions/nx_reg_move.ex
0.873417
0.884788
nx_reg_move.ex
starcoder
defmodule BtrzAuth.Plug.VerifyProviders do @moduledoc """ Looks for and validates if there are `provider_ids` in the query params and if they are valids with the account data under `conn.private.account` saved by `BtrzAuth.Plug.VerifyApiKey` (the order of the plugs is very important!) This plug will look for `providerId` or `provider_id` or a list of comma separated ids in `providerIds` or `provider_ids`. This, like all other Guardian plugs, requires a Guardian pipeline to be setup. It requires an error handler as `error_handler`. These can be set either: 1. Upstream on the connection with `plug Guardian.Pipeline` 2. Upstream on the connection with `Guardian.Pipeline.{put_module, put_error_handler, put_key}` 3. Inline with an option of `:module`, `:error_handler`, `:key` If any provider is invalid, the pipeline will be halted and an error with status 400 will be set in the `conn.resp_body` like this: ```elixir %{ "status" => 400, "code" => "INVALID_PROVIDER_ID", "message" => "Error getting provider" } ``` ### Example ```elixir plug BtrzAuth.Plug.VerifyProviders ``` """ import Plug.Conn alias Guardian.Plug.Pipeline require Logger @spec init(Keyword.t()) :: Keyword.t() def init(opts), do: opts @spec call(Plug.Conn.t(), Keyword.t()) :: Plug.Conn.t() def call(conn, opts) do Logger.debug("accessing VerifyProviders plug..") conn |> get_provider_ids_from_query() |> validate_provider_ids(conn, Pipeline.fetch_error_handler!(conn, opts)) end defp get_provider_ids_from_query(conn) do conn = fetch_query_params(conn) (conn.query_params["providerIds"] || conn.query_params["provider_ids"] || conn.query_params["providerId"] || conn.query_params["provider_id"] || "") |> String.split(",", trim: true) |> Enum.map(&String.trim/1) end defp is_a_valid_provider?(account, provider_id) do nil != account |> Map.get("providers", %{}) |> Map.get(provider_id) end defp validate_provider_ids([], conn, _error_handler), do: conn defp validate_provider_ids(provider_ids, %{private: %{account: account}} = conn, error_handler) when is_map(account) do if Enum.all?(provider_ids, &is_a_valid_provider?(account, &1)) do conn else respond_error(conn, error_handler) end end defp validate_provider_ids(_provider_id, conn, error_handler), do: respond_error(conn, error_handler) defp respond_error(conn, error_handler) do error_handler |> apply(:validation_error, [conn]) |> halt() end end
lib/plug/verify_providers.ex
0.838812
0.837952
verify_providers.ex
starcoder
defmodule Kalevala.Character.View do @moduledoc """ Render output from the game """ defmacro __using__(_opts) do quote do import Kalevala.Character.View.Macro alias Kalevala.Character.View end end @doc """ "Join" an IO data list with a separator string Similar to Enum.join, but leaves as an IO data list """ def join([], _separator), do: [] def join([line], _separator), do: [line] def join([line | lines], separator) do [line, separator | join(lines, separator)] end @doc """ Trim empty lines from io data Detects empty lines in io data (for example when rendered via EEx templating), and removes them. This way the template string works as expected, returning an empty string from a sub-render will strip the line. """ def trim_lines([]), do: [] def trim_lines(["\n", "", "\n" | segments]), do: ["\n" | trim_lines(segments)] def trim_lines(["\n", nil, "\n" | segments]), do: ["\n" | trim_lines(segments)] def trim_lines([segment | segments]), do: [segment | trim_lines(segments)] end defmodule Kalevala.Character.View.Macro do @moduledoc """ Imported into views """ alias Kalevala.Character.View @doc """ Creates ~E which runs through EEx templating """ defmacro sigil_E({:<<>>, _, [expr]}, opts) do string = EEx.compile_string(expr, line: __CALLER__.line + 1, sigil_opts: opts, engine: Kalevala.Character.View.EExKalevala ) quote do View.trim_lines(unquote(string)) end end @doc """ Creates ~i to create IO lists that look like standard interpolation """ defmacro sigil_i({:<<>>, _, text}, _) do Enum.map(text, &sigil_i_unwrap/1) end defp sigil_i_unwrap({:"::", _, interpolation}) do [text | _] = interpolation {_, _, [text]} = text quote do to_string(unquote(text)) end end defp sigil_i_unwrap(text) when is_binary(text) do :elixir_interpolation.unescape_string(text) end end defmodule Kalevala.Character.View.EExKalevala do @moduledoc """ An EEx Engine that returns IO data instead of a string Taken from [Phoenix.HTML.Engine](https://github.com/phoenixframework/phoenix_html/blob/master/lib/phoenix_html/engine.ex) """ @behaviour EEx.Engine @impl true def init(_opts) do %{ iodata: [], dynamic: [], vars_count: 0 } end @impl true def handle_begin(state) do %{state | iodata: [], dynamic: []} end @impl true def handle_end(quoted) do handle_body(quoted) end @impl true def handle_body(state) do %{iodata: iodata, dynamic: dynamic} = state iodata = Enum.reverse(iodata) {:__block__, [], Enum.reverse([iodata | dynamic])} end @impl true def handle_text(state, _meta, text) do %{iodata: iodata} = state %{state | iodata: [text | iodata]} end @impl true def handle_expr(state, "=", ast) do ast = Macro.prewalk(ast, &EEx.Engine.handle_assign/1) %{iodata: iodata, dynamic: dynamic, vars_count: vars_count} = state var = Macro.var(:"arg#{vars_count}", __MODULE__) ast = quote do: unquote(var) = unquote(ast) %{state | dynamic: [ast | dynamic], iodata: [var | iodata], vars_count: vars_count + 1} end def handle_expr(state, "", ast) do ast = Macro.prewalk(ast, &EEx.Engine.handle_assign/1) %{dynamic: dynamic} = state %{state | dynamic: [ast | dynamic]} end def handle_expr(state, marker, ast) do EEx.Engine.handle_expr(state, marker, ast) end end
lib/kalevala/character/view.ex
0.796292
0.532
view.ex
starcoder
defmodule Styx.SchemaRegistry.Definition.Schema do alias Styx.SchemaRegistry.Definition.Schema, as: SchemaDef @moduledoc """ Implements macros to generate an avro schema Available macros: * schema/2 * set_namespace/1 * attribute_accessors/0 * register/0 ## Use: ``` use Styx.SchemaRegistry.Definition.Schema schema "edu.uwsp.banderson.styx" do required :username, :string optional :age, :boolean end ``` """ @doc """ Adds the schema and set_namespace macros. * Uses Styx.SchemaRegistry.Definition.Value * Sets @namespace to nil """ defmacro __using__(_) do quote location: :keep do use Styx.SchemaRegistry.Definition.Value import Styx.SchemaRegistry.Definition.Schema, only: [schema: 2, set_namespace: 1] @namespace nil end end @doc """ sets @namespace if it has not already been set """ defmacro set_namespace(namespace) do quote do if @namespace == nil do @namespace unquote(namespace) end end end @doc """ sets @namespace, evaluates the fields block if given """ defmacro schema(namespace, [do: block]) do quote do set_namespace(unquote(namespace)) try do unquote(block) after SchemaDef.attribute_accessors() SchemaDef.register() SchemaDef.perform() :ok end end end @doc """ generates the fields() and namespace() functions """ defmacro attribute_accessors() do quote do def fields, do: @fields def namespace, do: @namespace end end @doc """ generates the register(postfix \\ "value") function """ defmacro register() do quote do require Logger def register(postfix \\ "value") do avro_schema = Styx.SchemaRegistry.Avro.Schema.generate(@namespace, @fields) {status, _} = Styx.SchemaRegistry.API.register( Styx.SchemaRegistry.Request.host(), "#{@namespace}-" <> postfix, avro_schema ) if status == :ok, do: Logger.info("Schema #{@namespace}-#{postfix} registered.") end end end @doc """ generates a default perform() function """ defmacro perform() do quote do require Logger def perform() do Logger.warn("Default perform() called in #{__MODULE__}") end end end end
lib/styx/schema_registry/definition/schema.ex
0.697094
0.563858
schema.ex
starcoder
defmodule LevelWeb.Schema.Enums do @moduledoc false use Absinthe.Schema.Notation enum :user_state do value :active, as: "ACTIVE" value :disabled, as: "DISABLED" end enum :space_user_state do value :active, as: "ACTIVE" value :disabled, as: "DISABLED" end enum :space_user_role do value :member, as: "MEMBER" value :admin, as: "ADMIN" value :owner, as: "OWNER" end enum :space_user_order_field do value :space_name value :last_name end enum :space_state do value :active, as: "ACTIVE" value :disabled, as: "DISABLED" end enum :space_setup_state do value :create_groups value :invite_users value :complete end enum :post_state do value :open, as: "OPEN" value :closed, as: "CLOSED" end enum :post_subscription_state do value :not_subscribed, as: "NOT_SUBSCRIBED" value :subscribed, as: "SUBSCRIBED" value :unsubscribed, as: "UNSUBSCRIBED" end enum :inbox_state do value :excluded, as: "EXCLUDED" value :dismissed, as: "DISMISSED" value :read, as: "READ" value :unread, as: "UNREAD" end enum :user_order_field do value :last_name end enum :space_order_field do value :name end enum :invitation_order_field do value :email end enum :group_order_field do value :name end enum :group_membership_state do value :not_subscribed, as: "NOT_SUBSCRIBED" value :subscribed, as: "SUBSCRIBED" end enum :post_order_field do value :posted_at value :last_pinged_at value :last_activity_at end enum :reply_order_field do value :posted_at end enum :order_direction do value(:asc) value(:desc) end enum :group_state do value :open, as: "OPEN" value :closed, as: "CLOSED" end enum :ping_filter do value :has_pings value :has_no_pings value :all end enum :watching_filter do value :is_watching value :all end enum :inbox_filter do value :unread value :read value :dismissed value :unread_or_read value :all end end
lib/level_web/schema/enums.ex
0.599251
0.444444
enums.ex
starcoder
defmodule Day12.Part2 do @doc """ iex> Day12.Part2.part2("day12-sample.txt") 286 """ def part2(filename) do parse_input(filename) |> Enum.reduce({0, 0, 10, -1}, &move_from/2) |> distance_to({0, 0}) end @doc """ iex> Day12.Part2.part2 28885 """ def part2, do: part2("day12.txt") def parse_input(filename) do "inputs/#{filename}" |> File.stream!() |> Stream.map(&String.trim/1) |> Stream.map(&parse_instruction/1) end def parse_instruction(str) do {instruction, quantity} = String.split_at(str, 1) {instruction, String.to_integer(quantity)} end def move_from({"N", quantity}, {srcx, srcy, wpx, wpy}), do: {srcx, srcy, wpx, wpy - quantity} def move_from({"S", quantity}, {srcx, srcy, wpx, wpy}), do: {srcx, srcy, wpx, wpy + quantity} def move_from({"E", quantity}, {srcx, srcy, wpx, wpy}), do: {srcx, srcy, wpx + quantity, wpy} def move_from({"W", quantity}, {srcx, srcy, wpx, wpy}), do: {srcx, srcy, wpx - quantity, wpy} def move_from({"F", quantity}, {srcx, srcy, wpx, wpy}), do: {srcx + wpx * quantity, srcy + wpy * quantity, wpx, wpy} @doc """ iex> Day12.Part2.move_from({"L", 90}, {0,0,1,2}) {0,0,2,-1} iex> Day12.Part2.move_from({"L", 180}, {0,0,1,2}) {0,0,-1,-2} iex> Day12.Part2.move_from({"L", 270}, {0,0,1,2}) {0,0,-2,1} iex> Day12.Part2.move_from({"R", 90}, {0,0,1,2}) {0,0,-2,1} iex> Day12.Part2.move_from({"R", 180}, {0,0,1,2}) {0,0,-1,-2} iex> Day12.Part2.move_from({"R", 270}, {0,0,1,2}) {0,0,2,-1} """ def move_from({"L", quantity}, {srcx, srcy, wpx, wpy}) do case rem(quantity, 360) do 90 -> {srcx, srcy, wpy, -wpx} 180 -> {srcx, srcy, -wpx, -wpy} 270 -> {srcx, srcy, -wpy, wpx} end end def move_from({"R", quantity}, {srcx, srcy, wpx, wpy}) do case rem(quantity, 360) do 90 -> {srcx, srcy, -wpy, wpx} 180 -> {srcx, srcy, -wpx, -wpy} 270 -> {srcx, srcy, wpy, -wpx} end end @doc """ iex> Day12.Part2.distance_to({0,0,0,0}, {17, 8}) 25 """ def distance_to({srcx, srcy, _, _}, {destx, desty}), do: abs(destx - srcx) + abs(desty - srcy) end
lib/day12/part2.ex
0.529993
0.434821
part2.ex
starcoder
alias Treex.Tree defmodule Treex.Traverse do @moduledoc """ Basic tree traversal algorithms, implementing depth-first and breadth-first traversal. """ @moduledoc since: "0.1.0" @type traverse:: :dfs | :bfs @type tree:: Tree.t @type history:: [any] @type result:: {:continue, any} | {:stop, any} @type operation:: (tree, history -> result) @type stack::list(tree) | [] @type queue:: :queue.queue @type collection:: stack | queue @doc """ Traverse the given tree and invoke the given operation function on each node. The function operation and the algorithm to use (one of `:bfs` or `:dfs`). An operation function must have the type: `(t:Treex.Tree.t/0, t:Treex.Tree.history -> t:Treex.Tree.result/0)` with the form: `fn node, history -> body end` where `node` is the current node and `history` is the accumulated list of traverse operated nodes. Returns: `[t:Treex.Tree.result/0]` ## Examples iex> Treex.TreeTraversal.traverse(nil, fn x, _, _ -> {:continue, x} end, :bfs) [] iex> Treex ..(1)> .Traverse ..(1)> .traverse(%Treex.Tree{value: 1, ..(1)> children: [%Treex.Tree{value: 2}, ..(1)> %Treex.Tree{value: 3}, ..(1)> %Treex.Tree{value: 4}]}, ..(1)> fn x, _, _ -> {:continue, x} end, ..(1)> :bfs) [4, 3, 2, 1] """ @doc since: "0.1.0" @spec traverse(tree, operation, traverse):: history def traverse(tree, operation, :bfs), do: tree |> tree_insert(new_queue()) |> bfs(operation, []) def traverse(tree, operation, :dfs), do: tree |> tree_insert(new_stack()) |> dfs(operation, []) @doc false @spec apply_operation(operation, tree, history):: result defp apply_operation(operation, node, history) do arity = :erlang.fun_info(operation)[:arity] if arity != 2 do raise "Function #{operation |> inspect} has invalid arity. Expected 3, got #{arity}." else operation.(node, history) end end @doc false @spec bfs(queue, operation, history):: history defp bfs({[], []}, _, history), do: history defp bfs(queue, operation, history) do {{:value, node}, new_queue} = :queue.out(queue) new_queue |> next(&bfs/3, node, operation, history) end @doc false @spec dfs(stack, operation, history):: history defp dfs(stack, operation, history) defp dfs([], _, history), do: history defp dfs([node | stack], operation, history), do: stack |> next(&dfs/3, node, operation, history) @doc false @spec next(collection, function, tree, operation, history):: history defp next(collection, named_function, node, operation, history) do case apply_operation(operation, node, history) do {:continue, res} -> node.children |> Enum.reduce(collection, &tree_insert/2) |> named_function.(operation, [res | history]) {:stop, res} -> [res | history] end end @doc false @spec new_stack:: stack defp new_stack, do: [] @doc false @spec new_queue:: queue defp new_queue, do: :queue.new @doc false @spec tree_insert(tree, collection):: collection defp tree_insert(tree, collection) defp tree_insert(nil, collection), do: collection defp tree_insert(tree, stack) when is_list(stack), do: [tree | stack] defp tree_insert(tree, queue), do: tree |> :queue.in(queue) end
lib/treex/traverse.ex
0.924381
0.61086
traverse.ex
starcoder
defmodule FarmbotOS.SysCalls.Movement do @moduledoc false require FarmbotOS.Logger alias FarmbotOS.Firmware.Command alias FarmbotOS.BotState def get_current_x do get_position(:x) end def get_current_y do get_position(:y) end def get_current_z do get_position(:z) end def get_cached_x do get_cached_position(:x) end def get_cached_y do get_cached_position(:y) end def get_cached_z do get_cached_position(:z) end def zero(axis) do case Command.set_zero(assert_axis!(axis)) do {:ok, _} -> :ok {:error, reason} -> FarmbotOS.SysCalls.give_firmware_reason("zero()", reason) end end def get_position() do # Update read cache case Command.report_current_position() do {:ok, %{x: x, y: y, z: z}} -> [x: x, y: y, z: z] reason -> FarmbotOS.SysCalls.give_firmware_reason("get_position", reason) end end def get_position(axis) do case get_position() do {:error, _} = error -> error position -> Keyword.fetch!(position, assert_axis!(axis)) end end def get_cached_position() do %{x: x, y: y, z: z} = BotState.fetch().location_data.position [x: x, y: y, z: z] end def get_cached_position(axis) do axis = assert_axis!(axis) Keyword.fetch!(get_cached_position(), axis) end def move_absolute(x, y, z, speed) do do_move_absolute(x, y, z, speed, speed, speed) end def move_absolute(x, y, z, speed_x, speed_y, speed_z) do do_move_absolute(x, y, z, speed_x, speed_y, speed_z) end defp do_move_absolute(x, y, z, speed_x, speed_y, speed_z) do %{x: x, y: y, z: z, a: speed_x, b: speed_y, c: speed_z} |> Command.move_abs() |> finish_movement() end def finish_movement(:ok), do: :ok def finish_movement({:ok, _}), do: :ok def finish_movement({:error, reason}), do: finish_movement(reason) def finish_movement(nil), do: {:error, "Movement error. See logs for details."} def finish_movement(reason) do msg = "Movement failed. #{inspect(reason)}" FarmbotOS.Logger.error(1, msg) {:error, msg} end def calibrate(axis) do case Command.find_length(assert_axis!(axis)) do {:ok, _} -> :ok reason -> FarmbotOS.SysCalls.give_firmware_reason("calibrate()", reason) end end def find_home(axis) do case Command.find_home(assert_axis!(axis)) do {:ok, _} -> :ok {:error, reason} -> FarmbotOS.SysCalls.give_firmware_reason("find_home", reason) end end def home(axis, _speed) do case Command.go_home(assert_axis!(axis)) do {:ok, _} -> :ok {:error, reason} -> FarmbotOS.SysCalls.give_firmware_reason("home", reason) end end defp assert_axis!(axis) when is_atom(axis), do: axis defp assert_axis!(axis) when axis in ~w(x y z), do: String.to_existing_atom(axis) defp assert_axis!(axis) do raise("unknown axis #{axis}") end end
lib/os/sys_calls/movement.ex
0.646906
0.494934
movement.ex
starcoder
defmodule ExtrText do @moduledoc """ ExtrText is an Elixir library for extracting text and meta information from `.docx`, `.xlsx`, `.pptx` files. """ @doc """ Extracts properties (metadata) from the specified OOXML data. """ @spec get_metadata(binary()) :: {:ok, ExtrText.Metadata.t()} | {:error, String.t()} def get_metadata(data) do case unzip(data) do {:ok, subdir, paths} -> do_get_metadata(subdir, paths) {:error, reason} -> {:error, reason} end end @doc """ Extracts plain texts from the body of specified OOXML data. The return value is a double nested list of strings. Each element of outer list represents the sheets of `.xsls` data and the slides of `.pptx` data. For `.docx` data, the outer list has only one element. Each element of inner list represents the paragraphs or lines of a spreadsheet. """ def get_texts(data) do case unzip(data) do {:ok, subdir, paths} -> do_get_texts(subdir, paths) {:error, reason} -> {:error, reason} end end defp unzip(data) do tmpdir = System.tmp_dir!() now = DateTime.utc_now() {usec, _} = now.microsecond subdir = tmpdir <> "/extr-text-" <> Integer.to_string(usec) case File.mkdir_p(subdir) do :ok -> do_unzip(data, subdir) {:error, _reason} -> {:error, "Can't create #{subdir}."} end end defp do_unzip(data, subdir) do case :zip.unzip(data, cwd: String.to_charlist(subdir)) do {:ok, paths} -> {:ok, subdir, Enum.map(paths, &List.to_string/1)} {:error, _reason} -> {:error, "Can't unzip the given data."} end end defp get_worksheets(subdir, paths) do Enum.filter(paths, fn path -> String.starts_with?(path, subdir <> "/xl/worksheets/") && String.ends_with?(path, ".xml") end) end defp get_slides(subdir, paths) do Enum.filter(paths, fn path -> String.starts_with?(path, subdir <> "/ppt/slides/") && String.ends_with?(path, ".xml") end) end defp do_get_metadata(subdir, _paths) do result = case File.read(Path.join(subdir, "docProps/core.xml")) do {:ok, xml} -> extract_metadata(xml) {:error, _} -> {:error, "Can't read docProps/core.xml."} end File.rm_rf!(subdir) result end defp extract_metadata(xml) do {:ok, %{metadata: metadata}} = Saxy.parse_string(xml, ExtrText.MetadataHandler, %{ name: nil, metadata: %ExtrText.Metadata{} }) {:ok, metadata} end defp do_get_texts(subdir, paths) do type = cond do Enum.any?(paths, fn path -> path == subdir <> "/word/document.xml" end) -> :docx Enum.any?(paths, fn path -> path == subdir <> "/xl/workbook.xml" end) -> :xlsx Enum.any?(paths, fn path -> path == subdir <> "/ppt/presentation.xml" end) -> :pptx true -> :unknown end result = do_get_texts(subdir, paths, type) File.rm_rf!(subdir) result end defp do_get_texts(_subdir, _paths, :unknown) do {:error, "Could not find a target XML file."} end defp do_get_texts(subdir, paths, :xlsx) do strings = if File.exists?(subdir <> "/xl/sharedStrings.xml") do ss_xml = File.read!(subdir <> "/xl/sharedStrings.xml") {:ok, strings} = Saxy.parse_string(ss_xml, ExtrText.ExcelSharedStringsHandler, []) Enum.reverse(strings) else [] end st_xml = File.read!(subdir <> "/xl/styles.xml") {:ok, %{num_formats: num_formats, cell_style_xfs: cell_style_xfs}} = Saxy.parse_string(st_xml, ExtrText.ExcelStylesHandler, %{ num_formats: [], cell_style_xfs: [], name: nil }) num_formats = Enum.reverse(num_formats) cell_style_xfs = Enum.reverse(cell_style_xfs) worksheets = get_worksheets(subdir, paths) text_sets = worksheets |> Enum.map(fn path -> case File.read(path) do {:ok, xml} -> extract_texts(:xslx, xml, strings, num_formats, cell_style_xfs) {:error, _} -> nil end end) |> Enum.reject(fn doc -> is_nil(doc) end) {:ok, text_sets} end defp do_get_texts(subdir, paths, type) when type in ~w(docx pptx)a do {handler, paths} = case type do :docx -> {ExtrText.WordDocumentHandler, [subdir <> "/word/document.xml"]} :pptx -> {ExtrText.PresentationSlideHandler, get_slides(subdir, paths)} end text_sets = paths |> Enum.map(fn path -> case File.read(path) do {:ok, xml} -> extract_texts(handler, xml) {:error, _} -> nil end end) |> Enum.reject(fn doc -> is_nil(doc) end) {:ok, text_sets} end defp extract_texts(:xslx, xml, strings, num_formats, cell_style_xfs) do {:ok, %{texts: texts}} = Saxy.parse_string(xml, ExtrText.ExcelWorksheetHandler, %{ texts: [], buffer: [], strings: strings, num_formats: num_formats, cell_style_xfs: cell_style_xfs, type: nil, style: nil }) Enum.reverse(texts) end defp extract_texts(handler, xml) do {:ok, %{texts: texts}} = Saxy.parse_string(xml, handler, %{texts: [], buffer: []}) Enum.reverse(texts) end end
lib/extr_text.ex
0.787237
0.41185
extr_text.ex
starcoder
defmodule TypeCheck.Type.StreamData do @moduledoc """ Transforms types into StreamData generators. With the exception of `wrap_with_gen/2`, methods in this module are only compiled when the optional dependency `:stream_data` is added to your project's dependencies. """ defstruct [:type, :generator_function] @doc """ Customizes a type with a _custom_ generator. `generator_function` can be a arity-zero function (in which case it should simply return a StreamData generator) or a arity-one function, in which case it is passed the value that would be generated by default and it can be altered by e.g. using `StreamData.map/2` or `StreamData.bind/2`. Note that these functions _must_ be of the form `&Module.function/arity` because this is the only form of function capture that can be stored at compile-time. ## Example: iex> defmodule IntString do ...> use TypeCheck ...> import TypeCheck.Type.StreamData ...> @type! t() :: ((val :: binary()) when Integer.parse(val) != :error) ...> |> wrap_with_gen(&IntString.gen/0) ...> ...> def gen() do ...> StreamData.integer() ...> |> StreamData.map(&to_string/1) ...> end ...> end ...> ...> IntString.t() |> TypeCheck.Type.StreamData.to_gen() |> StreamData.seeded(42) |> Enum.take(10) ["0", "2", "1", "-3", "-5", "-4", "-3", "-4", "3", "-6"] """ def wrap_with_gen(type, generator_function) when is_function(generator_function, 0) or is_function(generator_function, 1) do %__MODULE__{type: type, generator_function: generator_function} end defimpl TypeCheck.Protocols.ToCheck do def to_check(s, param) do TypeCheck.Protocols.ToCheck.to_check(s.type, param) end end defimpl TypeCheck.Protocols.Inspect do def inspect(s, opts) do TypeCheck.Protocols.Inspect.inspect(s.type, opts) end end if Code.ensure_loaded?(StreamData) do defimpl TypeCheck.Protocols.ToStreamData do def to_gen(s) do if is_function(s.generator_function, 0) do s.generator_function.() else s.type |> TypeCheck.Protocols.ToStreamData.to_gen() |> s.generator_function.() end end end @doc """ When given a type, it is transformed to a StreamData generator that can be used in a property test. iex> import TypeCheck.Type.StreamData iex> generator = TypeCheck.Type.build({:ok | :error, integer()}) |> to_gen() iex> StreamData.seeded(generator, 42) |> Enum.take(10) [ {:ok, -1}, {:ok, 2}, {:ok, -2}, {:ok, -4}, {:ok, 1}, {:ok, 1}, {:ok, 2}, {:ok, 4}, {:ok, -7}, {:ok, 5} ] """ def to_gen(type) do TypeCheck.Protocols.ToStreamData.to_gen(type) end def arbitrary_primitive_type_gen do choices = primitive_types_list() Elixir.StreamData.one_of(choices) end defp primitive_types_list() do import TypeCheck.Builtin simple = [ any(), atom(), binary(), bitstring(), boolean(), float(), function(), integer(), number() ] |> Enum.map(&Elixir.StreamData.constant/1) lit = Elixir.StreamData.term() |> Elixir.StreamData.map(&literal/1) [lit | simple] end def arbitrary_type_gen() do # TODO WIP StreamData.one_of( primitive_types_list() ++ [list_gen(), map_gen(), fixed_list_gen(), fixed_tuple_gen()] ) end defp list_gen() do lazy_type_gen() |> StreamData.map(&TypeCheck.Builtin.list/1) end defp map_gen() do {lazy_type_gen(), lazy_type_gen()} |> StreamData.map(fn {key_type, value_type} -> TypeCheck.Builtin.map(key_type, value_type) end) end def fixed_list_gen() do lazy_type_gen() |> StreamData.list_of() |> StreamData.map(&TypeCheck.Builtin.fixed_list/1) end defp fixed_tuple_gen() do lazy_type_gen() |> StreamData.list_of(max_length: 255) |> StreamData.map(&TypeCheck.Builtin.fixed_tuple/1) end defp lazy_type_gen() do # Lazily call content generator # To prevent infinite expansion recursion StreamData.constant({}) |> StreamData.bind(fn _ -> arbitrary_type_gen() |> StreamData.scale(fn size -> div(size, 2) end) end) end else def arbitrary_type_gen() do raise ArgumentError, """ `arbitrary_type_gen/0` depends on the optional library `:stream_data`. To use this functionality, add `:stream_data` to your application's deps. """ end end end
lib/type_check/type/stream_data.ex
0.79166
0.501709
stream_data.ex
starcoder
defmodule PTA.Parser do @moduledoc """ TODO """ # Partition `tokens` at the first token for which `pred` evaluates to true. # If `inclusive` is true, the token that matched `pred` will be included in the second returned partition. def _eat(tokens, pred \\ fn p -> p == "\n" end, inclusive \\ false) do case tokens do [h | tail] -> if pred.(h) do if inclusive do {[], [h] ++ tail} else {[], tail} end else {acc, remaining} = _eat(tail, pred, inclusive) {[h | acc], remaining} end _ -> {[], tokens} end end @doc """ Parses the journal in `src` and returns a `PTA.Journal` containing the accounts and transactions found. """ @spec parse(String.t()) :: {:ok, PTA.Journal.t()} @spec parse(String.t()) :: {:error, String.t()} def parse(src) do tokens = _tokenize(String.split(src, "", trim: true)) _journal(tokens) end @spec _journal(list(String.t())) :: {:ok, PTA.Journal.t()} @spec _journal(list(String.t())) :: {:error, String.t()} def _journal(tokens) do case tokens do [_ | _] -> case _journal_item(tokens) do {:ok, %PTA.Account{} = a, remaining} -> case _journal(remaining) do {:ok, j} -> {:ok, %PTA.Journal{accounts: [a | j.accounts], transactions: j.transactions}} {:error, reason} -> {:error, reason} end {:ok, %PTA.Transaction{} = t, remaining} -> case _journal(remaining) do {:ok, j} -> {:ok, %PTA.Journal{accounts: j.accounts, transactions: [t | j.transactions]}} {:error, reason} -> {:error, reason} end {:ok, remaining} -> case _journal(remaining) do {:ok, j} -> {:ok, j} {:error, reason} -> {:error, reason} end {:error, reason} -> {:error, reason} end [] -> {:ok, %PTA.Journal{accounts: [], transactions: []}} _ -> {:error, "missing token list"} end end @spec _journal_item(list(String.t())) :: {:ok, PTA.Transaction.t(), list(String.t())} @spec _journal_item(list(String.t())) :: {:ok, PTA.Account.t(), list(String.t())} @spec _journal_item(list(String.t())) :: {:ok, list(String.t())} @spec _journal_item(list(String.t())) :: {:error, String.t()} def _journal_item(tokens) do case tokens do [head | tail] -> cond do String.match?(head, ~r/\d{4}\/\d{2}\/\d{1,2}/) -> case _transaction(tokens) do {:ok, t, remaining} -> {:ok, t, remaining} {:error, reason} -> {:error, reason} end head == "\n" -> {:ok, tail} head == ";" -> {_, remaining} = _eat(tokens) {:ok, remaining} head == "account" -> # TODO: process account stanzas {_, remaining} = _eat(tokens) {:ok, remaining} head == "include" -> # TODO: process includes {_, remaining} = _eat(tokens) {:ok, remaining} true -> {:error, "got unknown token"} end _ -> {:error, "missing token list"} end end @spec _transaction(list(String.t())) :: {:ok, PTA.Transaction.t(), list(String.t())} @spec _transaction(list(String.t())) :: {:error, String.t()} def _transaction(tokens) do case tokens do [head | tail] -> {acc, leftover} = _eat(tail) case _postings(leftover) do {:ok, postings, remaining} -> { :ok, %PTA.Transaction{ date: head, cleared: hd(acc) != "!", payee: case acc do [h | t] when h == "!" or h == "*" -> Enum.join(t, " ") _ -> Enum.join(acc, " ") end, postings: postings }, remaining } {:error, reason} -> {:error, reason} end _ -> {:error, "missing token list"} end end @spec _transaction(list(String.t())) :: {:ok, list(PTA.Posting.t()), list(String.t())} @spec _transaction(list(String.t())) :: {:txn_end, list(String.t())} @spec _transaction(list(String.t())) :: {:error, String.t()} def _postings(tokens) do tokens = case tokens do ["\n" | tail] -> tail _ -> tokens end case _posting(tokens) do {:ok, p, remaining} -> case _postings(remaining) do {:ok, postings, leftover} -> {:ok, [p | postings], leftover} {:txn_end, remaining} -> {:ok, [p], remaining} {:error, reason} -> {:error, reason} end :txn_end -> {:txn_end, tokens} {:error, reason} -> {:error, reason} end end @spec _posting(list(String.t())) :: {:ok, PTA.Posting.t(), list(String.t())} @spec _posting(list(String.t())) :: :txn_end @spec _posting(list(String.t())) :: {:error, String.t()} def _posting(tokens) do {acc, remaining} = _eat(tokens) cond do acc == [] -> :txn_end String.match?(hd(acc), ~r/\d{4}\/\d{2}\/\d{1,2}/) -> :txn_end true -> # TODO: parse tags from `comment_parts` {posting_parts, comment_parts} = _eat(acc, fn p -> p == ";" end) comment = if length(comment_parts) > 0 do Enum.join(comment_parts, " ") else nil end {account_parts, amount_parts} = _eat(posting_parts, fn p -> String.match?(p, ~r/^(-?\d+\.?\d*)(.*)$/) end, inclusive: true ) account = case account_parts do [] -> nil _ -> Enum.join(account_parts, " ") end posting_parts = case account_parts do [] -> amount_parts _ -> [account] ++ amount_parts end case posting_parts do [account] -> {:ok, %PTA.Posting{account: account, comment: comment}, remaining} [account, trailer] -> case Regex.run(~r/^(-?\d+\.?\d*)(.*)$/, trailer) do [_, quant, commodity] -> {:ok, %PTA.Posting{ account: account, amount: %PTA.Amount{quantity: Float.parse(quant), commodity: commodity}, comment: comment }, remaining} _ -> {:error, "invalid posting amount"} end [account, quant, commodity] -> {:ok, %PTA.Posting{ account: account, amount: %PTA.Amount{quantity: Float.parse(quant), commodity: commodity}, comment: comment }, remaining} [] when comment != nil -> {:ok, %PTA.Posting{comment: comment}, remaining} _ -> {:error, "invalid posting"} end end end # Splits the given character list into tokens. # String.split\2 would be perfect here, but we need to treat newlines as separate tokens. @spec _tokenize(list(String.t()), list(String.t())) :: list(String.t()) def _tokenize(chars, acc \\ []) do case chars do [head | tail] -> cond do head == " " or head == "\t" -> case acc do [_ | _] -> [for(c <- acc, into: "", do: c) | _tokenize(tail, [])] _ -> _tokenize(tail, []) end head == "\n" or head == ";" -> case acc do [_ | _] -> [for(c <- acc, into: "", do: c)] ++ [head | _tokenize(tail, [])] _ -> [head | _tokenize(tail, [])] end true -> _tokenize(tail, acc ++ [head]) end _ -> acc end end end
lib/pta/parser.ex
0.606382
0.55911
parser.ex
starcoder
defmodule Roman.Validators.Sequence do @moduledoc false @type sequence :: [Roman.Decoder.decoded_numeral()] @doc """ Validates that the sequence of decoded numerals. Runs all other validators defined in this module, returning the given sequence or `{:error, reason}` on validation failure. """ @spec validate(sequence) :: sequence | Roman.error() def validate(seq) do with {:ok, seq} <- increasing_value_order(seq), {:ok, seq} <- subtraction_bounds_following_values(seq) do {:ok, seq} else {:error, _} = error -> error end end @spec increasing_value_order(sequence) :: {:ok, sequence} | Roman.error() defp increasing_value_order(seq) when is_list(seq) do case check_increasing_value_order(seq) do :ok -> {:ok, seq} {:error, _} = error -> error end end @spec check_increasing_value_order(sequence) :: :ok | Roman.error() defp check_increasing_value_order([]), do: :ok defp check_increasing_value_order([_]), do: :ok defp check_increasing_value_order([{_, %{value: a}}, {_, %{value: b}} | _] = [_ | t]) when a >= b do check_increasing_value_order(t) end defp check_increasing_value_order([{num_l, %{value: val_l}}, {num_r, %{value: val_r}} | _]) do {:error, {:sequence_increasing, "larger numerals must be placed to the left of smaller numerals, but " <> "encountered #{num_l} (#{val_l}) before #{num_r} (#{val_r})"}} end # Once a value has been subtracted from another, no further numeral or pair # may match or exceed the subtracted value. This disallows values such as # MCMD or CMC. @spec subtraction_bounds_following_values(sequence) :: {:ok, sequence} | Roman.error() defp subtraction_bounds_following_values(seq) do case check_subtraction_bound(seq, nil) do :ok -> {:ok, seq} {:error, _} = error -> error end end @spec check_subtraction_bound( sequence, nil | {integer, Roman.numeral()} ) :: :ok | Roman.error() defp check_subtraction_bound([], _), do: :ok defp check_subtraction_bound([{num, %{delta: d}} | t], nil) do check_subtraction_bound(t, {d, num}) end defp check_subtraction_bound([_ | t], nil) do check_subtraction_bound(t, nil) end defp check_subtraction_bound([{n, %{value: v, delta: d}} | t], {delta, _}) when v < delta do check_subtraction_bound(t, {d, n}) end defp check_subtraction_bound([{_, %{value: v}} | t], {delta, _} = acc) when v < delta do check_subtraction_bound(t, acc) end defp check_subtraction_bound([{num, %{value: v}} | _], {delta, delta_num}) when v >= delta do {:error, {:value_greater_than_subtraction, "once a value has been subtracted from another, no further numeral or " <> "pair may match or exceed the subtracted value, but encountered " <> "#{num} (#{v}) after having previously subtracted #{delta} " <> "(in #{delta_num})"}} end end
lib/roman/validators/sequence.ex
0.859339
0.573111
sequence.ex
starcoder
defmodule Survey.Handler do @moduledoc """ Handles HTTP requests. """ @pages_path Path.expand("pages", File.cwd!) import Survey.Plugins, only: [rewrite_path: 1, log: 1, track: 1] import Survey.Parser, only: [parse: 1] import Survey.FileHandler, only: [handle_file: 2] import Survey.View, only: [render: 3] alias Survey.Conv alias Survey.BearController alias Survey.FourOhFourCounter, as: Counter @doc """ Transforms the request into a response. """ def handle(request) do request |> parse |> rewrite_path |> log |> route |> track |> put_content_length |> format_response end def route(%Conv{method: "GET", path: "/404s"} = conv) do counts = Counter.get_counts() %Conv{ conv | status: 200, resp_body: inspect counts } end def route(%Conv{method: "POST", path: "/pledges"} = conv) do Survey.PledgeController.create(conv, conv.params) end def route(%Conv{method: "GET", path: "/pledges"} = conv) do Survey.PledgeController.index(conv) end def route(%Conv{method: "GET", path: "/pledges/new"} = conv) do Survey.PledgeController.new(conv) end def route(%Conv{ method: "GET", path: "/sensors" } = conv) do sensor_data = Survey.SensorServer.get_sensor_data() render(conv, "sensors.eex", snapshots: sensor_data.snapshots, location: sensor_data.location) end def route(%Conv{method: "GET", path: "/kaboom" }) do raise "Kaboom!" end def route(%Conv{method: "GET", path: "/hibernate/" <> time } = conv) do time |> String.to_integer |> :timer.sleep %Conv{ conv | status: 200, resp_body: "Awake!" } end def route(%Conv{method: "DELETE", path: "/bears/" <> id} = conv) do params = Map.put(conv.params, "id", id) BearController.delete(conv, params) end def route(%Conv{method: "GET", path: "/wildlife"} = conv) do %Conv{conv | path: "/wildthings"} |> route end def route(%Conv{method: "GET", path: "/wildthings"} = conv) do %Conv{ conv | status: 200, resp_body: "Bears, Lions, Tigers" } end def route(%Conv{method: "GET", path: "/bears"} = conv) do BearController.index(conv) end def route(%Conv{method: "GET", path: "/api/bears"} = conv) do Survey.Api.BearController.index(conv) end def route(%Conv{method: "POST", path: "/api/bears"} = conv) do Survey.Api.BearController.create(conv, conv.params) end def route(%Conv{method: "POST", path: "/bears"} = conv) do BearController.create(conv, conv.params) end def route(%Conv{method: "GET", path: "/about"} = conv) do @pages_path |> Path.join("about.html") |> File.read |> handle_file(conv) end def route(%Conv{method: "GET", path: "/bears/new"} = conv) do @pages_path |> Path.join("form.html") |> File.read |> handle_file(conv) end def route(%Conv{method: "GET", path: "/bears/" <> id} = conv) do params = Map.put(conv.params, "id", id) BearController.show(conv, params) end def route(%Conv{method: "GET", path: "/pages/" <> name} = conv) do IO.inspect(name) @pages_path |> Path.join("#{name}.md") |> File.read |> handle_file(conv) |> markdown_to_html end def route(%Conv{} = conv) do %Conv{ conv | status: 404, resp_body: "No #{conv.path} here!" } end defp markdown_to_html(%Conv{status: 200} = conv) do %{ conv | resp_body: Earmark.as_html!(conv.resp_body) } end defp markdown_to_html(%Conv{} = conv), do: conv def put_content_length(%Conv{} = conv) do resp_headers = Map.put(conv.resp_headers, "Content-Length", byte_size(conv.resp_body)) %Conv{ conv | resp_headers: resp_headers} end def format_response(%Conv{} = conv) do """ HTTP/1.1 #{Conv.full_status(conv)}\r #{format_response_headers(conv)} \r #{conv.resp_body} """ end defp format_response_headers(conv) do Enum.map(conv.resp_headers, fn({key, value}) -> "#{key}: #{value}\r" end) |> Enum.sort |> Enum.reverse |> Enum.join("\n") end end
survey/lib/survey/handler.ex
0.725551
0.457682
handler.ex
starcoder
defmodule Abbr.Mnesia.Local do @moduledoc """ Access to underlying Mnesia stored data. """ alias Abbr.Mnesia.Url, as: Table alias Abbr.Url alias Memento.Query require Logger @behaviour Abbr.Cache @doc """ Retrieves stored shortened URL. There are a few way to do this. The usual would be to just `Query.read(Table, short)`. But, that doesn't work in face of network splits since table might not be accessible during cluster healing. So, we need to wait for the table. One option, the simplest one is just to wait for it every time: ``` defp lookup(short) do lookup = fn -> Query.read(Table, short) end case execute(lookup, true) do {:ok, nil} -> nil {:ok, data} -> struct(Url, Map.from_struct(data)) {:error, _reason} -> nil end end ``` But, that does degrade the performance a bit. Another idea is to use dirty reading: ``` def lookup(short) do fun = fn -> :mnesia.dirty_read(Table, short) end case execute_dirty_with_table(fun) do [{Table, short, original}] -> %Url{short: short, original: original} [] -> nil end catch reason -> Logger.error("Could not read data for \#{short}, reason: \#{inspect(reason)}") nil end ``` This of course works, but I couldn't measure any meaningful performance gain. It introduces the "dirty" usage of Mnesia, with no apparent gain. Hence, decided to stay with transaction approach. """ @spec lookup(Url.short()) :: Url.t() | nil def lookup(short) do lookup = fn -> Query.read(Table, short) end case execute_with_table(lookup) do {:ok, nil} -> nil {:ok, data} -> struct(Url, Map.from_struct(data)) {:error, reason} -> Logger.error("Could not read data for #{short}, reason: #{inspect(reason)}") nil end end @spec save(Url.t()) :: :ok | :error def save(url) do save = fn -> Table |> struct(Map.from_struct(url)) |> Query.write() end case execute_with_table(save) do {:ok, _} -> :ok {:error, reason} -> Logger.error("Failed to save #{inspect(url)}, reason: #{inspect(reason)}") :error end end defp execute_with_table(fun) do case execute(fun) do {:error, {:no_exists, Table}} -> execute(fun, true) result -> result end end defp execute(fun, wait_for_table \\ false) do Memento.transaction(fn -> if wait_for_table, do: Memento.wait([Table]) fun.() end) end @doc """ Exports the entire locally stored data. There are a few "in transaction" ways to achieve the same. Simplest being just `&Memento.Query.all/1` or something like: ``` {:ok, data} = Memento.transaction(fn -> :mnesia.foldl(fn record, acc -> [record | acc] end, [], Table) end) ``` But, both export and merge need to be fast, to sync the nodes as soon as possible. Hence, decided to use the dirty variant of match idea. """ @spec export :: list(any()) def export do Table |> :mnesia.dirty_match_object({:_, :_, :_}) |> Enum.map(fn {_, key, value} -> %Url{short: key, original: value} end) end @doc """ Merges the supplied URL list to existing local data. As with `&export/0`, there was a few "in transaction" ways to do this, e.g. ``` Memento.transaction(fn -> for url <- list do :ok = save(url) end end) ``` But, again, for performance reasons, decided to go with dirty variant. """ @spec merge(list(any())) :: :ok def merge(list) do :mnesia.ets(fn -> Enum.each(list, &save_dirty/1) end) :ok end defp save_dirty(%{short: short, original: original}) do execute_dirty_with_table(fn -> :ok = :mnesia.dirty_write({Table, short, original}) end) end defp execute_dirty_with_table(fun) do execute_dirty(fun) rescue e in MatchError -> case e.term do {:EXIT, {:aborted, {:node_not_running, _}}} -> execute_dirty(fun, true) _ -> raise(e) end catch :exit, {:aborted, {:no_exists, Table}} -> execute_dirty(fun, true) end defp execute_dirty(fun, wait_for_table \\ false) do if wait_for_table, do: Memento.wait([Table]) fun.() end end
lib/abbr/mnesia/local.ex
0.807195
0.90053
local.ex
starcoder
defmodule Urn do @moduledoc """ Documentation for Urn. ## Examples ### parse iex> Urn.parse("urn:mycoll:143") %Urn.Schema{collection: "mycoll", identifier: "143", namespace: "urn"} ### verify iex> Urn.verify("urn:mycoll:143", "urn:mycoll:143") true ### verify_namespace iex> Urn.verify_namespace("urn:mycoll:143", "urn") true ### verify_collection iex> Urn.verify_collection("urn:mycoll:143", "mycoll") true """ defmodule Schema do @moduledoc """ URN Schema Follows the format identified [here](http://philcalcado.com/2017/03/22/pattern_using_seudo-uris_with_microservices.html#creating-a-good-enough-puri-spec) ## Example: "namespace:collection:identifier" ```elixir %Schema{ namespace: "namespace", collection: "collection", identifier: "identifier" } ``` """ defstruct namespace: nil, collection: nil, identifier: nil end def parse([ namespace, collection, identifier ]) do %Schema{ namespace: namespace, collection: collection, identifier: identifier, } end def parse(str) when is_binary(str) do parse(String.split(str, ":")) end def parse (str) do raise "A valid URN is required in this format: namespace:collection:identifier, received: #{str}" end def verify(a, a), do: true def verify(a, b) do raise "Validation failed: #{a} does not match #{b}" end def verify_namespace(str, namespace) when is_binary(str) do parsed = parse str verify_namespace parsed, namespace end def verify_namespace(%Schema{ namespace: namespace }, nsp) when namespace == nsp, do: true def verify_namespace(%Schema{ namespace: namespace }, nsp) do raise "Validation failed: #{namespace} does not match #{nsp}" end def verify_collection(str, namespace) when is_binary(str) do parsed = parse str verify_collection parsed, namespace end def verify_collection(%Schema{ collection: collection }, col) when collection == col, do: true def verify_collection(%Schema{ collection: collection }, col) do raise "Validation failed: #{collection} does not match #{col}" end end
lib/urn.ex
0.754734
0.642489
urn.ex
starcoder
defmodule Jsox.Encoder.Helper do @moduledoc """ A helper module for the encoding. """ import Jsox.Encoder use Bitwise @compile {:inline, unicode: 1, _map: 1, _map_item: 1, _collection: 1, _collection_item: 1} @escape_map %{ ?\\ => '\\\\', ?\" => '\\"', ?\n => '\\n', ?\r => '\\r', ?\b => '\\b', ?\f => '\\f', ?\t => '\\t', ?\/ => '\\/' } @spec escape(String.t) :: iolist() def escape(str), do: [?", escape(str, []), ?"] defp escape("", chars), do: Enum.reverse(chars) for {char, seq} <- Map.to_list(@escape_map) do defp escape(<<unquote(char)>> <> data, chars) do escape(data, [unquote(seq)|chars]) end end defp escape(<<char>> <> data, chars) when char <= 0x1F or char == 0x7F do escape(data, [unicode(char)|chars]) end defp escape(<<char :: utf8>> <> data, chars) when char in 0x80..0x9F do escape(data, [unicode(char)|chars]) end defp escape(<<char :: utf8>> <> data, chars) when char in 0xA0..0xFFFF do escape(data, [unicode(char)|chars]) end defp escape(<<char :: utf8>> <> data, chars) when char > 0xFFFF do code = char - 0x10000 esc = [unicode(0xD800 ||| (code >>> 10)), unicode(0xDC00 ||| (code &&& 0x3FF))] escape(data, [esc|chars]) end defp escape(<<char>> <> data, chars) do escape(data, [char|chars]) end defp unicode(char) do code = Integer.to_charlist(char, 16) case length(code) do 1 -> ["\\u000", code] 2 -> ["\\u00", code] 3 -> ["\\u0", code] 4 -> ["\\u", code] end end @spec list(list) :: iolist def list([item]), do: [?[, to_json(item, iodata: true) ,?]] def list([head|tail]), do: [?[, list(tail, [?,, to_json(head, iodata: true)]), ?]] defp list([item], acc), do: Enum.reverse([to_json(item, iodata: true)|acc]) defp list([head|tail], acc), do: list(tail, [[to_json(head, iodata: true), ?,]|acc]) @spec map(map) :: iodata def map(map), do: [?{, tl(_map(map)), ?}] defp _map(map), do: map |> Map.to_list |> Enum.flat_map(&_map_item/1) defp _map_item({key, value}), do: [?,, escape(to_string key), ?:, to_json(value, iodata: true)] @spec collection(Enum.t) :: iodata def collection(collection), do: [?[, tl(_collection(collection)), ?]] defp _collection(collection), do: Enum.flat_map(collection, &_collection_item/1) defp _collection_item(item), do: [?,, to_json(item, iodata: true)] @spec struct(struct()) :: iodata() def struct(struct), do: struct |> Map.from_struct |> map end
lib/jsox/encoder/helper.ex
0.58948
0.430506
helper.ex
starcoder
defmodule Zipper do alias Zipper, as: Z alias BinTree, as: BT defstruct [:focus, :path] @doc """ Get a zipper focused on the root node. """ @spec from_tree(BinTree.t()) :: Zipper.t() def from_tree(bin_tree), do: %Z{focus: bin_tree, path: []} @doc """ Get the complete tree from a zipper. """ @spec to_tree(Zipper.t()) :: BinTree.t() def to_tree(%Z{focus: focus, path: []}), do: focus def to_tree(zipper), do: zipper |> up() |> to_tree() @doc """ Get the value of the focus node. """ @spec value(Zipper.t()) :: any def value(%Z{focus: %BT{value: value}}), do: value @doc """ Get the left child of the focus node, if any. """ @spec left(Zipper.t()) :: Zipper.t() | nil def left(%Z{focus: %BT{left: nil}}), do: nil def left(%Z{focus: focus = %BT{left: left}, path: path}), do: %Z{focus: left, path: [{:left, focus} | path]} @doc """ Get the right child of the focus node, if any. """ @spec right(Zipper.t()) :: Zipper.t() | nil def right(%Z{focus: %BT{right: nil}}), do: nil def right(%Z{focus: focus = %BT{right: right}, path: path}), do: %Z{focus: right, path: [{:right, focus} | path]} @doc """ Get the parent of the focus node, if any. """ @spec up(Zipper.t()) :: Zipper.t() | nil def up(%Z{path: []}), do: nil def up(%Z{focus: focus, path: [{:left, node} | path]}), do: %Z{focus: %{node | left: focus}, path: path} def up(%Z{focus: focus, path: [{:right, node} | path]}), do: %Z{focus: %{node | right: focus}, path: path} @doc """ Set the value of the focus node. """ @spec set_value(Zipper.t(), any) :: Zipper.t() def set_value(zipper = %Z{focus: focus}, value), do: %{zipper | focus: %{focus | value: value}} @doc """ Replace the left child tree of the focus node. """ @spec set_left(Zipper.t(), BinTree.t() | nil) :: Zipper.t() def set_left(zipper = %Z{focus: focus}, left), do: %{zipper | focus: %{focus | left: left}} @doc """ Replace the right child tree of the focus node. """ @spec set_right(Zipper.t(), BinTree.t() | nil) :: Zipper.t() def set_right(zipper = %Z{focus: focus}, right), do: %{zipper | focus: %{focus | right: right}} end
elixir/zipper/lib/zipper.ex
0.862134
0.6219
zipper.ex
starcoder
defmodule Kinesis.Buffer.Memory do @behaviour Kinesis.Buffer defstruct impl: :queue.new(), size: 0, count: 0 @impl Kinesis.Buffer def new(size) when size > 0 do %__MODULE__{ impl: :queue.new(), size: size, count: 0 } end @impl Kinesis.Buffer def new(input, size) when size > 0 do valid_inps = Enum.reverse(input) |> Enum.take(size) inp_length = length(valid_inps) %__MODULE__{ impl: :queue.from_list(valid_inps), size: size, count: inp_length } end @impl Kinesis.Buffer def size(buf) do buf.size end @impl Kinesis.Buffer def empty?(buf) do :queue.is_empty(buf.impl) end @impl Kinesis.Buffer def insert(buf, item) do if buf.count < buf.size do %{ buf | impl: :queue.cons(item, buf.impl), count: buf.count + 1 } else new_q = :queue.drop_r(buf.impl) %{buf | impl: :queue.cons(item, new_q)} end end @impl Kinesis.Buffer def peek(buf) do case :queue.peek_r(buf.impl) do {:value, value} -> value :empty -> :empty end end @impl Kinesis.Buffer def pop(buf) do {peek(buf), delete(buf)} end @impl Kinesis.Buffer def delete(buf) do case buf.count do 0 -> buf 1 -> %{ buf | impl: :queue.drop_r(buf.impl), count: buf.count - 1 } _otherwise -> %{buf | impl: :queue.drop_r(buf.impl), count: buf.count - 1} end end @impl Kinesis.Buffer def to_list(buf) do :queue.reverse(buf.impl) |> :queue.to_list() end @impl Kinesis.Buffer def read(buf, num) do {items, nbuf} = Enum.reduce(0..num, {[], buf}, fn _rng, {items, b} -> case Kinesis.Buffer.Memory.pop(b) do {:empty, _} -> {items, b} {i, nbuf} -> {[i | items], nbuf} end end) {Enum.reverse(items), nbuf} end @impl Kinesis.Buffer def count(buf) do buf.count end defimpl Collectable, for: Kinesis.Buffer.Memory do def into(original) do collector_fun = fn buf, {:cont, val} -> Kinesis.Buffer.Memory.insert(buf, val) buf, :done -> buf _buf, :halt -> :ok end {original, collector_fun} end end defimpl Enumerable, for: Kinesis.Buffer.Memory do def count(buf), do: {:ok, buf.count} def reduce(buf, acc, fun), do: Enumerable.List.reduce(Kinesis.Buffer.Memory.to_list(buf), acc, fun) def slice(_buf), do: {:error, __MODULE__} def member?(buf, item), do: Enum.member?(Kinesis.Buffer.Memory.to_list(buf), item) end defimpl Inspect, for: Kinesis.Buffer.Memory do import Inspect.Algebra def inspect(buf, opts) do concat(["#MemoryBuffer<", to_doc(Kinesis.Buffer.Memory.to_list(buf), opts), ">"]) end end end
lib/kinesis/buffer/memory.ex
0.713132
0.584153
memory.ex
starcoder
defmodule TimeZoneInfo.DataPersistence.Priv do @moduledoc """ An implementation for the behaviour `TimeZoneInfo.DataPersistence` to persist data in the `priv` dir. """ @behaviour TimeZoneInfo.DataPersistence alias File.Stat alias TimeZoneInfo.ExternalTermFormat @impl true def put(data) do with {:ok, path} <- fetch_path(), {:ok, data} <- ExternalTermFormat.encode(data) do File.write(path, data) end end @impl true def fetch do with {:ok, path} <- fetch_path(), {:ok, data} <- File.read(path) do ExternalTermFormat.decode(data) end end @impl true def checksum do with {:ok, path} <- fetch_path(), {:ok, data} <- File.read(path) do ExternalTermFormat.checksum(data) end end @impl true def fetch_last_update do with {:ok, path} <- fetch_path(), {:ok, %Stat{mtime: mtime}} <- File.stat(path, time: :posix) do {:ok, mtime} end end @impl true def put_last_update(time) do with {:ok, path} <- fetch_path() do case File.exists?(path) do true -> File.touch(path, time) false -> {:error, :enoent} end end end @impl true def info do with {:ok, path} <- fetch_path(), {:ok, stat} <- File.stat(path), {:ok, data} <- File.read(path) do %{ stat: stat, path: path, checksum: ExternalTermFormat.checksum(data) } end end @spec fetch_path :: {:ok, Path.t()} | {:error, {:invalid_config, Keyword.key() | [Keyword.key()]}} defp fetch_path do with {:ok, path} <- fetch_env(:path) do path = :time_zone_info |> :code.priv_dir() |> Path.join(path) {:ok, path} end end defp fetch_env(:path) do with {:priv, {:ok, priv}} <- {:priv, Application.fetch_env(:time_zone_info, :priv)}, {:path, {:ok, path}} when is_binary(path) <- {:path, Keyword.fetch(priv, :path)} do {:ok, path} else {:priv, :error} -> {:error, {:invalid_config, :priv}} {:path, :error} -> {:error, {:invalid_config, [:priv, :path]}} {:path, {:ok, path}} -> {:error, {:invalid_config, [priv: [path: path]]}} end end end
lib/time_zone_info/data_persistence/priv.ex
0.835986
0.474449
priv.ex
starcoder
defmodule Ash.Dsl.Transformer do @moduledoc """ A transformer manipulates and/or validates the entire DSL state of a resource. It's `transform/2` takes a `map`, which is just the values/configurations at each point of the DSL. Don't manipulate it directly, if possible, instead use functions like `get_entities/3` and `replace_entity/5` to manipulate it. Use the `after?/1` and `before?/1` callbacks to ensure that your transformer runs either before or after some other transformer. The pattern for requesting information from other modules that use the DSL and are also currently compiling has not yet been determined. If you have that requirement you will need extra utilities to ensure that some other DSL based module has either completed or reached a certain point in its transformers. These utilities have not yet been written. """ @callback transform(module, map) :: {:ok, map} | {:error, term} | :halt @callback before?(module) :: boolean @callback after?(module) :: boolean defmacro __using__(_) do quote do @behaviour Ash.Dsl.Transformer def before?(_), do: false def after?(_), do: false defoverridable before?: 1, after?: 1 end end def persist(dsl, key, value) do Map.update(dsl, :persist, %{key => value}, &Map.put(&1, key, value)) end def build_entity(extension, path, name, opts) do do_build_entity(extension.sections(), path, name, opts) end defp do_build_entity(sections, [section_name], name, opts) do section = Enum.find(sections, &(&1.name == section_name)) entity = Enum.find(section.entities, &(&1.name == name)) case NimbleOptions.validate(opts, entity.schema) do {:ok, opts} -> {:ok, struct(entity.target, opts)} {:error, error} -> {:error, error} end end defp do_build_entity(sections, [section_name | rest], name, opts) do section = Enum.find(sections, &(&1.name == section_name)) do_build_entity(section.sections, rest, name, opts) end def add_entity(dsl_state, path, entity) do Map.update(dsl_state, path, %{entities: [entity], opts: []}, fn config -> Map.update(config, :entities, [entity], fn entities -> [entity | entities] end) end) end def get_entities(dsl_state, path) do dsl_state |> Map.get(path, %{entities: []}) |> Map.get(:entities, []) end def get_option(dsl_state, path, option) do dsl_state |> Map.get(path, %{opts: []}) |> Map.get(:opts) |> Kernel.||([]) |> Keyword.get(option) end def replace_entity(dsl_state, path, replacement, matcher) do Map.update(dsl_state, path, %{entities: [replacement], opts: []}, fn config -> Map.update(config, :entities, [replacement], fn entities -> replace_match(entities, replacement, matcher) end) end) end defp replace_match(entities, replacement, matcher) do Enum.map(entities, fn entity -> if matcher.(entity) do replacement else entity end end) end def sort(transformers) do Enum.reduce(transformers, [], fn transformer, list -> put_transformer_in(list, transformer) end) end defp put_transformer_in([], transformer), do: [transformer] defp put_transformer_in([first | rest] = remaining, transformer) do if transformer.before?(first) or first.after?(transformer) do [transformer | remaining] else [first | put_transformer_in(rest, transformer)] end end end
lib/ash/dsl/transformer.ex
0.786991
0.627566
transformer.ex
starcoder
defmodule PhoenixDSK3LO.Lms do # Puropse: Abstraction, Repo-like methods for accessing Blackboard Learn # Instead of a database, we have the Blackboard Learn LMS. @doc """ Get all the dataSources as a list of Learn.DSK structs This behavior is analogous to a Repo. 2017.04.18 - Can't generalize here because we are calling the particular get method for the given structure type. Hence there is an all method for Learn.Dsk, and another all method for Learn.User, etc. Example Usage: iex(1)> fqdn = "bd-partner-a-original.blackboard.com" "bd-partner-a-original.blackboard.com" iex(2)> PhoenixDSK3LO.Lms.all(fqdn,Learn.Dsk) {:ok, [%Learn.Dsk{description: "Internal data source used for associating records that are created for use by the Bb system.", externalId: "INTERNAL", id: "_1_1"}, %Learn.Dsk{description: "System data source used for associating records that are created via web browser.", externalId: "SYSTEM", id: "_2_1"},... """ require Logger def all(fqdn, Learn.Dsk) do {:ok, dskResponseMap, dskMapUnused} = LearnRestClient.get_data_sources(fqdn) {:ok, dskList} = LearnRestUtil.listofmaps_to_structs(Learn.Dsk,dskResponseMap["results"]) {:ok, dskList} end def all(fqdn, Learn.Dsk, "allpages") do {:ok, %Learn.DskResults{ paging: paging, results: dsk_maps }} = get(fqdn, Learn.DskResults ) # TODO 2018.02.14!! # dsk_maps is a list of maps dsk_maps = all_paging(fqdn, Learn.Dsk, paging, dsk_maps) {:ok, dskList} = LearnRestUtil.listofmaps_to_structs(Learn.Dsk, dsk_maps) {:ok, dskList } end @doc """ Get all the users as a list of Learn.User structs This behavior is analogous to a Repo. """ def all(fqdn, Learn.User) do {:ok, usersResponseMap} = LearnRestClient.get_users(fqdn) {:ok, userList} = LearnRestUtil.listofmaps_to_structs(Learn.User,usersResponseMap["results"]) {:ok, userList} end @doc """ Get all the courses as a list of Learn.Course structs This behavior is analogous to a Repo. """ def all(fqdn, Learn.Course) do {:ok, coursesResponseMap} = LearnRestClient.get_courses(fqdn) {:ok, courseList} = LearnRestUtil.listofmaps_to_structs(Learn.Course,coursesResponseMap["results"]) {:ok, courseList} end @doc """ Get all the memberships as a list of Learn.Membership structs This behavior is analogous to a Repo. iex(4)> PhoenixDSK3LO.Lms.all(fqdn, Learn.Membership, courseId) """ def all(fqdn, Learn.Membership, courseId) do {:ok, %Learn.MembershipResults{ paging: paging, results: membership_maps }} = get(fqdn, Learn.MembershipResults, courseId) # membership_maps is a list of maps membership_maps = all_paging(fqdn, Learn.Membership, paging, membership_maps) {:ok, memberships} = LearnRestUtil.listofmaps_to_structs(Learn.Membership, membership_maps) memberships_with_user = Enum.map(memberships, &fetch_user_of_membership(fqdn, &1)) {:ok, memberships_with_user} end def fetch_user_of_membership(fqdn, membership) do user_id = membership.userId {:ok, user_response} = LearnRestClient.get_user(fqdn, user_id) user = LearnRestUtil.to_struct(Learn.User, user_response) %Learn.Membership{ membership | user: user} end @doc """ Recursive all_paging required because while doesn't exist in Elixir. Any variable we would while on is immutable. """ def all_paging(_fqdn, Learn.Membership, paging, membership_maps) when paging == nil do membership_maps end def all_paging(fqdn, Learn.Membership, paging, membership_maps_in ) do {:ok, %Learn.MembershipResults{ paging: paging, results: membership_maps}} = get(fqdn, Learn.MembershipResults, "ignored", paging) all_paging(fqdn, Learn.Membership, paging, Enum.concat(membership_maps_in,membership_maps ) ) end def all_paging(_fqdn, Learn.Dsk, paging, dsk_maps) when paging == nil do dsk_maps end def all_paging(fqdn, Learn.Dsk, paging, dsk_maps_in ) do {:ok, %Learn.DskResults{ paging: paging, results: dsk_maps}} = get(fqdn, Learn.DskResults, paging) all_paging(fqdn, Learn.Dsk, paging, Enum.concat(dsk_maps_in,dsk_maps ) ) end # Elixir warns us if we don't group all of the gets with 3 params together, # then all of the gets with 4 params together. @doc """ Get a user with the given userName. userName is in the format mkauffman This behavior is analogous to a Repo. """ def get(fqdn, Learn.User, userName) do {:ok, userResponse} = LearnRestClient.get_user_with_userName(fqdn, userName) user = LearnRestUtil.to_struct(Learn.User, userResponse) {:ok, user} end @doc """ Get a course with the given courseName. courseId is in the format abc-123, no spaces! Learn does not allow spaces in a courseId. This behavior is analogous to a Repo. """ def get(fqdn, Learn.Course, courseId) do {:ok, courseResponse} = LearnRestClient.get_course_with_courseId(fqdn, courseId) course = LearnRestUtil.to_struct(Learn.Course, courseResponse) {:ok, course} end @doc """ Get the memberships for a given courseId. courseId is in the format abc-123, no spaces! Learn does not allow spaces in a courseId. """ def get(fqdn, Learn.MembershipResults, courseId) do {:ok, membership_response} = LearnRestClient.get_memberships_for_courseId(fqdn, courseId) membership_results = LearnRestUtil.to_struct(Learn.MembershipResults, membership_response) {:ok, membership_results} end def get(fqdn, Learn.Membership, courseId, userName) do {:ok, membershipResponse} = LearnRestClient.get_membership(fqdn, courseId, userName) membership = LearnRestUtil.to_struct(Learn.Membership, membershipResponse) Logger.info "Got membership for #{courseId} #{userName}" {:ok, membership} end @doc """ Get the memberships using the paging link given from the prior get request. _courseId is ignored Learn does not allow spaces in a courseId. """ def get(fqdn, Learn.MembershipResults, courseId, paging) do {:ok, membership_response} = LearnRestClient.get_nextpage_of_memberships(fqdn, paging["nextPage"]) membership_results = LearnRestUtil.to_struct(Learn.MembershipResults, membership_response) {:ok, membership_results} end def get(fqdn, Learn.DskResults) do {:ok, dsk_response, dskMapUnused} = LearnRestClient.get_data_sources(fqdn) dsk_results = LearnRestUtil.to_struct(Learn.DskResults, dsk_response) {:ok, dsk_results} end def get(fqdn, Learn.DskResults, paging) do {:ok, dsk_response} = LearnRestClient.get_nextpage_of_dsks(fqdn, paging["nextPage"]) dsk_results = LearnRestUtil.to_struct(Learn.DskResults, dsk_response) {:ok, dsk_results} end end
lib/phoenixDSK/lms.ex
0.576423
0.536495
lms.ex
starcoder
defmodule Donatebox.Donations do @moduledoc """ The Donations context. """ import Ecto.Query, warn: false alias Donatebox.Repo alias Donatebox.History alias Donatebox.Management alias Donatebox.Management.Admin import Plug.Conn alias Donatebox.Donations.Pendingdonation @doc """ Returns the list of pendingdonations. ## Examples iex> list_pendingdonations() [%Pendingdonation{}, ...] """ def list_pendingdonations do Repo.all(Pendingdonation) end @doc """ Gets a single pendingdonation. Raises `Ecto.NoResultsError` if the Pendingdonation does not exist. ## Examples iex> get_pendingdonation!(123) %Pendingdonation{} iex> get_pendingdonation!(456) ** (Ecto.NoResultsError) """ def get_pendingdonation!(id), do: Repo.get!(Pendingdonation, id) @doc """ Creates a pendingdonation. ## Examples iex> create_pendingdonation(%{field: value}) {:ok, %Pendingdonation{}} iex> create_pendingdonation(%{field: bad_value}) {:error, %Ecto.Changeset{}} """ def create_pendingdonation(attrs \\ %{}) do IO.inspect(attrs) %Pendingdonation{} |> Pendingdonation.changeset(attrs) |> Repo.insert() end def create_history(admin, c) do IO.inspect(c) IO.puts c.username donation = %{ donorname: c.username, title: c.title, type: c.type, quantity: c.quantity, req_date: c.inserted_at, accept_date: datetime = DateTime.utc_now(), meetlocation: c.location, adminname: admin.username, admincontact: "123", donationstatus: "Accepted" } IO.inspect(donation) %History{} |> History.changeset(donation) |> Repo.insert() end @doc """ Updates a pendingdonation. ## Examples iex> update_pendingdonation(pendingdonation, %{field: new_value}) {:ok, %Pendingdonation{}} iex> update_pendingdonation(pendingdonation, %{field: bad_value}) {:error, %Ecto.Changeset{}} """ def update_pendingdonation(%Pendingdonation{} = pendingdonation, attrs) do pendingdonation |> Pendingdonation.changeset(attrs) |> Repo.update() end @doc """ Deletes a Pendingdonation. ## Examples iex> delete_pendingdonation(pendingdonation) {:ok, %Pendingdonation{}} iex> delete_pendingdonation(pendingdonation) {:error, %Ecto.Changeset{}} """ def delete_pendingdonation(%Pendingdonation{} = pendingdonation) do Repo.delete(pendingdonation) end @doc """ Returns an `%Ecto.Changeset{}` for tracking pendingdonation changes. ## Examples iex> change_pendingdonation(pendingdonation) %Ecto.Changeset{source: %Pendingdonation{}} """ def change_pendingdonation(%Pendingdonation{} = pendingdonation) do Pendingdonation.changeset(pendingdonation, %{}) end end
lib/donatebox/donations/donations.ex
0.724481
0.425516
donations.ex
starcoder
defmodule Stripe.Subscriptions do @moduledoc """ Main API for working with Subscriptions at Stripe. Through this API you can: - create - change - retrieve - cancel - cancel_all - list all - count all Supports Connect workflow by allowing to pass in any API key explicitely (vs using the one from env/config). (API ref https://stripe.com/docs/api#subscriptions) """ @endpoint "customers" @doc """ Starts a subscription for the specified customer. ## Example ``` new_sub = [ plan: plan_id, metadata: [ ... ] ] {:ok, sub} = Stripe.Subscriptions.create customer_id, new_sub ``` """ def create( customer_id, opts ) do create customer_id, opts, Stripe.config_or_env_key end @doc """ Starts a subscription for the specified customer using given api key. ## Example ``` new_sub = [ plan: plan_id, metadata: [ ... ] ] {:ok, sub} = Stripe.Subscriptions.create customer_id, opts, key ``` """ def create(customer_id, opts, key) do plan_id = Keyword.get opts, :plan Stripe.make_request_with_key(:post, "#{@endpoint}/#{customer_id}/subscriptions", key, opts) |> Stripe.Util.handle_stripe_response end @doc """ Returns a subscription; customer_id and subscription_id are required. ## Example ``` {:ok, customer} = Stripe.Customers.get_subscription "customer_id", "subscription_id" ``` """ def get(customer_id, sub_id) do get customer_id, sub_id, Stripe.config_or_env_key end @doc """ Returns a subscription using given api key; customer_id and subscription_id are required. ## Example ``` {:ok, sub} = Stripe.Subscriptions.get "customer_id", "subscription_id", key """ def get(customer_id, sub_id, key) do Stripe.make_request_with_key(:get, "#{@endpoint}/#{customer_id}/subscriptions/#{sub_id}", key) |> Stripe.Util.handle_stripe_response end @doc """ Changes a customer's subscription (plan, description, etc - see Stripe API for acceptable options). Customer ID and Subscription ID are required for this. ## Example ``` Stripe.Subscriptions.change "customer_id", "subscription_id", "plan_id" ``` """ def change(customer_id, sub_id, plan_id) do change customer_id, sub_id, plan_id, Stripe.config_or_env_key end @doc """ Changes a customer's subscription (plan, description, etc - see Stripe API for acceptable options). Customer ID and Subscription ID are required for this. Using a given stripe key to apply against the account associated. ## Example ``` Stripe.Customers.change_subscription "customer_id", "subscription_id", "plan_id", key ``` """ def change(customer_id, sub_id, plan_id, key) when is_binary(plan_id) do change(customer_id, sub_id, [plan: plan_id], key) end @doc """ Changes a customer's subscription using given api key(plan, description, etc - see Stripe API for acceptable options). Customer ID, Subscription ID, opts and api key are required for this. ## Example ``` Stripe.Subscriptions.change "customer_id", "subscription_id", "plan_id", key ``` """ def change(customer_id, sub_id, opts, key) do Stripe.make_request_with_key(:post, "#{@endpoint}/#{customer_id}/subscriptions/#{sub_id}", key, opts) |> Stripe.Util.handle_stripe_response end @doc """ Cancels a subscription ## Example ``` Stripe.Subscriptions.cancel "customer_id", "subscription_id" Stripe.Subscriptions.cancel "customer_id", "subscription_id", [at_period_end: true] ``` """ def cancel(customer_id, sub_id, opts \\ []) do cancel customer_id, sub_id, opts, Stripe.config_or_env_key end @doc """ Cancels a subscription with given api key. ## Example ``` Stripe.Subscriptions.cancel "customer_id", "subscription_id", key ``` """ def cancel(customer_id, sub_id, opts, key) do Stripe.make_request_with_key(:delete, "#{@endpoint}/#{customer_id}/subscriptions/#{sub_id}", key, opts) |> Stripe.Util.handle_stripe_response end @doc """ Cancel all subscriptions for account. #Example ``` Stripe.Subscriptions.cancel_all customer_id ``` """ def cancel_all(customer_id,opts) do cancel_all customer_id, opts, Stripe.config_or_env_key end @doc """ Cancel all subscriptions for account using given api key. #Example ``` Stripe.Subscriptions.cancel_all customer_id, key ``` """ def cancel_all(customer_id, opts, key) do case all(customer_id, [], "", key) do {:ok, subs} -> Enum.each subs, fn sub -> cancel(customer_id, sub["id"], opts, key) end {:error, err} -> raise err end end @doc """ Changes the payment source for a subscription. #Example ``` source = [object: "card", number: "4111111111111111", exp_month: 01, exp_year: 2018, cvc: 123] Stripe.Subscriptions.change_payment_source("customer_id", "subscription_id", source) ``` """ def change_payment_source(customer_id, sub_id, source) do data = [source: source] Stripe.make_request_with_key(:post, "#{@endpoint}/#{customer_id}/subscriptions/#{sub_id}", Stripe.config_or_env_key, data) |> Stripe.Util.handle_stripe_response end @max_fetch_size 100 @doc """ List all subscriptions. ##Example ``` {:ok, subscriptions} = Stripe.Subscriptions.all customer_id ``` """ def all( customer_id, accum \\ [], starting_after \\ "") do all customer_id, accum, starting_after, Stripe.config_or_env_key end @doc """ List all subscriptions using given api key. ##Example ``` {:ok, subscriptions} = Stripe.Subscriptions.all customer_id, [], "", key ``` """ def all( customer_id, accum, starting_after, key) do case Stripe.Util.list_raw("#{@endpoint}/#{customer_id}/subscriptions", key,@max_fetch_size, starting_after) do {:ok, resp} -> case resp[:has_more] do true -> last_sub = List.last( resp[:data] ) all( customer_id, resp[:data] ++ accum, last_sub["id"], key ) false -> result = resp[:data] ++ accum {:ok, result} end end end @doc """ Count total number of subscriptions. ## Example ``` {:ok, count} = Stripe.Subscriptions.count customer_id ``` """ def count(customer_id) do count customer_id, Stripe.config_or_env_key end @doc """ Count total number of subscriptions using given api key. ## Example ``` {:ok, count} = Stripe.Subscriptions.count customer_id, key ``` """ def count(customer_id, key) do Stripe.Util.count "#{@endpoint}/#{customer_id}/subscriptions", key end end
lib/stripe/subscriptions.ex
0.82176
0.590779
subscriptions.ex
starcoder
defmodule Tirexs.HTTP do @standard_http_adapter Tirexs.HTTP.Standard @moduledoc """ A set of functions for working over HTTP. This bare-bone module provides all you need for getting things done over elasticsearch REST API. The functions designed to be handful for various possible use cases. A set of request functions such as `head/1-3`, `get/1-3`, `put/1-4`, `post/1-4` and `delete/1-3` also available with bangs functions. The bang functions raise `RuntimeError` exceptions. iex> get("unknown") {:error, 404, ... } iex> get!("unknown") ** (RuntimeError) {"error":{"root_cause": [{"type":"index_not_found_exception" ... }] The common set of allowed parameters are `path`, `params`, `uri` and `body`. iex> get("/bear_test/my_type/1") iex> get("/articles/document/1", [_source: false]) iex> put("/bear_test/my_type/1?version=2", [user: "kimchy"]) iex> put("/bear_test/my_type/1", [version: 2], [user: "kimchy"]) iex> put("/bear_test/my_type/1", [version: 2], %URI{ host: "example.com" }, [user: "kimchy"]) The `uri` parameter has a special purpose. You can use it for overriding any fields from application's environment uri (see `Tirexs.ENV.get_uri_env/0`). iex> Tirexs.get_uri_env() %URI{host: "127.0.0.1", port: 9200, scheme: "http"} iex> url("/articles/document/1") "http://127.0.0.1:9200/articles/document/1" iex> url("/articles/document/1", %URI{ port: 92, query: "_source=false" }) "http://127.0.0.1:92/articles/document/1?_source=false" A query `params` could be as a part of `path` or used as a standalone `params`. The `params` param is allowed to be a `%{}` or `[]`. iex> put("/bear_test/my_type/1?version=2", [user: "kimchy"]) iex> put("/bear_test/my_type/1", [version: 2], [user: "kimchy"]) iex> put("/bear_test/my_type/1", %{version: 2}, %{user: "kimchy"}) A request `body` is allowed to be a `Keyword` or `Map` instances or event just as `String`. """ @doc "Sends a HEAD request." def head(path, params, %URI{} = uri) when is_binary(path) do do_request(:head, url(path, params, uri)) end def head(path, %URI{} = uri) when is_binary(path) do do_request(:head, url(path, uri)) end def head(url_or_path_or_uri, params) do do_request(:head, url(url_or_path_or_uri, params)) end def head(url_or_path_or_uri) do do_request(:head, url(url_or_path_or_uri)) end @doc "Sends a HEAD request and raise an exception if something wrong." def head!(a, b, c), do: ok!(head(a, b, c)) def head!(a, b), do: ok!(head(a, b)) def head!(a), do: ok!(head(a)) @doc "Sends a GET request." def get(path, params, %URI{} = uri) when is_binary(path) do do_request(:get, url(path, params, uri)) end def get(path, %URI{} = uri) when is_binary(path) do do_request(:get, url(path, uri)) end def get(url_or_path_or_uri, params) do do_request(:get, url(url_or_path_or_uri, params)) end def get(url_or_path_or_uri) do do_request(:get, url(url_or_path_or_uri)) end @doc "Sends a GET request and raise an exception if something wrong." def get!(a, b, c), do: ok!(get(a, b, c)) def get!(a, b), do: ok!(get(a, b)) def get!(a), do: ok!(get(a)) @doc "Sends a PUT request." def put(path, params, %URI{} = uri, []) when is_binary(path) do do_request(:put, url(path, params, uri)) end def put(path, params, %URI{} = uri, body) when is_binary(path) and body == %{} do do_request(:put, url(path, params, uri)) end def put(path, params, %URI{} = uri, body) when is_binary(path) and is_list(body) do put(path, params, uri, encode(body)) end def put(path, params, %URI{} = uri, body) when is_binary(path) and is_map(body) do put(path, params, uri, encode(body)) end def put(path, params, %URI{} = uri, body) when is_binary(path) and is_binary(body) do do_request(:put, url(path, params, uri), body) end def put(path, %URI{} = uri, []) when is_binary(path) do do_request(:put, url(path, uri)) end def put(path, %URI{} = uri, body) when is_binary(path) and body == %{} do do_request(:put, url(path, uri)) end def put(path, %URI{} = uri, body) when is_binary(path) and is_list(body) do put(path, uri, encode(body)) end def put(path, %URI{} = uri, body) when is_binary(path) and is_map(body) do put(path, uri, encode(body)) end def put(path, %URI{} = uri, body) when is_binary(path) and is_binary(body) do do_request(:put, url(path, uri), body) end def put(path, params, %URI{} = uri) when is_binary(path) do do_request(:put, url(path, params, uri)) end def put(url_or_path_or_uri, params, []) do do_request(:put, url(url_or_path_or_uri, params)) end def put(url_or_path_or_uri, params, body) when body == %{} do do_request(:put, url(url_or_path_or_uri, params)) end def put(url_or_path_or_uri, params, body) when is_list(body) do put(url_or_path_or_uri, params, encode(body)) end def put(url_or_path_or_uri, params, body) when is_map(body) do put(url_or_path_or_uri, params, encode(body)) end def put(url_or_path_or_uri, params, body) when is_binary(body) do do_request(:put, url(url_or_path_or_uri, params), body) end def put(path, %URI{} = uri) when is_binary(path) do do_request(:put, url(path, uri)) end def put(url_or_path_or_uri, []) do do_request(:put, url(url_or_path_or_uri)) end def put(url_or_path_or_uri, body) when body == %{} do do_request(:put, url(url_or_path_or_uri)) end def put(url_or_path_or_uri, body) when is_list(body) do put(url_or_path_or_uri, encode(body)) end def put(url_or_path_or_uri, body) when is_map(body) do put(url_or_path_or_uri, encode(body)) end def put(url_or_path_or_uri, body) when is_binary(body) do do_request(:put, url(url_or_path_or_uri), body) end def put(url_or_path_or_uri, params) do do_request(:put, url(url_or_path_or_uri, params)) end def put(url_or_path_or_uri) do do_request(:put, url(url_or_path_or_uri)) end @doc "Sends a PUT request and raise an exception if something wrong." def put!(a, b, c, d), do: ok!(put(a, b, c, d)) def put!(a, b, c), do: ok!(put(a, b, c)) def put!(a, b), do: ok!(put(a, b)) def put!(a), do: ok!(put(a)) @doc "Sends a POST request." def post(path, params, %URI{} = uri, []) when is_binary(path) do do_request(:post, url(path, params, uri)) end def post(path, params, %URI{} = uri, body) when is_binary(path) and body == %{} do do_request(:post, url(path, params, uri)) end def post(path, params, %URI{} = uri, body) when is_binary(path) and is_list(body) do post(path, params, uri, encode(body)) end def post(path, params, %URI{} = uri, body) when is_binary(path) and is_map(body) do post(path, params, uri, encode(body)) end def post(path, params, %URI{} = uri, body) when is_binary(path) and is_binary(body) do do_request(:post, url(path, params, uri), body) end def post(path, %URI{} = uri, []) when is_binary(path) do do_request(:post, url(path, uri)) end def post(path, %URI{} = uri, body) when is_binary(path) and body == %{} do do_request(:post, url(path, uri)) end def post(path, %URI{} = uri, body) when is_binary(path) and is_list(body) do post(path, uri, encode(body)) end def post(path, %URI{} = uri, body) when is_binary(path) and is_map(body) do post(path, uri, encode(body)) end def post(path, %URI{} = uri, body) when is_binary(path) and is_binary(body) do do_request(:post, url(path, uri), body) end def post(path, params, %URI{} = uri) when is_binary(path) do do_request(:post, url(path, params, uri)) end def post(url_or_path_or_uri, params, []) do do_request(:post, url(url_or_path_or_uri, params)) end def post(url_or_path_or_uri, params, body) when body == %{} do do_request(:post, url(url_or_path_or_uri, params)) end def post(url_or_path_or_uri, params, body) when is_list(body) do post(url_or_path_or_uri, params, encode(body)) end def post(url_or_path_or_uri, params, body) when is_map(body) do post(url_or_path_or_uri, params, encode(body)) end def post(url_or_path_or_uri, params, body) when is_binary(body) do do_request(:post, url(url_or_path_or_uri, params), body) end def post(path, %URI{} = uri) when is_binary(path) do do_request(:post, url(path, uri)) end def post(url_or_path_or_uri, []) do do_request(:post, url(url_or_path_or_uri)) end def post(url_or_path_or_uri, body) when body == %{} do do_request(:post, url(url_or_path_or_uri)) end def post(url_or_path_or_uri, body) when is_list(body) do post(url_or_path_or_uri, encode(body)) end def post(url_or_path_or_uri, body) when is_map(body) do post(url_or_path_or_uri, encode(body)) end def post(url_or_path_or_uri, body) when is_binary(body) do do_request(:post, url(url_or_path_or_uri), body) end def post(url_or_path_or_uri, params) do do_request(:post, url(url_or_path_or_uri, params)) end def post(url_or_path_or_uri) do do_request(:post, url(url_or_path_or_uri)) end @doc "Sends a POST request and raise an exception if something wrong." def post!(a, b, c, d), do: ok!(post(a, b, c, d)) def post!(a, b, c), do: ok!(post(a, b, c)) def post!(a, b), do: ok!(post(a, b)) def post!(a), do: ok!(post(a)) @doc "Sends a DELETE request." def delete(path, params, %URI{} = uri) when is_binary(path) do do_request(:delete, url(path, params, uri)) end def delete(path, %URI{} = uri) when is_binary(path) do do_request(:delete, url(path, uri)) end def delete(url_or_path_or_uri, params) do do_request(:delete, url(url_or_path_or_uri, params)) end def delete(url_or_path_or_uri) do do_request(:delete, url(url_or_path_or_uri)) end @doc "Sends a DELETE request and raise an exception if something wrong." def delete!(a, b, c), do: ok!(delete(a, b, c)) def delete!(a, b), do: ok!(delete(a, b)) def delete!(a), do: ok!(delete(a)) @doc """ Composes a complete URL by given params. ## Examples: iex> url() "http://127.0.0.1:9200" iex> url("/articles/document/1") "http://127.0.0.1:9200/articles/document/1" iex> url("/articles/document/1", %URI{ host: "example.com" }) "http://example.com:9200/articles/document/1" Also see `Tirexs.Resources.urn/1`. """ def url(path, params, %URI{} = uri) when is_binary(path) and is_binary(params) do url(path, %URI{ uri | query: params }) end def url(path, params, %URI{} = uri) when is_binary(path) do url(path, %URI{ uri | query: URI.encode_query(params) }) end def url(path, %URI{} = uri) when is_binary(path) do { default, given } = { Tirexs.get_uri_env(), __normalize_path__(uri) } %URI{ __merge__(default, given) | path: __normalize_path__(path) } |> to_string end def url(url_or_path, params) when is_binary(url_or_path) and is_binary(params) do location = %URI{ URI.parse(url_or_path) | query: params } { default, given } = { Tirexs.get_uri_env(), __normalize_path__(location) } __merge__(default, given) |> to_string end def url(url_or_path, params) when is_binary(url_or_path) do location = %URI{ URI.parse(url_or_path) | query: URI.encode_query(params) } { default, given } = { Tirexs.get_uri_env(), __normalize_path__(location) } __merge__(default, given) |> to_string end def url(url_or_path) when is_binary(url_or_path) do { default, given } = { Tirexs.get_uri_env(), __normalize_path__(URI.parse(url_or_path)) } __merge__(default, given) |> to_string end def url(%URI{} = uri) do { default, given } = { Tirexs.get_uri_env(), __normalize_path__(uri) } __merge__(default, given) |> to_string end def url(), do: url(Tirexs.get_uri_env()) @doc """ Returns `false` if `{ :error, _, _ } = response`, otherwise returns `true`. """ def ok?(:error), do: false def ok?({ :error, _, _ }), do: false def ok?({ :ok, _, _ }), do: true @doc """ Raises `RuntimeError` if `{ :error, _, _ } = response`, otherwise returns `response` back. """ def ok!({ :error, _, error }), do: raise inspect(error) def ok!({ :ok, _, _ } = response), do: response def do_request(method, url, body \\ []) do http_adapter = Tirexs.ENV.get_env(:http_adapter, @standard_http_adapter) http_adapter.do_request(method, url, body) |> __response__() end @doc false def encode(term, opts \\ []) do JSX.encode!(term, opts) end @doc false def decode(json, opts \\ [{:labels, :atom}]) do with binary <- IO.iodata_to_binary(json), {:ok, decoded_json} <- JSX.decode(binary, opts) do decoded_json else {:error, msg} -> raise "Response is invalid JSON. Response: \"#{json}\". JSX Error: \"#{msg}\"" end end defp __merge__(map1, map2) do Map.merge(map1, map2, fn(_k, v1, v2) -> v2 || v1 end) end defp __normalize_path__(uri) when is_map(uri) do %URI{ uri | path: __normalize_path__(uri.path) } end defp __normalize_path__(path) when is_binary(path) do if String.starts_with?(path, "/"), do: path, else: "/" <> path end defp __normalize_path__(path), do: path defp __response__({state, status, []}), do: { state, status, [] } defp __response__({state, status, body}), do: { state, status, decode(body) } defp __response__(:error), do: :error end
lib/tirexs/http.ex
0.718199
0.434881
http.ex
starcoder
defmodule CSSEx.Helpers.AtParser do @moduledoc false import CSSEx.Helpers.Shared, only: [inc_col: 1] import CSSEx.Helpers.Error, only: [error_msg: 1] import CSSEx.Parser, only: [add_error: 2] @line_terminators CSSEx.Helpers.LineTerminators.code_points() @var_replacement_split ~r/(?<maybe_var_1>\$::)?.+(?<split>:).+(?<maybe_var_2>\$::)|(?<split_2>:)/ @parenthesis_separators ~r/\s(and|or|not)\s/ @enforce_keys [:type] defstruct [:type, column: 0, acc: [], parenthesis: 0, p_acc: %{}] def parse(rem, data, type) do case do_parse(rem, data, %__MODULE__{type: type}) do {:ok, {_, _} = result} -> result {:error, new_data} -> {rem, new_data} end end def do_parse([], data, %{acc: acc, parenthesis: 0} = _state) do parsed = IO.chardata_to_string(acc) {:ok, {parsed, data}} end # we found an opening parenthesis (, def do_parse([40 | rem], data, %{parenthesis: p, p_acc: p_acc} = state) do new_p = p + 1 new_p_acc = Map.put(p_acc, new_p, []) do_parse(rem, inc_col(data), %{state | parenthesis: new_p, p_acc: new_p_acc}) end # we found a closing parenthesis ) def do_parse([41 | rem], data, %{acc: acc, parenthesis: p, p_acc: p_acc} = state) when p > 0 do {accumulator, new_p_acc} = Map.pop(p_acc, p) try do processed = process_parenthesis_content(accumulator, data, state) previous_p = p - 1 new_state = inc_col(%{state | parenthesis: previous_p}) new_state_2 = case Map.pop(new_p_acc, previous_p) do # this means we're on the first opened parenthesis {nil, new_p_acc_2} -> %{new_state | p_acc: new_p_acc_2, acc: [acc, [40, processed, 41]]} {previous_acc, new_p_acc_2} -> %{ new_state | p_acc: Map.put(new_p_acc_2, previous_p, [previous_acc, [40, processed, 41]]) } end do_parse(rem, data, new_state_2) catch {:error, _} = error -> {:error, add_error(data, error_msg(error))} end end Enum.each(@line_terminators, fn char -> def do_parse([unquote(char) | _rem], data, %{type: type} = _state), do: {:error, add_error(data, error_msg({:invalid, "@#{type}", :newline}))} end) def do_parse( [char | rem], data, %{parenthesis: p, p_acc: p_acc} = state ) when p > 0 do p_acc_inner = Map.fetch!(p_acc, p) new_p_acc = Map.put(p_acc, p, [p_acc_inner, char]) do_parse(rem, data, inc_col(%{state | p_acc: new_p_acc})) end def do_parse([char | rem], data, %{parenthesis: 0, acc: acc} = state), do: do_parse(rem, data, inc_col(%{state | acc: [acc, char]})) def do_parse([41, _rem], data, %{parenthesis: 0}), do: {:error, add_error(data, error_msg({:unexpected, ")"}))} def process_parenthesis_content(accumulator, data, %{type: type} = _state) do case type do :media -> process_parenthesis_base(accumulator, data) :page -> throw({:error, :no_page}) :supports -> process_parenthesis_base(accumulator, data) end end def process_parenthesis_base(accumulator, data) do acc = IO.chardata_to_string(accumulator) Regex.split(@parenthesis_separators, acc, include_captures: true) |> Enum.map(fn value -> trimmed = String.trim(value) case trimmed do "and" -> "and" "or" -> "or" "not" -> "not" declaration -> String.split(declaration, @var_replacement_split, trim: true, on: [:split, :split_2]) |> Enum.map(fn token -> case CSSEx.Helpers.Interpolations.maybe_replace_val(String.trim(token), data) do {:ok, new_token} -> new_token {:error, _} = error -> throw(error) end end) |> Enum.join(":") end end) |> Enum.join(" ") end end
lib/helpers/at_parser.ex
0.565659
0.504089
at_parser.ex
starcoder
defmodule Ash.Query.Operator.In do @moduledoc """ left in [1, 2, 3] this predicate matches if the left is in the list on the right For comparison, this simplifies to a set of "or equals", e.g `{:or, {:or, {:or, left == 1}, left == 2}, left == 3}` """ use Ash.Query.Operator, operator: :in, predicate?: true, types: [[:any, {:array, :same}]] @inspect_items_limit 10 @dialyzer {:nowarn_function, compare: 2} def new(%Ash.Query.Ref{} = left, right) when is_list(right) do {:ok, %__MODULE__{left: left, right: MapSet.new(right)}} end def new(left, right), do: {:ok, %__MODULE__{left: left, right: right}} def evaluate(%{left: nil}), do: {:ok, nil} def evaluate(%{right: nil}), do: {:ok, nil} def evaluate(%{left: left, right: right}) do {:known, Enum.any?(right, &Comp.equal?(&1, left))} end def compare(%__MODULE__{left: left, right: %MapSet{} = left_right}, %__MODULE__{ left: left, right: %MapSet{} = right_right }) do if MapSet.equal?(left_right, right_right) do :mutually_inclusive else if MapSet.disjoint?(left_right, right_right) do :mutually_exclusive else :unknown end end end def compare(%__MODULE__{}, %Ash.Query.Operator.Eq{ right: %Ref{} }), do: false def compare(%__MODULE__{left: left, right: %MapSet{} = left_right}, %Ash.Query.Operator.Eq{ left: left, right: value }) do if MapSet.member?(left_right, value) do :left_implies_right else :mutually_exclusive end end def compare(_, _), do: :unknown def to_string(%{right: %Ref{}} = op, opts), do: super(op, opts) def to_string(%{left: left, right: mapset}, opts) do import Inspect.Algebra list_doc = case Enum.split(mapset, @inspect_items_limit) do {left, []} -> to_doc(left, opts) {left, _} -> concat(to_doc(left, opts), "...") end concat([ to_doc(left, opts), " in ", list_doc ]) end end
lib/ash/query/operator/in.ex
0.831074
0.713931
in.ex
starcoder
use Croma defmodule RaftKV.Config do @default_stats_collection_interval (if Mix.env() == :test, do: 2_000, else: 60_000) @default_workflow_execution_interval (if Mix.env() == :test, do: 2_000, else: 60_000) @default_workflow_lock_period (if Mix.env() == :test, do: 2_000, else: 30_000) @default_shard_ineligible_period_after_split_or_merge 2 * @default_stats_collection_interval @moduledoc """ `RaftKV` defines the following application configs: - `:stats_collection_interval`: Interval (in milliseconds) between collections of the following metrics of all shards: - Number of keys in a shard - Aggregated size of all keys in a shard - Aggregated load which a shard has experienced since the last stats collection By using smaller value you can adjust (split/merge) number of shards more quickly, with higher overhead. Defaults to `#{@default_stats_collection_interval}`. - `:workflow_execution_interval`: Interval (in milliseconds) between executions of workflow tasks. By using smaller value you can execute workflow tasks more quickly, with higher overhead. Defaults to `#{@default_workflow_execution_interval}`. Workflow task here means: - removing a keyspace - splitting 1 shard into 2 - merging 2 consecutive shards into 1 - `:workflow_lock_period`: When executing a workflow task it is locked for this period (in milliseconds) in order to avoid running the same task simultaneously. Defaults to `#{@default_workflow_lock_period}`. - `:shard_ineligible_period_after_split_or_merge`: When a shard has just been split/merged, stats of the affected shard(s) become stale. To prevent from incorrectly splitting/merging based on the stale stats, shards that have been split/merged within this period (in milliseconds) are excluded from next split/merge candidates. Defaults to `#{@default_shard_ineligible_period_after_split_or_merge}`. Note that each `raft_kv` process uses application configs stored in the local node. If you want to configure the options above you must set them on all nodes in your cluster. In addition to the configurations above, the following configurations defined by the underlying libraries are also available: - `RaftedValue.make_config/2` and `RaftedValue.change_config/2`. - `RaftFleet.Config`. """ defun stats_collection_interval() :: pos_integer do Application.get_env(:raft_kv, :stats_collection_interval, @default_stats_collection_interval) end defun workflow_execution_interval() :: pos_integer do Application.get_env(:raft_kv, :workflow_execution_interval, @default_workflow_execution_interval) end defun workflow_lock_period() :: pos_integer do Application.get_env(:raft_kv, :workflow_lock_period, @default_workflow_lock_period) end defun shard_ineligible_period_after_split_or_merge() :: pos_integer do Application.get_env(:raft_kv, :shard_ineligible_period_after_split_or_merge, @default_shard_ineligible_period_after_split_or_merge) end end
lib/raft_kv/config.ex
0.768038
0.540985
config.ex
starcoder
defmodule Sanbase.Model.Project.ListSelector do import Sanbase.DateTimeUtils alias Sanbase.Model.Project @doc ~s""" Return a list of projects described by the selector object. See `args_to_opts/1` for description of the argument format. """ def projects(args) do opts = args_to_opts(args) {:ok, Project.List.projects(opts)} end @doc ~s""" Transform a selector to a keyword list that can be passed to the functions in the `Project.List` module to apply filtering/ordering/pagination. The argument is a map in the following format: %{ selector: %{ filters: [ %{ metric: "daily_active_addresses", from: ~U[2020-04-22 00:00:00Z], to: ~U[2020-04-29 00:00:00Z], aggregation: :avg, operator: :greater_than, threshold: 10 } ], order_by: %{ metric: "circulation", from: ~U[2020-04-25 00:00:00Z], to: ~U[2020-04-29 00:00:00Z], aggregation: :last direction: :desc }, pagination: %{page: 1, page_size: 10} } } """ def args_to_opts(args) do filters = get_in(args, [:selector, :filters]) order_by = get_in(args, [:selector, :order_by]) pagination = get_in(args, [:selector, :pagination]) included_slugs = filters |> included_slugs_by_filters() ordered_slugs = order_by |> ordered_slugs_by_order_by(included_slugs) [ has_selector?: not is_nil(args[:selector]), has_order?: not is_nil(order_by), has_filters?: not is_nil(filters), has_pagination?: not is_nil(pagination), pagination: pagination, min_volume: Map.get(args, :min_volume), included_slugs: included_slugs, ordered_slugs: ordered_slugs ] end defp included_slugs_by_filters(nil), do: :all defp included_slugs_by_filters([]), do: :all defp included_slugs_by_filters(filters) when is_list(filters) do filters |> Sanbase.Parallel.map( fn filter -> cache_key = {:included_slugs_by_filters, %{filter | from: round_datetime(filter.from), to: round_datetime(filter.to)}} |> Sanbase.Cache.hash() {:ok, slugs} = Sanbase.Cache.get_or_store(cache_key, fn -> Sanbase.Metric.slugs_by_filter( filter.metric, filter.from, filter.to, filter.operator, filter.threshold, filter.aggregation ) end) slugs |> MapSet.new() end, ordered: false, max_concurrency: 8 ) |> Enum.reduce(&MapSet.intersection(&1, &2)) |> Enum.to_list() end defp ordered_slugs_by_order_by(nil, slugs), do: slugs defp ordered_slugs_by_order_by(order_by, slugs) do %{metric: metric, from: from, to: to, direction: direction} = order_by aggregation = Map.get(order_by, :aggregation) {:ok, ordered_slugs} = Sanbase.Metric.slugs_order(metric, from, to, direction, aggregation) case slugs do :all -> ordered_slugs ^slugs when is_list(slugs) -> slugs_mapset = slugs |> MapSet.new() Enum.filter(ordered_slugs, &(&1 in slugs_mapset)) end end end
lib/sanbase/model/project/list/list_selector.ex
0.793826
0.442396
list_selector.ex
starcoder
defmodule Oli.Delivery.Gating.ConditionTypes.Schedule do @moduledoc """ Schedule strategy provides a temporal based gating condition. A schedule condition can define a start and/or end datetime for a resource to be available. """ alias Oli.Delivery.Gating.GatingCondition alias Oli.Delivery.Gating.GatingConditionData alias Oli.Delivery.Sections alias Oli.Publishing.DeliveryResolver @behaviour Oli.Delivery.Gating.ConditionTypes.ConditionType def type do :schedule end def open?(%GatingCondition{ data: %GatingConditionData{start_datetime: start_datetime, end_datetime: end_datetime} }) do now = DateTime.utc_now() case {start_datetime, end_datetime} do {nil, nil} -> true {start_datetime, nil} -> DateTime.compare(start_datetime, now) == :lt {nil, end_datetime} -> DateTime.compare(now, end_datetime) == :lt {start_datetime, end_datetime} -> DateTime.compare(start_datetime, now) == :lt and DateTime.compare(now, end_datetime) == :lt end end def details( %GatingCondition{ section_id: section_id, resource_id: resource_id, data: %GatingConditionData{ start_datetime: start_datetime, end_datetime: end_datetime } }, opts \\ [] ) do section = Sections.get_section!(section_id) revision = DeliveryResolver.from_resource_id(section.slug, resource_id) now = DateTime.utc_now() format_datetime = Keyword.get(opts, :format_datetime, &format_datetime_default/1) cond do start_datetime != nil && DateTime.compare(start_datetime, now) != :lt -> "#{revision.title} is scheduled to start #{format_datetime.(start_datetime)}" end_datetime != nil && DateTime.compare(now, end_datetime) != :lt -> "#{revision.title} is scheduled to end #{format_datetime.(end_datetime)}" true -> nil end end defp format_datetime_default(%DateTime{} = dt) do Timex.format!(dt, "{M}/{D}/{YYYY} {h12}:{m}:{s} {AM} {Zabbr}") end end
lib/oli/delivery/gating/condition_types/schedule.ex
0.737253
0.434161
schedule.ex
starcoder
defmodule Kino.Ecto do @moduledoc """ A widget for interactively viewing `Ecto` query results. The data must be an enumerable of records, where each record is either map, struct, keyword list or tuple. ## Examples The widget primarily allows for viewing a database table given a schema: Kino.Ecto.new(Weather, Repo) However, the first argument can be any queryable, so you can pipe arbitrary queries directly to the widget: from(w in Weather, where: w.city == "New York") |> Kino.Ecto.new(Repo) """ @behaviour Kino.Table alias Kino.Utils @type t :: Kino.Table.t() @doc """ Starts a widget process with the given queryable as the data source. """ @spec new(Ecto.Queryable.t(), Ecto.Repo.t()) :: t() def new(queryable, repo) when is_atom(repo) do unless queryable?(queryable) do raise ArgumentError, "expected a term implementing the Ecto.Queryable protocol, got: #{inspect(queryable)}" end Kino.Table.new(__MODULE__, {repo, queryable}) end defp queryable?(term) do Ecto.Queryable.impl_for(term) != nil end @impl true def init({repo, queryable}) do name = queryable |> query_source() |> to_string() features = Kino.Utils.truthy_keys( refetch: true, pagination: true, # If the user specifies custom select, the record keys # are not valid "order by" fields, so we disable sorting sorting: default_select_query?(queryable) ) info = %{name: name, features: features} {:ok, info, %{repo: repo, queryable: queryable}} end @impl true def get_data(rows_spec, state) do {total_rows, records} = get_records(state.repo, state.queryable, rows_spec) keys = case keys_from_queryable(state.queryable) do [] -> Utils.Table.keys_for_records(records) keys -> keys end columns = Utils.Table.keys_to_columns(keys) rows = Enum.map(records, &Utils.Table.record_to_row(&1, keys)) {:ok, %{columns: columns, rows: rows, total_rows: total_rows}, state} end defp get_records(repo, queryable, rows_spec) do count = repo.aggregate(queryable, :count) query = prepare_query(queryable, rows_spec) records = repo.all(query) {count, records} end defp query_source(queryable) do %{from: %{source: {source, _schema}}} = Ecto.Queryable.to_query(queryable) source end defp default_select_query?(queryable) do query = Ecto.Queryable.to_query(queryable) query.select == nil end defp keys_from_queryable(queryable) do schema = Utils.Table.ecto_schema(queryable) if schema != nil and default_select_query?(queryable) do schema.__schema__(:fields) else [] end end if Code.ensure_loaded?(Ecto.Query) do defp prepare_query(queryable, rows_spec) do import Ecto.Query, only: [from: 2] query = from(q in queryable, limit: ^rows_spec.limit, offset: ^rows_spec.offset) if rows_spec[:order_by] do query = Ecto.Query.exclude(query, :order_by) order_by = [{rows_spec.order, rows_spec.order_by}] from(q in query, order_by: ^order_by) else query end end else defp prepare_query(_queryable, _rows_spec), do: raise("Ecto is missing") end end
lib/kino/ecto.ex
0.906203
0.626481
ecto.ex
starcoder
# simple functions to load a file, following the hashing rules defmodule Scenic.Cache.Term do alias Scenic.Cache alias Scenic.Cache.Hash # import IEx # -------------------------------------------------------- def load(path, hash, opts \\ []) # insecure loading. Loads file blindly even it is altered # don't recommend doing this in production. Better to embed the expected # hashes. Is also slower because it has to load the file and compute the hash # to use as a key even it is is already loaded into the cache. def load(path, :insecure, opts) do with {:ok, data} <- Cache.File.read(path, :insecure, opts), {:ok, hash} <- Hash.binary(data, opts[:hash] || :sha) do case Cache.claim(hash, opts[:scope]) do true -> {:ok, hash} false -> case do_read_term(data, opts) do {:ok, term} -> Cache.put(hash, term, opts[:scope]) err -> err end end else err -> err end end # preferred, more secure load. Expected hash signature is supplied # also faster if the item is already loaded as then it can just skip # loading the file def load(path, hash, opts) do case Cache.claim(hash, opts[:scope]) do true -> {:ok, hash} false -> # need to read and verify the file case read(path, hash, opts) do {:ok, data} -> Cache.put(hash, data, opts[:scope]) err -> err end end end # -------------------------------------------------------- def read(path, hash, opts \\ []) # insecure read # don't recommend doing this in production. Better to embed the expected # hashes. Is also slower because it has to load the file and compute the hash # to use as a key even it is is already loaded into the cache. def read(path, :insecure, opts) do with {:ok, data} <- File.read(path) do do_read_term(data, opts) else err -> err end end def read(path, hash, opts) do with {:ok, data} <- File.read(path), {:ok, data} <- Hash.verify(data, hash, opts[:hash] || :sha) do do_read_term(data, opts) else err -> err end end # -------------------------------------------------------- # unzip the data if the unzip option is true. Otherwise just returns # the data unchanged. defp do_read_term(data, opts) do opts = case opts[:safe] do false -> [] _ -> [:safe] end try do {:ok, :erlang.binary_to_term(data, opts)} rescue _ -> {:error, :invalid_term} end end end
lib/scenic/cache/term.ex
0.576304
0.514522
term.ex
starcoder
defmodule Mix.Tasks.Gen.Client do @moduledoc """ Generates a new client for an endpoint based on a given endpoint name. ## Example ``` $ mix client_generator expenses ``` """ use Mix.Task def run, do: usage def run([endpoint | [singular_endpoint | _]]) do endpoint = String.downcase(endpoint) Mix.Generator.create_file("lib/client/#{endpoint}_client.ex", template(endpoint, singular_endpoint)) noun = String.capitalize(endpoint) Mix.Generator.create_file("test/client/#{endpoint}_client_test.exs", """ defmodule Frex.Client.#{noun}Test do use ExUnit.Case, async: true alias Frex.Client.#{noun} import Credentials test \"#{noun}.get calls are successful\" do {status, _} = #{noun}.get(credentials, 1) assert status == :ok end test \"#{noun}.list calls are successful\" do {status, _, _} = #{noun}.list(credentials) assert status == :ok end end """) end def run(_), do: usage defp template(endpoint, singular_endpoint) do """ defmodule Frex.Client.#{String.capitalize(endpoint)} do @moduledoc \"\"\" Client module for interacting with the Freshbooks #{endpoint} endpoint. \"\"\" use Frex.Endpoint, :client @doc \"\"\" Sends a get request to the Freshbooks API #{endpoint} endpoint. ## Parameters * `#{singular_endpoint}_id` (**required**) -- #{singular_endpoint} ID \"\"\" def get(credentials, #{singular_endpoint}_id) do "#{singular_endpoint}.get" |> Request.build({:#{singular_endpoint}_id, %{}, #{singular_endpoint}_id}) |> HTTP.request!(credentials) end @doc \"\"\" Sends a list request to the Freshbooks API #{endpoint} endpoint. ## Parameters * `filters` (**optional**) -- a map of filters for the list request * `first_one` \"\"\" def list(credentials, filters \\\\ %{}) do opts = opts_to_builder(filters) "#{singular_endpoint}.list" |> Request.build(opts) |> HTTP.request!(credentials) end end """ end defp usage, do: IO.puts "Usage: mix gen.client Expenses expense" end
lib/mix/tasks/client_generator.ex
0.864525
0.529689
client_generator.ex
starcoder
defmodule Nx.Defn.Expr do @doc """ The expression used by `Nx.Defn.Compiler`. `Nx.Defn.Compiler` changes `Nx` default backend from `Nx.BinaryBackend` to `Nx.Defn.Expr`. It is a struct with the following fields: * `:id` - a unique identifier * `:op` - the operation name * `:args` - the operation arguments * `:context` - the context of the expression Convenience functions for traversing expressions can be found in `Nx.Defn.Tree`. ## Syntax nodes Most nodes are created directly via the `Nx` module and therefore map directly to `Nx.Tensor` callbacks. However the following syntax nodes exist: * `parameter(integer)` * `tensor(Nx.Tensor.t)` * `fun(parameters, t, fun)` * `cond(clauses, otherwise)` * `metadata(expr, metadata)` * `elem(tuple, pos, size)` - created automatically from expression that return tuples. Note it may return tuples too, which means we have nested tuples Custom compilers must handle said nodes accordingly. """ alias Nx.Defn.{Expr, Tree} alias Nx.Tensor, as: T import Nx.Shared @enforce_keys [:id, :op, :args, :context] defstruct [:id, :op, :args, :context] ## Public API @doc """ Builds an tensor expression from the given tensor. """ def tensor(tensor), do: to_expr(tensor) @doc """ Creates a tensor expression parameter at `pos` based on the given tensor expression. """ def parameter(%T{data: %Expr{context: context}} = tensor, pos) do parameter(tensor, context, pos) end @doc """ Creates a tensor expression parameter at `pos` based on the given `tensor` and `context`. """ def parameter(tensor, context, pos) when is_integer(pos) and pos >= 0 do expr(tensor, context, :parameter, [pos]) end @doc """ Creates a tensor expression parameter at `pos` with the given `context`, `type`, `shape`, and `pos`. """ def parameter(context, type, shape, pos) do names = List.duplicate(nil, tuple_size(shape)) expr(%T{type: type, shape: shape, names: names}, context, :parameter, [pos]) end @doc """ Creates a tensor expression metadata node wrapping the given tensor expression. """ def metadata(expr, metadata) when is_map(metadata) do expr = to_expr(expr) expr(expr, expr.data.context, :metadata, [expr, metadata]) end @doc """ Creates a tensor expression function node with the given args and anonymous function. """ def fun(args, fun) when is_function(fun, length(args)) do out = to_expr(apply(fun, args)) expr(out, out.data.context, :fun, [args, out, fun]) end @doc """ Creates a composite type, possibly recursively, by executing the given function for each element. Currently only tuples are supported as composite types. If a non- composite type is given, it is passed to the function as is. """ def composite(tuple, context, fun) when is_tuple(tuple) do size = tuple_size(tuple) expr = fun.(%T{shape: {}, names: [], type: {:tuple, size}}) # TODO: Use Enum.with_index on Elixir v1.12 tuple |> Tuple.to_list() |> Enum.with_index() |> Enum.map(fn {tensor, i} -> fun = &expr(&1, context, :elem, [expr, i, size]) composite(tensor, context, fun) end) |> List.to_tuple() end def composite(tensor, _context, fun), do: fun.(tensor) @doc """ Creates a `cond` tensor expression. """ def cond(clauses, last) do {preds, exprs} = Enum.unzip(clauses) {preds, context} = to_exprs(preds) [last | exprs] = cond_clauses(last, exprs) clauses = Enum.zip(preds, exprs) composite(last, context, &expr(&1, context, :cond, [clauses, last])) end defp cond_clauses(last, exprs) when is_tuple(last) do size = tuple_size(last) for expr <- exprs, not is_tuple(expr) or tuple_size(expr) != size, do: branch_mismatch!(expr, last) # TODO: Use Enum.with_index on Elixir v1.12 list_of_lists = last |> Tuple.to_list() |> Enum.with_index() |> Enum.map(fn {last, index} -> exprs = Enum.map(exprs, &elem(&1, index)) cond_clauses(last, exprs) end) {last_and_exprs, _} = Enum.map_reduce([last | exprs], list_of_lists, fn _, list_of_lists -> unzip_cons(list_of_lists, [], []) end) last_and_exprs end defp cond_clauses(type = last, exprs) do %{shape: shape, names: names} = last = to_expr(last) {exprs, {type, shape, names}} = Enum.map_reduce(exprs, {type, shape, names}, fn expr, {type, shape, names} -> if is_tuple(expr), do: branch_mismatch!(expr, last) type = binary_type(type, expr) expr = to_expr(expr) {shape, names} = Nx.Shape.binary_broadcast(shape, names, expr.shape, expr.names) {expr, {type, shape, names}} end) for expr <- [last | exprs] do expr |> Nx.as_type(type) |> Nx.broadcast(shape, names: names) end end defp unzip_cons([[head | tail] | rest], heads, tails), do: unzip_cons(rest, [head | heads], [tail | tails]) defp unzip_cons([], heads, tails), do: {heads |> Enum.reverse() |> List.to_tuple(), Enum.reverse(tails)} defp branch_mismatch!(left, right) do raise ArgumentError, "cond/if expects all branches to return tensors or tuples of the same size, " <> "got #{inspect(left)} and #{inspect(right)}" end ## Nx.Defn AST callbacks @doc false def id(), do: System.unique_integer() @doc false def cond(file, clauses, last) do clauses = for {meta, {pred, expr}} <- clauses do pred = to_expr(pred) if pred.shape != {} do raise CompileError, line: meta[:line], file: file, description: "condition must be a scalar tensor, got: #{inspect(pred.shape)}" end {pred, expr} end cond(clauses, last) end ## Nx.Backend Callbacks @behaviour Nx.Backend @impl true def eye(out) do expr(out, nil, :eye, []) end @impl true def iota(out, axis) do expr(out, nil, :iota, [axis]) end @impl true def random_uniform(out, min, max) do expr(out, nil, :random_uniform, [min, max]) end @impl true def random_normal(out, mu, sigma) do expr(out, nil, :random_normal, [mu, sigma]) end unary_ops = [:exp, :expm1, :log, :log1p, :logistic, :cos, :sin, :tan, :cosh, :sinh, :tanh] ++ [:acosh, :asinh, :atanh, :sqrt, :rsqrt, :cbrt, :negate, :sign, :abs, :bitwise_not] ++ [:population_count, :count_leading_zeros, :floor, :ceil, :round, :as_type] ++ [:erf, :erfc, :erf_inv, :acos, :asin, :atan, :bitcast] for op <- unary_ops do @impl true def unquote(op)(out, tensor) do tensor = to_expr(tensor) expr(out, tensor.data.context, unquote(op), [tensor]) end end binary_ops = [:add, :subtract, :multiply, :divide, :power, :remainder, :atan2, :max, :min, :quotient] ++ [:bitwise_and, :bitwise_or, :bitwise_xor, :left_shift, :right_shift] ++ [:equal, :not_equal, :greater, :less, :less_equal, :greater_equal] ++ [:logical_and, :logical_or, :logical_xor] ++ [:outer] for op <- binary_ops do @impl true def unquote(op)(out, t1, t2) do {[t1, t2], context} = to_exprs([t1, t2]) expr(out, context, unquote(op), [t1, t2]) end end aggregate_ops = [:all?, :any?, :argmax, :argmin, :sum, :product, :reduce_min, :reduce_max] for op <- aggregate_ops do @impl true def unquote(op)(out, tensor, opts) do tensor = to_expr(tensor) expr(out, tensor.data.context, unquote(op), [tensor, opts]) end end window_aggregate_ops = [:window_sum, :window_product, :window_max, :window_min] for op <- window_aggregate_ops do @impl true def unquote(op)(out, tensor, window_dimensions, opts) do tensor = to_expr(tensor) expr(out, tensor.data.context, unquote(op), [tensor, window_dimensions, opts]) end end @impl true def reduce(%{type: type} = out, tensor, acc, opts, fun) do args = [parameter(:reduce, type, {}, 0), parameter(:reduce, type, {}, 1)] {[tensor, acc], context} = to_exprs([tensor, acc]) fun = fun(args, fun) if fun.shape != {} do raise "reduce function must return a scalar tensor, got: #{inspect(fun.shape)}" end expr(out, context, :reduce, [tensor, acc, opts, fun]) end @impl true def reduce_window( %{type: type} = out, tensor, acc, window_dims, opts, fun ) do args = [parameter(:reduce_window, type, {}, 0), parameter(:reduce_window, type, {}, 1)] {[tensor, acc], context} = to_exprs([tensor, acc]) fun = fun(args, fun) if fun.shape != {} do raise "reduce_window function must return a scalar tensor, got: #{inspect(fun.shape)}" end expr(out, context, :reduce_window, [tensor, acc, window_dims, opts, fun]) end @impl true def map(%{type: type} = out, tensor, fun) do args = [parameter(:map, type, {}, 0)] tensor = to_expr(tensor) expr(out, tensor.data.context, :map, [tensor, fun(args, fun)]) end @impl true def scatter_window_max(out, tensor, source, window_dims, opts, init_value) do {[tensor, source, init_value], context} = to_exprs([tensor, source, init_value]) expr(out, context, :scatter_window_max, [ tensor, source, window_dims, opts, init_value ]) end @impl true def scatter_window_min(out, tensor, source, window_dims, opts, init_value) do {[tensor, source, init_value], context} = to_exprs([tensor, source, init_value]) expr(out, context, :scatter_window_min, [ tensor, source, window_dims, opts, init_value ]) end @impl true def reshape(out, tensor, shape) do tensor = to_expr(tensor) expr(out, tensor.data.context, :reshape, [tensor, shape]) end @impl true def squeeze(out, tensor, axes) do tensor = to_expr(tensor) # If we are in a sequence of squeezes, we collapse them. # This helps us fuse the access syntax. with %T{data: %Expr{op: :squeeze, args: [tensor, inner_axes]}} <- tensor do axes = merge_squeeze(Enum.sort(inner_axes), Enum.sort(axes), 0) expr(out, tensor.data.context, :squeeze, [tensor, axes]) else _ -> expr(out, tensor.data.context, :squeeze, [tensor, axes]) end end defp merge_squeeze([inner_axis | inner_axes], [axis | axes], extra) when inner_axis <= axis + extra, do: [inner_axis | merge_squeeze(inner_axes, [axis | axes], extra + 1)] defp merge_squeeze(inner_axes, [axis | axes], extra), do: [axis + extra | merge_squeeze(inner_axes, axes, extra)] defp merge_squeeze([], [], _extra), do: [] @impl true def transpose(out, tensor, axes) do tensor = to_expr(tensor) expr(out, tensor.data.context, :transpose, [tensor, axes]) end @impl true def broadcast(out, tensor, shape, axes) do tensor = to_expr(tensor) expr(out, tensor.data.context, :broadcast, [tensor, shape, axes]) end @impl true def dot(out, t1, a1, t2, a2) do {[t1, t2], context} = to_exprs([t1, t2]) expr(out, context, :dot, [t1, a1, t2, a2]) end @impl true def conv(out, inp, kernel, opts) do {[inp, kernel], context} = to_exprs([inp, kernel]) expr(out, context, :conv, [inp, kernel, opts]) end @impl true def pad(out, expr, value, config) do {[expr, value], context} = to_exprs([expr, value]) expr(out, context, :pad, [expr, value, config]) end @impl true def select(out, pred, on_true, on_false) do {[pred, on_true, on_false], context} = to_exprs([pred, on_true, on_false]) expr(out, context, :select, [pred, on_true, on_false]) end @impl true def clip(out, operand, min, max) do {[operand, min, max], context} = to_exprs([operand, min, max]) expr(out, context, :clip, [operand, min, max]) end @impl true def slice(out, tensor, start, lengths, strides) do tensor = to_expr(tensor) # If we are in a sequence of slices, it is the access syntax, # so we compact them into a single slice. with true <- ones_stride?(strides), {slice, axes} <- maybe_squeeze(tensor), %T{data: %Expr{op: :slice, args: [tensor, inner_start, inner_lengths, strides]}} <- slice, true <- ones_stride?(strides) do {start, lengths} = 0 |> merge_slice(axes, inner_start, start, inner_lengths, lengths) |> Enum.unzip() tensor |> Nx.slice(start, lengths) |> Nx.squeeze(axes: axes) else _ -> expr(out, tensor.data.context, :slice, [tensor, start, lengths, strides]) end end defp ones_stride?(strides), do: Enum.all?(strides, &(&1 == 1)) defp maybe_squeeze(%T{data: %Expr{op: :squeeze, args: [slice, axes]}}), do: {slice, axes} defp maybe_squeeze(slice), do: {slice, []} defp merge_slice(_axis, _axes, [], [], [], []), do: [] defp merge_slice(axis, axes, [is | inner_start], start, [il | inner_lengths], lengths) do # This is one of the erased axes, so we need to get coordinates from inner if axis in axes do [{is, il} | merge_slice(axis + 1, axes, inner_start, start, inner_lengths, lengths)] else [s | start] = start [l | lengths] = lengths [{is + s, l} | merge_slice(axis + 1, axes, inner_start, start, inner_lengths, lengths)] end end @impl true def reverse(out, tensor, axes) do tensor = to_expr(tensor) expr(out, tensor.data.context, :reverse, [tensor, axes]) end @impl true def concatenate(out, tensors, axis) do {tensors, context} = to_exprs(tensors) expr(out, context, :concatenate, [tensors, axis]) end @impl true def cholesky(out, tensor) do tensor = to_expr(tensor) expr(out, tensor.data.context, :cholesky, [tensor]) end @impl true def lu({p, l, u}, tensor, opts) do tensor = to_expr(tensor) context = tensor.data.context composite({p, l, u}, context, &expr(&1, context, :lu, [{p, l, u}, tensor, opts])) end @impl true def qr({q, r}, tensor, opts) do tensor = to_expr(tensor) context = tensor.data.context composite({q, r}, context, &expr(&1, context, :qr, [{q, r}, tensor, opts])) end @impl true def svd({u, s, vt}, tensor, opts) do tensor = to_expr(tensor) context = tensor.data.context composite({u, s, vt}, context, &expr(&1, context, :svd, [{u, s, vt}, tensor, opts])) end @impl true def sort(out, tensor, opts) do comparator = opts[:comparator] %{type: type} = out tensor = to_expr(tensor) args = [parameter(:sort, type, {}, 0), parameter(:sort, type, {}, 1)] comparator = to_nx_comparator(comparator) fun = fun(args, comparator) if fun.shape != {} do raise "sort comparator must return a scalar tensor, got: #{inspect(fun.shape)}" end if fun.type != {:u, 8} do raise "sort comparator must return a predicate type, got: #{inspect(fun.type)}" end expr(out, tensor.data.context, :sort, [tensor, opts, fun]) end defp to_nx_comparator(:desc), do: &Nx.less/2 defp to_nx_comparator(:asc), do: &Nx.greater/2 defp to_nx_comparator(comp) when is_function(comp, 2), do: comp defp to_nx_comparator(_), do: "comparator must be either :desc or :asc or a function with arity 2" ## Undefined ops = [backend_copy: 3, backend_deallocate: 1, backend_transfer: 3] ++ [from_binary: 3, to_binary: 2, to_batched_list: 2] for {op, arity} <- ops do args = Macro.generate_arguments(arity, __MODULE__) @impl true def unquote(op)(unquote_splicing(args)) do raise ArgumentError, """ cannot invoke #{unquote(op)}/#{unquote(arity)} on Nx.Defn.Expr. This typically means you are invoking an unsupported Nx function by code inside `defn` or JIT/AOT compiled code """ end end ## Helpers defp expr(tensor, context, op, args) do %{tensor | data: %Expr{id: id(), op: op, args: args, context: context}} end defp to_expr(%T{data: %Expr{}} = t), do: t defp to_expr(%T{} = t), do: expr(t, nil, :tensor, [t]) defp to_expr(number) when is_number(number), do: to_expr(Nx.tensor(number, backend: Nx.BinaryBackend)) defp to_expr(other) do raise ArgumentError, "unable to build tensor expression, expected a tensor or a number, " <> "got: #{inspect(other)}" end defp to_exprs(list) do Enum.map_reduce(list, nil, fn tensor, acc -> %{data: %{context: context}} = expr = to_expr(tensor) if context != acc and context != nil and acc != nil do raise """ cannot build defn because expressions come from different contexts: \ #{inspect(context)} and #{inspect(acc)}. This typically happens on anonymous functions, which do not behave \ like closures inside defn. For example, this is not valid: defn example(t, amplifier) do Nx.reduce(t, 0, fn val, acc -> val * amplifier + acc end) end In the example above, "amplifier" is a variable defined outside of \ the anonymous function, which is not allowed in defn. """ end {expr, context || acc} end) end ## Inspect import Inspect.Algebra @impl true def inspect(tensor, opts) do {_, acc} = inspect_expr(tensor, {[], [], %{}}) {_, {exprs, params, _var_map}} = Tree.traverse_args(tensor, acc, &inspect_expr/2) all = params |> Enum.reverse() |> Kernel.++(Enum.reverse(exprs)) header = concat(line(), color("Nx.Defn.Expr", :map, opts)) length = Enum.reduce(all, 0, fn {str, _tensor}, acc -> max(byte_size(str), acc) end) all |> Enum.map(fn {str, tensor} -> String.pad_trailing(str, length, " ") <> " " <> to_type_shape(tensor) end) |> Enum.uniq() |> Enum.reduce(header, &concat(&2, concat(line(), &1))) end # Scalars and funs are shown as is defp inspect_expr(%T{data: %Expr{op: :tensor}, shape: {}} = t, acc), do: {t, acc} defp inspect_expr(%T{data: %Expr{op: :fun}} = t, acc), do: {t, acc} defp inspect_expr(%T{data: %Expr{op: op, id: id}} = t, {exprs, params, var_map}) when op in [:tensor, :parameter] do {var, var_map} = var_for_id(var_map, id) param = Atom.to_string(op) <> " " <> var {t, {exprs, [{param, t} | params], var_map}} end defp inspect_expr(%T{} = t, acc) do %{data: %Expr{id: id, op: op, args: args}} = t {_, {exprs, params, var_map}} = Tree.traverse_args(t, acc, &inspect_expr/2) {var, var_map} = var_for_id(var_map, id) args_str = inspect_args(op, args, var_map) expr_str = var <> " = " <> Atom.to_string(op) <> " [ " <> args_str <> " ]" {t, {[{expr_str, t} | exprs], params, var_map}} end defp inspect_args(:cond, [clauses, last], var_map) do clauses = Enum.map(clauses, fn {pred, expr} -> [inspect_arg(pred, var_map), " -> ", inspect_arg(expr, var_map), ", "] end) IO.iodata_to_binary([clauses, ":otherwise -> ", inspect_arg(last, var_map)]) end defp inspect_args(:metadata, [expr, metadata], var_map) do IO.iodata_to_binary([inspect_arg(expr, var_map), ", ", inspect(Map.keys(metadata))]) end defp inspect_args(_op, args, var_map), do: inspect_args(args, var_map) defp inspect_args(args, var_map) do Enum.map_join(args, ", ", &inspect_arg(&1, var_map)) end defp inspect_arg(arg, var_map) do case arg do %T{data: %Expr{op: :fun, args: [_, _, fun]}} -> inspect(fun) %T{data: %Expr{op: :tensor, args: [t]}, shape: {}} -> try do t |> Nx.to_scalar() |> to_string() rescue _ -> "SCALAR" end %T{data: %Expr{id: id}} -> Map.fetch!(var_map, id) _ -> cond do Keyword.keyword?(arg) and arg != [] -> Enum.map_join(arg, ", ", fn {k, v} -> "#{k}: #{inspect(v)}" end) is_list(arg) -> [?[, inspect_args(arg, var_map), ?]] is_tuple(arg) -> [?{, inspect_args(Tuple.to_list(arg), var_map), ?}] true -> inspect(arg) end end end defp var_for_id(var_map, id) do case var_map do %{^id => var} -> {var, var_map} %{} -> var = IO.iodata_to_binary(counter_to_name(map_size(var_map))) {var, Map.put(var_map, id, var)} end end defp counter_to_name(counter) when counter >= 26 do [counter_to_name(div(counter, 26)) | counter_to_name(rem(counter, 26))] end defp counter_to_name(counter), do: [Enum.at(?a..?z, counter)] defp to_type_shape(%{type: type, shape: shape}) do brackets = shape |> Tuple.to_list() |> Enum.map(&[?[, Integer.to_string(&1), ?]]) IO.iodata_to_binary([Nx.Type.to_string(type) | brackets]) end end
lib/nx/defn/expr.ex
0.755637
0.871092
expr.ex
starcoder
defmodule ExUnit.FailuresManifest do @moduledoc false @type test_id :: {module, name :: atom} @opaque t :: %{test_id => test_file :: Path.t()} @manifest_vsn 1 @spec new() :: t def new, do: %{} @spec files_with_failures(t) :: MapSet.t(Path.t()) def files_with_failures(%{} = manifest) do manifest |> Map.values() |> MapSet.new() end @spec failed_test_ids(t) :: MapSet.t(test_id) def failed_test_ids(%{} = manifest) do manifest |> Map.keys() |> MapSet.new() end @spec put_test(t, ExUnit.Test.t()) :: t def put_test(%{} = manifest, %ExUnit.Test{state: {ignored_state, _}}) when ignored_state in [:skipped, :excluded], do: manifest def put_test(%{} = manifest, %ExUnit.Test{state: nil} = test) do Map.delete(manifest, {test.module, test.name}) end def put_test(%{} = manifest, %ExUnit.Test{state: {failed_state, _}} = test) when failed_state in [:failed, :invalid] do Map.put(manifest, {test.module, test.name}, test.tags.file) end @spec write!(t, Path.t()) :: :ok def write!(manifest, file) when is_binary(file) do manifest = prune_deleted_tests(manifest) binary = :erlang.term_to_binary({@manifest_vsn, manifest}) Path.dirname(file) |> File.mkdir_p!() File.write!(file, binary) end @spec read(Path.t()) :: t def read(file) when is_binary(file) do with {:ok, binary} <- File.read(file), {:ok, {@manifest_vsn, manifest}} when is_map(manifest) <- safe_binary_to_term(binary) do manifest else _ -> new() end end defp safe_binary_to_term(binary) do {:ok, :erlang.binary_to_term(binary)} rescue ArgumentError -> :error end defp prune_deleted_tests(manifest) do Map.drop(manifest, find_deleted_tests(Enum.to_list(manifest), %{}, [])) end defp find_deleted_tests([], _file_existence, deleted_tests), do: deleted_tests defp find_deleted_tests([{{mod, name} = id, file} | rest] = all, file_existence, acc) do file_exists = Map.fetch(file_existence, file) cond do file_exists == :error -> # This is the first time we've looked up the existence of the file. # Cache the result and try again. file_existence = Map.put(file_existence, file, File.regular?(file)) find_deleted_tests(all, file_existence, acc) file_exists == {:ok, false} -> # The file does not exist, so the test has been deleted. find_deleted_tests(rest, file_existence, [id | acc]) :code.is_loaded(mod) != false and not function_exported?(mod, name, 1) -> # The test module has been loaded, but the test no longer exists. find_deleted_tests(rest, file_existence, [id | acc]) true -> # The file exists and the test module was not loaded (which means the test # *might* still exist) or the function is exported (which means the test # *definitely* still exists). Either way, we do not want to prune it. find_deleted_tests(rest, file_existence, acc) end end end
lib/ex_unit/lib/ex_unit/failures_manifest.ex
0.682785
0.414188
failures_manifest.ex
starcoder
defmodule Sanbase.Clickhouse.Label do @moduledoc """ Labeling addresses """ import Sanbase.Utils.Transform, only: [maybe_apply_function: 2] import Sanbase.Metric.SqlQuery.Helper, only: [label_id_by_label_fqn_filter: 2, label_id_by_label_key_filter: 2] @type label :: %{ name: String.t(), metadata: String.t() } def list_all(:all = _blockchain) do query = """ SELECT DISTINCT(label) FROM blockchain_address_labels """ Sanbase.ClickhouseRepo.query_transform(query, [], fn [label] -> label end) end def list_all(blockchain) do query = """ SELECT DISTINCT(label) FROM blockchain_address_labels PREWHERE blockchain = ?1 """ Sanbase.ClickhouseRepo.query_transform(query, [blockchain], fn [label] -> label end) end def addresses_by_labels(label_fqn_or_fqns, opts \\ []) def addresses_by_labels(label_fqn_or_fqns, opts) do blockchain = Keyword.get(opts, :blockchain) label_fqns = label_fqn_or_fqns |> List.wrap() |> Enum.map(&String.downcase/1) {query, args} = addresses_by_label_fqns_query(label_fqns, blockchain) Sanbase.ClickhouseRepo.query_reduce( query, args, %{}, fn [address, blockchain, label_fqn], acc -> Map.update(acc, {address, blockchain}, [label_fqn], &[label_fqn | &1]) end ) |> maybe_apply_function(fn address_blockchain_labels_map -> apply_addresses_labels_combinator(address_blockchain_labels_map, label_fqns, opts) end) end def addresses_by_label_keys(label_key_or_keys, opts \\ []) def addresses_by_label_keys(label_key_or_keys, opts) do blockchain = Keyword.get(opts, :blockchain) label_keys = label_key_or_keys |> List.wrap() |> Enum.map(&String.downcase/1) {query, args} = addresses_by_label_keys_query(label_keys, blockchain) Sanbase.ClickhouseRepo.query_reduce( query, args, %{}, fn [address, blockchain, label_fqn], acc -> Map.update(acc, {address, blockchain}, [label_fqn], &[label_fqn | &1]) end ) |> maybe_apply_function(fn address_blockchain_labels_map -> apply_addresses_labels_combinator(address_blockchain_labels_map, label_keys, []) end) end defp apply_addresses_labels_combinator( address_blockchain_labels_map, label_fqns, opts ) do case Keyword.get(opts, :labels_combinator, :or) do :or -> address_blockchain_labels_map :and -> # Reject all addresses that don't have all the required label_fqns Enum.reject(address_blockchain_labels_map, fn {_address_blockchain, address_label_fqns} -> Enum.any?(label_fqns, &(&1 not in address_label_fqns)) end) end |> Enum.map(fn {{address, blockchain}, _labels} -> %{ address: address, infrastructure: Sanbase.BlockchainAddress.infrastructure_from_blockchain(blockchain) } end) end def add_labels(_, []), do: {:ok, []} def add_labels(slug, maps) when is_list(maps) do addresses = get_list_of_addresses(maps) blockchain = slug_to_blockchain(slug) {query, args} = addresses_labels_query(slug, blockchain, addresses) result = Sanbase.ClickhouseRepo.query_reduce( query, args, %{}, fn [address, label, metadata], acc -> label = %{name: label, metadata: metadata, origin: "santiment"} Map.update(acc, address, [label], &[label | &1]) end ) case result do {:ok, labels_map} -> {:ok, do_add_labels(maps, labels_map)} {:error, reason} -> {:error, reason} end end def get_address_labels(_slug, []), do: {:ok, %{}} def get_address_labels(slug, addresses) when is_list(addresses) do blockchain = slug_to_blockchain(slug) {query, args} = addresses_labels_query(slug, blockchain, addresses) Sanbase.ClickhouseRepo.query_reduce( query, args, %{}, fn [address, label, metadata], acc -> label = %{name: label, metadata: metadata, origin: "santiment"} Map.update(acc, address, [label], &[label | &1]) end ) end # Private functions # For backwards compatibility, if the slug is nil treat it as ethereum blockchain def slug_to_blockchain(nil), do: "ethereum" def slug_to_blockchain(slug), do: Sanbase.Model.Project.slug_to_blockchain(slug) def addresses_by_label_fqns_query(label_fqns, _blockchain = nil) do query = """ SELECT address, blockchain, dictGetString('default.labels_dict', 'fqn', label_id) AS label_fqn FROM label_addresses PREWHERE #{label_id_by_label_fqn_filter(label_fqns, argument_position: 1)} GROUP BY address, blockchain, label_id LIMIT 20000 """ args = [label_fqns] {query, args} end def addresses_by_label_fqns_query(label_fqns, blockchain) do query = """ SELECT address, blockchain, dictGetString('default.labels_dict', 'fqn', label_id) AS label_fqn FROM label_addresses PREWHERE #{label_id_by_label_fqn_filter(label_fqns, argument_position: 1)} AND blockchain = ?2 GROUP BY address, blockchain, label_id LIMIT 20000 """ args = [label_fqns, blockchain] {query, args} end def addresses_by_label_keys_query(label_keys, _blockchain = nil) do query = """ SELECT address, blockchain, dictGetString('default.labels_dict', 'fqn', label_id) AS label_fqn FROM label_addresses PREWHERE #{label_id_by_label_key_filter(label_keys, argument_position: 1)} GROUP BY address, blockchain, label_id LIMIT 20000 """ args = [label_keys] {query, args} end def addresses_by_label_keys_query(label_keys, blockchain) do query = """ SELECT address, blockchain, dictGetString('default.labels_dict', 'fqn', label_id) AS label_fqn FROM label_addresses PREWHERE #{label_id_by_label_key_filter(label_keys, argument_position: 1)} AND blockchain = ?2 GROUP BY address, blockchain, label_id LIMIT 20000 """ args = [label_keys, blockchain] {query, args} end defp addresses_labels_query(slug, "ethereum", addresses) do query = create_addresses_labels_query(slug) args = case slug do nil -> [addresses] _ -> [addresses, slug] end {query, args} end defp addresses_labels_query(_slug, blockchain, addresses) do query = """ SELECT address, label, metadata FROM( SELECT address, label, argMax(metadata, version) AS metadata, argMax(sign, version) AS sign FROM blockchain_address_labels PREWHERE blockchain = ?1 AND address IN (?2) GROUP BY blockchain, asset_id, label, address HAVING sign = 1 ) """ {query, [blockchain, addresses]} end defp get_list_of_addresses(maps) do maps |> Enum.flat_map(fn map -> [ Map.get(map, :address) && map.address.address, Map.get(map, :from_address) && map.from_address.address, Map.get(map, :to_address) && map.to_address.address ] end) |> Enum.uniq() |> Enum.reject(&is_nil/1) end defp do_add_labels(maps, labels_map) do add_labels = fn # In this case the address type does not exist, so the result is not used nil -> nil map -> labels = Map.get(labels_map, map.address, []) |> Enum.sort_by(& &1.name) Map.put(map, :labels, labels) end maps |> Enum.map(fn %{} = map -> map |> Map.replace(:address, add_labels.(Map.get(map, :address))) |> Map.replace(:from_address, add_labels.(Map.get(map, :from_address))) |> Map.replace(:to_address, add_labels.(Map.get(map, :to_address))) end) end defp create_addresses_labels_query(slug) do """ SELECT address, label, concat('\{', '"owner": "', owner, '"\}') as metadata FROM ( SELECT address, arrayJoin(labels_owners_filtered) as label_owner, label_owner.1 as label_raw, label_owner.2 as owner, multiIf( owner = 'uniswap router', 'Uniswap Router', label_raw='uniswap_ecosystem', 'Uniswap Ecosystem', label_raw='cex_dex_trader', 'CEX & DEX Trader', label_raw='centralized_exchange', 'CEX', label_raw='decentralized_exchange', 'DEX', label_raw='withdrawal', 'CEX Trader', label_raw='dex_trader', 'DEX Trader', #{whale_filter(slug, position: 2)} label_raw='deposit', 'CEX Deposit', label_raw='defi', 'DeFi', label_raw='deployer', 'Deployer', label_raw='stablecoin', 'Stablecoin', label_raw='uniswap_ecosystem', 'Uniswap', label_raw='makerdao-cdp-owner', 'MakerDAO CDP Owner', label_raw='makerdao-bite-keeper', 'MakerDAO Bite Keeper', label_raw='genesis', 'Genesis', label_raw='proxy', 'Proxy', label_raw='system', 'System', label_raw='miner', 'Miner', label_raw='contract_factory', 'Contract Factory', label_raw='derivative_token', 'Derivative Token', label_raw='eth2stakingcontract', 'ETH2 Staking Contract', label_raw ) as label FROM ( SELECT address_hash, address, asset_id, splitByChar(',', labels) as label_arr, splitByChar(',', owners) as owner_arr, arrayZip(label_arr, owner_arr) as labels_owners, multiIf( -- if there is the `system` label for an address, we exclude other labels has(label_arr, 'system'), arrayFilter(x -> x.1 = 'system', labels_owners), -- if an address has a `centralized_exchange` label and at least one of the `deposit` and -- `withdrawal` labels, we exclude the `deposit` and `withdrawal` labels. has(label_arr, 'centralized_exchange') AND hasAny(label_arr, ['deposit', 'withdrawal']), arrayFilter(x -> x.1 NOT IN ('deposit', 'withdrawal'), labels_owners), -- if there are the `dex_trader` and `decentralized_exchange` labels for an address, we exclude `dex_trader` label hasAll(label_arr, ['dex_trader', 'decentralized_exchange']), arrayFilter(x -> x.1 != 'dex_trader', labels_owners), -- if there are the `deposit` and `withdrawal` labels for an address, we exclude the `withdrawal` label hasAll(label_arr, ['deposit', 'withdrawal']), arrayFilter(x -> x.1 != 'withdrawal', labels_owners), -- if there are the `dex_trader` and `withdrawal` labels for an address, we replace these metrics to the `cex_dex_trader` label hasAll(label_arr, ['dex_trader', 'withdrawal']), arrayPushFront(arrayFilter(x -> x.1 NOT IN ['dex_trader', 'withdrawal'], labels_owners), ('cex_dex_trader', arrayFilter(x -> x.1 == 'withdrawal', labels_owners)[1].2)), labels_owners ) as labels_owners_filtered FROM eth_labels_final ANY INNER JOIN ( SELECT cityHash64(address) as address_hash, address FROM ( SELECT lower(arrayJoin([?1])) as address ) ) USING address_hash PREWHERE address_hash IN ( SELECT cityHash64(address) FROM ( SELECT lower(arrayJoin([?1])) as address ) ) ) ANY LEFT JOIN ( select asset_id, name from asset_metadata ) USING asset_id ) WHERE label != 'whale_wrong' """ end defp whale_filter(nil, _) do """ label_raw='whale', concat('Whale, token:', name), """ end defp whale_filter(slug, opts) when is_binary(slug) do position = Keyword.fetch!(opts, :position) """ label_raw='whale' AND asset_id = (SELECT asset_id FROM asset_metadata FINAL PREWHERE name = ?#{position}), 'Whale', label_raw='whale' AND asset_id != (SELECT asset_id FROM asset_metadata FINAL PREWHERE name = ?#{position}), 'whale_wrong', """ end end
lib/sanbase/clickhouse/label/label.ex
0.724968
0.410284
label.ex
starcoder
defmodule ExTwilio.Account do @moduledoc """ Represents an Account or Subaccount resource. - [Account docs](https://www.twilio.com/docs/iam/api/account) - [Subaccount docs](https://www.twilio.com/docs/api/rest/subaccounts) ## Examples An ExTwilio.Account can represent either an Account or a SubAccount. To see all accounts and subaccounts that your auth_token has access to, run: ExTwilio.Account.all If you want to find a SubAccount, use `find/1`. ExTwilio.Account.find("sid") If you want to see items associated with a SubAccount, you can do so by passing in an `account:` option in all other ExTwilio resources. For example: ExTwilio.Call.stream(account: "subaccount_sid") """ defstruct sid: nil, owner_account_sid: nil, date_created: nil, date_updated: nil, friendly_name: nil, type: nil, status: nil, auth_token: nil, uri: nil, subresource_uris: nil use ExTwilio.Resource, import: [:stream, :all, :find, :create, :update] @doc """ Suspend an Account by updating its status to "suspended". - [Twilio Docs](https://www.twilio.com/docs/api/rest/subaccounts#suspending-subaccounts) ## Example {:ok, account} = ExTwilio.Account.find("<sid>") ExTwilio.Account.suspend(account) """ @spec suspend(map | String.t()) :: Parser.success() | Parser.error() def suspend(%{sid: sid}), do: suspend(sid) def suspend(sid), do: update(sid, status: "suspended") @doc """ Reactivate a suspended Account by updating its status to "active". - [Twilio Docs](https://www.twilio.com/docs/api/rest/subaccounts#suspending-subaccounts) ## Example {:ok, account} = ExTwilio.Account.find("<sid>") ExTwilio.Account.reactivate(account) """ @spec reactivate(map | String.t()) :: Parser.success() | Parser.error() def reactivate(%{sid: sid}), do: reactivate(sid) def reactivate(sid), do: update(sid, status: "active") @doc """ Permanently close an Account by updating its status to "closed". This cannot be undone, so use it carefully! - [Twilio Docs](https://www.twilio.com/docs/api/rest/subaccounts#closing-subaccounts) ## Example {:ok, account} = ExTwilio.Account.find("<sid>") ExTwilio.Account.close(account) """ @spec close(map | String.t()) :: Parser.success() | Parser.error() def close(%{sid: sid}), do: close(sid) def close(sid), do: update(sid, status: "closed") def parents, do: [:account] end
lib/ex_twilio/resources/account.ex
0.832203
0.490236
account.ex
starcoder
defmodule AWS.Elasticsearch do @moduledoc """ Amazon Elasticsearch Configuration Service Use the Amazon Elasticsearch Configuration API to create, configure, and manage Elasticsearch domains. For sample code that uses the Configuration API, see the [Amazon Elasticsearch Service Developer Guide](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-configuration-samples.html). The guide also contains [sample code for sending signed HTTP requests to the Elasticsearch APIs](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-request-signing.html). The endpoint for configuration service requests is region-specific: es.*region*.amazonaws.com. For example, es.us-east-1.amazonaws.com. For a current list of supported regions and endpoints, see [Regions and Endpoints](http://docs.aws.amazon.com/general/latest/gr/rande.html#elasticsearch-service-regions). """ @doc """ Allows the destination domain owner to accept an inbound cross-cluster search connection request. """ def accept_inbound_cross_cluster_search_connection(client, cross_cluster_search_connection_id, input, options \\ []) do path_ = "/2015-01-01/es/ccs/inboundConnection/#{URI.encode(cross_cluster_search_connection_id)}/accept" headers = [] query_ = [] request(client, :put, path_, query_, headers, input, options, nil) end @doc """ Attaches tags to an existing Elasticsearch domain. Tags are a set of case-sensitive key value pairs. An Elasticsearch domain may have up to 10 tags. See [ Tagging Amazon Elasticsearch Service Domains for more information.](http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-awsresorcetagging) """ def add_tags(client, input, options \\ []) do path_ = "/2015-01-01/tags" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Associates a package with an Amazon ES domain. """ def associate_package(client, domain_name, package_i_d, input, options \\ []) do path_ = "/2015-01-01/packages/associate/#{URI.encode(package_i_d)}/#{URI.encode(domain_name)}" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Cancels a scheduled service software update for an Amazon ES domain. You can only perform this operation before the `AutomatedUpdateDate` and when the `UpdateStatus` is in the `PENDING_UPDATE` state. """ def cancel_elasticsearch_service_software_update(client, input, options \\ []) do path_ = "/2015-01-01/es/serviceSoftwareUpdate/cancel" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Creates a new Elasticsearch domain. For more information, see [Creating Elasticsearch Domains](http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomains) in the *Amazon Elasticsearch Service Developer Guide*. """ def create_elasticsearch_domain(client, input, options \\ []) do path_ = "/2015-01-01/es/domain" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Creates a new cross-cluster search connection from a source domain to a destination domain. """ def create_outbound_cross_cluster_search_connection(client, input, options \\ []) do path_ = "/2015-01-01/es/ccs/outboundConnection" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Create a package for use with Amazon ES domains. """ def create_package(client, input, options \\ []) do path_ = "/2015-01-01/packages" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Permanently deletes the specified Elasticsearch domain and all of its data. Once a domain is deleted, it cannot be recovered. """ def delete_elasticsearch_domain(client, domain_name, input, options \\ []) do path_ = "/2015-01-01/es/domain/#{URI.encode(domain_name)}" headers = [] query_ = [] request(client, :delete, path_, query_, headers, input, options, nil) end @doc """ Deletes the service-linked role that Elasticsearch Service uses to manage and maintain VPC domains. Role deletion will fail if any existing VPC domains use the role. You must delete any such Elasticsearch domains before deleting the role. See [Deleting Elasticsearch Service Role](http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-vpc.html#es-enabling-slr) in *VPC Endpoints for Amazon Elasticsearch Service Domains*. """ def delete_elasticsearch_service_role(client, input, options \\ []) do path_ = "/2015-01-01/es/role" headers = [] query_ = [] request(client, :delete, path_, query_, headers, input, options, nil) end @doc """ Allows the destination domain owner to delete an existing inbound cross-cluster search connection. """ def delete_inbound_cross_cluster_search_connection(client, cross_cluster_search_connection_id, input, options \\ []) do path_ = "/2015-01-01/es/ccs/inboundConnection/#{URI.encode(cross_cluster_search_connection_id)}" headers = [] query_ = [] request(client, :delete, path_, query_, headers, input, options, nil) end @doc """ Allows the source domain owner to delete an existing outbound cross-cluster search connection. """ def delete_outbound_cross_cluster_search_connection(client, cross_cluster_search_connection_id, input, options \\ []) do path_ = "/2015-01-01/es/ccs/outboundConnection/#{URI.encode(cross_cluster_search_connection_id)}" headers = [] query_ = [] request(client, :delete, path_, query_, headers, input, options, nil) end @doc """ Delete the package. """ def delete_package(client, package_i_d, input, options \\ []) do path_ = "/2015-01-01/packages/#{URI.encode(package_i_d)}" headers = [] query_ = [] request(client, :delete, path_, query_, headers, input, options, nil) end @doc """ Returns domain configuration information about the specified Elasticsearch domain, including the domain ID, domain endpoint, and domain ARN. """ def describe_elasticsearch_domain(client, domain_name, options \\ []) do path_ = "/2015-01-01/es/domain/#{URI.encode(domain_name)}" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Provides cluster configuration information about the specified Elasticsearch domain, such as the state, creation date, update version, and update date for cluster options. """ def describe_elasticsearch_domain_config(client, domain_name, options \\ []) do path_ = "/2015-01-01/es/domain/#{URI.encode(domain_name)}/config" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns domain configuration information about the specified Elasticsearch domains, including the domain ID, domain endpoint, and domain ARN. """ def describe_elasticsearch_domains(client, input, options \\ []) do path_ = "/2015-01-01/es/domain-info" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Describe Elasticsearch Limits for a given InstanceType and ElasticsearchVersion. When modifying existing Domain, specify the ` `DomainName` ` to know what Limits are supported for modifying. """ def describe_elasticsearch_instance_type_limits(client, elasticsearch_version, instance_type, domain_name \\ nil, options \\ []) do path_ = "/2015-01-01/es/instanceTypeLimits/#{URI.encode(elasticsearch_version)}/#{URI.encode(instance_type)}" headers = [] query_ = [] query_ = if !is_nil(domain_name) do [{"domainName", domain_name} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Lists all the inbound cross-cluster search connections for a destination domain. """ def describe_inbound_cross_cluster_search_connections(client, input, options \\ []) do path_ = "/2015-01-01/es/ccs/inboundConnection/search" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Lists all the outbound cross-cluster search connections for a source domain. """ def describe_outbound_cross_cluster_search_connections(client, input, options \\ []) do path_ = "/2015-01-01/es/ccs/outboundConnection/search" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Describes all packages available to Amazon ES. Includes options for filtering, limiting the number of results, and pagination. """ def describe_packages(client, input, options \\ []) do path_ = "/2015-01-01/packages/describe" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Lists available reserved Elasticsearch instance offerings. """ def describe_reserved_elasticsearch_instance_offerings(client, max_results \\ nil, next_token \\ nil, reserved_elasticsearch_instance_offering_id \\ nil, options \\ []) do path_ = "/2015-01-01/es/reservedInstanceOfferings" headers = [] query_ = [] query_ = if !is_nil(reserved_elasticsearch_instance_offering_id) do [{"offeringId", reserved_elasticsearch_instance_offering_id} | query_] else query_ end query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns information about reserved Elasticsearch instances for this account. """ def describe_reserved_elasticsearch_instances(client, max_results \\ nil, next_token \\ nil, reserved_elasticsearch_instance_id \\ nil, options \\ []) do path_ = "/2015-01-01/es/reservedInstances" headers = [] query_ = [] query_ = if !is_nil(reserved_elasticsearch_instance_id) do [{"reservationId", reserved_elasticsearch_instance_id} | query_] else query_ end query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Dissociates a package from the Amazon ES domain. """ def dissociate_package(client, domain_name, package_i_d, input, options \\ []) do path_ = "/2015-01-01/packages/dissociate/#{URI.encode(package_i_d)}/#{URI.encode(domain_name)}" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Returns a list of upgrade compatible Elastisearch versions. You can optionally pass a ` `DomainName` ` to get all upgrade compatible Elasticsearch versions for that specific domain. """ def get_compatible_elasticsearch_versions(client, domain_name \\ nil, options \\ []) do path_ = "/2015-01-01/es/compatibleVersions" headers = [] query_ = [] query_ = if !is_nil(domain_name) do [{"domainName", domain_name} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Retrieves the complete history of the last 10 upgrades that were performed on the domain. """ def get_upgrade_history(client, domain_name, max_results \\ nil, next_token \\ nil, options \\ []) do path_ = "/2015-01-01/es/upgradeDomain/#{URI.encode(domain_name)}/history" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Retrieves the latest status of the last upgrade or upgrade eligibility check that was performed on the domain. """ def get_upgrade_status(client, domain_name, options \\ []) do path_ = "/2015-01-01/es/upgradeDomain/#{URI.encode(domain_name)}/status" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns the name of all Elasticsearch domains owned by the current user's account. """ def list_domain_names(client, options \\ []) do path_ = "/2015-01-01/domain" headers = [] query_ = [] request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Lists all Amazon ES domains associated with the package. """ def list_domains_for_package(client, package_i_d, max_results \\ nil, next_token \\ nil, options \\ []) do path_ = "/2015-01-01/packages/#{URI.encode(package_i_d)}/domains" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ List all Elasticsearch instance types that are supported for given ElasticsearchVersion """ def list_elasticsearch_instance_types(client, elasticsearch_version, domain_name \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do path_ = "/2015-01-01/es/instanceTypes/#{URI.encode(elasticsearch_version)}" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end query_ = if !is_nil(domain_name) do [{"domainName", domain_name} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ List all supported Elasticsearch versions """ def list_elasticsearch_versions(client, max_results \\ nil, next_token \\ nil, options \\ []) do path_ = "/2015-01-01/es/versions" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Lists all packages associated with the Amazon ES domain. """ def list_packages_for_domain(client, domain_name, max_results \\ nil, next_token \\ nil, options \\ []) do path_ = "/2015-01-01/domain/#{URI.encode(domain_name)}/packages" headers = [] query_ = [] query_ = if !is_nil(next_token) do [{"nextToken", next_token} | query_] else query_ end query_ = if !is_nil(max_results) do [{"maxResults", max_results} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Returns all tags for the given Elasticsearch domain. """ def list_tags(client, a_r_n, options \\ []) do path_ = "/2015-01-01/tags/" headers = [] query_ = [] query_ = if !is_nil(a_r_n) do [{"arn", a_r_n} | query_] else query_ end request(client, :get, path_, query_, headers, nil, options, nil) end @doc """ Allows you to purchase reserved Elasticsearch instances. """ def purchase_reserved_elasticsearch_instance_offering(client, input, options \\ []) do path_ = "/2015-01-01/es/purchaseReservedInstanceOffering" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Allows the destination domain owner to reject an inbound cross-cluster search connection request. """ def reject_inbound_cross_cluster_search_connection(client, cross_cluster_search_connection_id, input, options \\ []) do path_ = "/2015-01-01/es/ccs/inboundConnection/#{URI.encode(cross_cluster_search_connection_id)}/reject" headers = [] query_ = [] request(client, :put, path_, query_, headers, input, options, nil) end @doc """ Removes the specified set of tags from the specified Elasticsearch domain. """ def remove_tags(client, input, options \\ []) do path_ = "/2015-01-01/tags-removal" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Schedules a service software update for an Amazon ES domain. """ def start_elasticsearch_service_software_update(client, input, options \\ []) do path_ = "/2015-01-01/es/serviceSoftwareUpdate/start" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Modifies the cluster configuration of the specified Elasticsearch domain, setting as setting the instance type and the number of instances. """ def update_elasticsearch_domain_config(client, domain_name, input, options \\ []) do path_ = "/2015-01-01/es/domain/#{URI.encode(domain_name)}/config" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @doc """ Allows you to either upgrade your domain or perform an Upgrade eligibility check to a compatible Elasticsearch version. """ def upgrade_elasticsearch_domain(client, input, options \\ []) do path_ = "/2015-01-01/es/upgradeDomain" headers = [] query_ = [] request(client, :post, path_, query_, headers, input, options, nil) end @spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) :: {:ok, map() | nil, map()} | {:error, term()} defp request(client, method, path, query, headers, input, options, success_status_code) do client = %{client | service: "es"} host = build_host("es", client) url = host |> build_url(path, client) |> add_query(query, client) additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}] headers = AWS.Request.add_headers(additional_headers, headers) payload = encode!(client, input) headers = AWS.Request.sign_v4(client, method, url, headers, payload) perform_request(client, method, url, payload, headers, options, success_status_code) end defp perform_request(client, method, url, payload, headers, options, success_status_code) do case AWS.Client.request(client, method, url, payload, headers, options) do {:ok, %{status_code: status_code, body: body} = response} when is_nil(success_status_code) and status_code in [200, 202, 204] when status_code == success_status_code -> body = if(body != "", do: decode!(client, body)) {:ok, body, response} {:ok, response} -> {:error, {:unexpected_response, response}} error = {:error, _reason} -> error end end defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do endpoint end defp build_host(_endpoint_prefix, %{region: "local"}) do "localhost" end defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do "#{endpoint_prefix}.#{region}.#{endpoint}" end defp build_url(host, path, %{:proto => proto, :port => port}) do "#{proto}://#{host}:#{port}#{path}" end defp add_query(url, [], _client) do url end defp add_query(url, query, client) do querystring = encode!(client, query, :query) "#{url}?#{querystring}" end defp encode!(client, payload, format \\ :json) do AWS.Client.encode!(client, payload, format) end defp decode!(client, payload) do AWS.Client.decode!(client, payload, :json) end end
lib/aws/generated/elasticsearch.ex
0.814459
0.404184
elasticsearch.ex
starcoder
defmodule Readtome.Creators do @moduledoc """ The Creators context. """ import Ecto.Query, warn: false alias Readtome.Repo alias Readtome.Creators.{Creator, UserInterestCreator} @doc """ Returns the list of creators. ## Examples iex> list_creators() [%Creator{}, ...] """ def list_creators do Repo.all(Creator) end @doc """ Gets a single creator. Raises `Ecto.NoResultsError` if the Creator does not exist. ## Examples iex> get_creator!(123) %Creator{} iex> get_creator!(456) ** (Ecto.NoResultsError) """ def get_creator!(id), do: Repo.get!(Creator, id) def get_creator(id), do: Repo.get(Creator, id) @doc """ Creates a creator. ## Examples iex> create_creator(%{field: value}) {:ok, %Creator{}} iex> create_creator(%{field: bad_value}) {:error, %Ecto.Changeset{}} """ def create_creator(attrs \\ %{}) do %Creator{} |> Creator.changeset(attrs) |> Repo.insert() end def add_by_name(name) do with nil <- Repo.get_by(Creator, name: name), {:ok, creator} <- create_creator(%{name: name}) do creator else creator when is_struct(creator, Creator) -> creator _ -> nil end end @doc """ Updates a creator. ## Examples iex> update_creator(creator, %{field: new_value}) {:ok, %Creator{}} iex> update_creator(creator, %{field: bad_value}) {:error, %Ecto.Changeset{}} """ def update_creator(%Creator{} = creator, attrs) do creator |> Creator.changeset(attrs) |> Repo.update() end @doc """ Deletes a Creator. ## Examples iex> delete_creator(creator) {:ok, %Creator{}} iex> delete_creator(creator) {:error, %Ecto.Changeset{}} """ def delete_creator(%Creator{} = creator) do Repo.delete(creator) end @doc """ Returns an `%Ecto.Changeset{}` for tracking creator changes. ## Examples iex> change_creator(creator) %Ecto.Changeset{source: %Creator{}} """ def change_creator(%Creator{} = creator) do Creator.changeset(creator, %{}) end def create_user_interest_creator(attrs \\ %{}) do %UserInterestCreator{} |> UserInterestCreator.changeset(attrs) |> Repo.insert() end def data() do Dataloader.Ecto.new(Repo) end end
lib/readtome/creators/creators.ex
0.812161
0.506103
creators.ex
starcoder
defmodule Genotype do @moduledoc """ This module provides methods for constructing a Genotype for a neural network. The Genotype is a datastructure which provides all of the information needed to construct the network. """ alias Genotype.Id, as: Id alias Genotype.Cortex, as: Cortex alias Genotype.Neuron, as: Neuron alias Genotype.Actuator, as: Actuator alias Genotype.Sensor, as: Sensor @doc """ Build a genotype for a standard feed-forward neural network. ## Example iex>gen = Genotype.for_network 1, 1, [2] iex>length(gen) 6 """ def for_network(num_sensors, num_actuators, layer_sizes \\ []) when is_number(num_sensors) and is_number(num_actuators) and is_list(layer_sizes) do cortex = Cortex.new sensors = build_sensors cortex.id, num_sensors neurons = build_neurons cortex.id, layer_sizes, num_actuators actuators = build_actuators cortex.id, num_actuators layers = [sensors] ++ neurons ++ [actuators] |> Pairlist.map(&link/2) List.flatten [cortex, layers] end defp link(front, back) when is_list(front) and is_list(back) do for neuron <- back do link_single front, neuron end end @doc """ Link every item in the layer to the neuron. Assumes that the layer's outputs are all 1-dimensional """ defp link_single(layer, %Neuron{} = neuron) when is_list(layer) do Enum.reduce(layer, neuron, fn source, target -> weights = case source do %Sensor{} -> [rand_weight, rand_weight] %Neuron{} -> [rand_weight] end Neuron.with_input(target, source.id, weights) end) end defp link_single(layer, %Actuator{} = actuator) when is_list(layer) do Enum.reduce(layer, actuator, fn source, target -> Actuator.with_neuron(target, source.id) end) end defp build_sensors(cortex_id, num_sensors) do for size <- 1..num_sensors, do: Sensor.new(cortex_id) end defp build_neurons(cortex_id, layer_sizes, num_actuators) do neurons = for size <- layer_sizes, do: build_layer(cortex_id, size) neurons ++ [build_layer(cortex_id, num_actuators)] end defp build_actuators(cortex_id, num_actuators) do for size <- 1..num_actuators, do: Actuator.new(cortex_id) end defp build_layer(cortex_id, size) do for i <- 1..size, do: Neuron.new(cortex_id) end defp rand_weight do 2 * (:rand.uniform - :rand.uniform) end end
lib/genotype/genotype.ex
0.8398
0.536313
genotype.ex
starcoder
defmodule Snitch.Domain.Payment do @moduledoc """ Helper functions and utlities for handling payments. """ use Snitch.Domain alias Snitch.Data.Model.HostedPayment alias SnitchPayments.PaymentMethodCode alias Snitch.Data.Schema.{Order, PaymentMethod, Payment} @hosted_payment PaymentMethodCode.hosted_payment() @cod_payment PaymentMethodCode.cash_on_delivery() @doc """ Creates payment record in the `pending` state. The function handles creation of both the payment record as well as the subtype in the same transaction. The differentiation for the `payment` subtypes is being made from the `:code` field in the `payment_method` struct. ## See `Snitch.Data.Schema.PaymentMethod` The `params` map expects the attributes for `subtype` and `payment` under `:subtype_params` and `payment_params` keys. ## Example For the subtype `hosted payment` the `params` map would be. iex> params = %{ subtype_params: %{}, payment_params: %{ amount: Money.new(10, :USD) } } """ @spec create_payment(map, PaymentMethod.t(), Order.t()) :: {:ok, map} | {:error, Ecto.Changeset.t()} def create_payment(params, payment_method, order) do slug = get_slug() payment_params = params[:payment_params] subtype_params = params[:subtype_params] create_payment_with_subtype( payment_method.code, subtype_params, payment_params, slug, order.id, payment_method.id ) end defp create_payment_with_subtype( @hosted_payment, hosted_params, payment_params, slug, order_id, payment_method_id ) do HostedPayment.create( slug, order_id, payment_params, hosted_params, payment_method_id ) end defp create_payment_with_subtype( @cod_payment, _, payment_params, slug, order_id, payment_method_id ) do payment = struct(Payment, payment_params) more_payment_params = %{ order_id: order_id, payment_type: PaymentMethodCode.cash_on_delivery(), payment_method_id: payment_method_id, slug: slug } changeset = Payment.create_changeset(payment, more_payment_params) Repo.insert(changeset) end # generates a unique slug using nano id # TODO look for a better alternative. defp get_slug() do "payment_slug-#{Nanoid.generate()}" end end
apps/snitch_core/lib/core/domain/payment/payment.ex
0.766905
0.456713
payment.ex
starcoder
defmodule CSV do use CSV.Defaults alias CSV.Decoding.Preprocessing alias CSV.Decoding.Decoder alias CSV.Encoding.Encoder alias CSV.EscapeSequenceError alias CSV.StrayQuoteError @moduledoc ~S""" RFC 4180 compliant CSV parsing and encoding for Elixir. Allows to specify other separators, so it could also be named: TSV, but it isn't. """ @doc """ Decode a stream of comma-separated lines into a stream of tuples. Decoding errors will be inlined into the stream. ## Options These are the options: * `:separator` – The separator token to use, defaults to `?,`. Must be a codepoint (syntax: ? + (your separator)). * `:strip_fields` – When set to true, will strip whitespace from cells. Defaults to false. * `:preprocessor` – Which preprocessor to use: :lines (default) -> Will preprocess line by line input respecting escape sequences :none -> Will not preprocess input and expects line by line input with multiple line escape sequences aggregated to one line * `:validate_row_length` – If set to `false`, will disable validation for row length. This will allow for rows with variable length. Defaults to `true` * `:escape_max_lines` – How many lines to maximally aggregate for multiline escapes. Defaults to a 1000. * `:num_workers` – The number of parallel operations to run when producing the stream. * `:worker_work_ratio` – The available work per worker, defaults to 5. Higher rates will mean more work sharing, but might also lead to work fragmentation slowing down the queues. * `:headers` – When set to `true`, will take the first row of the csv and use it as header values. When set to a list, will use the given list as header values. When set to `false` (default), will use no header values. When set to anything but `false`, the resulting rows in the matrix will be maps instead of lists. ## Examples Convert a filestream into a stream of rows in order of the given stream: iex> \"../test/fixtures/docs/valid.csv\" iex> |> Path.expand(__DIR__) iex> |> File.stream! iex> |> CSV.decode iex> |> Enum.take(2) [ok: [\"a\",\"b\",\"c\"], ok: [\"d\",\"e\",\"f\"]] Errors will show up as error tuples: iex> \"../test/fixtures/docs/escape-errors.csv\" iex> |> Path.expand(__DIR__) iex> |> File.stream! iex> |> CSV.decode iex> |> Enum.take(2) [ ok: [\"a\",\"b\",\"c\"], error: "Escape sequence started on line 2 near \\"d,e,f\\n\\" did \ not terminate.\\n\\nEscape sequences are allowed to span up to 1000 lines. \ This threshold avoids collecting the whole file into memory when an escape \ sequence does not terminate. You can change it using the escape_max_lines \ option: https://hexdocs.pm/csv/CSV.html#decode/2" ] Map an existing stream of lines separated by a token to a stream of rows with a header row: iex> [\"a;b\",\"c;d\", \"e;f\"] iex> |> Stream.map(&(&1)) iex> |> CSV.decode(separator: ?;, headers: true) iex> |> Enum.take(2) [ ok: %{\"a\" => \"c\", \"b\" => \"d\"}, ok: %{\"a\" => \"e\", \"b\" => \"f\"} ] Map an existing stream of lines separated by a token to a stream of rows with a given header row: iex> [\"a;b\",\"c;d\", \"e;f\"] iex> |> Stream.map(&(&1)) iex> |> CSV.decode(separator: ?;, headers: [:x, :y]) iex> |> Enum.take(2) [ ok: %{:x => \"a\", :y => \"b\"}, ok: %{:x => \"c\", :y => \"d\"} ] """ def decode(stream, options \\ []) do stream |> preprocess(options) |> Decoder.decode(options) |> inline_errors!(options) end @doc """ Decode a stream of comma-separated lines into a stream of tuples. Errors when decoding will get raised immediately. ## Options These are the options: * `:separator` – The separator token to use, defaults to `?,`. Must be a codepoint (syntax: ? + (your separator)). * `:strip_fields` – When set to true, will strip whitespace from cells. Defaults to false. * `:preprocessor` – Which preprocessor to use: :lines (default) -> Will preprocess line by line input respecting escape sequences :none -> Will not preprocess input and expects line by line input with multiple line escape sequences aggregated to one line * `:escape_max_lines` – How many lines to maximally aggregate for multiline escapes. Defaults to a 1000. * `:validate_row_length` – If set to `false`, will disable validation for row length. This will allow for rows with variable length. Defaults to `true` * `:num_workers` – The number of parallel operations to run when producing the stream. * `:worker_work_ratio` – The available work per worker, defaults to 5. Higher rates will mean more work sharing, but might also lead to work fragmentation slowing down the queues. * `:headers` – When set to `true`, will take the first row of the csv and use it as header values. When set to a list, will use the given list as header values. When set to `false` (default), will use no header values. When set to anything but `false`, the resulting rows in the matrix will be maps instead of lists. ## Examples Convert a filestream into a stream of rows in order of the given stream: iex> \"../test/fixtures/docs/valid.csv\" iex> |> Path.expand(__DIR__) iex> |> File.stream! iex> |> CSV.decode! iex> |> Enum.take(2) [[\"a\",\"b\",\"c\"], [\"d\",\"e\",\"f\"]] Errors will be raised: iex> \"../test/fixtures/docs/row-length-errors.csv\" iex> |> Path.expand(__DIR__) iex> |> File.stream! iex> |> CSV.decode! iex> |> Enum.take(2) ** (CSV.RowLengthError) Row has length 3 - expected length 2 on line 2 Map an existing stream of lines separated by a token to a stream of rows with a header row: iex> [\"a;b\",\"c;d\", \"e;f\"] iex> |> Stream.map(&(&1)) iex> |> CSV.decode!(separator: ?;, headers: true) iex> |> Enum.take(2) [ %{\"a\" => \"c\", \"b\" => \"d\"}, %{\"a\" => \"e\", \"b\" => \"f\"} ] Map an existing stream of lines separated by a token to a stream of rows with a given header row: iex> [\"a;b\",\"c;d\", \"e;f\"] iex> |> Stream.map(&(&1)) iex> |> CSV.decode!(separator: ?;, headers: [:x, :y]) iex> |> Enum.take(2) [ %{:x => \"a\", :y => \"b\"}, %{:x => \"c\", :y => \"d\"} ] """ def decode!(stream, options \\ []) do stream |> preprocess(options) |> Decoder.decode(options) |> raise_errors!(options) end defp preprocess(stream, options) do case options |> Keyword.get(:preprocessor) do :none -> stream |> Preprocessing.None.process(options) _ -> stream |> Preprocessing.Lines.process(options) end end defp raise_errors!(stream, options) do escape_max_lines = options |> Keyword.get(:escape_max_lines, @escape_max_lines) stream |> Stream.map(&yield_or_raise!(&1, escape_max_lines)) end defp yield_or_raise!({:error, EscapeSequenceError, escape_sequence, index}, escape_max_lines) do raise EscapeSequenceError, escape_sequence: escape_sequence, line: index + 1, escape_max_lines: escape_max_lines end defp yield_or_raise!({:error, StrayQuoteError, field, index}, _) do raise StrayQuoteError, field: field, line: index + 1 end defp yield_or_raise!({:error, mod, message, index}, _) do raise mod, message: message, line: index + 1 end defp yield_or_raise!({:ok, row}, _), do: row defp inline_errors!(stream, options) do escape_max_lines = options |> Keyword.get(:escape_max_lines, @escape_max_lines) stream |> Stream.map(&yield_or_inline!(&1, escape_max_lines)) end defp yield_or_inline!({:error, EscapeSequenceError, escape_sequence, index}, escape_max_lines) do {:error, EscapeSequenceError.exception( escape_sequence: escape_sequence, line: index + 1, escape_max_lines: escape_max_lines ).message} end defp yield_or_inline!({:error, StrayQuoteError, field, index}, _) do {:error, StrayQuoteError.exception( field: field, line: index + 1 ).message} end defp yield_or_inline!({:error, errormod, message, index}, _) do {:error, errormod.exception(message: message, line: index + 1).message} end defp yield_or_inline!(value, _), do: value @doc """ Encode a table stream into a stream of RFC 4180 compliant CSV lines for writing to a file or other IO. ## Options These are the options: * `:separator` – The separator token to use, defaults to `?,`. Must be a codepoint (syntax: ? + (your separator)). * `:delimiter` – The delimiter token to use, defaults to `\\r\\n`. Must be a string. ## Examples Convert a stream of rows with cells into a stream of lines: iex> [~w(a b), ~w(c d)] iex> |> CSV.encode iex> |> Enum.take(2) [\"a,b\\r\\n\", \"c,d\\r\\n\"] Convert a stream of rows with cells with escape sequences into a stream of lines: iex> [[\"a\\nb\", \"\\tc\"], [\"de\", \"\\tf\\\"\"]] iex> |> CSV.encode(separator: ?\\t, delimiter: \"\\n\") iex> |> Enum.take(2) [\"\\\"a\\nb\\\"\\t\\\"\\tc\\\"\\n\", \"de\\t\\\"\\tf\\\"\\\"\\\"\\n\"] """ def encode(stream, options \\ []) do Encoder.encode(stream, options) end end
lib/csv.ex
0.938124
0.437403
csv.ex
starcoder
defmodule CTE.DataCase do @moduledoc """ This module defines the setup for tests requiring access to the application's data layer. You may define functions here to be used as helpers in your tests. Finally, if the test case interacts with the database, it cannot be async. For this reason, every test runs inside a transaction which is reset at the beginning of the test unless the test case is marked as async. """ use ExUnit.CaseTemplate @start_apps [ :ecto, :ecto_sql, :postgrex ] using do quote do alias CTE.{Repo, Author, Comment, TreePath} import Ecto import Ecto.Changeset import Ecto.Query import CTE.DataCase end end setup_all do Application.put_env(:ecto, :ecto_repos, [CTE.Repo]) Application.put_env(:cte, CTE.Repo, name: :cte_repo, hostname: "localhost", username: "postgres", password: "<PASSWORD>", database: "ct_ecto_test", pool: Ecto.Adapters.SQL.Sandbox ) Enum.each(@start_apps, &Application.ensure_all_started/1) {:ok, _pid} = start_supervised(CTE.Repo) on_exit(fn -> [cte: CTE.Repo, ecto: :ecto_repos] |> Enum.each(fn {app, key} -> Application.delete_env(app, key) end) end) end setup tags do :ok = Ecto.Adapters.SQL.Sandbox.checkout(CTE.Repo) unless tags[:async] do Ecto.Adapters.SQL.Sandbox.mode(CTE.Repo, {:shared, self()}) end :ok end @doc """ A helper that transforms changeset errors into a map of messages. assert {:error, changeset} = Accounts.create_user(%{password: "<PASSWORD>"}) assert "password <PASSWORD>" in errors_on(changeset).password assert %{password: ["<PASSWORD>"]} = errors_on(changeset) """ def errors_on(changeset) do Ecto.Changeset.traverse_errors(changeset, fn {message, opts} -> Enum.reduce(opts, message, fn {key, value}, acc -> String.replace(acc, "%{#{key}}", to_string(value)) end) end) end end
test/support/data_case.ex
0.668447
0.461866
data_case.ex
starcoder
defmodule JSON.Parser.Bitstring.Object do @doc """ parses a valid JSON object value, returns its elixir representation ## Examples iex> JSON.Parser.Bitstring.Object.parse "" {:error, :unexpected_end_of_buffer} iex> JSON.Parser.Bitstring.Object.parse "face0ff" {:error, {:unexpected_token, "face0ff"} } iex> JSON.Parser.Bitstring.Object.parse "[] " {:error, {:unexpected_token, "[] "}} iex> JSON.Parser.Bitstring.Object.parse "[]" {:error, {:unexpected_token, "[]"}} iex> JSON.Parser.Bitstring.Object.parse "[\\\"foo\\\", 1, 2, 1.5] lala" {:error, {:unexpected_token, "[\\\"foo\\\", 1, 2, 1.5] lala"}} iex> JSON.Parser.Bitstring.Object.parse "{\\\"result\\\": \\\"this will be a elixir result\\\"} lalal" {:ok, Enum.into([{"result", "this will be a elixir result"}], Map.new), " lalal"} """ def parse(<< ?{, rest :: binary >>) do JSON.Parser.Bitstring.trim(rest) |> parse_object_contents end def parse(<< >>), do: { :error, :unexpected_end_of_buffer } def parse(json), do: { :error, { :unexpected_token, json } } # Object Parsing defp parse_object_key(json) do case JSON.Parser.Bitstring.String.parse(json) do {:error, error_info} -> {:error, error_info} {:ok, key, after_key } -> case JSON.Parser.Bitstring.trim(after_key) do << ?:, after_colon :: binary >> -> { :ok, key, JSON.Parser.Bitstring.trim(after_colon) } << >> -> { :error, :unexpected_end_of_buffer} _ -> { :error, { :unexpected_token, JSON.Parser.Bitstring.trim(after_key) } } end end end defp parse_object_value(acc, key, after_key) do case JSON.Parser.Bitstring.parse(after_key) do { :error, error_info } -> { :error, error_info } { :ok, value, after_value } -> acc = Map.put(acc, key, value) after_value = JSON.Parser.Bitstring.trim(after_value) case after_value do << ?,, after_comma :: binary >> -> parse_object_contents acc, JSON.Parser.Bitstring.trim(after_comma) _ -> parse_object_contents acc, after_value end end end defp parse_object_contents(json), do: parse_object_contents(Map.new, json) defp parse_object_contents(acc, << ?", _ :: binary >> = bin) do case parse_object_key(bin) do { :error, error_info } -> { :error, error_info } { :ok, key, after_key } -> parse_object_value(acc, key, after_key) end end defp parse_object_contents(acc, << ?}, rest :: binary >>), do: { :ok, acc, rest } defp parse_object_contents(_, << >>), do: { :error, :unexpected_end_of_buffer } defp parse_object_contents(_, json), do: { :error, { :unexpected_token, json } } end
elixir/codes-from-books/little-elixir/cap4/metex/deps/json/lib/json/parser/bitstring/object.ex
0.726717
0.413803
object.ex
starcoder
defmodule Etop.Monitor do @moduledoc """ Etop Monitors. Add `:summary` or `:process` monitors to Etop. These monitors are checked on each run. If the threshould condition is met, the monitor's callback if called. Monitors are added with either the `Etop.monitor/4` or `Etop.add_monitor/4` calls. """ require Logger @doc """ Run the monitor checks. """ @spec run(any(), map()) :: {any(), map()} def run(params, %{monitors: monitors, stats: %{procs: procs}} = state) when is_nil(monitors) or is_nil(procs) or monitors == [] do {params, state} end def run(params, %{monitors: monitors, stats: stats} = state) when is_list(monitors) do {params, Enum.reduce(monitors, state, &check_and_run(&1, params, stats, &2))} end @spec run_monitors(any(), map()) :: {any(), map()} def run_monitors(params, %{monitors: monitor, stats: stats} = state) when is_tuple(monitor) do {params, check_and_run(monitor, params, stats, state)} end defp check_and_run({:process, field, threshold, callback}, {_, prev}, %{procs: curr}, state) do prev = Enum.into(prev, %{}) Enum.reduce(curr, state, fn {pid, info}, state -> info = put_in(info, [:pid], pid) prev_info = if item = prev[pid], do: put_in(item, [:pid], pid), else: nil if exceeds_threshold?(state, info, field, threshold) and exceeds_threshold?(state, prev_info, field, threshold) do run_callback(info, info[field], callback, state) else state end end) end defp check_and_run({:summary, fields, threshold, callback}, {curr, _}, prev, state) do if exceeds_threshold?(state, curr, fields, threshold) and exceeds_threshold?(state, prev, fields, threshold) do curr |> get_in([hd(fields)]) |> run_callback(get_in(curr, fields), callback, state) else state end end defp exceeds_threshold?(state, info, field, {fun, threshold}) when is_function(fun, 2) do exceeds_threshold?(state, info, field, fn value -> fun.(value, threshold) end) end defp exceeds_threshold?(state, info, field, {fun, threshold}) when is_function(fun, 3) do exceeds_threshold?(state, info, field, fn value -> fun.(value, threshold, state) end) end defp exceeds_threshold?(state, info, field, fun) when is_function(fun, 2) do exceeds_threshold?(state, info, field, fn value -> fun.(value, state) end) end defp exceeds_threshold?(state, info, field, fun) when is_function(fun, 3) do exceeds_threshold?(state, info, field, fn value -> fun.(value, info, state) end) end defp exceeds_threshold?(state, info, field, threshold) when not is_function(threshold) do exceeds_threshold?(state, info, field, &(&1 >= threshold)) end defp exceeds_threshold?(_state, info, field, comparator) when is_function(comparator, 1) and is_atom(field) and (is_list(info) or is_map(info)) do comparator.(!!info[field] && info[field]) end defp exceeds_threshold?(_state, stats, fields, comparator) when is_function(comparator, 1) and is_list(fields) and (is_list(stats) or is_map(stats)) do value = get_in(stats, fields) comparator.(!!value && value) end defp exceeds_threshold?(_, _, _, _), do: false defp run_callback(info, value, callback, state) when is_function(callback, 3) do try_callback(info, value, callback, state) end defp run_callback(info, value, {mod, fun}, state) do if function_exported?(mod, fun, 3) do try_callback(info, value, &apply(mod, fun, [&1, &2, &3]), state) else Logger.warn("&#{mod}.#{fun}/3 is not a valid callback") state end end # Safely run a monitor callback. # Run the callback and check the return for something that resembles a state map. # If so, return that map, otherwise return the original state map. defp try_callback(info, value, callback, state) do try do case callback.(info, value, state) do %{monitors: _, file: _, format: _} = state -> state _ -> state end rescue e -> Logger.warn("monitor callback exception: #{inspect(e)}, callback: #{inspect(callback)}") state end end end
lib/etop/monitor.ex
0.699973
0.544922
monitor.ex
starcoder
defmodule Linguist.Vocabulary do alias Linguist.Compiler @moduledoc """ Defines lookup functions for given translation locales, binding interopolation Locales are defined with the `locale/2` macro, accepting a locale name and either keyword list of translations or String path to evaluate for translations list. For example, given the following translations : locale "en", [ flash: [ notice: [ hello: "hello %{first} %{last}", ] ], users: [ title: "Users", ] ] locale "fr", Path.join([__DIR__, "fr.exs"]) this module will compile this down to these functions : def t("en", "flash.notice.hello", bindings \\ []), do: # ... def t("en", "users.title", bindings \\ []), do: # ... def t("fr", "flash.notice.hello", bindings \\ []), do: # ... """ @doc """ Compiles all the translations and inject the functions created in the current module. """ defmacro __using__(_options) do quote do Module.register_attribute(__MODULE__, :locales, accumulate: true, persist: false) import unquote(__MODULE__) @before_compile unquote(__MODULE__) end end defmacro __before_compile__(env) do Compiler.compile(Module.get_attribute(env.module, :locales)) end @doc """ Embeds locales from provided source * name - The String name of the locale, ie "en", "fr" * source - 1. The String file path to eval that returns a keyword list of translactions 2. The Keyword List of translations Examples locale "en", [ flash: [ notice: [ hello: "hello %{first} %{last}", ] ] ] locale "fr", Path.join([__DIR__, "fr.exs"]) """ defmacro locale(name, source) do quote bind_quoted: [name: name, source: source] do loaded_source = cond do is_binary(source) && String.ends_with?(source, [".yml", ".yaml"]) -> Linguist.Vocabulary._load_yaml_file(source) is_binary(source) -> @external_resource source source |> Code.eval_file() |> elem(0) true -> source end name = name |> to_string() @locales {name, loaded_source} end end @doc """ Function used internally to load a yaml file. Please use the `locale` macro with a path to a yaml file - this function will not work as expected if called directly. """ def _load_yaml_file(source) do {:ok, [result]} = YamlElixir.read_all_from_file(source) result |> Enum.reduce([], &Linguist.Vocabulary._yaml_reducer/2) end @doc """ Recursive function used internally for loading yaml files. Not intended for external use """ # sobelow_skip ["DOS.StringToAtom"] def _yaml_reducer({key, value}, acc) when is_binary(value) do [{String.to_atom(key), value} | acc] end # sobelow_skip ["DOS.StringToAtom"] def _yaml_reducer({key, value}, acc) do [{String.to_atom(key), Enum.reduce(value, [], &Linguist.Vocabulary._yaml_reducer/2)} | acc] end end
lib/linguist/vocabulary.ex
0.841256
0.553928
vocabulary.ex
starcoder
defmodule Ada.Schema.ScheduledTask do @moduledoc """ Represents a boilerplate for the recurring execution of a workflow. Captures the workflow to run, its frequency and params. See `t:t/0` for more details. """ use Ecto.Schema alias Ada.{Schema.Frequency, Workflow} @task_version 1 schema "scheduled_tasks" do field :version, :integer, null: false, default: @task_version field :workflow_name, Ecto.Module, null: false field :params, :map, null: false, default: %{} field :transport, Ecto.Atom, null: false, default: :email embeds_one :frequency, Frequency, on_replace: :update timestamps() end @typedoc """ A scheduled task is mainly defined by: - a workflow name, deciding the workflow that needs to be run - a frequency, determining how often the task is run (see `Ada.Schema.Frequency`) - a map of params, which are going to be passed to the workflow when run - a transport, deciding the transport used to communicate the workflow result to the relevant user """ @type t :: %__MODULE__{ __meta__: term(), id: String.t(), version: pos_integer(), workflow_name: Workflow.t(), params: map(), transport: Workflow.transport(), frequency: Frequency.t(), inserted_at: DateTime.t(), updated_at: DateTime.t() } @doc """ Returns a changeset, starting from a scheduled task and a map of attributes to change. """ @spec changeset(t, map) :: Ecto.Changeset.t() def changeset(scheduled_task, params \\ %{}) do scheduled_task |> Ecto.Changeset.cast(params, [:version, :workflow_name, :params, :transport]) |> Ecto.Changeset.cast_embed(:frequency) |> Ecto.Changeset.validate_required([:frequency, :workflow_name, :transport]) |> Ecto.Changeset.validate_inclusion(:transport, Workflow.transports()) |> Ecto.Changeset.validate_number(:version, equal_to: @task_version) |> Ecto.Changeset.validate_change(:workflow_name, workflow_name_validator()) end @doc false defguard is_valid_hourly_spec?(minute, second) when minute in 0..59 and second in 0..59 @doc false defguard is_valid_daily_spec?(hour, minute) when hour in 0..23 and minute in 0..59 @doc """ Returns true for an hourly task. """ @spec hourly?(t) :: bool() def hourly?(%__MODULE__{frequency: frequency}), do: Frequency.hourly?(frequency) @doc """ Returns true for a daily task. """ @spec daily?(t) :: bool() def daily?(%__MODULE__{frequency: frequency}), do: Frequency.daily?(frequency) @doc """ Returns true for a weekly task. """ @spec weekly?(t) :: bool() def weekly?(%__MODULE__{frequency: frequency}), do: Frequency.weekly?(frequency) @doc """ Returns true for a task that matches a given datetime, where matching is defined as: - same day of the week, hour and zero minutes and seconds for a weekly task - same hour, same minute and zero seconds for a daily task - same minute and second for a hourly task """ @spec matches_time?(t, DateTime.t()) :: bool() def matches_time?(st, datetime), do: Frequency.matches_time?(st.frequency, datetime) @doc """ Runs a scheduled task resolving the contained workflow. """ @spec run(t, Keyword.t()) :: Workflow.run_result() def run(st, ctx \\ []) do Workflow.run(st.workflow_name, st.params, st.transport, ctx) end @doc """ Previews the results of a scheduled task by looking at its raw data. """ @spec preview(t, Keyword.t()) :: Workflow.raw_data_result() def preview(st, ctx \\ []) do Workflow.raw_data(st.workflow_name, st.params, ctx) end defp workflow_name_validator do fn :workflow_name, workflow_name -> if Workflow.valid_name?(workflow_name) do [] else [workflow_name: "workflow name is invalid"] end end end defimpl Jason.Encoder do def encode(scheduled_task, opts) do scheduled_task |> Map.drop([:__struct__, :__meta__]) |> Map.update!(:workflow_name, &Workflow.normalize_name/1) |> Map.put(:workflow_human_name, scheduled_task.workflow_name.human_name()) |> Map.update!(:params, fn params -> Enum.map(params, fn {name, value} -> %{name: name, value: value} end) end) |> Jason.Encode.map(opts) end end end
lib/ada/schema/scheduled_task.ex
0.917635
0.572812
scheduled_task.ex
starcoder
defmodule Stripe.Plan do @moduledoc """ Work with Stripe plan objects. You can: - Create a plan - Retrieve a plan - Update a plan - Delete a plan Does not yet render lists or take options. Stripe API reference: https://stripe.com/docs/api#plan Example: ``` { "id": "ivory-extended-580", "object": "plan", "active": true, "aggregate_usage": null, "amount": 999, "billing_scheme": "per_unit", "created": 1531234812, "currency": "usd", "interval": "month", "interval_count": 1, "livemode": false, "metadata": { }, "nickname": null, "product": "prod_DCmtkptv7qHXGE", "tiers": null, "tiers_mode": null, "transform_usage": null, "trial_period_days": null, "usage_type": "licensed" } ``` """ use Stripe.Entity import Stripe.Request @type t :: %__MODULE__{ id: Stripe.id(), object: String.t(), active: boolean, aggregate_usage: String.t() | nil, amount: non_neg_integer | nil, billing_scheme: String.t() | nil, created: Stripe.timestamp(), currency: String.t(), deleted: boolean | nil, interval: String.t(), interval_count: pos_integer, livemode: boolean, metadata: Stripe.Types.metadata(), name: String.t(), nickname: String.t() | nil, product: Stripe.id() | Stripe.Product.t(), tiers: Stripe.List.t(map) | nil, tiers_mode: boolean | nil, transform_usage: map | nil, trial_period_days: non_neg_integer | nil, usage_type: String.t() | nil, } defstruct [ :id, :object, :active, :aggregate_usage, :amount, :billing_scheme, :created, :currency, :deleted, :interval, :interval_count, :livemode, :metadata, :name, :nickname, :product, :tiers, :tiers_mode, :transform_usage, :trial_period_days, :usage_type, ] @plural_endpoint "plans" @doc """ Create a plan. """ @spec create(params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()} when params: %{ :currency => String.t(), :interval => String.t(), :product => Stripe.id() | Stripe.Product.t(), optional(:id) => String.t(), optional(:amount) => non_neg_integer, optional(:active) => boolean, optional(:billing_scheme) => String.t(), optional(:interval_count) => pos_integer, optional(:metadata) => Stripe.Types.metadata(), optional(:nickname) => String.t(), optional(:tiers) => Stripe.List.t(), optional(:tiers_mode) => String.t(), optional(:transform_usage) => map, optional(:trial_period_days) => non_neg_integer, optional(:usage_type) => String.t(), } | %{} def create(%{currency: _, interval: _, product: _} = params, opts \\ []) do new_request(opts) |> put_endpoint(@plural_endpoint) |> put_params(params) |> put_method(:post) |> make_request() end @doc """ Retrieve a plan. """ @spec retrieve(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()} def retrieve(id, opts \\ []) do new_request(opts) |> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}") |> put_method(:get) |> make_request() end @doc """ Update a plan. Takes the `id` and a map of changes. """ @spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()} when params: %{ optional(:active) => boolean, optional(:metadata) => Stripe.Types.metadata(), optional(:nickname) => String.t(), optional(:product) => Stripe.id() | Stripe.Product.t(), optional(:trial_period_days) => non_neg_integer, } | %{} def update(id, params, opts \\ []) do new_request(opts) |> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}") |> put_method(:post) |> put_params(params) |> make_request() end @doc """ Delete a plan. """ @spec delete(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()} def delete(id, opts \\ []) do new_request(opts) |> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}") |> put_method(:delete) |> make_request() end @doc """ List all plans. """ @spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()} when params: %{ optional(:active) => boolean, optional(:created) => Stripe.date_query(), optional(:ending_before) => t | Stripe.id(), optional(:limit) => 1..100, optional(:product) => Stripe.Product.t() | Stripe.id(), optional(:starting_after) => t | Stripe.id(), } | %{} def list(params \\ %{}, opts \\ []) do new_request(opts) |> put_endpoint(@plural_endpoint) |> put_method(:get) |> put_params(params) |> cast_to_id([:ending_before, :starting_after]) |> make_request() end end
lib/stripe/subscriptions/plan.ex
0.851506
0.654215
plan.ex
starcoder
defmodule OT.Server.Adapter do @moduledoc """ An adapter behaviour for interacting with peristed data in an operational transformation system. """ alias OT.Server @doc """ Call a function inside of a transaction. This is useful for adapters that use databases that support transactions. All of the other adapter functions (other than `c:handle_submit_error/3`) will be call called in the function passed to this function. This is a good place to implement locking to ensure that only a single operation is processed at a time per document, a requirement of this OT system. """ @callback transact(id :: Server.datum_id, (() -> any)) :: {:ok, any} | {:error, any} @doc """ Roll a transaction back. This will be called when the attempt to submit an operation fails—for adapters without real transaction support, they must choose how to repair their data at this stage, since `c:update_datum/2` may have been called, but `c:insert_operation/3` may have failed. """ @callback rollback(any) :: no_return @doc """ Get the datum identified by the ID. """ @callback get_datum(id :: Server.datum_id) :: {:ok, Server.datum} | {:error, any} @doc """ Get any conflicting operations for the given datum at the given version. In a proper OT system, this means any operation for the given datum whose version is greater than or equal to the given version. The function must return a list of `t:OT.Server.operation_info/0`s. """ @callback get_conflicting_operations(datum :: Server.datum, Server.version) :: [Server.operation_info] @doc """ Update the `t:OT.Server.datum/0` with the given content and increment its `t:OT.Server.version/0`. """ @callback update_datum(datum :: Server.datum, any) :: {:ok, Server.datum} | {:error, any} @doc """ Insert the given `t:OT.Server.operation/0` into persistence. Any metadata that was originally passed to `OT.Server.submit_operation/3` will also be passed to the adapter. On a successful submission, this value is what will be returned from `OT.Server.submit_operation/3`. """ @callback insert_operation(datum :: Server.datum, Server.operation_info, any) :: {:ok, any} | {:error, any} @doc """ Handle a submission error. If the error passed to this function constitutes a scenario in which the submission should be tried again, return `:retry`. Otherwise, return a tagged error tuple and the call to `OT.Server.submit_operation/3` will fail. """ @callback handle_submit_error(any, any, Server.operation_info) :: :retry | {:error, any} end
lib/ot/server/adapter.ex
0.849129
0.566588
adapter.ex
starcoder
defmodule Chatter do @moduledoc """ Functions and data for numerically approximating 'chatter'. 'Chatter' is when an infinite sequence of impacts accumulates in a finite time on a 'sticking' impact. It is the analogue in this system to a real-world situation in which the mass judders against the stop. To handle it numerically it is necessary to detect when it is happening and then extrapolate forward to the accumulation point. """ @spec low_velocity_acceleration(float, float, float) :: float defp low_velocity_acceleration(t, x, omega) do # Approximates the acceleration at low velocity :math.cos(omega * t) - x end @doc """ Returns the `:StateOfMotion` corresponding to the limit of a sequence of chatter impacts. `:state`: the `:StateOfMotion` corresponding to a starting impact `:parameters`: system parameters for the oscillator **Precondition** `:state` is assumed to correspond to a low velocity impact (i.e. `:state.x` == `:parameters.sigma` and `:state.v` small) but this is not checked. If these conditions are not met, the return value will be meaningless. **Precondition** chatter cannot occur for `:parameters.r` >= 1. This will result in an error condition. """ @spec accumulation_state(%StateOfMotion{}, %SystemParameters{}) :: {atom(), %StateOfMotion{}} def accumulation_state(%StateOfMotion{} = state, %SystemParameters{} = parameters) do g = low_velocity_acceleration(state.t, parameters.sigma, parameters.omega) cond do parameters.r >= 1 -> {:error, "Chatter cannot occur for coefficient of restitution >= 1"} parameters.r < 0 -> {:error, "Chatter cannot occur for coefficient of restitution < 1"} g < 0 -> {:error, "Chatter will not occur outside the sticking region"} g == 0 -> with {:ok, region} <- StickingRegion.derive(parameters), do: {:ok, StickingRegion.next_impact_state( state.t, parameters.sigma, region )} true -> {:ok, %StateOfMotion{ t: state.t - 2 * state.v / g / (1 - parameters.r), x: parameters.sigma, v: 0 }} end end @doc """ The number of successive low velocity impacts after which the test for chatter will be applied. TODO: make configurable """ @const_low_v_count_threshold 10 def const_low_v_count_threshold, do: @const_low_v_count_threshold @doc """ Counts successive low velocity impacts and flags when a threshold number have been reached. Used in detecting chatter. """ @spec count_low_v(integer()) :: (number() -> {boolean(), (integer() -> any())}) def count_low_v(counter \\ 0) do require ImposcConstants fn v -> if v != 0 && v < ImposcConstants.const_smallish() do if counter < const_low_v_count_threshold() do {false, count_low_v(counter + 1)} else {true, count_low_v(0)} end else {false, count_low_v(0)} end end end end
apps/imposc/lib/dynamics/chatter.ex
0.91266
0.805479
chatter.ex
starcoder
defmodule Appsignal.Tracer do alias Appsignal.Span @monitor Application.get_env(:appsignal, :appsignal_monitor, Appsignal.Monitor) @table :"$appsignal_registry" @type option :: {:pid, pid} | {:start_time, integer} @type options :: [option] @doc false def start_link do Agent.start_link(fn -> :ets.new(@table, [:named_table, :public, :duplicate_bag]) end, name: __MODULE__ ) end @doc """ Creates a new root span. ## Example Appsignal.Tracer.create_span("http_request") """ @spec create_span(String.t()) :: Span.t() | nil def create_span(namespace), do: create_span(namespace, nil, []) @doc """ Creates a new child span. ## Example parent = Appsignal.Tracer.current_span() Appsignal.Tracer.create_span("http_request", parent) """ @spec create_span(String.t(), Span.t() | nil) :: Span.t() | nil def create_span(namespace, parent), do: create_span(namespace, parent, []) @doc """ Creates a new span, with an optional parent or pid. ## Example parent = Appsignal.Tracer.current_span() Appsignal.Tracer.create_span("http_request", parent, [start_time: :os.system_time(), pid: self()]) """ @spec create_span(String.t(), Span.t() | nil, options) :: Span.t() | nil def create_span(namespace, nil, options) do pid = Keyword.get(options, :pid, self()) if running?() && !ignored?(pid) do span = case Keyword.get(options, :start_time) do nil -> Span.create_root(namespace, pid) timestamp -> Span.create_root(namespace, pid, timestamp) end register(span) end end def create_span(_namespace, parent, options) do pid = Keyword.get(options, :pid, self()) if running?() && !ignored?(pid) do span = case Keyword.get(options, :start_time) do nil -> Span.create_child(parent, pid) timestamp -> Span.create_child(parent, pid, timestamp) end register(span) end end @doc """ Finds the span in the registry table. """ @spec lookup(pid()) :: list() def lookup(pid) do if running?(), do: :ets.lookup(@table, pid) end @doc """ Returns the current span in the current process. """ @spec current_span() :: Span.t() | nil def current_span, do: current_span(self()) @doc """ Returns the current span in the passed pid's process. """ @spec current_span(pid()) :: Span.t() | nil def current_span(pid) do pid |> lookup() |> current() end @doc """ Returns the root span in the current process. """ @spec root_span() :: Span.t() | nil def root_span, do: root_span(self()) @doc """ Returns the root span in the passed pid's process. """ @spec root_span(pid()) :: Span.t() | nil def root_span(pid) do pid |> lookup() |> root() end @doc false def child_spec(_) do %{ id: Appsignal.Tracer, start: {Appsignal.Tracer, :start_link, []} } end defp current({_pid, :ignore}), do: nil defp current({_pid, span}), do: span defp current(spans) when is_list(spans) do spans |> List.last() |> current() end defp current(_), do: nil defp root([{_pid, %Span{} = root} | _]), do: root defp root(_), do: nil @spec close_span(Span.t() | nil) :: :ok | nil @doc """ Closes a span and deregisters it. ## Example Appsignal.Tracer.current_span() |> Appsignal.Tracer.close_span() """ def close_span(%Span{} = span) do if running?() do span |> Span.close() |> deregister() end :ok end def close_span(nil), do: nil @spec close_span(Span.t() | nil, list()) :: :ok | nil @doc """ Closes a span and deregisters it. Takes an options list, which currently only accepts a `List` with an `:end_time` integer. ## Example Appsignal.Tracer.current_span() |> Appsignal.Tracer.close_span(end_time: :os.system_time()) """ def close_span(span, options) def close_span(%Span{} = span, end_time: end_time) do if running?() do span |> Span.close(end_time) |> deregister() end :ok end def close_span(nil, _options), do: nil @doc """ Ignores the current process. """ @spec ignore() :: :ok | nil def ignore do if running?() do pid = self() delete(pid) :ets.insert(@table, {pid, :ignore}) @monitor.add() end :ok end @doc """ Removes the process' spans from the registry. """ @spec delete(pid()) :: :ok def delete(pid) do if running?(), do: :ets.delete(@table, pid) :ok end defp register(%Span{pid: pid} = span) do :ets.insert(@table, {pid, span}) @monitor.add() span end defp register(nil), do: nil defp deregister(%Span{pid: pid} = span) do :ets.delete_object(@table, {pid, span}) end defp ignored?(pid) when is_pid(pid) do pid |> lookup() |> ignored?() end defp ignored?([{_pid, :ignore}]), do: true defp ignored?(_), do: false defp running? do is_pid(Process.whereis(__MODULE__)) end end
lib/appsignal/tracer.ex
0.82741
0.544559
tracer.ex
starcoder
defmodule Day07 do @moduledoc "Day 7: Recursive Circus" def part1(input) do input |> Enum.map(&String.split/1) |> Enum.map(&parse_line/1) |> Enum.reduce({[], []}, fn {program, _, sub}, {progs, subs} -> {[program | progs], subs ++ sub} end) |> (fn {progs, subs} -> progs -- subs end).() |> hd end def part2(input) do top_program = part1(input) input |> Enum.map(&String.split/1) |> Enum.map(&parse_line/1) |> Enum.reduce(%{}, fn {prog, weight, subs}, map -> Map.put(map, prog, {weight, subs}) end) |> calc_prog_weight(top_program) |> find_imbalance(top_program) end defp calc_prog_weight(tree, prog) do {weight, subs} = Map.get(tree, prog) case subs do [] -> {tree, weight} _ -> {new_tree, subs_sum} = sum_weight(tree, subs) {Map.put(new_tree, prog, {weight + subs_sum, subs}), weight + subs_sum} end end defp sum_weight(_, _, sum \\ 0) defp sum_weight(tree, [], sum), do: {tree, sum} defp sum_weight(tree, [s|r], sum) do val = calc_prog_weight(tree, s) {new_tree, acc_weight_of_sub} = val sum_weight(new_tree, r, sum + acc_weight_of_sub) end defp find_imbalance({tree, _}, prog), do: find_imbalance(tree, prog, nil) defp find_imbalance(tree, prog, target) do {_, subs} = Map.get(tree, prog) weight_groups = group_by_weight(tree, subs) if balanced?(weight_groups) do target - (subs |> Enum.map(& tree |> Map.get(&1) |> elem(0)) |> Enum.sum) else {sub, correct_weight} = unbalanced_sub_correct_weight(weight_groups, tree) find_imbalance(tree, sub, correct_weight) end end defp group_by_weight(tree, subs) do subs |> Enum.map(&({&1, Map.get(tree, &1)})) |> Enum.map(fn {prog, {weight, _}} -> {prog, weight} end) |> Enum.reduce(%{}, fn {prog, weight}, acc -> Map.put(acc, weight, [prog | Map.get(acc, weight, [])]) end) end defp balanced?(weight_groups) do weight_groups |> Map.keys |> Enum.count == 1 end defp unbalanced_sub_correct_weight(weight_groups, tree) do weight_groups |> find_unbalanced_sub |> find_correct_weight(weight_groups, tree) end defp find_unbalanced_sub(weight_count) do weight_count |> Map.keys |> Enum.map(&(Map.get(weight_count, &1))) |> Enum.filter(&(Enum.count(&1) == 1)) |> hd |> hd end defp find_correct_weight(unbalance_sub, weight_groups, tree) do {unbalance_sub, weight_groups |> Map.keys |> Enum.filter( fn key -> key != tree |> Map.get(unbalance_sub) |> elem(1) end) |> hd} end defp parse_line([program, weight]) do {program, weight |> weight_to_int, []} end defp parse_line([program, weight, "->" | subprograms]) do {program, weight |> weight_to_int, subprograms |> Enum.map(&(String.replace_trailing(&1, ",", "")))} end defp weight_to_int(weight) do weight |> String.replace_leading("(", "") |> String.replace_trailing(")", "") |> String.to_integer end end
apps/day07/lib/day07.ex
0.592313
0.464719
day07.ex
starcoder
defmodule Intcode do use GenServer def run_computer(name, inputs \\ [], timeout \\ 5000) do case GenServer.call(name, {:run, inputs}, timeout) do {:output, _code} -> run_computer(name, [], timeout) {:exit, code} -> code end end @empty_state %{ program: [], position: 0, inputs: [], diagnostic: 0, relative_base: 0 } @impl true def init(program) do {:ok, %{@empty_state | program: program}} end @impl true def handle_call({:run, new_inputs}, _from, %{inputs: inputs} = state) do {exit_type, state} = run_program(%{state | inputs: new_inputs ++ inputs}) {:reply, {exit_type, state.diagnostic}, state, :hibernate} end @impl true def handle_call({:get_state}, _from, state) do {:reply, state, state, :hibernate} end @impl true def handle_call({:set_state, state}, _from, _) do {:reply, :ok, Map.merge(@empty_state, state), :hibernate} end def run_program(state) do Opcode.at_position(state.program, state.position, state.relative_base) |> execute_opcode(state) end # Opcodes that interrupt program execution. def execute_opcode(%{code: 4, params: [diagnostic], next_pos: next_pos}, state) do {:output, %{state | position: next_pos, diagnostic: diagnostic}} end def execute_opcode(%{code: 99}, state) do {:exit, state} end # Opcode for reading input. def execute_opcode( %{code: 3, params: [loc], next_pos: next_pos}, %{program: program, inputs: [input | inputs]} = state ) do program = List.replace_at(program, loc, input) run_program(%{state | program: program, position: next_pos, inputs: inputs}) end # Opcodes that change auxiliary program state. def execute_opcode( %{code: 9, next_pos: next_pos, params: [shift]}, %{relative_base: relative_base} = state ) do run_program(%{state | position: next_pos, relative_base: relative_base + shift}) end # Opcodes that shift the program's current position. def execute_opcode(%{code: code} = opcode, %{program: program} = state) when code == 5 or code == 6 do run_program(%{state | program: program, position: get_next_pos(opcode)}) end # Opcodes for program manipulation. def execute_opcode( %{code: code, params: params, next_pos: next_pos}, %{program: program} = state ) do program = cond do code == 1 -> execute_addition(program, params) code == 2 -> execute_multiplication(program, params) code == 7 -> execute_lt_substitution(program, params) code == 8 -> execute_eq_substitution(program, params) true -> program end run_program(%{state | program: program, position: next_pos}) end def extend_program(program, index) do if index >= length(program), do: program ++ for(_ <- 0..index, do: nil), else: program end defp get_next_pos(%{code: 5, params: [check, pos], next_pos: default_pos}), do: if(check != 0, do: pos, else: default_pos) defp get_next_pos(%{code: 6, params: [check, pos], next_pos: default_pos}), do: if(check == 0, do: pos, else: default_pos) defp execute_addition(program, [addend1, addend2, index]), do: program |> extend_program(index) |> List.replace_at(index, addend1 + addend2) defp execute_multiplication(program, [factor1, factor2, index]), do: program |> extend_program(index) |> List.replace_at(index, factor1 * factor2) defp execute_lt_substitution(program, [arg1, arg2, loc]), do: program |> extend_program(loc) |> List.replace_at(loc, if(arg1 < arg2, do: 1, else: 0)) defp execute_eq_substitution(program, [arg1, arg2, loc]), do: program |> extend_program(loc) |> List.replace_at(loc, if(arg1 == arg2, do: 1, else: 0)) end
lib/intcode/intcode.ex
0.50293
0.443299
intcode.ex
starcoder
defmodule BitPal.Crypto.Base58 do @moduledoc """ This module implements the Base58 scheme used in various places in Bitcoin and Bitcoin Cash for example. Note that there are multiple versions of the Base58 encoding. This module implements the encoding specific for Bitcoin (and used in other places). The difference is the order of the characters used. In addition to raw encoding/decoding, this module also implements checksumming so that the user of the module don't have to worry about that. """ use Bitwise @doc """ Encode the binary into base58. No checksumming. """ def encode(data) do data |> to_base58 |> to_ascii end @doc """ Encode the binary into base58, optionally adding a checksum. Checksums available are: - :none - no checksum - :doublesha - double sha256 - used in Bitcoin """ def encode(data, checksum) do encode(data <> hash_message(checksum, data)) end @doc """ Decode the string from base58 into a binary. No checksumming. """ def decode(string) do string |> from_ascii |> from_base58 end @doc """ Decode a string with a specified checksum. Returns :error on checksum failure. Checksums available are: - :none - no checksum - :doublesha - double sha256 - used in Bitcoin """ def decode(string, checksum) do data = decode(string) payload_size = byte_size(data) - hash_size(checksum) if payload_size <= 0 do :error else <<payload::binary-size(payload_size), hash::binary>> = data if hash == hash_message(checksum, payload) do payload else :error end end end # Size of the hash. defp hash_size(:none), do: 0 defp hash_size(:doublesha), do: 4 # Compute hash. defp hash_message(:none, _message), do: <<>> defp hash_message(:doublesha, message) do <<hash::binary-size(4), _::binary>> = :crypto.hash(:sha256, :crypto.hash(:sha256, message)) hash end # Convert a binary to base 58. We interpret "data" as an integer with the most significant byte # first. We need to be wary of any leading zero bytes, as they will otherwise be ignored when we # convert to an integer. # Note: I have seen quite a few implementations that do not seem to handle this case very well, # they simply convert to an integer and call it a day (thus discarding leading zeros). defp to_base58(data) do case data do <<0x00, rest::binary>> -> <<0>> <> to_base58(rest) <<>> -> <<>> d -> to_base58_int(:binary.decode_unsigned(d, :big)) end end defp to_base58_int(number) do if number > 0 do to_base58_int(div(number, 58)) <> <<rem(number, 58)>> else <<>> end end # Convert a binary with numbers (0-57) into a "regular" binary. defp from_base58(data) do case data do <<0, rest::binary>> -> <<0x00>> <> from_base58(rest) <<>> -> <<>> d -> num = from_base58_int(d, byte_size(d) - 1) :binary.encode_unsigned(num, :big) end end defp from_base58_int(data, pos) do result = :binary.at(data, pos) if pos == 0 do result else result + from_base58_int(data, pos - 1) * 58 end end # Convert from ASCII to a binary of digits. defp from_ascii(binary) do binary |> :binary.bin_to_list() |> Enum.map(fn x -> digit_to_num(x) end) |> :binary.list_to_bin() end # Convert from binary of digits to ASCII defp to_ascii(binary) do binary |> :binary.bin_to_list() |> Enum.map(fn x -> num_to_digit(x) end) |> :binary.list_to_bin() end @alphabet '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' # Decode a single base58 digit into an integer. defp digit_to_num(value) do case Enum.find_index(@alphabet, &(&1 == value)) do nil -> raise("Unknown character in base58 string.") x -> x end end # Encode a single base58 digit. defp num_to_digit(value) do Enum.at(@alphabet, value) end end
lib/bitpal/crypto/base58.ex
0.791539
0.571916
base58.ex
starcoder
defmodule StepFlow.Step.Launch do @moduledoc """ The Step launcher context. """ require Logger alias StepFlow.Amqp.CommonEmitter alias StepFlow.Jobs alias StepFlow.Notifications.Notification alias StepFlow.Step.Helpers alias StepFlow.Step.LaunchParams alias StepFlow.Workflows def launch_step(workflow, step) do dates = Helpers.get_dates() # refresh workflow to get recent stored parameters on it workflow = Workflows.get_workflow!(workflow.id) step_id = StepFlow.Map.get_by_key_or_atom(step, :id) step_name = StepFlow.Map.get_by_key_or_atom(step, :name) step_mode = StepFlow.Map.get_by_key_or_atom(step, :mode, "one_for_one") source_paths = get_source_paths(workflow, step, dates) case {source_paths, step_mode} do {_, "notification"} -> Logger.debug("Notification step") Notification.process( workflow, dates, step_name, step, step_id, source_paths ) {[], _} -> Logger.debug("job one for one path") Jobs.create_skipped_job(workflow, step_id, step_name) {source_paths, "one_for_one"} when is_list(source_paths) -> first_file = source_paths |> Enum.sort() |> List.first() launch_params = LaunchParams.new(workflow, step, dates, first_file) case StepFlow.Map.get_by_key_or_atom(step, :multiple_jobs) do nil -> start_job_one_for_one( source_paths, launch_params ) multiple_jobs_parameter -> start_multiple_jobs_one_for_one(source_paths, multiple_jobs_parameter, launch_params) end {source_paths, "one_for_many"} when is_list(source_paths) -> Logger.debug("job one for many paths") launch_params = LaunchParams.new(workflow, step, dates) start_job_one_for_many(source_paths, launch_params) {_, _} -> Jobs.create_skipped_job(workflow, step_id, step_name) end end defp start_job_one_for_one([], _launch_params), do: {:ok, "started"} defp start_job_one_for_one( [source_path | source_paths], launch_params ) do message = generate_message_one_for_one( source_path, launch_params ) case CommonEmitter.publish_json( LaunchParams.get_step_name(launch_params), LaunchParams.get_step_id(launch_params), message ) do :ok -> start_job_one_for_one(source_paths, launch_params) _ -> {:error, "unable to publish message"} end end defp start_multiple_jobs_one_for_one(source_paths, multiple_jobs_parameter, launch_params) do segments = Helpers.get_value_in_parameters_with_type( launch_params.workflow, multiple_jobs_parameter, "array_of_media_segments" ) |> List.first() case segments do nil -> start_job_one_for_one( source_paths, launch_params ) segments -> start_jobs_one_for_one_for_segments( segments, source_paths, launch_params ) end end defp start_jobs_one_for_one_for_segments( [], _source_paths, _launch_params ), do: {:ok, "started"} defp start_jobs_one_for_one_for_segments( [segment | segments], source_paths, launch_params ) do launch_params = %{launch_params | segment: segment} _result = start_job_one_for_one_with_segment( source_paths, launch_params ) start_jobs_one_for_one_for_segments( segments, source_paths, launch_params ) end defp start_job_one_for_one_with_segment( [], _launch_params ), do: {:ok, "started"} defp start_job_one_for_one_with_segment( [source_path | source_paths], launch_params ) do new_parameters = StepFlow.Map.get_by_key_or_atom(launch_params.step, :parameters, []) |> Enum.concat([ %{ "id" => "sdk_start_index", "type" => "integer", "value" => StepFlow.Map.get_by_key_or_atom(launch_params.segment, :start) }, %{ "id" => "sdk_stop_index", "type" => "integer", "value" => StepFlow.Map.get_by_key_or_atom(launch_params.segment, :end) } ]) updated_step = StepFlow.Map.replace_by_atom(launch_params.step, :parameters, new_parameters) launch_params = %{launch_params | step: updated_step} parameters = generate_job_parameters_one_for_one( source_path, launch_params ) step_name = LaunchParams.get_step_name(launch_params) step_id = LaunchParams.get_step_id(launch_params) job_params = %{ name: step_name, step_id: step_id, workflow_id: launch_params.workflow.id, parameters: parameters } {:ok, job} = Jobs.create_job(job_params) message = Jobs.get_message(job) case CommonEmitter.publish_json(step_name, step_id, message) do :ok -> start_job_one_for_one_with_segment( source_paths, launch_params ) _ -> {:error, "unable to publish message"} end end def get_source_paths(workflow, step, dates) do input_filter = Helpers.get_value_in_parameters(step, "input_filter") case StepFlow.Map.get_by_key_or_atom(step, :parent_ids, []) do [] -> Helpers.get_value_in_parameters(step, "source_paths") |> List.flatten() |> Helpers.templates_process(workflow, step, dates) |> Helpers.filter_path_list(input_filter) parent_ids -> workflow.jobs |> Enum.filter(fn job -> job.step_id in parent_ids end) |> Helpers.get_jobs_destination_paths() |> Helpers.filter_path_list(input_filter) end end def start_job_one_for_many(source_paths, launch_params) do message = generate_message_one_for_many(source_paths, launch_params) case CommonEmitter.publish_json( LaunchParams.get_step_name(launch_params), LaunchParams.get_step_id(launch_params), message ) do :ok -> {:ok, "started"} _ -> {:error, "unable to publish message"} end end def generate_message_one_for_one( source_path, launch_params ) do parameters = generate_job_parameters_one_for_one( source_path, launch_params ) job_params = %{ name: LaunchParams.get_step_name(launch_params), step_id: LaunchParams.get_step_id(launch_params), workflow_id: launch_params.workflow.id, parameters: parameters } {:ok, job} = Jobs.create_job(job_params) Jobs.get_message(job) end defp generate_job_parameters_one_for_one( source_path, launch_params ) do destination_path_templates = Helpers.get_value_in_parameters_with_type( launch_params.step, "destination_path", "template" ) destination_filename_templates = Helpers.get_value_in_parameters_with_type( launch_params.step, "destination_filename", "template" ) base_directory = Helpers.get_base_directory(launch_params.workflow, launch_params.step) {required_paths, destination_path} = build_requirements_and_destination_path( destination_path_templates, destination_filename_templates, launch_params.workflow, launch_params.step, launch_params.dates, base_directory, source_path, launch_params.required_file ) requirements = Helpers.get_step_requirements(launch_params.workflow.jobs, launch_params.step) |> Helpers.add_required_paths(required_paths) destination_path_parameter = if StepFlow.Map.get_by_key_or_atom(launch_params.step, :skip_destination_path, false) do [] else [ %{ "id" => "destination_path", "type" => "string", "value" => destination_path } ] end filter_and_pre_compile_parameters( launch_params, source_path ) |> Enum.concat(destination_path_parameter) |> Enum.concat([ %{ "id" => "source_path", "type" => "string", "value" => source_path }, %{ "id" => "requirements", "type" => "requirements", "value" => requirements } ]) end def generate_message_one_for_many(source_paths, launch_params) do select_input = StepFlow.Map.get_by_key_or_atom(launch_params.step, :parameters, []) |> Enum.filter(fn param -> StepFlow.Map.get_by_key_or_atom(param, :type) == "select_input" end) |> Enum.map(fn param -> id = StepFlow.Map.get_by_key_or_atom(param, :id) value = StepFlow.Map.get_by_key_or_atom(param, :value) Logger.warn("source paths: #{inspect(source_paths)} // value: #{inspect(value)}") path = Helpers.filter_path_list(source_paths, [value]) |> List.first() %{ id: id, type: "string", value: path } end) destination_filename_templates = Helpers.get_value_in_parameters_with_type( launch_params.step, "destination_filename", "template" ) select_input = case destination_filename_templates do [destination_filename_template] -> if StepFlow.Map.get_by_key_or_atom(launch_params.step, :skip_destination_path, false) do select_input else filename = destination_filename_template |> Helpers.template_process( launch_params.workflow, launch_params.step, launch_params.dates, source_paths ) |> Path.basename() destination_path = Helpers.get_base_directory(launch_params.workflow, launch_params.step) <> filename Enum.concat(select_input, [ %{ id: "destination_path", type: "string", value: destination_path } ]) end _ -> select_input end source_paths = get_source_paths( launch_params.workflow, launch_params.dates, launch_params.step, source_paths ) requirements = Helpers.get_step_requirements(launch_params.workflow.jobs, launch_params.step) |> Helpers.add_required_paths(source_paths) parameters = filter_and_pre_compile_parameters(launch_params, source_paths) |> Enum.concat(select_input) |> Enum.concat([ %{ "id" => "source_paths", "type" => "array_of_strings", "value" => source_paths }, %{ "id" => "requirements", "type" => "requirements", "value" => requirements } ]) job_params = %{ name: LaunchParams.get_step_name(launch_params), step_id: LaunchParams.get_step_id(launch_params), workflow_id: launch_params.workflow.id, parameters: parameters } {:ok, job} = Jobs.create_job(job_params) Jobs.get_message(job) end def build_requirements_and_destination_path( [destination_path_template], _, workflow, step, dates, source_path, _first_file ) do destination_path = Helpers.template_process(destination_path_template, workflow, step, dates, source_path) {[], destination_path} end def build_requirements_and_destination_path( _, [destination_filename_template], workflow, step, dates, base_directory, source_path, first_file ) do filename = Helpers.template_process(destination_filename_template, workflow, step, dates, source_path) |> Path.basename() required_paths = if source_path != first_file do base_directory <> Path.basename(first_file) else [] end {required_paths, base_directory <> filename} end def build_requirements_and_destination_path( _, _, _workflow, _step, _dates, base_directory, source_path, first_file ) do required_paths = if source_path != first_file do base_directory <> Path.basename(first_file) else [] end {required_paths, base_directory <> Path.basename(source_path)} end defp get_source_paths(workflow, dates, step, source_paths) do source_paths_templates = Helpers.get_value_in_parameters_with_type(step, "source_paths", "array_of_templates") |> List.flatten() case source_paths_templates do nil -> source_paths [] -> source_paths templates -> Enum.map( templates, fn template -> Helpers.template_process(template, workflow, step, dates, nil) end ) end end defp filter_and_pre_compile_parameters(launch_params, source_paths) do StepFlow.Map.get_by_key_or_atom(launch_params.step, :parameters, []) |> Enum.map(fn param -> case StepFlow.Map.get_by_key_or_atom(param, :type) do "template" -> value = StepFlow.Map.get_by_key_or_atom( param, :value, StepFlow.Map.get_by_key_or_atom(param, :default) ) |> Helpers.template_process( launch_params.workflow, launch_params.step, launch_params.dates, source_paths ) {_, filtered_map} = StepFlow.Map.replace_by_atom(param, :type, "string") |> StepFlow.Map.replace_by_atom(:value, value) |> Map.pop("default") filtered_map "array_of_templates" -> filter_and_pre_compile_array_of_templates_parameter( param, launch_params.workflow, launch_params.step, launch_params.dates ) _ -> param end end) |> Enum.filter(fn param -> StepFlow.Map.get_by_key_or_atom(param, :type) != "filter" && StepFlow.Map.get_by_key_or_atom(param, :type) != "template" && StepFlow.Map.get_by_key_or_atom(param, :type) != "select_input" && StepFlow.Map.get_by_key_or_atom(param, :type) != "array_of_templates" end) |> Enum.filter(fn param -> StepFlow.Map.get_by_key_or_atom(param, :id) != "source_paths" || StepFlow.Map.get_by_key_or_atom(param, :type) != "array_of_strings" || StepFlow.Map.get_by_key_or_atom(launch_params.step, :keep_source_paths, true) end) end defp filter_and_pre_compile_array_of_templates_parameter(param, workflow, step, dates) do case StepFlow.Map.get_by_key_or_atom(param, :id) do "source_paths" -> param _ -> value = StepFlow.Map.get_by_key_or_atom( param, :value, StepFlow.Map.get_by_key_or_atom(param, :default) ) |> Helpers.templates_process(workflow, step, dates) {_, filtered_map} = StepFlow.Map.replace_by_atom(param, :type, "array_of_strings") |> StepFlow.Map.replace_by_atom(:value, value) |> Map.pop("default") filtered_map end end end
lib/step_flow/step/launch.ex
0.607197
0.44342
launch.ex
starcoder
defmodule Annex.Data.List2D do @moduledoc """ List2D is a 2 dimensional list of lists of floats. """ use Annex.Data require Annex.Data.List1D alias Annex.{ AnnexError, Data.List1D } @type t :: [[float(), ...], ...] @doc """ Given flat `data` and a valid 2-D `shape` (in the form of `{rows, columns}`) or a 2D list of lists of floats and a valid 2-D shapereturns a list of lists, a 2-D list of lists of floats. """ @impl Data @spec cast(Data.flat_data() | t(), Shape.t()) :: t() def cast(data, [_, _] = shape) when List1D.is_list1D(data) do cast([data], shape) end def cast(data, [_, _] = shape) do flat_data = data |> type_check() |> List.flatten() elements_count = length(flat_data) elements_expected = Shape.product(shape) if elements_count != elements_expected do raise %AnnexError{ message: """ The number of items in the provided data did not match the required number of items of the given shape. """, details: [ shape: shape, expected_count: elements_count, actual_count: elements_expected, data: data ] } end Data.flat_data_to_tensor(flat_data, shape) end @doc """ Returns true for a list of lists of floats. """ @impl Data @spec is_type?(Data.data()) :: boolean def is_type?(data), do: is_list2D(data) @doc """ The shape of a List2D can be calculated thus: `rows` is the number of elements in the outermost list. `columns` is the count of the elements of the first row. """ @impl Data def shape(data) do [row_of_floats | _] = type_check(data) [length(data), length(row_of_floats)] end @doc """ The shape of a List2D can be calculated thus: `rows` is the number of elements in the outermost list. `columns` is the count of the elements of the first row. """ @impl Data def to_flat_list(data) do data |> type_check() |> List.flatten() end @impl Data @spec apply_op(t(), Data.op(), Data.args()) :: Data.flat_data() def apply_op(data, op, args) do case {op, args} do {:map, [func]} -> map(data, func) end end def map(data, func) do Enum.map(data, fn row -> Enum.map(row, func) end) end @spec transpose(t()) :: t() def transpose(data) do do_transpose(data) end defp do_transpose([]) do [] end defp do_transpose(data) do [Enum.map(data, &hd/1) | data |> Enum.map(&tl/1) |> do_transpose()] end defp type_check(data) do if not is_list2D(data) do raise ArgumentError, message: """ #{inspect(Annex.Data.List2D)} requires data to be a list of lists of floats. data: #{inspect(data)} """ end data end defp is_list2D([[f | _] | _]) when is_float(f), do: true defp is_list2D(_), do: false end
lib/annex/data/list_2d.ex
0.865196
0.813164
list_2d.ex
starcoder
defmodule AstroEx.Unit.HMS do @moduledoc """ Hours:Minutes:Seconds """ alias AstroEx.Unit.{Arcmin, Arcsec, Degrees, DMS, Radian} alias AstroEx.Utils.Math @enforce_keys [:value] defstruct [:value] @typep hours :: 12..23 @typep minutes :: 0..59 @typep seconds :: number() @typep hms :: {hours(), minutes(), seconds()} @type t :: %__MODULE__{value: hms()} @doc """ ## Examples iex> AstroEx.Unit.HMS.new("12:00:00.0") #AstroEx.Unit.HMS<12:00:00.0> """ def new(str) when is_binary(str), do: str |> parse_hms() |> new() def new({hrs, min, sec} = hms) when is_integer(hrs) and is_integer(min) and is_number(sec), do: %__MODULE__{value: hms} def new(hrs, min, sec), do: new({hrs, min, sec}) @doc """ ## Examples iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.HMS.to_degrees() #AstroEx.Unit.Degrees<180.0> iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.cast(AstroEx.Unit.Degrees) #AstroEx.Unit.Degrees<180.0> """ def to_degrees(%__MODULE__{value: value}), do: value |> convert_to_degrees() |> Degrees.new() @doc """ ## Examples iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.HMS.to_radian() #AstroEx.Unit.Radian<3.141593> iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.cast(AstroEx.Unit.Radian) #AstroEx.Unit.Radian<3.141593> """ def to_radian(%__MODULE__{} = value), do: value |> to_degrees() |> Degrees.to_radian() @doc """ ## Examples iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.HMS.to_arcsec() #AstroEx.Unit.Arcsec<648000.0> iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.cast(AstroEx.Unit.Arcsec) #AstroEx.Unit.Arcsec<648000.0> """ def to_arcsec(%__MODULE__{} = value), do: value |> to_degrees() |> Arcsec.from_degrees() @doc """ ## Examples iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.HMS.to_arcmin() #AstroEx.Unit.Arcmin<10800.0> iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.cast(AstroEx.Unit.Arcmin) #AstroEx.Unit.Arcmin<10800.0> """ def to_arcmin(%__MODULE__{} = value), do: value |> to_degrees() |> Arcmin.from_degrees() @doc """ ## Examples iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.HMS.to_dms() #AstroEx.Unit.DMS<180:00:00.0> iex> "12:00:00.0" ...> |> AstroEx.Unit.HMS.new() ...> |> AstroEx.Unit.cast(AstroEx.Unit.DMS) #AstroEx.Unit.DMS<180:00:00.0> """ def to_dms(%__MODULE__{} = value), do: value |> to_degrees() |> DMS.from_degrees() @doc """ ## Examples iex> 180 ...> |> AstroEx.Unit.Degrees.new() ...> |> AstroEx.Unit.HMS.from_degrees() #AstroEx.Unit.HMS<12:00:00.0> """ def from_degrees(%Degrees{value: value}), do: from_degrees(value) def from_degrees(val) when is_integer(val), do: from_degrees(val * 1.0) def from_degrees(val) when is_float(val), do: val |> calculate_hms() |> new() defp parse_hms(str) do [hrs, min, sec] = String.split(str, ":") hrs = String.to_integer(hrs) min = String.to_integer(min) sec = String.to_float(sec) {hrs, min, sec} end defp calculate_hms(degrees) do degrees |> calculate_hrs() |> calculate_min() |> calculate_sec() end defp calculate_hrs(val), do: Math.divmod(val, 15) defp calculate_min({hrs, remdr}) do remdr |> (&(&1 * 60)).() |> Math.divmod(15) |> Tuple.insert_at(0, hrs) end defp calculate_sec({hrs, min, remdr}), do: {hrs, min, remdr * 60.0 * 15.0} defp convert_to_degrees({hrs, min, sec}), do: convert_to_degrees({min, sec}, hrs) * 15 defp convert_to_degrees({min, sec}, sum), do: convert_to_degrees({sec}, sum + min / 60.0) defp convert_to_degrees({sec}, sum), do: sum + sec / 60.0 / 60.0 defimpl AstroEx.Unit, for: __MODULE__ do alias AstroEx.Unit.{Arcmin, Arcsec, Degrees, DMS, HMS, Radian} def cast(%HMS{} = hms, Arcmin), do: HMS.to_arcmin(hms) def cast(%HMS{} = hms, Arcsec), do: HMS.to_arcsec(hms) def cast(%HMS{} = hms, Degrees), do: HMS.to_degrees(hms) def cast(%HMS{} = hms, DMS), do: HMS.to_dms(hms) def cast(%HMS{} = hms, HMS), do: hms def cast(%HMS{} = hms, Radian), do: HMS.to_radian(hms) def cast(%{value: value}, Float), do: value def cast(%{value: value}, Integer), do: trunc(value) def to_string(%{value: val}), do: val |> format_values() |> to_s() def from_degrees(val), do: HMS.from_degrees(val) defp to_s({hrs, min, sec}), do: "#{hrs}:#{min}:#{sec}" defp format_values({hrs, min, sec}), do: {pad(trunc(hrs)), pad(trunc(min)), pad(sec)} defp pad(val) when is_float(val) and val < 10, do: val |> Float.round(1) |> Float.to_string() |> pad(4) defp pad(val) when is_integer(val) and val < 10, do: val |> Integer.to_string() |> pad() defp pad(val) when is_integer(val), do: val |> Integer.to_string() defp pad(val) when is_float(val), do: val |> Float.round(2) |> Float.to_string() defp pad(val, n \\ 2) when is_binary(val), do: String.pad_leading(val, n, "0") end defimpl Inspect, for: __MODULE__ do alias AstroEx.Unit import Inspect.Algebra def inspect(value, _opts) do value = Unit.to_string(value) concat(["#AstroEx.Unit.HMS<", value, ">"]) end end end
lib/astro_ex/unit/hms.ex
0.782912
0.432663
hms.ex
starcoder
defmodule WaoBirthday.Birthday do use Memento.Table, attributes: [:uid, :day, :month, :by_owner] alias Memento.Query alias Alchemy.Embed require Embed def read_birthday(id) do result = Memento.transaction fn -> Query.read __MODULE__, id end case result do {:ok, nil} -> {:error, :not_found} {:ok, birthday} -> {:ok, birthday} other -> IO.inspect other end end def write_birthday(id, day_str, month_str, owner) do with {month, _} when is_integer(month) <- Integer.parse(month_str), {day, _} when is_integer(day) <- Integer.parse(day_str), {:ok, _date} <- Date.new(2000, month, day) do {:ok, Query.write(%__MODULE__{ uid: id, day: day, month: month, by_owner: owner })} else :error -> {:error, "Invalid Date"} {:error, _} -> {:error, "Invalid Date"} end end def for_day(%{day: day, month: month} \\ Date.utc_today()) do Query.select(__MODULE__, [ {:==, :day, day}, {:==, :month, month} ]) end def until %{day: day, month: month} do now = Date.utc_today() {:ok, next} = Date.new(now.year, month, day) next = if Date.diff(next, now) < 0 do %{next | year: next.year + 1} else next end Date.diff(next, now) end def from_now birthday do case until birthday do 0 -> "TODAY!" days -> days |> Timex.Duration.from_days() |> Timex.Format.Duration.Formatter.format(:humanized) end end def embed birthday, user do %Embed{} |> Embed.title("#{WaoBirthday.Utils.maybe_s(user.username)} is on the #{to_string(birthday)}") |> Embed.description("That's in #{from_now(birthday)}") |> Embed.color(0xc13621) end defimpl String.Chars do def to_string %{day: day, month: month} do suffix = case rem day, 10 do 1 -> "st" 2 -> "nd" 3 -> "rd" _ -> "th" end "#{day}#{suffix} #{Timex.month_name(month)}" end end end
lib/wao_birthday/birthday.ex
0.513425
0.424949
birthday.ex
starcoder
defmodule Composer.DSL do @moduledoc """ Provies methods to generate custom AST from JSON DSL. The DSL is made of expressions and each expression has a type, arguments Example: `{ "type": "string", "arguments": [ "factor18" ]}` Currently the following expressions are allowed: type `null` accepts no arguments `{ "type": "null" }` type `atom` accepts a single string as an argument `{ "type": "atom", "arguments" => [ "a" ] }` type `string` accepts a single string as an argument `{ "type": "string", "arguments" => [ "a" ] }` type `var` accepts an atom expression as an argument `{ "type": "var", "arguments" => [{ "type": "atom", "arguments" => [ "a" ] }] }` type `!` accepts an expression as an argument type `+` accepts an expression as an argument type `-` accepts an expression as an argument type `abs` accepts an expression as an argument type `=` accepts two expressions as arguments type `!=` accepts two expressions as arguments type `!==` accepts two expressions as arguments type `&&` accepts two expressions as arguments type `||` accepts two expressions as arguments type `*` accepts two expressions as arguments type `++` accepts two expressions as arguments type `+` accepts two expressions as arguments type `--` accepts two expressions as arguments type `-` accepts two expressions as arguments type `/` accepts two expressions as arguments type `<` accepts two expressions as arguments type `<=` accepts two expressions as arguments type `<>` accepts two expressions as arguments type `==` accepts two expressions as arguments type `===` accepts two expressions as arguments type `>` accepts two expressions as arguments type `>=` accepts two expressions as arguments type `rem` accepts two expressions as arguments type `if` accepts two/three expressions as arguments The first argument is the conditions, second expression is executed if the conditions evaluate to true and the third argument is evaluated when the conditions evaluate to false type `list` accepts a finite number of expressions as arguments type `sum` accepts a finite number of expressions as arguments type `block` accepts a finite number of expressions as arguments ## Example DSL ```elixir { "type": "block", "arguments": [ { "type": "=", "arguments": [ { "type": "var", "arguments": [{ "type": "atom", "arguments": [ "acc" ] }] }, { "type": "sum", "arguments": [ { "type": "var", "arguments": [{ "type": "atom", "arguments": [ "a" ] }] }, { "type": "var", "arguments": [{ "type": "atom", "arguments": [ "b" ] }] }, { "type": "var", "arguments": [{ "type": "atom", "arguments": [ "c" ] }] } ] } ] }, { "type": "block", "arguments": [ { "type": "=", "arguments": [ { "type": "var", "arguments": [{ "type": "atom", "arguments": [ "a" ] }] }, 100 ] }, { "type": "sum", "arguments": [ { "type": "var", "arguments": [{ "type": "atom", "arguments": [ "acc" ] }] }, { "type": "var", "arguments": [{ "type": "atom", "arguments": [ "a" ] }] } ] } ] } ] } ``` """ @doc """ Parses the DSL given in a json file and generates custom AST which can be consumed by Composer.AST to generate elixir AST ## Example iex> json = File.read!("test/support/var.json") iex> Composer.DSL.convert(json) {:var, :a} """ def convert(json) do json |> Poison.decode! |> do_convert end @doc """ Converts the DST to custom AST ## Example iex> Composer.DSL.do_convert(%{ "type" => "+", "arguments" => [ 10, 20 ]}) { :+, [10, 20] } """ def do_convert(%{ "type" => "null" }) do nil end def do_convert(%{ "type" => "atom", "arguments" => [ word ] }) do String.to_atom(word) end def do_convert(%{ "type" => "string", "arguments" => [ word ] }) do word end def do_convert(%{ "type" => "var", "arguments" => [ var ] }) do { :var, do_convert(var) } end def do_convert(%{ "type" => "!", "arguments" => [ var ] }) do { :!, [ do_convert(var) ] } end def do_convert(%{ "type" => "+", "arguments" => [ var ] }) do { :+, [ do_convert(var) ] } end def do_convert(%{ "type" => "-", "arguments" => [ var ] }) do { :-, [ do_convert(var) ] } end def do_convert(%{ "type" => "abs", "arguments" => [ var ] }) do { :abs, [ do_convert(var) ] } end def do_convert(%{ "type" => "=", "arguments" => [ left, right ] }) do { :=, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "!=", "arguments" => [ left, right ] }) do { :!=, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "!==", "arguments" => [ left, right ] }) do { :!==, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "&&", "arguments" => [ left, right ] }) do { :&&, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "||", "arguments" => [ left, right ] }) do { :||, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "*", "arguments" => [ left, right ] }) do { :*, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "++", "arguments" => [ left, right ] }) do { :++, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "+", "arguments" => [ left, right ] }) do { :+, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "--", "arguments" => [ left, right ] }) do { :--, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "-", "arguments" => [ left, right ] }) do { :-, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "/", "arguments" => [ left, right ] }) do { :/, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "<", "arguments" => [ left, right ] }) do { :<, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "<=", "arguments" => [ left, right ] }) do { :<=, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "<>", "arguments" => [ left, right ] }) do { :<>, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "==", "arguments" => [ left, right ] }) do { :==, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "===", "arguments" => [ left, right ] }) do { :===, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => ">", "arguments" => [ left, right ] }) do { :>, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => ">=", "arguments" => [ left, right ] }) do { :>=, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "rem", "arguments" => [ left, right ] }) do { :rem, [ do_convert(left), do_convert(right) ] } end def do_convert(%{ "type" => "if", "arguments" => [ conditions, first_clause ] }) do do_convert(%{ "type" => "if", "arguments" => [ conditions, first_clause, %{ "type" => "null" } ] }) end def do_convert(%{ "type" => "if", "arguments" => [ conditions, first_clause, second_clause ] }) do { :if, [ do_convert(conditions), do_convert(first_clause), do_convert(second_clause) ] } end def do_convert(%{ "type" => "list", "arguments" => elements }) do { :list, Enum.map(elements, &do_convert/1) } end def do_convert(%{ "type" => "sum", "arguments" => arguments }) do { :sum, Enum.map(arguments, &do_convert/1) } end def do_convert(%{ "type" => "block", "arguments" => arguments }) do { :block, Enum.map(arguments, &do_convert/1) } end def do_convert(true), do: true def do_convert(false), do: false def do_convert(x) when is_binary(x), do: x def do_convert(x) when is_number(x), do: x end
apps/composer/lib/dsl.ex
0.903443
0.935524
dsl.ex
starcoder
defmodule DataDaemon.Plug do @moduledoc false @doc false @spec safe_in(any, [atom | String.t()]) :: any def safe_in(data, []), do: data def safe_in(data, [field | fields]), do: safe_in(Map.get(data, field), fields) @doc false @spec enable :: term def enable do quote location: :keep do @behaviour Plug import DataDaemon.Plug, only: [safe_in: 2] import Plug.Conn, only: [register_before_send: 2] @doc false @spec init(Keyword.t()) :: atom() | binary() | [atom() | binary() | [any()] | number() | tuple() | map()] | number() | tuple() | %{ (atom() | binary() | [any()] | number() | tuple() | map()) => atom() | binary() | [any()] | number() | tuple() | map() } @impl Plug def init(opts) do tags = opts |> Keyword.get(:tags, []) |> Enum.map(fn conn = {:conn, field} when is_list(field) -> {:conn, field} {:conn, field} -> {:conn, [field]} env = {:system, _} -> env {k, {:conn, field}} when is_list(field) -> {to_string(k), {:conn, field}} {k, {:conn, field}} -> {to_string(k), {:conn, [field]}} {k, env = {:system, _}} -> {to_string(k), env} {k, v} -> {to_string(k), to_string(v)} v -> to_string(v) end) |> Enum.group_by(fn {:conn, _} -> :conn_tags {_, {:conn, _}} -> :conn_tags _ -> :tags end) %{ metric: opts[:metric] || raise("Need to set metric name."), tags: Map.get(tags, :tags, []), conn_tags: Map.get(tags, :conn_tags, []), exclude: opts[:exclude] || [] } end @doc false @spec call(Plug.Conn.t(), map) :: Plug.Conn.t() @impl Plug def call(conn = %{request_path: path}, %{ metric: metric, tags: tags, conn_tags: conn_tags, exclude: exclude }) do if path in exclude do conn else start_time = :erlang.monotonic_time(:milli_seconds) register_before_send(conn, fn conn -> time = :erlang.monotonic_time(:milli_seconds) - start_time conn_tags = Enum.map(conn_tags, fn {:conn, conn_tag} -> safe_in(conn, conn_tag) {k, {:conn, conn_tag}} -> {k, safe_in(conn, conn_tag)} end) spawn(fn -> timing( metric, time, tags: tags ++ conn_tags ) end) conn end) end end end end end
lib/data_daemon/plug.ex
0.80271
0.448064
plug.ex
starcoder
defmodule Rolodex.Schema do @moduledoc """ Exposes functions and macros for working with request and response parameters. It includes two macros. Used together, they will setup reuseable schemas for things like API responses. - `schema/3` - for declaring a schema - `field/3` - for declaring schema fields It also exposes the following functions: - `is_schema_module?/1` - determines if the provided item is a module that has defined a reuseable schema - `to_map/1` - serializes a schema module into a map for use by a `Rolodex.Processor` behaviour - `new_field/1` - parses a schema field into a map of metadata. The `field/3` macro uses this function to parse the metadata passed in. This function is also called when parsing all controller action `@doc` parameter annotations - `get_refs/1` - takes a schema or map and traverses it, looking for any nested references to schemas within """ defmacro __using__(_opts) do quote do import Rolodex.Schema end end @doc """ Opens up the schema definition for the current module. Will name the schema and generate metadata for the schema based on subsequent calls to `field/3` **Accepts** - `name` - the schema name - `opts` - a keyword list of options (currently, only looks for a `desc` key) - `block` - the inner schema definition with one or more calls to `field/3` ## Example defmodule MySchema do use Rolodex.Schema schema "MySchema", desc: "Example schema" do # Atomic field with no description field :id, :uuid # Atomic field with a description field :name, :string, desc: "The object's name" # A field that refers to another, nested object field :other, OtherSchema # A field that is an array of items of one-or-more types field :multi, :list, of: [:string, OtherSchema] # A field that is one of the possible provided types field :any, :one_of, of: [:string, OtherSchema] end end """ defmacro schema(name, opts \\ [], do: block) do quote do Module.register_attribute(__MODULE__, :fields, accumulate: true) unquote(block) def __schema__(:name), do: unquote(name) def __schema__(:desc), do: unquote(Keyword.get(opts, :desc, nil)) def __schema__(:fields), do: @fields |> Enum.reverse() end end @doc """ Adds a new field to the schema. Will generate a method `__field__/1` where the one argument is the field `identifier`. This can be used to fetch the field metadata later. Accepts - `identifier` - field name - `type` - either an atom or another Rolodex.Schema module - `opts` - a keyword list of options, looks for `desc` and `of` (for array types) ## Example defmodule MySchema do use Rolodex.Schema schema "MySchema", desc: "Example schema" do # Atomic field with no description field :id, :uuid # Atomic field with a description field :name, :string, desc: "The object's name" # A field that refers to another, nested object field :other, OtherSchema # A field that is an array of items of one-or-more types field :multi, :list, of: [:string, OtherSchema] # A field that is one of the possible provided types field :any, :one_of, of: [:string, OtherSchema] end end """ defmacro field(identifier, type, opts \\ []) do quote do @fields unquote(identifier) def __field__(unquote(identifier)) do field = ([type: unquote(type)] ++ unquote(opts)) |> new_field() {unquote(identifier), field} end end end @doc """ Determines if an arbitrary item is a module that has defined a reusable schema via `Rolodex.Schema` macros ## Example iex> defmodule SimpleSchema do ...> use Rolodex.Schema ...> schema "SimpleSchema", desc: "Demo schema" do ...> field :id, :uuid ...> end ...> end iex> iex> # Validating a schema module iex> Rolodex.Schema.is_schema_module?(SimpleSchema) true iex> # Validating some other module iex> Rolodex.Schema.is_schema_module?(OtherModule) false """ @spec is_schema_module?(any()) :: boolean() def is_schema_module?(item) def is_schema_module?(module) when is_atom(module) do try do module.__info__(:functions) |> Keyword.has_key?(:__schema__) rescue # Any error means that `module` isn't a module and so we can just say `false` _ -> false end end def is_schema_module?(_), do: false @doc """ Serializes the `Rolodex.Schema` metadata defined for the given module into an object, using the `new_field/1` helper. ## Example iex> defmodule OtherSchema do ...> use Rolodex.Schema ...> ...> schema "OtherSchema" do ...> field :id, :uuid ...> end ...> end iex> iex> defmodule MySchema do ...> use Rolodex.Schema ...> ...> schema "MySchema", desc: "An example" do ...> # Atomic field with no description ...> field :id, :uuid ...> ...> # Atomic field with a description ...> field :name, :string, desc: "The schema's name" ...> ...> # A field that refers to another, nested object ...> field :other, OtherSchema ...> ...> # A field that is an array of items of one-or-more types ...> field :multi, :list, of: [:string, OtherSchema] ...> ...> # A field that is one of the possible provided types ...> field :any, :one_of, of: [:string, OtherSchema] ...> end ...> end iex> iex> Rolodex.Schema.to_map(MySchema) %{ type: :object, desc: "An example", properties: %{ id: %{type: :uuid}, name: %{desc: "The schema's name", type: :string}, other: %{type: :ref, ref: Rolodex.SchemaTest.OtherSchema}, multi: %{ type: :list, of: [ %{type: :string}, %{type: :ref, ref: Rolodex.SchemaTest.OtherSchema} ] }, any: %{ type: :one_of, of: [ %{type: :string}, %{type: :ref, ref: Rolodex.SchemaTest.OtherSchema} ] } } } """ @spec to_map(module()) :: map() def to_map(schema) do desc = schema.__schema__(:desc) props = schema.__schema__(:fields) |> Map.new(&schema.__field__/1) new_field(type: :object, properties: props, desc: desc) end @doc """ Parses data for schema fields and controller action parameter annotations. Resolves references to any nested `Rolodex.Schema` modules within. Generates a new map representing the field in a standardized format. Every field within the map returned will have a `type`. Some fields, like lists and objects, have other data nested within. Other fields hold references (called `refs`) to `Rolodex.Schema` modules. You can think of the output as an AST of parameter data that a `Rolodex.Processor` behaviour can accept for writing out to a destination. ## Examples ### Parsing primitive data types (e.g. `string`) # Creating a simple field with a primitive type iex> Rolodex.Schema.new_field(:string) %{type: :string} # With additional options iex> Rolodex.Schema.new_field(type: :string, desc: "My string") %{type: :string, desc: "My string"} ### Parsing collections: objects and lists # Create an object iex> Rolodex.Schema.new_field(type: :object, properties: %{id: :uuid, name: :string}) %{ type: :object, properties: %{ id: %{type: :uuid}, name: %{type: :string} } } # Shorthand for creating an object: a top-level map or keyword list iex> Rolodex.Schema.new_field(%{id: :uuid, name: :string}) %{ type: :object, properties: %{ id: %{type: :uuid}, name: %{type: :string} } } # Create a list iex> Rolodex.Schema.new_field(type: :list, of: [:string, :uuid]) %{ type: :list, of: [ %{type: :string}, %{type: :uuid} ] } # Shorthand for creating a list: a list of types iex> Rolodex.Schema.new_field([:string, :uuid]) %{ type: :list, of: [ %{type: :string}, %{type: :uuid} ] } ### Arbitrary collections Use the `one_of` type to describe a field that can be one of the provided types iex> Rolodex.Schema.new_field(type: :one_of, of: [:string, :uuid]) %{ type: :one_of, of: [ %{type: :string}, %{type: :uuid} ] } ### Working with schemas iex> defmodule DemoSchema do ...> use Rolodex.Schema ...> ...> schema "DemoSchema" do ...> field :id, :uuid ...> end ...> end iex> iex> # Creating a field with a `Rolodex.Schema` as the top-level type iex> Rolodex.Schema.new_field(DemoSchema) %{type: :ref, ref: Rolodex.SchemaTest.DemoSchema} iex> iex> # Creating a collection field with various members, including a nested schema iex> Rolodex.Schema.new_field(type: :list, of: [:string, DemoSchema]) %{ type: :list, of: [ %{type: :string}, %{type: :ref, ref: Rolodex.SchemaTest.DemoSchema} ] } """ @spec new_field(atom() | module() | list() | map()) :: map() def new_field(opts) def new_field(type) when is_atom(type), do: new_field(type: type) def new_field(opts) when is_list(opts) do case Keyword.keyword?(opts) do true -> opts |> Map.new() |> new_field() # List shorthand: if a plain list is provided, turn it into a `type: :list` field false -> new_field(%{type: :list, of: opts}) end end def new_field(opts) when is_map(opts), do: create_field(opts) defp create_field(%{type: :object, properties: props} = metadata) do resolved_props = Map.new(props, fn {k, v} -> {k, new_field(v)} end) %{metadata | properties: resolved_props} end defp create_field(%{type: :list, of: items} = metadata) do resolved_items = Enum.map(items, &new_field/1) %{metadata | of: resolved_items} end defp create_field(%{type: :one_of, of: items} = metadata) do resolved_items = Enum.map(items, &new_field/1) %{metadata | of: resolved_items} end defp create_field(%{type: type} = metadata) do case is_schema_module?(type) do true -> Map.merge(metadata, %{type: :ref, ref: type}) false -> metadata end end # Object shorthand: if a map is provided without a reserved `type: <type>` # identifier, turn it into a `type: :object` field defp create_field(data) when is_map(data) do new_field(%{type: :object, properties: data}) end @doc """ Returns a unique list of all nested `Rolodex.Schema` refs within the current field map or schema module. ## Examples iex> defmodule NestedSchema do ...> use Rolodex.Schema ...> ...> schema "NestedSchema" do ...> field :id, :uuid ...> end ...> end iex> iex> defmodule TopSchema do ...> use Rolodex.Schema ...> ...> schema "TopSchema", desc: "An example" do ...> # Atomic field with no description ...> field :id, :uuid ...> ...> # Atomic field with a description ...> field :name, :string, desc: "The schema's name" ...> ...> # A field that refers to another, nested object ...> field :other, NestedSchema ...> ...> # A field that is an array of items of one-or-more types ...> field :multi, :list, of: [:string, NestedSchema] ...> ...> # A field that is one of the possible provided types ...> field :any, :one_of, of: [:string, NestedSchema] ...> end ...> end iex> iex> # Searching for refs in a formatted map iex> Rolodex.Schema.new_field(type: :list, of: [TopSchema, NestedSchema]) ...> |> Rolodex.Schema.get_refs() [Rolodex.SchemaTest.NestedSchema, Rolodex.SchemaTest.TopSchema] iex> iex> # Searching for refs in an arbitrary map iex> Rolodex.Schema.get_refs(%{id: :uuid, nested: TopSchema}) [Rolodex.SchemaTest.NestedSchema] iex> iex> # Search for refs in a schema iex> Rolodex.Schema.get_refs(TopSchema) [Rolodex.SchemaTest.NestedSchema] """ @spec get_refs(module() | map()) :: [module()] def get_refs(field) def get_refs(%{of: items}) when is_list(items) do items |> Enum.reduce(MapSet.new(), &collect_refs_for_item/2) |> Enum.to_list() end def get_refs(%{type: :object, properties: props}) when is_map(props) do props |> Enum.reduce(MapSet.new(), fn {_, item}, refs -> collect_refs_for_item(item, refs) end) |> Enum.to_list() end def get_refs(%{type: :ref, ref: object}) when is_atom(object) do [object] end def get_refs(field) when is_map(field) do field |> Enum.reduce(MapSet.new(), fn {_, value}, refs -> collect_refs_for_item(value, refs) end) |> Enum.to_list() end def get_refs(schema) when is_atom(schema) do case is_schema_module?(schema) do true -> schema |> to_map() |> get_refs() false -> [] end end def get_refs(_), do: [] defp collect_refs_for_item(item, refs) do case get_refs(item) do [] -> refs objects -> objects |> MapSet.new() |> MapSet.union(refs) end end end
lib/rolodex/schema.ex
0.879613
0.67555
schema.ex
starcoder
defmodule Optimal.SchemaHelpers do @moduledoc """ Helpers for building and working with schemas. """ @doc """ The schema of the opts for making an Optimal schema. """ @spec schema_schema() :: Optimal.Schema.t() def schema_schema() do %Optimal.Schema{ opts: [ :opts, :required, :defaults, :extra_keys?, :custom, :describe ], types: [ opts: [{:list, :atom}, :keyword], required: {:list, :atom}, defaults: :keyword, extra_keys?: :boolean, custom: :keyword, describe: :keyword ], defaults: [ opts: [], required: [], describe: [], defaults: [], extra_keys?: false, custom: [] ], describe: [ opts: "A list of opts accepted, or a keyword of opt name to opt type", required: "A list of required opts (all of which must be in `opts` as well)", defaults: "A keyword list of option name to a default value. Values must pass type rules", extra_keys?: "If enabled, extra keys not specified by the schema do not fail validation", custom: "A keyword list of option name (for errors) and custom validations. See README", describe: "A keyword list of option names to short descriptions (like these)" ], extra_keys?: false, custom: [ opts: &Optimal.Type.validate_types/4 ] } end @doc """ The schema of the opts for merging two optimal schemas """ @spec merge_schema() :: Optimal.Schema.t() def merge_schema() do %Optimal.Schema{ opts: [ :annotate, :add_required? ], types: [ annotate: :string, add_required?: :boolean ], defaults: [ add_required?: true ], describe: [ annotate: "Annotates the source of the opt, to be used in displaying documentation.", add_required?: "If true, all required fields from left/right are marked as required. " <> "Otherwise, only takes required fields from the left." ] } end end
lib/optimal/schema_helpers.ex
0.832237
0.409457
schema_helpers.ex
starcoder
defmodule Toby.App.Views.System do @moduledoc """ A component that displays summarized information about the Erlang VM. """ import Ratatouille.View import Toby.Util.Formatting def render(%{ data: %{ cpu: cpu, limits: limits, memory: memory, statistics: statistics, system: system } }) do row do column(size: 12) do row do column(size: 6) do panel(title: "System and Architecture") do table do table_row do table_cell(content: "System Version:") table_cell(content: to_string(system.otp_release)) end table_row do table_cell(content: "ERTS Version:") table_cell(content: to_string(system.erts_version)) end table_row do table_cell(content: "Compiled for:") table_cell(content: to_string(system.compiled_for)) end table_row do table_cell(content: "Emulator Wordsize:") table_cell(content: to_string(system.emulator_wordsize)) end table_row do table_cell(content: "Process Wordsize:") table_cell(content: to_string(system.process_wordsize)) end table_row do table_cell(content: "SMP Support:") table_cell(content: to_string(system.smp_support?)) end table_row do table_cell(content: "Thread Support:") table_cell(content: to_string(system.thread_support?)) end table_row do table_cell(content: "Async thread pool size:") table_cell(content: to_string(system.async_thread_pool_size)) end end end end column(size: 6) do panel(title: "Memory Usage") do table do table_row do table_cell(content: "Total") table_cell(content: format_bytes(memory.total)) end table_row do table_cell(content: "Processes") table_cell(content: format_bytes(memory.processes)) end table_row do table_cell(content: "Processes (used)") table_cell(content: format_bytes(memory.processes_used)) end table_row do table_cell(content: "System") table_cell(content: format_bytes(memory.system)) end table_row do table_cell(content: "Atoms") table_cell(content: format_bytes(memory.atom)) end table_row do table_cell(content: "Atoms (used)") table_cell(content: format_bytes(memory.atom_used)) end table_row do table_cell(content: "Binaries") table_cell(content: format_bytes(memory.binary)) end table_row do table_cell(content: "Code") table_cell(content: format_bytes(memory.code)) end table_row do table_cell(content: "ETS") table_cell(content: format_bytes(memory.ets)) end end end end end row do column(size: 6) do panel(title: "CPUs & Threads") do table do table_row do table_cell(content: "Logical CPUs:") table_cell(content: to_string(cpu.logical_cpus)) end table_row do table_cell(content: "Online Logical CPUs:") table_cell(content: to_string(cpu.online_logical_cpus)) end table_row do table_cell(content: "Available Logical CPUs:") table_cell(content: to_string(cpu.available_logical_cpus)) end table_row do table_cell(content: "Schedulers:") table_cell(content: to_string(cpu.schedulers)) end table_row do table_cell(content: "Online schedulers:") table_cell(content: to_string(cpu.online_schedulers)) end table_row do table_cell(content: "Available schedulers:") table_cell(content: to_string(cpu.available_schedulers)) end end end end column(size: 6) do panel(title: "Statistics") do table do table_row do table_cell(content: "Uptime:") table_cell(content: format_ms(statistics.uptime_ms)) end table_row do table_cell(content: "Run Queue:") table_cell(content: to_string(statistics.run_queue)) end table_row do table_cell(content: "IO Input:") table_cell(content: format_bytes(statistics.io_input_bytes)) end table_row do table_cell(content: "IO Output:") table_cell(content: format_bytes(statistics.io_output_bytes)) end end end end end row do column(size: 12) do panel(title: "System statistics / limit") do table do table_row do table_cell(content: "Atoms:") table_cell(content: format_limit(limits.atoms)) end table_row do table_cell(content: "Processes:") table_cell(content: format_limit(limits.procs)) end table_row do table_cell(content: "Ports:") table_cell(content: format_limit(limits.ports)) end table_row do table_cell(content: "ETS:") table_cell(content: format_limit(limits.ets)) end table_row do table_cell(content: "Distribution buffer busy limit:") table_cell(content: to_string(limits.dist_buffer_busy)) end end end end end end end end defp format_limit(%{count: count, limit: limit, percent_used: percent}) do "#{count} / #{limit} (#{percent}% used)" end end
lib/toby/app/views/system.ex
0.62498
0.405213
system.ex
starcoder
defmodule Canvas.Resources.EnrollmentTerms do @moduledoc """ Provides functions to interact with the [enrollment term endpoints](https://canvas.instructure.com/doc/api/enrollment_terms). """ alias Canvas.{Client, Listing, Response} alias Canvas.Resources.EnrollmentTerm def create_enrollment_term() do end def update_enrollment_term() do end def delete_enrollment_term() do end @doc """ Retrieve a paginated list of terms in this account. See: - https://canvas.instructure.com/doc/api/accounts.html#method.accounts.courses_api ## Examples: client = %Canvas.Client{access_token: "<KEY>", base_url: "https://instructure.test"} {:ok, response} = Canvas.Resources.EnrollmentTerms.list_enrollment_terms(client, account_id = 1) {:ok, response} = Canvas.Resources.EnrollmentTerms.list_enrollment_terms(client, account_id = 1, per_page: 20, page: 2) """ @spec list_enrollment_terms(Client.t(), String.t() | integer, Keyword.t()) :: {:ok | :error, Response.t()} def list_enrollment_terms(client, account_id, options \\ []) do url = Client.versioned("/accounts/#{account_id}/terms") Listing.get(client, url, options) |> Response.parse(%{"enrollment_terms" => [%EnrollmentTerm{}]}, :enrollment_terms) end @doc """ List all terms in an account automatically paginating if necessary. This function will automatically page through all pages, returning all assignments. ## Examples: client = %Canvas.Client{access_token: "<KEY>", base_url: "https://instructure.test"} {:ok, response} = Canvas.Resources.EnrollmentTerms.all_enrollment_terms(client, account_id = 1) """ @spec all_enrollment_terms(Client.t(), String.t() | integer, Keyword.t()) :: {:ok, list(%EnrollmentTerm{})} | {:error, Response.t()} def all_enrollment_terms(client, account_id, options \\ []) do Listing.get_all(__MODULE__, :list_enrollment_terms, [client, account_id, options]) end @doc """ Retrieves the details for an enrollment term in the account. Includes overrides by default. ## Examples: client = %Canvas.Client{access_token: "<KEY>", base_url: "https://instructure.test"} {:ok, response} = Canvas.Resources.EnrollmentTerms.get_enrollment_term(client, account_id = 1, term_id = 10) """ @spec get_enrollment_term(Client.t(), String.t() | integer, String.t() | integer, Keyword.t()) :: {:ok, %EnrollmentTerm{}} | {:error, Response.t()} def get_enrollment_term(client, account_id, id, options \\ []) do url = Client.versioned("/accounts/#{account_id}/terms/#{id}") Listing.get(client, url, options) |> Response.parse(%EnrollmentTerm{}) end end
lib/canvas/resources/enrollment_terms.ex
0.821152
0.489564
enrollment_terms.ex
starcoder
defmodule Searchex.Command do @moduledoc """ Main Searchex workflow results <- query <- index <- catalog <- params This workflow establishes a dependency chain, with higher level steps depending on the outputs of lower level steps. Each step generates an intermediate output which can be cached to minimize re-execution of compute-intensive steps. The processing middleware is based on `Shake`. An LRU Cache is used, with auto-expiration of old keys. Cache keys are digests of the content produced at each step of the build chain. Note that all of these functions take a `cfg_snip` argument. The `cfg_snip` is a wildcard string which matches against the repo/collection name. For example, if the repo/collection name is `sample/genesis`, any of these cfg_snips would match (`sample/genesis`, `genesis`, `sampgeni`, `geni`) All of these functions return simple Elixir terms with no output formatting. See `Searchex.Render` for a list of functions that perform special handling on command output. """ alias Util.Cache @doc """ Generate the catalog for `cfg_snip` The catalog is a Map that contains all configuration data, document text and meta-data. The catalog is generated from a config file, stored at `~/.searchex/repo/<cfg_snip>.yml`. """ def catalog(cfg_snip) do Searchex.Command.Catalog.exec(cfg_snip) |> Cache.save end @doc """ Generate the index for `cfg_snip` The index is a data structure used for fast search and retrieval. The index lives in a Process Tree, one worker for each keyword. """ def index(cfg_snip) do Searchex.Command.Index.exec(cfg_snip) |> Cache.save end @doc """ Generate both the catalog and the index for `cfg_snip` in one step """ def build(cfg_snip) do Searchex.Command.Build.exec(cfg_snip) |> Cache.save end @doc """ Return info about the collection - Number of documents - Generation date - Average size of documents - etc. """ def info(cfg_snip) do Searchex.Command.Info.exec(cfg_snip) end @doc """ Query the collection, and return query scores. """ def query(cfg_snip, query) do Searchex.Command.Query.exec(cfg_snip, query) |> Cache.save end @doc """ Show last results """ def results(cfg_snip) do Searchex.Command.Results.exec(cfg_snip) end @doc """ Show document text """ def show(cfg_snip, tgt_id) do Searchex.Command.Show.exec(cfg_snip, tgt_id) end end
lib/searchex/command.ex
0.835484
0.579043
command.ex
starcoder
defmodule SunTimes do @moduledoc """ Documentation for SunTimes. """ @default_zenith 90.83333 @degrees_per_hour 360.0 / 24.0 @doc """ Calculates the sunrise. """ def rise(date, lat, lon) do calculate(:rise, date, lat, lon) end @doc """ Calculates the sunset. """ def set(date, lat, lon) do calculate(:set, date, lat, lon) end defp calculate(event, date, lat, lon) do datetime = date |> Timex.to_datetime # lngHour longitude_hour = lon / @degrees_per_hour # t base_time = if event == :rise, do: 6.0, else: 18.0 approximate_time = day_of_year(datetime) + (base_time - longitude_hour) / 24.0 # M mean_sun_anomaly = (0.9856 * approximate_time) - 3.289 # L sun_true_longitude = mean_sun_anomaly + (1.916 * Math.sin(Math.deg2rad(mean_sun_anomaly))) + (0.020 * Math.sin(2 * Math.deg2rad(mean_sun_anomaly))) + 282.634 sun_true_longitude = coerce_degrees(sun_true_longitude) # RA tan_right_ascension = 0.91764 * Math.tan(Math.deg2rad(sun_true_longitude)) sun_right_ascension = Math.rad2deg(Math.atan(tan_right_ascension)) sun_right_ascension = coerce_degrees(sun_right_ascension) # right ascension value needs to be in the same quadrant as L sun_true_longitude_quadrant = Float.floor(sun_true_longitude / 90.0) * 90.0 sun_right_ascension_quadrant = Float.floor(sun_right_ascension / 90.0) * 90.0 sun_right_ascension = sun_right_ascension + (sun_true_longitude_quadrant - sun_right_ascension_quadrant) # RA = RA / 15 sun_right_ascension_hours = sun_right_ascension / @degrees_per_hour sin_declination = 0.39782 * Math.sin(Math.deg2rad(sun_true_longitude)) cos_declination = Math.cos(Math.asin(sin_declination)) cos_local_hour_angle = (Math.cos(Math.deg2rad(@default_zenith)) - (sin_declination * Math.sin(Math.deg2rad(lat)))) / (cos_declination * Math.cos(Math.deg2rad(lat))) # the sun never rises on this location (on the specified date) if cos_local_hour_angle > 1 do {:error, "Never rises"} else # # the sun never sets on this location (on the specified date) if cos_local_hour_angle < -1 do {:error, "Never sets"} else # H suns_local_hour = if event == :rise do 360 - Math.rad2deg(Math.acos(cos_local_hour_angle)) else Math.rad2deg(Math.acos(cos_local_hour_angle)) end # H = H / 15 suns_local_hour_hours = suns_local_hour / @degrees_per_hour # T = H + RA - (0.06571 * t) - 6.622 local_mean_time = suns_local_hour_hours + sun_right_ascension_hours - (0.06571 * approximate_time) - 6.622 # UT = T - lngHour gmt_hours = local_mean_time - longitude_hour gmt_hours = if gmt_hours > 24, do: gmt_hours - 24.0, else: gmt_hours gmt_hours = if gmt_hours < 0, do: gmt_hours + 24.0, else: gmt_hours hour = Float.floor(gmt_hours) hour_remainder = (gmt_hours - hour) * 60.0 minute = Float.floor(hour_remainder) seconds = Float.floor((hour_remainder - minute) * 60.0) Timex.to_datetime({{datetime.year, datetime.month, datetime.day}, {round(hour), round(minute), round(seconds)}}, "Etc/UTC") end end end defp coerce_degrees(d) when d < 0, do: coerce_degrees(d + 360) defp coerce_degrees(d) when d >= 360, do: coerce_degrees(d - 360) defp coerce_degrees(d), do: d defp day_of_year(d) do {_, week} = :calendar.iso_week_number({d.year, d.month, d.day}) ((week - 1) * 7) + (:calendar.day_of_the_week(d.year, d.month, d.day)) end defp next_day(datetime) do Timex.shift(datetime, days: 1) |> Timex.to_date end defp prev_day(datetime) do Timex.shift(datetime, days: -1) |> Timex.to_date end defp to_datetime(d) do t = Timex.now d |> Timex.to_datetime |> Timex.shift(hour: t.hour, minute: t.minute, second: t.second) end end
lib/sun_times.ex
0.79999
0.469885
sun_times.ex
starcoder
defmodule Membrane.RTC.Engine.Track do @moduledoc """ Module representing media track. Media track is a single audio or video. Tracks that are related to each other (e.g. audio from microphone that corresponds to video from a web cam) can be grouped into the same stream by assigning each of them the same stream id. """ alias ExSDP.Attribute.FMTP @enforce_keys [:type, :stream_id, :id, :fmtp] defstruct @enforce_keys ++ [encoding: nil, format: nil, active?: true, metadata: nil, ctx: %{}] @type id :: String.t() @type encoding :: atom() @type format :: [atom()] @typedoc """ This module contains: * `type` - audio or video, * `stream_id` - media stream this track belongs to. Relationship between tracks (e.g. audio and video) can be indicated by assigning each of them the same `stream_id`. One `stream_id` can be assign to any number of tracks. * `id` - track id * `encoding` - track encoding * `format` - list of available track formats. At this moment max two formats can be specified. One of them has to be `:raw` which indicates that other Endpoints will receive this track in format of `encoding`. The other one can be any atom (e.g. `:RTP`). * `fmtp` - struct describing format specific parameters e.g. for H264 it contains `profile_level_id` * `active?` - indicates whether track is still available or not (because peer left a room) * `metadata` - any data passed by user to be linked with this track * `ctx` - any data Endpoints need to associate with `#{inspect(__MODULE__)}.t()` for internal usage """ @type t :: %__MODULE__{ type: :audio | :video, stream_id: String.t(), id: id, encoding: encoding, format: format, fmtp: FMTP, active?: boolean(), metadata: any(), ctx: map() } @doc """ Creates a new track. Tracks belonging to the same stream should have the same `stream_id`, that can be generated with `stream_id/0`. """ @spec new( :audio | :video, stream_id :: String.t(), id: String.t(), encoding: encoding, format: format, fmtp: FMTP, metadata: any(), ctx: map() ) :: t def new(type, stream_id, opts \\ []) do id = Keyword.get(opts, :id, Base.encode16(:crypto.strong_rand_bytes(8))) %__MODULE__{ type: type, stream_id: stream_id, id: id, encoding: Keyword.get(opts, :encoding), format: Keyword.get(opts, :format), fmtp: Keyword.get(opts, :fmtp), metadata: Keyword.get(opts, :metadata), ctx: Keyword.get(opts, :ctx, %{}) } end @doc """ Generates stream id, that can be used to mark tracks belonging to the same stream. """ @spec stream_id() :: String.t() def stream_id(), do: UUID.uuid4() end
lib/membrane_rtc_engine/track.ex
0.858363
0.460653
track.ex
starcoder
defmodule Statux do @moduledoc """ """ defdelegate child_spec(opts), to: Statux.Tracker def load_rule_set!(path), do: Statux.RuleSet.load_json!(path) def init(init_arg) do {:ok, init_arg} end @doc """ Simply evaluates the given value for the given status based on its value. This function can be used to have a simple rule evaluation without any additional constraints. Therefore, 'constraints' are ignored and only the 'value' requirements are evaluated. """ def valid_states(status_name, value, rule_set) do status_options = rule_set[status_name][:status] case Statux.ValueRules.should_be_ignored?(value, rule_set[status_name]) do true -> [] _ -> Statux.ValueRules.find_possible_valid_status(value, status_options) end end @doc """ Pass a new value to Statux to be evaluated against the given rule_set Feedback is provided asynchronously, either through the configured PubSub Module or by calling the callbacks given in the rule set. Statux.put("my_device", :battery_voltage, 12.4) """ def put(id, status_name, value, rule_set \\ :default) do Statux.Tracker.put(id, status_name, value, rule_set) end def put_for(server, id, status_name, value, rule_set \\ :default) do Statux.Tracker.put(server, id, status_name, value, rule_set) end @doc """ Retrieve the current status for a given ID. iex> Statux.get("my_device") %{ battery_voltage: %Statux.Models.Status{ current: :ok, history: [:ok, :low], transition_count: 2, transitioned_at: DateTime%{} }, other_status: %Statux.Models.Status{...}, ... } """ def get(id) do Statux.Tracker.get(id) end def get_for(server, id) do Statux.Tracker.get(server, id) end @doc """ Forcefully sets the state of a given id and status to an option. This allows to create options that can not be left automatically, for example a :critical or :warning status that has to be acknowledged manually. iex> Statux.put("my_device", :battery_voltage, :ok) %Statux.Models.Status{ current: :ok, history: [:ok, :low], transition_count: 2, transitioned_at: DateTime%{} # now } iex> Statux.put("my_device", :battery_voltage, :some_random_option) {:error, :invalid_option} """ def set(id, status_name, option) do Statux.Tracker.set(id, status_name, option) end def set_for(server, id, status_name, option) do Statux.Tracker.set(server, id, status_name, option) end end
lib/statux.ex
0.809238
0.483222
statux.ex
starcoder
defmodule Midifile.Event do use Bitwise defstruct symbol: :off, delta_time: 0, bytes: [] # data bytes, including status byte def status(%Midifile.Event{bytes: [st|_]}) when st < 0xf0, do: band(st, 0xf0) def status(%Midifile.Event{bytes: [st|_]}), do: st def data(%Midifile.Event{bytes: [_|data]}), do: data def channel?(%Midifile.Event{bytes: [st|_]}) when st < 0xf0, do: true def channel?(_), do: false def note?(%Midifile.Event{bytes: [st|_]}) when st < 0xb0, do: true def note?(_), do: false def channel(%Midifile.Event{bytes: [st|_]}) when st < 0xf0, do: band(st, 0x0f) def note(%Midifile.Event{bytes: [st, n, _]}) when st < 0xb0, do: n def velocity(%Midifile.Event{bytes: [st, _, v]}) when st < 0xb0, do: v @doc """ Returns a list of start times (not delta times) of each event. """ def start_times(events) do {_, start_times} = events |> Enum.reduce({0, []}, fn(e, {prev_delta, sts}) -> {prev_delta + e.delta_time, [prev_delta + e.delta_time | sts]} end) Enum.reverse(start_times) end @doc """ Given a list of start times, returns a list of delta times. """ def delta_times(sts) do {_, deltas} = sts |> Enum.reduce({0, []}, fn(start_time, {prev_start_time, deltas}) -> {start_time, [start_time - prev_start_time | deltas]} end) Enum.reverse(deltas) end # Return a list of events where every event has been quantized. # We quantize start times, then convert back to deltas. def quantize(events, n) do quantized_delta_times = events |> start_times |> Enum.map(&quantize_to(&1, n)) |> delta_times Enum.zip(events, quantized_delta_times) |> Enum.map(fn({e, t}) -> %{e | delta_time: t} end) end def quantize_to(t, n) do modulo = rem(t, n) if modulo >= n / 2 do t + n - modulo else t - modulo end end @doc """ Merges two lists of events in the proper order. """ def merge(es1, es2) do merged = [Enum.zip(es1, start_times(es1)), Enum.zip(es2, start_times(es2))] |> Enum.concat |> Enum.sort(fn({_, start1}, {_, start2}) -> start1 < start2 end) {_, es} = merged |> Enum.reduce({0, []}, fn({e, start}, {prev_start_time, es2}) -> delta = start - prev_start_time {start, [%{e | delta_time: delta} | es2]} end) Enum.reverse(es) end def to_string(%Midifile.Event{bytes: [st|data]} = e) do "#{e.delta_time}: ch #{band(st, 0x0f)} #{e.symbol} #{inspect data}" end end
lib/midifile/event.ex
0.714927
0.628051
event.ex
starcoder
defmodule ExMatchers.Size do @moduledoc false import ExUnit.Assertions import ExMatchers.Custom defprotocol SizeMatcher do @fallback_to_any true def to_match(value) def to_match(value, size) def to_not_match(value) def to_not_match(value, size) end defimpl SizeMatcher, for: Map do def to_match(value) do to_match(value, 0) end def to_match(value, size) do assert map_size(value) == size end def to_not_match(value) do to_not_match(value, 0) end def to_not_match(value, size) do refute map_size(value) == size end end defimpl SizeMatcher, for: Tuple do def to_match(value) do to_match(value, 0) end def to_match(value, size) do assert tuple_size(value) == size end def to_not_match(value) do to_not_match(value, 0) end def to_not_match(value, size) do refute tuple_size(value) == size end end defimpl SizeMatcher, for: BitString do def to_match(value) do to_match(value, 0) end def to_match(value, size) do assert byte_size(String.trim(value)) == size end def to_not_match(value) do to_not_match(value, 0) end def to_not_match(value, size) do refute byte_size(String.trim(value)) == size end end defimpl SizeMatcher, for: List do def to_match(value) do to_match(value, 0) end def to_match(value, size) do assert length(value) == size end def to_not_match(value) do to_not_match(value, 0) end def to_not_match(value, size) do refute length(value) == size end end defimpl SizeMatcher, for: MapSet do def to_match(value) do to_match(value, 0) end def to_match(value, size) do assert MapSet.size(value) == size end def to_not_match(value) do to_not_match(value, 0) end def to_not_match(value, size) do refute MapSet.size(value) == size end end defimpl SizeMatcher, for: Range do def to_match(value) do to_match(value, 0) end def to_match(value, size) do assert Enum.count(value) == size end def to_not_match(value) do to_not_match(value, 0) end def to_not_match(value, size) do refute Enum.count(value) == size end end defimpl SizeMatcher, for: Atom do def to_match(nil), do: true def to_match(nil, _size), do: true def to_not_match(nil), do: flunk "Nil is empty" def to_not_match(nil, _size), do: flunk "Nil is empty" end defimpl SizeMatcher, for: Any do def to_match(value) do flunk "Size not supported for #{inspect(value)}" end def to_match(value, size) do flunk "Size not supported for #{inspect(value)} with size #{size}" end def to_not_match(value) do flunk "Size not supported for #{inspect(value)}" end def to_not_match(value, size) do flunk "Size not supported for #{inspect(value)} with size #{size}" end end defmatcher be_empty, matcher: SizeMatcher defmatcher have_items(number), matcher: SizeMatcher end
lib/ex_matchers/size.ex
0.704872
0.859958
size.ex
starcoder
defmodule Solace.Prismic.Types.Prismic do alias Solace.Prismic.Types.Prismic.StructuredText alias Solace.Prismic.Types.Prismic.Paragraph alias Solace.Prismic.Types.Prismic.ListItem alias Solace.Prismic.Types.Prismic.OListItem alias Solace.Prismic.Types.Prismic.Heading1 alias Solace.Prismic.Types.Prismic.Heading2 alias Solace.Prismic.Types.Prismic.Heading3 alias Solace.Prismic.Types.Prismic.Heading4 alias Solace.Prismic.Types.Prismic.Heading5 alias Solace.Prismic.Types.Prismic.Heading6 alias Solace.Prismic.Types.Prismic.LinkImage alias Solace.Prismic.Types.Prismic.Embed alias Solace.Prismic.Types.Prismic.Video alias Solace.Prismic.Types.Prismic.Slice alias Solace.Prismic.Types.Prismic.SliceZone alias Solace.Prismic.Types.Prismic.Group alias Solace.Prismic.Types.Prismic.Image alias Solace.Prismic.Types.Prismic.ImageMeta alias Solace.Prismic.Types.Prismic.LinkWeb alias Solace.Prismic.Types.Prismic.Text alias Solace.Prismic.Types.Prismic.Preformatted alias Solace.Prismic.Types.Prismic.Spans def process_spans(text, nil = _spans) do text end # end process_spans/2 def process_spans(text, [] = _spans) do text end # end process_spans/2 def process_spans(text, spans) when is_list(spans) do # Insure Spans are sorted by start spans = Enum.sort(spans, fn(a,b) -> (a.start < b.start) || ((a.start == b.start) && (a.end < b.end)) end ) instructions = List.foldl( spans, %{}, fn(x, acc) -> o = Solace.PrismicSpanProtocol.open(x) c = Solace.PrismicSpanProtocol.close(x) acc |> Map.update(x.start, o, fn(p) -> p <> o end) |> Map.update(x.end, c, fn(p) -> c <> p end) end ) slices = instructions |> Map.keys |> Enum.sort {pos, acc} = List.foldl(slices, {0,""}, fn(i, {pos, acc}) -> # Append text pos to i to acc. append instructions, update pos. if (i == pos) do {i, acc <> instructions[i]} else {i, acc <> String.slice(text, pos..(i-1)) <> instructions[i]} end end ) acc <> String.slice(text, (pos)..-1) end # end process_spans/2 def process_newlines(text) do text |> String.replace("\n", "</br>") end # end process_newlines/1 def parse_item(json) when is_list(json) do for j <- json do parse_item(j) end end # end parse_item/1 def parse_item(json) do case json["type"] do "StructuredText" -> StructuredText.new(json) "paragraph" -> Paragraph.new(json) "list-item" -> ListItem.new(json) "o-list-item" -> OListItem.new(json) "heading1" -> Heading1.new(json) "heading2" -> Heading2.new(json) "heading3" -> Heading3.new(json) "heading4" -> Heading4.new(json) "heading5" -> Heading5.new(json) "heading6" -> Heading6.new(json) "Link.image" -> LinkImage.new(json) "Link.web" -> LinkWeb.new(json) "SliceZone" -> SliceZone.new(json) "Slice" -> Slice.new(json) "Group" -> Group.new(json) "Image" -> ImageMeta.new(json) "image" -> Image.new(json) "Text" -> Text.new(json) "preformatted" -> Preformatted.new(json) "embed" -> Embed.new(json) "video" -> Video.new(json) _ -> IO.puts " =================== #{json["type"]} ================= #{inspect json, pretty: true} ===================================================== " %{unknown: json["type"], data: json} end end # end parse_item/1 def parse_spans(nil) do [] end # end parse_spans/1 def parse_spans(spans) when is_list(spans) do for span <- spans do parse_span(span) end end # end parse_spans/1 def parse_span(span) do case span["type"] do "strong" -> Spans.Strong.new(span) "em" -> Spans.Em.new(span) "hyperlink" -> Spans.HyperLink.new(span) _ -> IO.puts " ================= SPAN: #{inspect span} ============= #{inspect span, pretty: true} ===================================================== " %{unknown_span: span["type"], data: span} end end # end parse_span/1 end # end defmodule Solace.Prismic.Types.Prismic
lib/prismic/types/prismic.ex
0.616012
0.555073
prismic.ex
starcoder
defmodule Kira2.Task do require Kira2.TaskDefinition, as: TaskDefinition require Kira.Util, as: Util @moduledoc false defstruct [ :state, :definition, :awaiting, :blocking, :awaiting_unapply, :blocking_unapply ] @type errors :: [{any, DateTime.t()}] @type state() :: :not_started | {:running_apply, pid(), errors} | {:running_apply_retry, pid(), errors} | {:running_unapply, pid(), any, errors} | {:running_unapply_retry, pid(), any, errors} | {:failed, errors} | {:done_applied, any} | :done_unapplied @type definitions :: %{required(atom()) => TaskDefinition.t()} @type t() :: %__MODULE__{ state: state(), definition: TaskDefinition.t(), awaiting: MapSet.t(atom()), blocking: MapSet.t(atom()), awaiting_unapply: MapSet.t(atom()), blocking_unapply: MapSet.t(atom()) } @spec create(task_def :: TaskDefinition.t(), definitions :: definitions()) :: t() def create(task_def, definitions) do # The tasks blocking this task. awaiting = Enum.into(task_def.dependencies, MapSet.new()) # The tasks blocked by this task. blocking = for {t, other_def} <- definitions, Enum.member?(other_def.dependencies, task_def.name), into: MapSet.new(), do: t %__MODULE__{ state: :not_started, definition: task_def, awaiting: awaiting, blocking: blocking, awaiting_unapply: MapSet.new(), blocking_unapply: awaiting } end @spec apply_ready?(task :: t()) :: boolean() def apply_ready?(task) do MapSet.size(task.awaiting) == 0 end @spec unapply_ready?(task :: t()) :: boolean() def unapply_ready?(task) do MapSet.size(task.awaiting_unapply) == 0 && is_complete(task) end @spec set_state(task :: t(), state :: state) :: t() def set_state(task, state) do %{task | state: state} end @spec get_completed(task :: t()) :: Util.result(any) def get_completed(task) do case task.state do {:done_applied, value} -> {:ok, value} {:running_unapply, _, value} -> {:ok, value} _ -> {:error, {:unable_to_get_task_value, task.definition.name, task.state}} end end @spec is_complete(task :: t()) :: boolean def is_complete(task) do case task.state do {:done_applied, _value} -> true _ -> false end end @spec get_pid(task :: t()) :: Util.result(pid) def get_pid(task) do case task.state do {:running_apply, pid, _} -> {:ok, pid} {:running_apply_retry, pid, _} -> {:ok, pid} {:running_unapply, pid, _, _} -> {:ok, pid} {:running_unapply_retry, pid, _, _} -> {:ok, pid} _ -> {:error, {:unable_to_get_task_pid, task.definition.name, task.state}} end end @spec get_errors(task :: t) :: errors def get_errors(task) do case task.state do {:running_apply, _, errors} -> errors {:running_apply_retry, _, errors} -> errors {:running_unapply, _, _, errors} -> errors {:running_unapply_retry, _, _, errors} -> errors {:failed, errors} -> errors _ -> [] end end def put_in_awaiting(task, other_task_name) do %{task | awaiting: MapSet.put(task.awaiting, other_task_name)} end def put_in_awaiting_unapply(task, other_task_name) do %{task | awaiting_unapply: MapSet.put(task.awaiting_unapply, other_task_name)} end def put_in_blocking(task, other_task_name) do %{task | blocking: MapSet.put(task.blocking, other_task_name)} end def put_in_blocking_unapply(task, other_task_name) do %{task | blocking_unapply: MapSet.put(task.blocking_unapply, other_task_name)} end def drop_from_awaiting(task, other_task_name) do %{task | awaiting: MapSet.delete(task.awaiting, other_task_name)} end def drop_from_awaiting_unapply(task, other_task_name) do %{task | awaiting_unapply: MapSet.delete(task.awaiting_unapply, other_task_name)} end def drop_from_blocking(task, other_task_name) do %{task | blocking: MapSet.delete(task.blocking, other_task_name)} end def drop_from_blocking_unapply(task, other_task_name) do %{task | blocking_unapply: MapSet.delete(task.blocking_unapply, other_task_name)} end end
lib/kira_2/task.ex
0.730482
0.427427
task.ex
starcoder
defmodule Namex.Tokenize do @moduledoc """ States """ alias Namex.Tokenize, as: State defstruct commas: 0, words: 0, initials: 0, suffices: 0, stack: [], string: '' @comma ~r/^\s*,\s*/ @stops ",;" @seperator ~r/^\s*(\band\b|\&|;)\s*/i @title ~r/^\s*\b(sir|lord|count(ess)?|(gen|adm|col|maj|capt|cmdr|lt|sgt|cpl|pvt|pastor|pr|reverend|rev|elder|deacon|deaconess|father|fr|rabbi|cantor|vicar|esq|esquire|prof|dr|md|ph\.?d)\.?)(\s+|$)/i @suffix ~r/^\s*\b(JR|Jr|jr|SR|Sr|sr|[IVX]{2,})(\.|\b)/ @appellation ~r/^\s*\b((mrs?|ms|fr|hr)\.?|miss|herr|frau)(\s+|$)/i @uword ~r/^((\\\w+)?\{[^\}]*\})*[[:upper:]][^\s#{@stops}]*/ @lword ~r/^((\\\w+)?\{[^\}]*\})*[[:lower:]][^\s#{@stops}]*/ @pword ~r/^(\\\w+)?\{[^\}]*\}[^\s#{@stops}]*/ @nick ~r/^('[^'\n]+')|("[^"\n]+")/ def call(string) do next_token(%State{string: string}).stack |> Enum.reverse() |> Enum.map(fn {token, value} -> {token, 1, value} end) end defp next_token(%State{string: ""} = state), do: state defp next_token(%State{string: nil} = state), do: state defp next_token(state) do state |> scan(@seperator, &consume_seperator/2) |> scan(@comma, &handle_comma/2) |> scan(~r/^\s+/, fn _, new_state -> next_token(new_state) end) |> scan(@title, &consume_word(:TITLE, String.trim(&1), &2)) |> scan(@suffix, &consume_word(:SUFFIX, String.trim(&1), &2)) |> scan(@appellation, &handle_appellation/2) |> scan(@uword, &consume_word(:UWORD, String.trim(&1), &2)) |> scan(@lword, &consume_word(:LWORD, String.trim(&1), &2)) |> scan(@pword, &consume_word(:PWORD, String.trim(&1), &2)) |> scan(@nick, &consume_word(:NICK, &1 |> String.trim("\"") |> String.trim("'"), &2)) end defp scan(%State{string: ""} = state, _, _), do: state defp scan(%State{string: nil} = state, _, _), do: state defp scan(state, regex, callback) do case Regex.run(regex, state.string, return: :index) do nil -> state [{_, offset} | _] -> {match, string} = String.split_at(state.string, offset) callback.(match, %State{state | string: string}) end end defp handle_comma(_match, state) do if state.commas == 0 || (state.commas == 1 && suffix?(state)) do %State{state | commas: state.commas + 1, stack: [{:COMMA, :COMMA} | state.stack]} |> next_token else consume_seperator(nil, state) end end defp handle_appellation(match, %State{words: 0} = state) do %State{state | stack: [{:APPELLATION, String.trim(match)} | state.stack]} |> next_token end defp handle_appellation(match, state), do: consume_word(:UWORD, match, state) defp consume_seperator(_match, state) do if seen_seperator?(state) do next_token(state) else %State{ state | commas: 0, words: 0, initials: 0, suffices: 0, stack: [{:AND, :AND} | state.stack] } |> next_token end end defp consume_word(type, word, state) do state = %State{state | words: state.words + 1, stack: [{type, word} | state.stack]} state = case type do :UWORD -> if Regex.match?(~r/^[[:upper:]]+\b/, word) do %State{state | initials: state.initials + 1} else state end :SUFFIX -> %State{state | suffices: state.suffices + 1} _ -> state end next_token(state) end defp seen_seperator?(%State{stack: []}), do: false defp seen_seperator?(%State{stack: [{:and, :and} | _]}), do: true defp seen_seperator?(_), do: false defp suffix?(%State{suffices: 0} = state), do: will_see_suffix?(state) defp suffix?(_), do: true defp will_see_suffix?(%State{string: string}) do suffix = string |> String.slice(0..8) |> String.trim() |> String.split(~r/\s+/) |> List.first() Regex.match?(@suffix, suffix) end end
lib/namex/tokenize.ex
0.516108
0.522507
tokenize.ex
starcoder
defmodule Telnyx.MessagingPhoneNumbers do @moduledoc """ Context for managing messaging phone numbers """ alias Telnyx.Client @doc """ Lists all messaging phone numbers. ``` api_key = "YOUR_API_KEY" Telnyx.MessagingPhoneNumbers.list(api_key, page: %{size: 10}) ``` Example response: ``` {:ok, [ %{ "MMS" => %{ "domestic_two_way" => true, "enabled" => false, "international_inbound" => false, "international_outbound" => false }, "SMS" => %{ "domestic_two_way" => true, "enabled" => false, "international_inbound" => false, "international_outbound" => false }, "country_code" => "US", "created_at" => "2020-04-27T15:33:17.098Z", "eligible_messaging_products" => ["A2P", "P2P"], "features" => %{ "mms" => %{ "domestic_two_way" => true, "international_inbound" => false, "international_outbound" => false }, "sms" => %{ "domestic_two_way" => true, "international_inbound" => false, "international_outbound" => false } }, "health" => %{ "inbound_outbound_ratio" => 0.06521739130434782, "message_count" => 98.0, "spam_ratio" => 0.0, "success_ratio" => 1.0 }, "id" => "8787893573721994846", "messaging_product" => "A2P", "messaging_profile_id" => "21994846-5bdf-4fd6-b75f-6d2f633b08bd", "organization_id" => "21994846-d17b-4cde-8fbf-e37bd8baa4b1", "phone_number" => "+18665552368", "phone_number_country" => "US", "phone_number_type" => "longcode", "record_type" => "messaging_phone_number", "traffic_type" => "A2P", "type" => "longcode", "updated_at" => "2020-04-27T15:33:17.098Z", "user_id" => "21994846-d17b-4cde-8fbf-e37bd8baa4b1" } ] } ``` """ @spec list(String.t(), Keyword.t()) :: {:ok, [map]} | {:error, %Telnyx.Error{}} def list(api_key, opts \\ []) do query = if opts != [] do Enum.map(opts, fn {type, map} -> Enum.map(map, fn {key, value} -> build_query_string(type, key, value) end) end) |> List.flatten() |> Enum.join("&") end Client.get(api_key, "/messaging_phone_numbers?#{query}") end @doc """ Retrieves a messaging phone number. ## Examples ``` api_key = "YOUR_API_KEY" Telnyx.MessagingPhoneNumbers.retrieve("+18665552368", api_key) ``` Example response: ``` {:ok, %{ "MMS" => %{ "domestic_two_way" => true, "enabled" => false, "international_inbound" => false, "international_outbound" => false }, "SMS" => %{ "domestic_two_way" => true, "enabled" => false, "international_inbound" => false, "international_outbound" => false }, "country_code" => "US", "created_at" => "2020-04-27T15:33:17.098Z", "eligible_messaging_products" => ["A2P", "P2P"], "features" => %{ "mms" => %{ "domestic_two_way" => true, "international_inbound" => false, "international_outbound" => false }, "sms" => %{ "domestic_two_way" => true, "international_inbound" => false, "international_outbound" => false } }, "health" => %{ "inbound_outbound_ratio" => 0.06521739130434782, "message_count" => 98.0, "spam_ratio" => 0.0, "success_ratio" => 1.0 }, "id" => "8787893573721994846", "messaging_product" => "A2P", "messaging_profile_id" => "21994846-5bdf-4fd6-b75f-6d2f633b08bd", "organization_id" => "21994846-d17b-4cde-8fbf-e37bd8baa4b1", "phone_number" => "+18665552368", "phone_number_country" => "US", "phone_number_type" => "longcode", "record_type" => "messaging_phone_number", "traffic_type" => "A2P", "type" => "longcode", "updated_at" => "2020-04-27T15:33:17.098Z", "user_id" => "21994846-d17b-4cde-8fbf-e37bd8baa4b1" }} ``` See https://developers.telnyx.com/docs/api/v2/messaging/Phone-Numbers#retrieveMessagingPhoneNumber """ @spec retrieve(String.t(), String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}} def retrieve(phone, api_key) do Client.get(api_key, "/messaging_phone_numbers/#{phone}") end @doc """ Updates a messaging phone number. ## Examples ``` api_key = "YOUR_API_KEY" {:ok, number} = Telnyx.MessagingPhoneNumbers.retrieve("+18665552368", api_key) %{messaging_profile_id: "uuid"} |> Telnyx.MessagingPhoneNumbers.update(number["id"], api_key) ``` Example response: ``` {:ok, %{ "MMS" => %{ "domestic_two_way" => true, "enabled" => false, "international_inbound" => false, "international_outbound" => false }, "SMS" => %{ "domestic_two_way" => true, "enabled" => false, "international_inbound" => false, "international_outbound" => false }, "country_code" => "US", "created_at" => "2020-04-27T15:33:17.098Z", "eligible_messaging_products" => ["A2P", "P2P"], "features" => %{ "mms" => %{ "domestic_two_way" => true, "international_inbound" => false, "international_outbound" => false }, "sms" => %{ "domestic_two_way" => true, "international_inbound" => false, "international_outbound" => false } }, "health" => %{ "inbound_outbound_ratio" => 0.06521739130434782, "message_count" => 98.0, "spam_ratio" => 0.0, "success_ratio" => 1.0 }, "id" => "8787893573721994846", "messaging_product" => "A2P", "messaging_profile_id" => "uuid", "organization_id" => "21994846-d17b-4cde-8fbf-e37bd8baa4b1", "phone_number" => "+18665552368", "phone_number_country" => "US", "phone_number_type" => "longcode", "record_type" => "messaging_phone_number", "traffic_type" => "A2P", "type" => "longcode", "updated_at" => "2020-04-27T15:33:17.098Z", "user_id" => "21994846-d17b-4cde-8fbf-e37bd8baa4b1" }} ``` See https://developers.telnyx.com/docs/api/v2/messaging/Phone-Numbers#updateMessagingPhoneNumber """ @spec update(map, String.t(), String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}} def update(params = %{}, uuid, api_key) do Client.patch(api_key, params, "/messaging_phone_numbers/#{uuid}") end defp build_query_string(type, key, value), do: "#{type}[#{key}]=#{value}" end
lib/telnyx/messaging_phone_numbers.ex
0.673729
0.52409
messaging_phone_numbers.ex
starcoder
defmodule ExHal.Transcoder do @moduledoc """ Helps to build transcoders for HAL documents. Given a document like ```json { "name": "<NAME>", "mailingAddress": "123 Main St", "_links": { "app:department": { "href": "http://example.com/dept/42" }, "app:manager": { "href": "http://example.com/people/84" } } } ``` We can define an transcoder for it. ```elixir defmodule PersonTranscoder do use ExHal.Transcoder defproperty "name" defproperty "mailingAddress", param: :address deflink "app:department", param: :department_url deflink "app:manager", param: :manager_id, value_converter: PersonUrlConverter end ``` `PersonUrlConverter` is a module that has adopted the `ExHal.ValueConverter` behavior. ```elixir defmodule PersonUrlConverter do @behaviour ExHal.ValueConveter def from_hal(person_url) do to_string(person_url) |> String.split("/") |> List.last end def to_hal(person_id) do "http://example.com/people/\#{person_id}" end end ``` We can use this transcoder to to extract the pertinent parts of the document into a map. ```elixir iex> PersonTranscoder.decode!(doc) %{name: "<NAME>", address: "123 Main St", department_url: "http://example.com/dept/42", manager_id: 84} ``` iex> PersonTranscoder.encode!(%{name: "<NAME>", address: "123 Main St", department_url: "http://example.com/dept/42", manager_id: 84}) ~s( { "name": "<NAME>", "mailingAddress": "123 Main St", "_links": { "app:department": { "href": "http://example.com/dept/42" }, "app:manager": { "href": "http://example.com/people/84" } } } ) ``` """ @type t :: module @doc """ Returns a decoded version of HAL document merged with the initial params. initial_params - the initial params with which the newly extracted info should merged. src_doc - the document to interpret opts - options for use by modules adopting ExHal.ValueConverterWithOptions behaviour """ @callback decode!(ExHal.Document.t()) :: %{} @callback decode!(%{}, ExHal.Document.t()) :: %{} @callback decode!(ExHal.Document.t(), keyword) :: %{} @callback decode!(%{}, ExHal.Document.t(), keyword) :: %{} @doc """ Returns an HAL version of params provided, combined with the initial doc. initial_doc - the initial document with which the newly encoded info should merged. src_params - the params to encoded into HAL opts - options for use by modules adopting ExHal.ValueConverterWithOptions behaviour """ @callback encode!(%{}) :: ExHal.Document.t() @callback encode!(ExHal.Document.t(), %{}) :: ExHal.Document.t() @callback encode!(%{}, keyword) :: ExHal.Document.t() @callback encode!(ExHal.Document.t(), %{}, keyword) :: ExHal.Document.t() @doc """ Updates an existing object, such as one created by ExHal.Transcoder.decode! initial_object - a map containing properties, links etc patch_ops - a list of JSON-patch operations (https://tools.ietf.org/html/rfc6902) Supported are "replace" for properties, links, and collections of links and "add" for collections of links. For example: [ %{"op => "replace", "path" => "/panicLevel", "value" => 42}, # replace a property %{"op => "replace, "path" => "/_links/answers", "value" => [ "urn:earth", "urn:universe"]} # replace link collection %{"op => "add", "path" => "/_links/answers/-", "value" => "urn:everyhing"}] # append to link collection """ @callback patch!(%{}, [%{}]) :: %{} @callback patch!(%{}, [%{}], keyword) :: %{} defmacro __using__(_opts) do quote do import unquote(__MODULE__) Module.register_attribute(__MODULE__, :extractors, accumulate: true, persist: false) Module.register_attribute(__MODULE__, :injectors, accumulate: true, persist: false) Module.register_attribute(__MODULE__, :patchers, accumulate: true, persist: false) @before_compile unquote(__MODULE__) end end defmacro __before_compile__(_env) do quote do @behaviour ExHal.Transcoder def decode!(initial_params, %ExHal.Document{} = doc, opts) do @extractors |> Enum.reduce(initial_params, &apply(__MODULE__, &1, [doc, &2, opts])) end def decode!(doc, [_ | _] = opts), do: decode!(%{}, doc, opts) def decode!(initial_params, %ExHal.Document{} = doc), do: decode!(initial_params, doc, []) def decode!(doc), do: decode!(%{}, doc, []) def encode!(%ExHal.Document{} = initial_doc, params, opts) do @injectors |> Enum.reduce(initial_doc, &apply(__MODULE__, &1, [&2, params, opts])) end def encode!(params, [_ | _] = opts), do: encode!(%ExHal.Document{}, params, opts) def encode!(%ExHal.Document{} = initial_doc, params), do: encode!(initial_doc, params, []) def encode!(params), do: encode!(%ExHal.Document{}, params, []) def patch!(initial_object, patch_ops), do: patch!(initial_object, patch_ops, []) def patch!(initial_object, [], _opts), do: initial_object def patch!(initial_object, [patch_op | remaining_ops], opts) do @patchers |> Enum.reduce(initial_object, &apply(__MODULE__, &1, [&2, patch_op, [opts]])) |> patch!(remaining_ops, opts) end end end defmodule ValueConverter do @type t :: module @doc """ Returns Elixir representation of HAL value. hal_value - The HAL representation of the value to convert. """ @callback from_hal(any) :: any @doc """ Returns HAL representation of Elixir value. elixir_value - The Elixir representation of the value to convert. """ @callback to_hal(any) :: any end defmodule ValueConverterWithOptions do @type t :: module @doc """ Returns Elixir representation of HAL value. hal_value - The HAL representation of the value to convert. opts - Options to be used by the converter. """ @callback from_hal(any, keyword) :: any @doc """ Returns HAL representation of Elixir value. elixir_value - The Elixir representation of the value to convert. opts - Options to be used by the converter. """ @callback to_hal(any, keyword) :: any end defmodule IdentityConverter do @behaviour ValueConverter def from_hal(it), do: it def to_hal(it), do: it end defp interpret_opts(options, name) do unique_string = (:rand.uniform() * 100_000_000) |> trunc |> Integer.to_string() param_names = options |> Keyword.get(:param, String.to_atom(name)) |> List.wrap() templated = options |> Keyword.get(:templated, false) value_converter = Keyword.get(options, :value_converter, IdentityConverter) extractor_name = :"extract_#{unique_string}_#{Enum.join(param_names, ".")}" injector_name = :"inject_#{unique_string}_#{Enum.join(param_names, ".")}" patcher_name = :"patch_#{unique_string}_#{Enum.join(param_names, ".")}" {param_names, value_converter, extractor_name, injector_name, patcher_name, templated} end @doc """ Define a property extractor and injector. * name - the name of the property in HAL * options - Keywords arguments - :param - the key(s) in the param structure that map to this property. Default is `String.to_atom(name)`. - :value_converter - a `ExHal.Transcoder.ValueConverter` with which to convert the value to and from HAL """ defmacro defproperty(name, options \\ []) do {param_names, value_converter, extractor_name, injector_name, patcher_name, _} = interpret_opts(options, name) quote do def unquote(extractor_name)(doc, params, opts) do ExHal.get_lazy(doc, unquote(name), fn -> nil end) |> decode_value(unquote(value_converter), opts) |> put_param(params, unquote(param_names)) end @extractors unquote(extractor_name) def unquote(injector_name)(doc, params, opts) do get_in(params, unquote(param_names)) |> encode_value(unquote(value_converter), opts) |> put_property(doc, unquote(name)) end @injectors unquote(injector_name) unquote do unless(Keyword.get(options, :protected)) do quote do def unquote(patcher_name)( obj, %{"op" => "replace", "path" => "/#{unquote(name)}", "value" => value}, opts ) do value |> decode_value(unquote(value_converter), opts) |> put_param(obj, unquote(param_names)) end end end end def unquote(patcher_name)(obj, _patch_op, _opts), do: obj @patchers unquote(patcher_name) end end @doc """ Define a link extractor & injector. * rel - the rel of the link in HAL * options - Keywords arguments - :param - the key(s) in the param structure that maps to this link. Required. - :templated - a boolean that adds a `templated: true` parameter if true - :value_converter - a `ExHal.Transcoder.ValueConverter` with which to convert the link target when en/decoding HAL """ defmacro deflink(rel, options \\ []) do {param_names, value_converter, extractor_name, injector_name, patcher_name, templated} = interpret_opts(options, rel) quote do def unquote(extractor_name)(doc, params, opts) do ExHal.get_links_lazy(doc, unquote(rel), fn -> [] end) |> Enum.map(&Map.get(&1, :href)) |> List.first() |> decode_value(unquote(value_converter), opts) |> put_param(params, unquote(param_names)) end @extractors unquote(extractor_name) def unquote(injector_name)(doc, params, opts) do get_in(params, unquote(param_names)) |> encode_value(unquote(value_converter), opts) |> put_link(doc, unquote(rel), unquote(templated)) end @injectors unquote(injector_name) unquote do unless(Keyword.get(options, :protected)) do quote do def unquote(patcher_name)( obj, %{ "op" => "replace", "path" => "/_links/#{unquote(rel)}", "value" => %{"href" => href} }, opts ) do href |> decode_value(unquote(value_converter), opts) |> put_param(obj, unquote(param_names)) end end end end def unquote(patcher_name)(obj, _patch_op, _opts), do: obj @patchers unquote(patcher_name) end end @doc """ Define a link extractor & injector for links that may have more than one item. * rel - the rel of the link in HAL * options - Keywords arguments - :param - the key(s) in the param structure that maps to this link. Required. - :value_converter - a `ExHal.Transcoder.ValueConverter` with which to convert the link target when en/decoding HAL """ defmacro deflinks(rel, options \\ []) do {param_names, value_converter, extractor_name, injector_name, patcher_name, _} = interpret_opts(options, rel) quote do def unquote(extractor_name)(doc, params, opts) do ExHal.link_targets_lazy(doc, unquote(rel), fn -> nil end) |> decode_value(unquote(value_converter), opts) |> put_param(params, unquote(param_names)) end @extractors unquote(extractor_name) def unquote(injector_name)(doc, params, opts) do get_in(params, unquote(param_names)) |> encode_value(unquote(value_converter), opts) |> Enum.reduce(doc, &put_link(&1, &2, unquote(rel))) end @injectors unquote(injector_name) unquote do unless(Keyword.get(options, :protected)) do quote do def unquote(patcher_name)( obj, %{ "op" => "add", "path" => "/_links/#{unquote(rel)}/-", "value" => %{"href" => href} }, opts ) do update_in(obj, unquote(param_names), fn nil -> [href] links -> [href | links] end) end def unquote(patcher_name)( obj, %{"op" => "replace", "path" => "/_links/#{unquote(rel)}", "value" => links}, opts ) do hrefs = links |> List.wrap() |> Enum.map(&Map.get(&1, "href")) update_in(obj, unquote(param_names), fn _ -> hrefs end) end end end end def unquote(patcher_name)(obj, _patch_op, _opts), do: obj @patchers unquote(patcher_name) end end def decode_value(nil, _opts), do: nil def decode_value(raw_value, converter, opts) do if :erlang.function_exported(converter, :from_hal, 2) do converter.from_hal(raw_value, opts) else converter.from_hal(raw_value) end end def put_param(nil, params, _), do: params def put_param(value, params, param_names) do params = build_out_containers(params, param_names) put_in(params, param_names, value) end def encode_value(nil, _, _opts), do: nil def encode_value(raw_value, converter, opts) do if :erlang.function_exported(converter, :to_hal, 2) do converter.to_hal(raw_value, opts) else converter.to_hal(raw_value) end end def put_link(nil, doc, _), do: doc def put_link(target, doc, rel, templated \\ false) def put_link(nil, doc, _, _), do: doc def put_link(target, doc, rel, templated) do ExHal.Document.put_link(doc, rel, target, templated) end def put_property(nil, doc, _), do: doc def put_property(value, doc, prop_name) do ExHal.Document.put_property(doc, prop_name, value) end defp build_out_containers(params, [_h] = _param_names), do: params defp build_out_containers(params, param_names) do 1..(Enum.count(param_names) - 1) |> Enum.map(&Enum.take(param_names, &1)) |> Enum.reduce(params, fn c, acc -> case get_in(acc, c) do nil -> put_in(acc, c, %{}) _ -> acc end end) end end
lib/exhal/transcoder.ex
0.899463
0.704461
transcoder.ex
starcoder
defmodule ExAlgebra.Matrix do @moduledoc """ The Exalgebra Matrix module is a collection of functions that perform computations on matrices. Matrices are represented by lists of lists of numbers, where the inner lists represent the rows of the matrix. """ import :math, only: [pow: 2] alias ExAlgebra.Vector, as: Vector @doc """ Computes the rank of a matrix. Both the row rank and the column rank are returned as a map. ##### Examples iex> ExAlgebra.Matrix.rank([[1, 2], [3, 4], [4, 3]]) %{rows: 3, columns: 2} """ @spec rank([[number]]) :: map def rank([first_row | _] = matrix) do %{rows: length(matrix), columns: length(first_row)} end @doc """ Computes the addition of two matrices. This is a new matrix with entries equal to the sum of the two matrices's corresponding entries. The input matrices should have the same rank. ##### Examples iex> ExAlgebra.Matrix.add([[1, 3, 1], [1, 0, 0]], [[0, 0, 5], [7, 5, 0]]) [[1, 3, 6], [8, 5, 0]] """ @spec add([[number]], [[number]]) :: [[number]] def add([], []), do: [] def add([a_first_row | a_remaining_rows], [b_first_row | b_remaining_rows]) do [Vector.add(a_first_row, b_first_row) | add(a_remaining_rows, b_remaining_rows)] end @doc """ Computes the subtraction of two matrices. This is a new matrix with entries equal to the difference of the pair of matrices's corresponding entries. The input matrices should have the same rank. ##### Examples iex> ExAlgebra.Matrix.subtract([[1, 3, 1], [1, 0, 0]], [[0, 0, 5], [7, 5, 0]]) [[1, 3, -4], [-6, -5, 0]] """ @spec subtract([[number]], [[number]]) :: [[number]] def subtract([], []), do: [] def subtract([a_first_row | a_remaining_rows], [b_first_row | b_remaining_rows]) do [Vector.subtract(a_first_row, b_first_row) | subtract(a_remaining_rows, b_remaining_rows)] end @doc """ Computes the multiple of a matrix by a scalar value. ##### Examples iex> ExAlgebra.Matrix.scalar_multiply([[1, 3, 1], [1, 0, 0]] , 2.5) [[2.5, 7.5, 2.5], [2.5, 0.0, 0.0]] """ @spec scalar_multiply([[number]], number) :: [[number]] def scalar_multiply([], _scalar), do: [] def scalar_multiply(matrix, scalar) do matrix |> Enum.map(&Vector.scalar_multiply(&1, scalar)) end @doc """ Computes the transpose of a matrix. This is the matrix A<sup>t</sup> built from the matrix A where the entries A<sub>ij</sub> have been mapped to A<sub>ji</sub>. ##### Examples iex> ExAlgebra.Matrix.transpose([[1, 3, 1], [1, 0, 0]]) [[1, 1], [3, 0], [1, 0]] """ @spec transpose([[number]]) :: [[number]] def transpose(matrix) do matrix |> List.zip |> Enum.map(&Tuple.to_list(&1)) end @doc """ Computes the multiplication of two matrices. If the rank of matrix A is `n x m`, then the rank of matrix B must be `m x n`. ##### Examples iex> ExAlgebra.Matrix.multiply([[2, 3, 4], [1, 0, 0]], [[0, 1000], [1, 100], [0, 10]]) [[3, 2340], [0, 1000]] """ @spec multiply([[number]], [[number]]) :: [[number]] def multiply(a, b) do naive_multiply(a, transpose(b)) end @doc """ Returns the `(i, j)` submatrix of a matrix. This is the matrix with the i<sup>th</sup> row and j<sup>th</sup> column removed. ##### Examples iex> ExAlgebra.Matrix.submatrix([[2, 3, 4], [1, 0, 0], [3, 4, 5]], 2, 3) [[2, 3], [3, 4]] """ @spec submatrix([[number]], number, number) :: [[number]] def submatrix(matrix, i, j) do matrix |> remove_row(i) |> remove_column(j) end @doc """ Removes the j<sup>th</sup> column of a matrix. ##### Examples iex> ExAlgebra.Matrix.remove_column([[2, 3, 4], [1, 0, 0], [3, 4, 5]], 2) [[2, 4], [1, 0], [3, 5]] """ @spec remove_column([[number]], number) :: [[number]] def remove_column(matrix, j) do matrix |> Enum.map(&(List.delete_at(&1, j - 1))) end @doc """ Removes the i<sup>th</sup> row of a matrix. ##### Examples iex> ExAlgebra.Matrix.remove_row([[2, 3, 4], [1, 0, 0], [3, 4, 5]], 2) [[2, 3, 4], [3, 4, 5]] """ @spec remove_row([[number]], number) :: [[number]] def remove_row(matrix, i) do matrix |> List.delete_at(i - 1) end @doc """ Computes the determinant of a matrix. This is computed by summing the cofactors of the matrix multiplied by corresponding elements of the first row. ##### Examples iex> ExAlgebra.Matrix.det([[6, 1, 1], [4, -2, 5], [2, 8, 7]]) -306.0 """ @spec det([[number]]) :: number def det([[a]]), do: a def det([first_row | _] = matrix) do first_row |> Enum.with_index |> List.foldl(0, fn({row_element, row_index}, acc) -> acc + row_element * cofactor(matrix, 1, row_index + 1) end) end @doc """ Computes the `(i, j)` cofactor of a matrix. This is equal to the `(i, j)` minor of a matrix multiplied by `-1` raised to the power of `i + j`. ##### Examples iex> ExAlgebra.Matrix.cofactor( [[2, 3, 4], [1, 0, 0], [3, 4, 5]], 1, 2) -5.0 """ @spec cofactor([[number]], number, number) :: number def cofactor(matrix, i, j), do: minor(matrix, i, j) * pow(-1, i + j) @doc """ Computes the `(i, j)` minor of a matrix. This is the determinant of a matrix whose i<sup>th</sup> row and j<sup>th</sup> column have been removed. ##### Examples iex> ExAlgebra.Matrix.minor( [[2, 3, 4], [1, 0, 0], [3, 4, 5]], 1, 2) 5.0 """ @spec minor([[number]], number, number) :: number def minor(matrix, i, j), do: matrix |> submatrix(i, j) |> det @doc """ Computes the the trace of a matrix. This is the sum of the elements accross the diagonal of a matrix. ##### Examples iex> ExAlgebra.Matrix.trace([[6, 1, 1], [4, -2, 5], [2, 8, 7]]) 11 """ @spec trace([[number]]) :: number def trace(matrix) do matrix |> Enum.with_index |> Enum.map(fn({row, index}) -> Enum.at(row, index) end) |> Enum.sum end @doc """ Computes the hadamard product of two matrices of equal rank. ##### Examples iex> ExAlgebra.Matrix.hadamard_product([[1, 2, 3], [2, 3, 4]], [[1, 3, 4], [1, 2, 3]]) [[1, 6, 12], [2, 6, 12]] """ @spec hadamard_product([[number]], [[number]]) :: [[number]] def hadamard_product(matrix_one, matrix_two) do matrix_one |> Enum.zip(matrix_two) |> Enum.map(fn({u, v}) -> Vector.hadamard_product(u, v) end) end @spec naive_multiply([[number]], [[number]]) :: [[number]] defp naive_multiply(matrix_one, matrix_two) do matrix_one |> List.foldl([], fn(row, acc) -> [matrix_two |> Enum.map(&Vector.dot(&1, row)) | acc] end) |> Enum.reverse end end
lib/Matrix/matrix.ex
0.945368
0.9255
matrix.ex
starcoder