hexsha
stringlengths
40
40
size
int64
2
991k
ext
stringclasses
2 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
208
max_stars_repo_name
stringlengths
6
106
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
list
max_stars_count
int64
1
33.5k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
208
max_issues_repo_name
stringlengths
6
106
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
list
max_issues_count
int64
1
16.3k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
208
max_forks_repo_name
stringlengths
6
106
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
list
max_forks_count
int64
1
6.91k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
991k
avg_line_length
float64
1
36k
max_line_length
int64
1
977k
alphanum_fraction
float64
0
1
ffb781aa6ba7a8e426b4d6d513400f995b1fdcec
2,556
ex
Elixir
lib/arc_ecto/schema.ex
massetech/arc_ecto
428bfc0fb2573b037dc856929117e161a82eb9e8
[ "Apache-2.0" ]
null
null
null
lib/arc_ecto/schema.ex
massetech/arc_ecto
428bfc0fb2573b037dc856929117e161a82eb9e8
[ "Apache-2.0" ]
null
null
null
lib/arc_ecto/schema.ex
massetech/arc_ecto
428bfc0fb2573b037dc856929117e161a82eb9e8
[ "Apache-2.0" ]
null
null
null
defmodule Arc.Ecto.Schema do defmacro __using__(_) do quote do import Arc.Ecto.Schema end end defmacro cast_attachments(changeset_or_data, params, allowed, options \\ []) do quote bind_quoted: [changeset_or_data: changeset_or_data, params: params, allowed: allowed, options: options] do # If given a changeset, apply the changes to obtain the underlying data scope = case changeset_or_data do %Ecto.Changeset{} -> Ecto.Changeset.apply_changes(changeset_or_data) %{__meta__: _} -> changeset_or_data end # Cast supports both atom and string keys, ensure we're matching on both. allowed_param_keys = Enum.map(allowed, fn key -> case key do key when is_binary(key) -> key key when is_atom(key) -> Atom.to_string(key) end end) arc_params = case params do :invalid -> :invalid %{} -> params |> Arc.Ecto.Schema.convert_params_to_binary |> Map.take(allowed_param_keys) |> Enum.reduce([], fn # Don't wrap nil casts in the scope object {field, nil}, fields -> [{field, nil} | fields] # Allow casting Plug.Uploads {field, upload = %{__struct__: Plug.Upload}}, fields -> [{field, {upload, scope}} | fields] # Allow casting binary data structs {field, upload = %{filename: filename, binary: binary}}, fields when is_binary(filename) and is_binary(binary) -> [{field, {upload, scope}} | fields] # If casting a binary (path), ensure we've explicitly allowed paths {field, path}, fields when is_binary(path) -> if Keyword.get(options, :allow_paths, false) do [{field, {path, scope}} | fields] else fields end end) |> Enum.into(%{}) end cast(changeset_or_data, arc_params, allowed) end end def convert_params_to_binary(params) do Enum.reduce(params, nil, fn {key, _value}, nil when is_binary(key) -> nil {key, _value}, _ when is_binary(key) -> raise ArgumentError, "expected params to be a map with atoms or string keys, " <> "got a map with mixed keys: #{inspect params}" {key, value}, acc when is_atom(key) -> Map.put(acc || %{}, Atom.to_string(key), value) end) || params end end
33.194805
103
0.566119
ffb7cdfc6a78230a6adecd186ebe402d159daa68
1,325
ex
Elixir
lib/remedy/schema/voice_state.ex
bdanklin/nostrum
554ebd6cff1d0f68c874aa92f475dabf1aed5512
[ "MIT" ]
3
2021-09-05T09:44:02.000Z
2022-01-26T15:31:50.000Z
lib/remedy/schema/voice_state.ex
bdanklin/remedy
554ebd6cff1d0f68c874aa92f475dabf1aed5512
[ "MIT" ]
null
null
null
lib/remedy/schema/voice_state.ex
bdanklin/remedy
554ebd6cff1d0f68c874aa92f475dabf1aed5512
[ "MIT" ]
null
null
null
defmodule Remedy.Schema.VoiceState do @moduledoc """ Voice State Update Event """ use Remedy.Schema @typedoc "Time at which the user requested to speak, if applicable" @type t :: %__MODULE__{ guild_id: Snowflake.t(), channel_id: Snowflake.t(), user_id: Snowflake.t(), member: Member.t() | nil, session_id: String.t(), deaf?: boolean(), mute?: boolean(), self_deaf?: boolean(), self_mute?: boolean(), self_stream?: boolean(), self_video?: boolean(), suppress?: boolean(), request_to_speak_timestamp: ISO8601.t() | nil } @primary_key false embedded_schema do field :guild_id, Snowflake field :channel_id, Snowflake field :user_id, Snowflake field :session_id, :boolean field :deaf?, :boolean field :mute?, :boolean field :self_deaf?, :boolean field :self_mute?, :boolean field :self_stream?, :boolean field :self_video?, :boolean field :suppress?, :boolean field :request_to_speak_timestamp, ISO8601 embeds_one :member, Member end def changeset(model \\ %__MODULE__{}, params) do model |> cast(params, __MODULE__.__schema__(:fields) -- __MODULE__.__schema__(:embeds)) |> cast_embed(:member) end end
27.604167
85
0.621132
ffb7f2d7251283f06d6f40856bec60622f38d66c
3,753
ex
Elixir
lib/asciinema/asciicasts/asciicast.ex
philschatz/asciinema-server
ed69e0c1737f76ef639ecd9f3ef0e59c3e66250b
[ "Apache-2.0" ]
null
null
null
lib/asciinema/asciicasts/asciicast.ex
philschatz/asciinema-server
ed69e0c1737f76ef639ecd9f3ef0e59c3e66250b
[ "Apache-2.0" ]
null
null
null
lib/asciinema/asciicasts/asciicast.ex
philschatz/asciinema-server
ed69e0c1737f76ef639ecd9f3ef0e59c3e66250b
[ "Apache-2.0" ]
null
null
null
defmodule Asciinema.Asciicasts.Asciicast do use Ecto.Schema import Ecto.Changeset alias Asciinema.Accounts.User alias Asciinema.Asciicasts.Asciicast alias Asciinema.PngGenerator.PngParams @default_png_scale 2 @default_theme "asciinema" schema "asciicasts" do field :version, :integer field :file, :string field :terminal_columns, :integer field :terminal_lines, :integer field :terminal_type, :string field :stdout_data, :string field :stdout_timing, :string field :stdout_frames, :string field :private, :boolean field :featured, :boolean field :secret_token, :string field :duration, :float field :title, :string field :description, :string field :theme_name, :string field :theme_fg, :string field :theme_bg, :string field :theme_palette, :string field :snapshot_at, :float field :snapshot, Asciinema.Ecto.Type.JsonArray field :command, :string field :shell, :string field :uname, :string field :user_agent, :string field :recorded_at, Timex.Ecto.DateTime field :idle_time_limit, :float timestamps(inserted_at: :created_at) belongs_to :user, User end defimpl Phoenix.Param, for: Asciicast do def to_param(%Asciicast{private: true, secret_token: secret_token}) do secret_token end def to_param(%Asciicast{id: id}) do Integer.to_string(id) end end defp changeset(struct, attrs) do struct |> cast(attrs, [:title, :private, :snapshot_at]) |> validate_required([:private]) end def create_changeset(struct, attrs) do struct |> changeset(attrs) |> cast(attrs, [:version, :file, :duration, :terminal_columns, :terminal_lines, :terminal_type, :command, :shell, :uname, :user_agent, :recorded_at, :theme_fg, :theme_bg, :theme_palette, :idle_time_limit]) |> validate_required([:user_id, :version, :duration, :terminal_columns, :terminal_lines]) |> generate_secret_token end def update_changeset(struct, attrs) do struct |> changeset(attrs) |> cast(attrs, [:description, :theme_name]) end def snapshot_changeset(struct, snapshot) do cast(struct, %{snapshot: snapshot}, [:snapshot]) end defp generate_secret_token(changeset) do put_change(changeset, :secret_token, Crypto.random_token(25)) end def json_store_path(%Asciicast{file: v} = asciicast) when is_binary(v) do file_store_path(asciicast, :file) end def json_store_path(%Asciicast{stdout_frames: v} = asciicast) when is_binary(v) do file_store_path(asciicast, :stdout_frames) end def file_store_path(%Asciicast{id: id, file: fname}, :file) do file_store_path(:file, id, fname) end def file_store_path(%Asciicast{id: id, stdout_frames: fname}, :stdout_frames) do file_store_path(:stdout_frames, id, fname) end def file_store_path(%Asciicast{id: id, stdout_data: fname}, :stdout_data) do file_store_path(:stdout, id, fname) end def file_store_path(%Asciicast{id: id, stdout_timing: fname}, :stdout_timing) do file_store_path(:stdout_timing, id, fname) end def file_store_path(type, id, fname) when is_binary(fname) do "asciicast/#{type}/#{id}/#{fname}" end def file_store_path(_type, _id, _fname) do nil end def snapshot_at(%Asciicast{snapshot_at: snapshot_at, duration: duration}) do snapshot_at || duration / 2 end def theme_name(%Asciicast{theme_name: a_theme_name}, %User{theme_name: u_theme_name}) do a_theme_name || u_theme_name || @default_theme end def png_params(%Asciicast{} = asciicast, %User{} = user) do %PngParams{snapshot_at: snapshot_at(asciicast), theme: theme_name(asciicast, user), scale: @default_png_scale} end end
29.551181
209
0.706368
ffb7f6ec1725237d2493366c9db493b2ecd0a1a3
1,513
ex
Elixir
apps/language_server/lib/language_server/dialyzer/utils.ex
ngscheurich/elixir-ls
4d671e5fbcab69df70e490d17cfb7affcf4d1383
[ "Apache-2.0" ]
null
null
null
apps/language_server/lib/language_server/dialyzer/utils.ex
ngscheurich/elixir-ls
4d671e5fbcab69df70e490d17cfb7affcf4d1383
[ "Apache-2.0" ]
null
null
null
apps/language_server/lib/language_server/dialyzer/utils.ex
ngscheurich/elixir-ls
4d671e5fbcab69df70e490d17cfb7affcf4d1383
[ "Apache-2.0" ]
null
null
null
defmodule ElixirLS.LanguageServer.Dialyzer.Utils do @epoch_gregorian_seconds 62_167_219_200 def dialyzable?(module) do file = :code.which(module) is_list(file) and match?({:ok, _}, :dialyzer_utils.get_core_from_beam(file)) end def pathname_to_module(path) do String.to_atom(Path.basename(path, ".beam")) end def expand_references(modules, exclude \\ [], result \\ MapSet.new()) def expand_references([], _, result) do result end def expand_references([module | rest], exclude, result) do result = if module in result or module in exclude or not dialyzable?(module) do result else result = MapSet.put(result, module) expand_references(module_references(module), exclude, result) end expand_references(rest, exclude, result) end # Mix.Utils.last_modified/1 returns a posix time, so we normalize to a :calendar.universal_time() def normalize_timestamp(timestamp) when is_integer(timestamp), do: :calendar.gregorian_seconds_to_datetime(timestamp + @epoch_gregorian_seconds) defp module_references(mod) do try do forms = :forms.read(mod) calls = :forms.filter( fn {:call, _, {:remote, _, {:atom, _, _}, _}, _} -> true _ -> false end, forms ) modules = for {:call, _, {:remote, _, {:atom, _, module}, _}, _} <- calls, do: module Enum.uniq(modules) rescue _ -> [] catch _ -> [] end end end
26.54386
99
0.635823
ffb81c68f0a74aa877f5934e213dbdf04aa3de41
1,363
exs
Elixir
test/swapper_test.exs
dougal/scatter_swap.ex
f596196330081265d4838e884ce73ac2143cd66e
[ "MIT" ]
null
null
null
test/swapper_test.exs
dougal/scatter_swap.ex
f596196330081265d4838e884ce73ac2143cd66e
[ "MIT" ]
null
null
null
test/swapper_test.exs
dougal/scatter_swap.ex
f596196330081265d4838e884ce73ac2143cd66e
[ "MIT" ]
null
null
null
defmodule ScatterSwap.SwapperTest do use ExUnit.Case doctest ScatterSwap.Swapper alias ScatterSwap.Swapper test "swap() uses a unique map to swap out one number for another" do assert Swapper.swap([0, 0, 0, 0, 0, 0, 0, 0, 0, 1]) == [9, 0, 1, 2, 3, 4, 5, 6, 7, 9] assert Swapper.swap([0, 0, 0, 0, 0, 0, 0, 0, 0, 2]) == [9, 0, 1, 2, 3, 4, 5, 6, 7, 2] end test "swap() with a salt scrambles the results" do assert Swapper.swap([0, 0, 0, 0, 0, 0, 0, 0, 0, 1], 14) == [3, 4, 1, 2, 9, 0, 7, 8, 5, 0] assert Swapper.swap([0, 0, 0, 0, 0, 0, 0, 0, 0, 2], 26) == [5, 6, 3, 4, 9, 0, 7, 8, 7, 6] end test "unswap() uses the map to return the original list" do assert Swapper.unswap([9, 0, 1, 2, 3, 4, 5, 6, 7, 9]) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 1] assert Swapper.unswap([9, 0, 1, 2, 3, 4, 5, 6, 7, 2]) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 2] end test "unswap() with a salt returns the original list" do assert Swapper.unswap([3, 4, 1, 2, 9, 0, 7, 8, 5, 0], 14) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 1] assert Swapper.unswap([5, 6, 3, 4, 9, 0, 7, 8, 7, 6], 26) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 2] end test "unswap() reverses the actions of swap()" do original = [0, 0, 0, 0, 0, 0, 0, 0, 0, 1] output = original |> Swapper.swap(84) |> Swapper.unswap(84) assert output == original end end
36.837838
95
0.526779
ffb824523c997d6b7fcc390315f3d05e3a3958c4
2,718
ex
Elixir
lib/fun_with_flags/store/cache.ex
gburd/fun_with_flags
db44e02278d7c448c3a06327affe33ab0473a1da
[ "MIT" ]
null
null
null
lib/fun_with_flags/store/cache.ex
gburd/fun_with_flags
db44e02278d7c448c3a06327affe33ab0473a1da
[ "MIT" ]
null
null
null
lib/fun_with_flags/store/cache.ex
gburd/fun_with_flags
db44e02278d7c448c3a06327affe33ab0473a1da
[ "MIT" ]
null
null
null
defmodule FunWithFlags.Store.Cache do @moduledoc """ The in-memory cache for the feature flag, backed by an ETS table. This module is not meant to be used directly, but some of its functions can be useful to debug flag state. """ @type ttl :: integer @type cached_at :: integer @doc false use GenServer alias FunWithFlags.Config alias FunWithFlags.Flag alias FunWithFlags.Timestamps @table_name :fun_with_flags_cache @table_options [ :set, :protected, :named_table, {:read_concurrency, true} ] @doc false def worker_spec do if FunWithFlags.Config.cache? do %{ id: __MODULE__, start: {__MODULE__, :start_link, []}, restart: :permanent, type: :worker, } end end @doc false def start_link do GenServer.start_link(__MODULE__, :ok, [name: __MODULE__]) end # We lookup without going through the GenServer # for concurrency and performance. # @doc false def get(flag_name) do case :ets.lookup(@table_name, flag_name) do [{^flag_name, {flag, timestamp, ttl}}] -> validate(flag_name, flag, timestamp, ttl) _ -> {:miss, :not_found, nil} end end defp validate(name, flag = %Flag{name: name}, timestamp, ttl) do if Timestamps.expired?(timestamp, ttl) do {:miss, :expired, flag} else {:ok, flag} end end defp validate(_name, _flag, _timestamp, _ttl) do {:miss, :invalid, nil} end # We want to always write serially through the # GenServer to avoid race conditions. # @doc false def put(flag = %Flag{}) do GenServer.call(__MODULE__, {:put, flag}) end @doc """ Clears the cache. It will be rebuilt gradually as the public interface of the package is queried. """ @spec flush() :: true def flush do GenServer.call(__MODULE__, :flush) end @doc """ Returns the contents of the cache ETS table, for inspection. """ @spec dump() :: [{atom, {FunWithFlags.Flag.t, cached_at, ttl}}] def dump do :ets.tab2list(@table_name) end # ------------------------------------------------------------ # GenServer callbacks @doc false def init(:ok) do tab_name = @table_name ^tab_name = :ets.new(@table_name, @table_options) {:ok, %{tab_name: tab_name, ttl: Config.cache_ttl}} end @doc false def handle_call({:put, flag = %Flag{name: name}}, _from, state = %{ttl: ttl}) do # writing to an ETS table will either return true or raise :ets.insert(@table_name, {name, {flag, Timestamps.now, ttl}}) {:reply, {:ok, flag}, state} end @doc false def handle_call(:flush, _from, state) do {:reply, :ets.delete_all_objects(@table_name), state} end end
22.097561
82
0.633186
ffb84fc66212d22939f796c10fc8ca81856f9eea
19,607
ex
Elixir
lib/json/ld/context.ex
alexgleason/jsonld-ex
3205dbef2a9010c29a02813afcbc6586d37a2138
[ "MIT" ]
12
2020-06-18T18:23:34.000Z
2022-01-05T15:00:09.000Z
lib/json/ld/context.ex
alexgleason/jsonld-ex
3205dbef2a9010c29a02813afcbc6586d37a2138
[ "MIT" ]
5
2020-06-16T16:18:14.000Z
2021-08-19T20:23:03.000Z
lib/json/ld/context.ex
alexgleason/jsonld-ex
3205dbef2a9010c29a02813afcbc6586d37a2138
[ "MIT" ]
6
2020-06-17T22:25:51.000Z
2021-08-19T19:55:49.000Z
defmodule JSON.LD.Context do import JSON.LD.{IRIExpansion, Utils} alias JSON.LD.Context.TermDefinition alias JSON.LD.Options alias RDF.IRI @type local :: map | String.t() | nil @type remote :: [map] @type value :: map | String.t() | nil @type t :: %__MODULE__{ term_defs: map, default_language: String.t() | nil, vocab: nil, base_iri: String.t() | boolean | nil, api_base_iri: String.t() | nil } defstruct term_defs: %{}, default_language: nil, vocab: nil, base_iri: false, api_base_iri: nil @spec base(t) :: String.t() | nil def base(%__MODULE__{base_iri: false, api_base_iri: api_base_iri}), do: api_base_iri def base(%__MODULE__{base_iri: base_iri}), do: base_iri @spec new(Options.convertible()) :: t def new(options \\ %Options{}), do: %__MODULE__{api_base_iri: Options.new(options).base} @spec create(map, Options.convertible()) :: t def create(%{"@context" => json_ld_context}, options), do: options |> new() |> update(json_ld_context, [], options) @spec update(t, [local] | local, remote, Options.convertible()) :: t def update(active, local, remote \\ [], options \\ %Options{}) def update(%__MODULE__{} = active, local, remote, %Options{} = options) when is_list(local) do Enum.reduce(local, active, fn local, result -> do_update(result, local, remote, options) end) end # 2) If local context is not an array, set it to an array containing only local context. def update(%__MODULE__{} = active, local, remote, %Options{} = options), do: update(active, [local], remote, options) def update(%__MODULE__{} = active, local, remote, options), do: update(active, local, remote, Options.new(options)) # 3.1) If context is null, set result to a newly-initialized active context and continue with the next context. The base IRI of the active context is set to the IRI of the currently being processed document (which might be different from the currently being processed context), if available; otherwise to null. If set, the base option of a JSON-LD API Implementation overrides the base IRI. @spec do_update(t, local, remote, Options.t()) :: t defp do_update(%__MODULE__{}, nil, _remote, options), do: new(options) # 3.2) If context is a string, [it's interpreted as a remote context] defp do_update(%__MODULE__{} = active, local, remote, options) when is_binary(local) do # 3.2.1) local = absolute_iri(local, base(active)) # 3.2.2) if local in remote do raise JSON.LD.RecursiveContextInclusionError, message: "Recursive context inclusion: #{local}" end remote = remote ++ [local] # 3.2.3) document_loader = options.document_loader || JSON.LD.DocumentLoader.Default document = case apply(document_loader, :load, [local, options]) do {:ok, result} -> result.document {:error, reason} -> raise JSON.LD.LoadingRemoteContextFailedError, message: "Could not load remote context (#{local}): #{inspect(reason)}" end document = cond do is_map(document) -> document is_binary(document) -> case Jason.decode(document) do {:ok, result} -> result {:error, reason} -> raise JSON.LD.InvalidRemoteContextError, message: "Context is not a valid JSON document: #{inspect(reason)}" end true -> raise JSON.LD.InvalidRemoteContextError, message: "Context is not a valid JSON object: #{inspect(document)}" end local = document["@context"] || raise JSON.LD.InvalidRemoteContextError, message: "Invalid remote context: No @context key in #{inspect(document)}" # 3.2.4) - 3.2.5) update(active, local, remote, options) end # 3.4) - 3.8) defp do_update(%__MODULE__{} = active, local, remote, _) when is_map(local) do with {base, local} <- Map.pop(local, "@base", false), {vocab, local} <- Map.pop(local, "@vocab", false), {language, local} <- Map.pop(local, "@language", false) do active |> set_base(base, remote) |> set_vocab(vocab) |> set_language(language) |> create_term_definitions(local) end end # 3.3) If context is not a JSON object, an invalid local context error has been detected and processing is aborted. defp do_update(_, local, _, _) do raise JSON.LD.InvalidLocalContextError, message: "#{inspect(local)} is not a valid @context value" end @spec set_base(t, boolean, remote) :: t defp set_base(active, false, _), do: active defp set_base(active, _, remote) when is_list(remote) and length(remote) > 0, do: active defp set_base(active, base, _) do cond do # TODO: this slightly differs from the spec, due to our false special value for base_iri; add more tests is_nil(base) or IRI.absolute?(base) -> %__MODULE__{active | base_iri: base} active.base_iri -> %__MODULE__{active | base_iri: absolute_iri(base, active.base_iri)} true -> raise JSON.LD.InvalidBaseIRIError, message: "#{inspect(base)} is a relative IRI, but no active base IRI defined" end end @spec set_vocab(t, boolean | nil) :: t defp set_vocab(active, false), do: active defp set_vocab(active, vocab) do if is_nil(vocab) or IRI.absolute?(vocab) or blank_node_id?(vocab) do %__MODULE__{active | vocab: vocab} else raise JSON.LD.InvalidVocabMappingError, message: "#{inspect(vocab)} is not a valid vocabulary mapping" end end @spec set_language(t, boolean | nil) :: t defp set_language(active, false), do: active defp set_language(active, nil), do: %__MODULE__{active | default_language: nil} defp set_language(active, language) when is_binary(language), do: %__MODULE__{active | default_language: String.downcase(language)} defp set_language(_, language) do raise JSON.LD.InvalidDefaultLanguageError, message: "#{inspect(language)} is not a valid language" end @spec language(t, String.t()) :: String.t() | nil def language(active, term) do case Map.get(active.term_defs, term, %TermDefinition{}).language_mapping do false -> active.default_language language -> language end end @spec create_term_definitions(t, map, map) :: t defp create_term_definitions(active, local, defined \\ %{}) do {active, _} = Enum.reduce(local, {active, defined}, fn {term, value}, {active, defined} -> create_term_definition(active, local, term, value, defined) end) active end @doc """ Expands the given input according to the steps in the JSON-LD Create Term Definition Algorithm. see <https://www.w3.org/TR/json-ld-api/#create-term-definition> """ @spec create_term_definition(t, map, String.t(), value, map) :: {t, map} def create_term_definition(active, local, term, value, defined) def create_term_definition(active, _, "@base", _, defined), do: {active, defined} def create_term_definition(active, _, "@vocab", _, defined), do: {active, defined} def create_term_definition(active, _, "@language", _, defined), do: {active, defined} def create_term_definition(active, local, term, value, defined) do # 3) if term in JSON.LD.keywords() do raise JSON.LD.KeywordRedefinitionError, message: "#{inspect(term)} is a keyword and can not be defined in context" end # 1) case defined[term] do true -> {active, defined} # , message: "#{inspect term} .." false -> raise JSON.LD.CyclicIRIMappingError nil -> # 2) do_create_term_definition(active, local, term, value, Map.put(defined, term, false)) end end @spec do_create_term_definition(t, map, String.t(), value, map) :: {t, map} defp do_create_term_definition(active, _local, term, nil, defined) do { # (if Map.has_key?(active.term_defs, term), # do: put_in(active, [:term_defs, term], nil), # else: raise "NotImplemented"), %__MODULE__{active | term_defs: Map.put(active.term_defs, term, nil)}, Map.put(defined, term, true) } end defp do_create_term_definition(active, local, term, %{"@id" => nil}, defined), do: do_create_term_definition(active, local, term, nil, defined) defp do_create_term_definition(active, local, term, value, defined) when is_binary(value), do: do_create_term_definition(active, local, term, %{"@id" => value}, defined) defp do_create_term_definition(active, local, term, %{} = value, defined) do # 9) definition = %TermDefinition{} {definition, active, defined} = do_create_type_definition(definition, active, local, value, defined) {done, definition, active, defined} = do_create_reverse_definition(definition, active, local, value, defined) {definition, active, defined} = unless done do {definition, active, defined} = do_create_id_definition(definition, active, local, term, value, defined) definition = do_create_container_definition(definition, value) definition = do_create_language_definition(definition, value) {definition, active, defined} else {definition, active, defined} end # 18 / 11.6) Set the term definition of term in active context to definition and set the value associated with defined's key term to true. { %__MODULE__{active | term_defs: Map.put(active.term_defs, term, definition)}, Map.put(defined, term, true) } end defp do_create_term_definition(_, _, _, value, _) do raise JSON.LD.InvalidTermDefinitionError, message: "#{inspect(value)} is not a valid term definition" end # 10.1) # TODO: RDF.rb implementation says: "SPEC FIXME: @type may be nil" @spec do_create_type_definition(TermDefinition.t(), map, map, value, map) :: {TermDefinition.t(), t, map} defp do_create_type_definition(_, _, _, %{"@type" => type}, _) when not is_binary(type) do raise JSON.LD.InvalidTypeMappingError, message: "#{inspect(type)} is not a valid type mapping" end # 10.2) and 10.3) defp do_create_type_definition(definition, active, local, %{"@type" => type}, defined) do {expanded_type, active, defined} = expand_iri(type, active, false, true, local, defined) if IRI.absolute?(expanded_type) or expanded_type in ~w[@id @vocab] do {%TermDefinition{definition | type_mapping: expanded_type}, active, defined} else raise JSON.LD.InvalidTypeMappingError, message: "#{inspect(type)} is not a valid type mapping" end end defp do_create_type_definition(definition, active, _, _, defined), do: {definition, active, defined} @spec do_create_reverse_definition(TermDefinition.t(), t, map, value, map) :: {boolean, TermDefinition.t(), t, map} # 11) If value contains the key @reverse defp do_create_reverse_definition( definition, active, local, %{"@reverse" => reverse} = value, defined ) do cond do # 11.1) Map.has_key?(value, "@id") -> raise JSON.LD.InvalidReversePropertyError, message: "#{inspect(reverse)} is not a valid reverse property" # 11.2) not is_binary(reverse) -> raise JSON.LD.InvalidIRIMappingError, message: "Expected String for @reverse value. got #{inspect(reverse)}" # 11.3) true -> {expanded_reverse, active, defined} = expand_iri(reverse, active, false, true, local, defined) definition = if IRI.absolute?(expanded_reverse) or blank_node_id?(expanded_reverse) do %TermDefinition{definition | iri_mapping: expanded_reverse} else raise JSON.LD.InvalidIRIMappingError, message: "Non-absolute @reverse IRI: #{inspect(reverse)}" end # 11.4) definition = case Map.get(value, "@container", {false}) do {false} -> definition container when is_nil(container) or container in ~w[@set @index] -> %TermDefinition{definition | container_mapping: container} _ -> raise JSON.LD.InvalidReversePropertyError, message: "#{inspect(reverse)} is not a valid reverse property; reverse properties only support set- and index-containers" end # 11.5) & 11.6) {true, %TermDefinition{definition | reverse_property: true}, active, defined} end end defp do_create_reverse_definition(definition, active, _, _, defined), do: {false, definition, active, defined} # 13) @spec do_create_id_definition(TermDefinition.t(), t, map, String.t(), map, map) :: {TermDefinition.t(), t, map} defp do_create_id_definition(definition, active, local, term, %{"@id" => id}, defined) when id != term do # 13.1) if is_binary(id) do # 13.2) {expanded_id, active, defined} = expand_iri(id, active, false, true, local, defined) cond do expanded_id == "@context" -> raise JSON.LD.InvalidKeywordAliasError, message: "cannot alias @context" JSON.LD.keyword?(expanded_id) or IRI.absolute?(expanded_id) or blank_node_id?(expanded_id) -> {%TermDefinition{definition | iri_mapping: expanded_id}, active, defined} true -> raise JSON.LD.InvalidIRIMappingError, message: "#{inspect(id)} is not a valid IRI mapping; resulting IRI mapping should be a keyword, absolute IRI or blank node" end else raise JSON.LD.InvalidIRIMappingError, message: "expected value of @id to be a string, but got #{inspect(id)}" end end defp do_create_id_definition(definition, active, local, term, _, defined) do # 14) # TODO: The W3C spec seems to contain an error by requiring only to check for a collon. What's when an absolute IRI is given and an "http" term is defined in the context? if String.contains?(term, ":") do case compact_iri_parts(term) do [prefix, suffix] -> prefix_mapping = local[prefix] {active, defined} = if prefix_mapping do do_create_term_definition(active, local, prefix, prefix_mapping, defined) else {active, defined} end if prefix_def = active.term_defs[prefix] do {%TermDefinition{definition | iri_mapping: prefix_def.iri_mapping <> suffix}, active, defined} else {%TermDefinition{definition | iri_mapping: term}, active, defined} end nil -> {%TermDefinition{definition | iri_mapping: term}, active, defined} end # 15) else if active.vocab do {%TermDefinition{definition | iri_mapping: active.vocab <> term}, active, defined} else raise JSON.LD.InvalidIRIMappingError, message: "#{inspect(term)} is not a valid IRI mapping; relative term definition without vocab mapping" end end end # 16.1) @spec do_create_container_definition(TermDefinition.t(), map) :: TermDefinition.t() defp do_create_container_definition(_, %{"@container" => container}) when container not in ~w[@list @set @index @language] do raise JSON.LD.InvalidContainerMappingError, message: "#{inspect(container)} is not a valid container mapping; @container must be either @list, @set, @index, or @language" end # 16.2) defp do_create_container_definition(definition, %{"@container" => container}), do: %TermDefinition{definition | container_mapping: container} defp do_create_container_definition(definition, _), do: definition # 17) @spec do_create_language_definition(TermDefinition.t(), map) :: TermDefinition.t() defp do_create_language_definition(definition, %{"@language" => language} = value) do unless Map.has_key?(value, "@type") do case language do language when is_binary(language) -> %TermDefinition{definition | language_mapping: String.downcase(language)} language when is_nil(language) -> %TermDefinition{definition | language_mapping: nil} _ -> raise JSON.LD.InvalidLanguageMappingError, message: "#{inspect(language)} is not a valid language mapping; @language must be a string or null" end end end defp do_create_language_definition(definition, _), do: definition @doc """ Inverse Context Creation algorithm Details at <https://www.w3.org/TR/json-ld-api/#inverse-context-creation> """ @spec inverse(t) :: map def inverse(%__MODULE__{} = context) do # 2) Initialize default language to @none. If the active context has a default language, set default language to it. default_language = context.default_language || "@none" # 3) For each key term and value term definition in the active context, ordered by shortest term first (breaking ties by choosing the lexicographically least term) context.term_defs |> Enum.sort_by(fn {term, _} -> String.length(term) end) |> Enum.reduce(%{}, fn {term, term_def}, result -> # 3.1) If the term definition is null, term cannot be selected during compaction, so continue to the next term. if term_def do # 3.2) Initialize container to @none. If there is a container mapping in term definition, set container to its associated value. container = term_def.container_mapping || "@none" # 3.3) Initialize iri to the value of the IRI mapping for the term definition. iri = term_def.iri_mapping type_map = get_in(result, [iri, container, "@type"]) || %{} language_map = get_in(result, [iri, container, "@language"]) || %{} {type_map, language_map} = case term_def do # 3.8) If the term definition indicates that the term represents a reverse property %TermDefinition{reverse_property: true} -> {Map.put_new(type_map, "@reverse", term), language_map} # 3.9) Otherwise, if term definition has a type mapping %TermDefinition{type_mapping: type_mapping} when type_mapping != false -> {Map.put_new(type_map, type_mapping, term), language_map} # 3.10) Otherwise, if term definition has a language mapping (might be null) %TermDefinition{language_mapping: language_mapping} when language_mapping != false -> language = language_mapping || "@null" {type_map, Map.put_new(language_map, language, term)} # 3.11) Otherwise _ -> language_map = Map.put_new(language_map, default_language, term) language_map = Map.put_new(language_map, "@none", term) type_map = Map.put_new(type_map, "@none", term) {type_map, language_map} end result |> Map.put_new(iri, %{}) |> Map.update(iri, %{}, fn container_map -> Map.put(container_map, container, %{"@type" => type_map, "@language" => language_map}) end) else result end end) end @spec empty?(t) :: boolean def empty?(%__MODULE__{term_defs: term_defs, vocab: nil, base_iri: false, default_language: nil}) when map_size(term_defs) == 0, do: true def empty?(_), do: false end
36.376623
392
0.645178
ffb86860a6448fad67441edb6e6919f775536e3d
4,113
ex
Elixir
lib/absinthe/phase/subscription/subscribe_self.ex
zoldar/absinthe
72ff9f91fcc0a261f9965cf8120c7c72ff6e4c7c
[ "MIT" ]
4,101
2016-03-02T03:49:20.000Z
2022-03-31T05:46:01.000Z
lib/absinthe/phase/subscription/subscribe_self.ex
zoldar/absinthe
72ff9f91fcc0a261f9965cf8120c7c72ff6e4c7c
[ "MIT" ]
889
2016-03-02T16:06:59.000Z
2022-03-31T20:24:12.000Z
lib/absinthe/phase/subscription/subscribe_self.ex
zoldar/absinthe
72ff9f91fcc0a261f9965cf8120c7c72ff6e4c7c
[ "MIT" ]
564
2016-03-02T07:49:59.000Z
2022-03-06T14:40:59.000Z
defmodule Absinthe.Phase.Subscription.SubscribeSelf do use Absinthe.Phase alias Absinthe.Phase @moduledoc false alias Absinthe.Blueprint @spec run(any, Keyword.t()) :: {:ok, Blueprint.t()} def run(blueprint, options) do with %{type: :subscription} = op <- Blueprint.current_operation(blueprint) do do_subscription(op, blueprint, options) else _ -> {:ok, blueprint} end end def do_subscription(%{type: :subscription} = op, blueprint, options) do context = blueprint.execution.context pubsub = ensure_pubsub!(context) %{selections: [field]} = op with {:ok, config} <- get_config(field, context, blueprint) do field_keys = get_field_keys(field, config) subscription_id = get_subscription_id(config, blueprint, options) for field_key <- field_keys, do: Absinthe.Subscription.subscribe(pubsub, field_key, subscription_id, blueprint) {:replace, blueprint, [ {Phase.Subscription.Result, topic: subscription_id}, {Phase.Telemetry, Keyword.put(options, :event, [:execute, :operation, :stop])} ]} else {:error, error} -> blueprint = update_in(blueprint.execution.validation_errors, &[error | &1]) error_pipeline = [ {Phase.Document.Result, options} ] {:replace, blueprint, error_pipeline} end end defp get_config( %{schema_node: schema_node, argument_data: argument_data} = field, context, blueprint ) do name = schema_node.identifier config = case Absinthe.Type.function(schema_node, :config) do fun when is_function(fun, 2) -> apply(fun, [argument_data, %{context: context, document: blueprint}]) fun when is_function(fun, 1) -> IO.write( :stderr, "Warning: 1-arity topic functions are deprecated, upgrade to 2 arity before 1.4.0 release" ) apply(fun, [argument_data]) nil -> {:ok, topic: Atom.to_string(name)} end case config do {:ok, config} -> {:ok, config} {:error, msg} -> error = %Phase.Error{ phase: __MODULE__, message: msg, locations: [field.source_location] } {:error, error} val -> raise """ Invalid return from config function! A config function must return `{:ok, config}` or `{:error, msg}`. You returned: #{inspect(val)} """ end end defp get_field_keys(%{schema_node: schema_node} = _field, config) do name = schema_node.identifier find_field_keys!(config) |> Enum.map(fn key -> {name, key} end) end defp ensure_pubsub!(context) do case Absinthe.Subscription.extract_pubsub(context) do {:ok, pubsub} -> pubsub _ -> raise """ Pubsub not configured! Subscriptions require a configured pubsub module. """ end end defp find_field_keys!(config) do topic = config[:topic] || raise """ Subscription config must include a non null topic! #{inspect(config)} """ case topic do [] -> raise """ Subscription config must not provide an empty list of topics! #{inspect(config)} """ val -> List.wrap(val) |> Enum.map(&to_string/1) end end defp get_subscription_id(config, blueprint, options) do context_id = get_context_id(config) document_id = get_document_id(config, blueprint, options) "__absinthe__:doc:#{context_id}:#{document_id}" end defp get_context_id(config) do context_id = config[:context_id] || :erlang.unique_integer() to_string(context_id) end defp get_document_id(config, blueprint, options) do case config[:document_id] do nil -> binary = {blueprint.source || blueprint.input, options[:variables] || %{}} |> :erlang.term_to_binary() :crypto.hash(:sha256, binary) |> Base.encode16() val -> val end end end
24.628743
102
0.601751
ffb86d6eac0069bb4909d3fcb31437bd0e808fa7
513
ex
Elixir
deps/absinthe/lib/absinthe/blueprint/document/fragment/spread.ex
JoakimEskils/elixir-absinthe
d81e24ec7c7b1164e6d152101dd50422f192d7e9
[ "MIT" ]
3
2017-06-22T16:33:58.000Z
2021-07-07T15:21:09.000Z
lib/absinthe/blueprint/document/fragment/spread.ex
bruce/absinthe
19b63d3aaa9fb75aad01ffd5e91d89e0b30d7f91
[ "MIT" ]
null
null
null
lib/absinthe/blueprint/document/fragment/spread.ex
bruce/absinthe
19b63d3aaa9fb75aad01ffd5e91d89e0b30d7f91
[ "MIT" ]
null
null
null
defmodule Absinthe.Blueprint.Document.Fragment.Spread do @moduledoc false alias Absinthe.Blueprint @enforce_keys [:name] defstruct [ :name, directives: [], source_location: nil, # Populated by phases complexity: nil, flags: %{}, errors: [], ] @type t :: %__MODULE__{ directives: [Blueprint.Directive.t], errors: [Absinthe.Phase.Error.t], name: String.t, flags: Blueprint.flags_t, source_location: nil | Blueprint.Document.SourceLocation.t, } end
19
63
0.658869
ffb89291f241a95a9b80d36d090a1f719b24e2b7
361
exs
Elixir
test/ua_inspector/short_code_map/client_browsers_test.exs
elixytics/ua_inspector
11fd98f69b7853b70529ee73355ef57851248572
[ "Apache-2.0" ]
57
2015-04-07T03:10:45.000Z
2019-03-11T01:01:40.000Z
test/ua_inspector/short_code_map/client_browsers_test.exs
elixytics/ua_inspector
11fd98f69b7853b70529ee73355ef57851248572
[ "Apache-2.0" ]
16
2015-03-09T19:56:17.000Z
2019-03-16T14:24:02.000Z
test/ua_inspector/short_code_map/client_browsers_test.exs
elixytics/ua_inspector
11fd98f69b7853b70529ee73355ef57851248572
[ "Apache-2.0" ]
15
2015-02-02T23:14:00.000Z
2019-03-16T13:15:05.000Z
defmodule UAInspector.ShortCodeMap.ClientBrowsersTest do use ExUnit.Case, async: true alias UAInspector.ShortCodeMap.ClientBrowsers test "client browser" do assert "PU" = ClientBrowsers.to_short("Puffin") end test "client browser not convertible" do browser = "--unknown--" assert ^browser = ClientBrowsers.to_short(browser) end end
22.5625
56
0.745152
ffb89bbfdb52391716a770e39c63c6cdfd1d72fb
952
ex
Elixir
lib/blockchain_node/socket_handler.ex
dominicletz/blockchain-node
85837c8c37477f6a145c40eab45d9d99ea606ffe
[ "Apache-2.0" ]
1
2021-12-15T07:09:12.000Z
2021-12-15T07:09:12.000Z
lib/blockchain_node/socket_handler.ex
dominicletz/blockchain-node
85837c8c37477f6a145c40eab45d9d99ea606ffe
[ "Apache-2.0" ]
null
null
null
lib/blockchain_node/socket_handler.ex
dominicletz/blockchain-node
85837c8c37477f6a145c40eab45d9d99ea606ffe
[ "Apache-2.0" ]
null
null
null
defmodule BlockchainNode.SocketHandler do @behaviour :cowboy_websocket_handler def init(_, _req, _opts) do {:upgrade, :protocol, :cowboy_websocket} end # Called on websocket connection initialization. def websocket_init(_type, req, _opts) do :pg2.join(:websocket_connections, self()) state = %{} {:ok, req, state} end # Handle 'ping' messages from the browser - reply def websocket_handle({:text, "ping"}, req, state) do {:reply, {:text, "pong"}, req, state} end # Handle other messages from the browser - don't reply def websocket_handle({:text, message}, req, state) do IO.puts(message) {:ok, req, state} end # Format and forward elixir messages to client def websocket_info(message, req, state) do {:reply, {:text, message}, req, state} end # No matter why we terminate, remove all of this pids subscriptions def websocket_terminate(_reason, _req, _state) do :ok end end
26.444444
69
0.688025
ffb8dc0b9c27d747fb86edb83b1734117ba1b04d
696
exs
Elixir
config/test.exs
andrebrito16/lyov
9f1cf7b85745db3b8acc6dbe14d1633853a2311b
[ "MIT" ]
1
2022-01-03T02:24:58.000Z
2022-01-03T02:24:58.000Z
config/test.exs
andrebrito16/lyov
9f1cf7b85745db3b8acc6dbe14d1633853a2311b
[ "MIT" ]
1
2022-01-04T12:59:33.000Z
2022-01-04T12:59:33.000Z
config/test.exs
andrebrito16/lyov
9f1cf7b85745db3b8acc6dbe14d1633853a2311b
[ "MIT" ]
1
2022-01-02T17:29:39.000Z
2022-01-02T17:29:39.000Z
use Mix.Config # Configure your database # # The MIX_TEST_PARTITION environment variable can be used # to provide built-in test partitioning in CI environment. # Run `mix help test` for more information. config :cgnaflightsapi, Cgnaflightsapi.Repo, username: "postgres", password: "postgres", database: "cgnaflightsapi_test#{System.get_env("MIX_TEST_PARTITION")}", hostname: "localhost", pool: Ecto.Adapters.SQL.Sandbox # We don't run a server during test. If one is required, # you can enable the server option below. config :cgnaflightsapi, CgnaflightsapiWeb.Endpoint, http: [port: 4002], server: false # Print only warnings and errors during test config :logger, level: :warn
30.26087
73
0.761494
ffb8facc38e85ad34bcecffc8bd3b1b397368255
4,494
ex
Elixir
lib/aws/generated/pi.ex
onno-vos-dev/aws-elixir
00f02c2bce689b932948b6a4d603fd44bb5fc0e9
[ "Apache-2.0" ]
null
null
null
lib/aws/generated/pi.ex
onno-vos-dev/aws-elixir
00f02c2bce689b932948b6a4d603fd44bb5fc0e9
[ "Apache-2.0" ]
null
null
null
lib/aws/generated/pi.ex
onno-vos-dev/aws-elixir
00f02c2bce689b932948b6a4d603fd44bb5fc0e9
[ "Apache-2.0" ]
null
null
null
# WARNING: DO NOT EDIT, AUTO-GENERATED CODE! # See https://github.com/aws-beam/aws-codegen for more details. defmodule AWS.PI do @moduledoc """ Amazon RDS Performance Insights Amazon RDS Performance Insights enables you to monitor and explore different dimensions of database load based on data captured from a running DB instance. The guide provides detailed information about Performance Insights data types, parameters and errors. When Performance Insights is enabled, the Amazon RDS Performance Insights API provides visibility into the performance of your DB instance. Amazon CloudWatch provides the authoritative source for Amazon Web Services service-vended monitoring metrics. Performance Insights offers a domain-specific view of DB load. DB load is measured as average active sessions. Performance Insights provides the data to API consumers as a two-dimensional time-series dataset. The time dimension provides DB load data for each time point in the queried time range. Each time point decomposes overall load in relation to the requested dimensions, measured at that time point. Examples include SQL, Wait event, User, and Host. * To learn more about Performance Insights and Amazon Aurora DB instances, go to the [ Amazon Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/USER_PerfInsights.html). * To learn more about Performance Insights and Amazon RDS DB instances, go to the [ Amazon RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PerfInsights.html). """ alias AWS.Client alias AWS.Request def metadata do %AWS.ServiceMetadata{ abbreviation: "AWS PI", api_version: "2018-02-27", content_type: "application/x-amz-json-1.1", credential_scope: nil, endpoint_prefix: "pi", global?: false, protocol: "json", service_id: "PI", signature_version: "v4", signing_name: "pi", target_prefix: "PerformanceInsightsv20180227" } end @doc """ For a specific time period, retrieve the top `N` dimension keys for a metric. Each response element returns a maximum of 500 bytes. For larger elements, such as SQL statements, only the first 500 bytes are returned. """ def describe_dimension_keys(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "DescribeDimensionKeys", input, options) end @doc """ Get the attributes of the specified dimension group for a DB instance or data source. For example, if you specify a SQL ID, `GetDimensionKeyDetails` retrieves the full text of the dimension `db.sql.statement`cassociated with this ID. This operation is useful because `GetResourceMetrics` and `DescribeDimensionKeys` don't support retrieval of large SQL statement text. """ def get_dimension_key_details(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetDimensionKeyDetails", input, options) end @doc """ Retrieve the metadata for different features. For example, the metadata might indicate that a feature is turned on or off on a specific DB instance. """ def get_resource_metadata(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetResourceMetadata", input, options) end @doc """ Retrieve Performance Insights metrics for a set of data sources, over a time period. You can provide specific dimension groups and dimensions, and provide aggregation and filtering criteria for each group. Each response element returns a maximum of 500 bytes. For larger elements, such as SQL statements, only the first 500 bytes are returned. """ def get_resource_metrics(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "GetResourceMetrics", input, options) end @doc """ Retrieve the dimensions that can be queried for each specified metric type on a specified DB instance. """ def list_available_resource_dimensions(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListAvailableResourceDimensions", input, options) end @doc """ Retrieve metrics of the specified types that can be queried for a specified DB instance. """ def list_available_resource_metrics(%Client{} = client, input, options \\ []) do Request.request_post(client, metadata(), "ListAvailableResourceMetrics", input, options) end end
39.078261
136
0.742323
ffb90294d9c498e05317539e4eba0b18caa06ebb
3,350
exs
Elixir
config/prod.exs
harry-gao/ex-cart
573e7f977bb3b710d11618dd215d4ddd8f819fb3
[ "Apache-2.0" ]
null
null
null
config/prod.exs
harry-gao/ex-cart
573e7f977bb3b710d11618dd215d4ddd8f819fb3
[ "Apache-2.0" ]
null
null
null
config/prod.exs
harry-gao/ex-cart
573e7f977bb3b710d11618dd215d4ddd8f819fb3
[ "Apache-2.0" ]
null
null
null
use Mix.Config # For production, we configure the host to read the PORT # from the system environment. Therefore, you will need # to set PORT=80 before running your server. # # You should also configure the url host to something # meaningful, we use this information when generating URLs. # # Finally, we also include the path to a manifest # containing the digested version of static files. This # manifest is generated by the mix phoenix.digest task # which you typically run after static files are built. config :nectar, Nectar.Endpoint, http: [port: {:system, "PORT"}], url: [scheme: "https", host: System.get_env("APP_URL"), port: 443], force_ssl: [rewrite_on: [:x_forwarded_proto]], cache_static_manifest: "priv/static/manifest.json", secret_key_base: System.get_env("SECRET_KEY_BASE") # Configure your database config :nectar, Nectar.Repo, adapter: Ecto.Adapters.Postgres, url: System.get_env("DATABASE_URL"), pool_size: 10 # Do not print debug messages in production config :logger, level: :info # config :nectar, :stripe, # type: Commerce.Billing.Gateways.Stripe, # credentials: {System.get_env("STRIPE_TEST_KEY"), ""}, # default_currency: "USD" # config :nectar, :braintree, # type: Nectar.Billing.Gateways.BraintreeImpl # config :braintree, # merchant_id: System.get_env("BRAINTREE_MERCHANT_ID"), # public_key: System.get_env("BRAINTREE_PUBLIC_KEY"), # private_key: System.get_env("BRAINTREE_PRIVATE_KEY") config :arc, bucket: System.get_env("AWS_S3_BUCKET"), virtual_host: true config :ex_aws, debug_requests: true, access_key_id: [{:system, "AWS_ACCESS_KEY_ID"}, :instance_role], secret_access_key: [{:system, "AWS_SECRET_ACCESS_KEY"}, :instance_role], region: "ap-southeast-1", s3: [ scheme: "https://", host: "s3.ap-southeast-1.amazonaws.com/", region: "ap-southeast-1" ] config :ex_aws, :hackney_opts, follow_redirect: true, recv_timeout: 30_000 # ## SSL Support # # To get SSL working, you will need to add the `https` key # to the previous section and set your `:url` port to 443: # # config :nectar, Nectar.Endpoint, # ... # url: [host: "example.com", port: 443], # https: [port: 443, # keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"), # certfile: System.get_env("SOME_APP_SSL_CERT_PATH")] # # Where those two env variables return an absolute path to # the key and cert in disk or a relative path inside priv, # for example "priv/ssl/server.key". # # We also recommend setting `force_ssl`, ensuring no data is # ever sent via http, always redirecting to https: # # config :nectar, Nectar.Endpoint, # force_ssl: [hsts: true] # # Check `Plug.SSL` for all available options in `force_ssl`. # ## Using releases # # If you are doing OTP releases, you need to instruct Phoenix # to start the server for all endpoints: # # config :phoenix, :serve_endpoints, true # # Alternatively, you can configure exactly which server to # start per endpoint: # # config :nectar, Nectar.Endpoint, server: true # # You will also need to set the application root to `.` in order # for the new static assets to be served after a hot upgrade: # # config :nectar, Nectar.Endpoint, root: "." # Finally import the config/prod.secret.exs # which should be versioned separately. # import_config "prod.secret.exs"
31.603774
74
0.714328
ffb921fd854d4d9d770d98185e4cdfd4b39ffea8
3,261
exs
Elixir
test/spear/connection/configuration_test.exs
NFIBrokerage/spear
611223d2cdba97daa63f2ddd81a08b4f55e72198
[ "Apache-2.0" ]
34
2021-04-15T22:16:38.000Z
2022-03-11T10:20:11.000Z
test/spear/connection/configuration_test.exs
NFIBrokerage/spear
611223d2cdba97daa63f2ddd81a08b4f55e72198
[ "Apache-2.0" ]
32
2021-04-05T18:07:35.000Z
2021-11-04T19:32:58.000Z
test/spear/connection/configuration_test.exs
NFIBrokerage/spear
611223d2cdba97daa63f2ddd81a08b4f55e72198
[ "Apache-2.0" ]
1
2021-10-11T15:53:42.000Z
2021-10-11T15:53:42.000Z
defmodule Spear.Connection.ConfigurationTest do use ExUnit.Case, async: true import ExUnit.CaptureLog alias Spear.Connection.Configuration, as: Config test "a minimal connection string produces valid config" do config = Config.new(connection_string: "esdb://localhost:2113") assert config.valid? == true assert config.tls? == false assert config.scheme == :http assert config.keep_alive_interval == 10_000 assert config.keep_alive_timeout == 10_000 end test "a tls connection string passes validation" do config = Config.new( connection_string: "esdb://admin:changeit@localhost:2113?tls=true&keepAliveInterval=30000&keepAliveTimeout=15000" ) assert config.valid? == true assert config.tls? == true assert config.scheme == :https assert config.username == "admin" assert config.password == "changeit" end test "keep-alive interval/timeout can be disabled with -1" do config = Config.new( connection_string: "esdb://localhost:2113?tls=true&keepAliveInterval=-1&keepAliveTimeout=-1" ) assert config.valid? == true assert config.keep_alive_interval == false assert config.keep_alive_interval == false end test "small keep-alive times emit warning log messages" do log = capture_log([level: :warn], fn -> config = Config.new( connection_string: "esdb://localhost:2113?tls=true&keepAliveInterval=1000&keepAliveTimeout=1000" ) assert config.valid? == true assert config.keep_alive_interval == 1_000 assert config.keep_alive_timeout == 1_000 end) assert log =~ "keepAliveInterval" assert log =~ "keepAliveTimeout" assert log =~ "less than recommended 10_000ms" end test "negative keep-alive values emit errors" do config = Config.new( connection_string: "esdb://localhost:2113?tls=true&keepAliveInterval=-500&keepAliveTimeout=-500" ) assert config.valid? == false assert [_ | _] = config.errors for {key, error_msg} <- config.errors do assert key in ~w[keep_alive_interval keep_alive_timeout]a assert error_msg =~ "must be greater than 1" end end test "port number 0 gets rejected by validation" do config = Config.new(connection_string: "esdb://localhost:0") assert config.valid? == false assert [{:port, error_msg}] = config.errors assert error_msg =~ "0 is not a valid port number" end test "connection params can be entirely crafted without the connection string" do config = Config.new( scheme: :http, username: "admin", password: "changeit", host: "localhost", port: 2113 ) assert config.valid? == true end test "mint protocols and mode options cannot be overriden" do config = Config.new( scheme: :http, username: "admin", password: "changeit", host: "localhost", port: 2113, mint_opts: [protocols: [:http2, :http], mode: :passive] ) assert config.valid? == true assert config.mint_opts[:protocols] == [:http2] assert config.mint_opts[:mode] == :active end end
28.356522
104
0.651334
ffb9a559ace683404d012f985a95d5c84c76bacb
918
exs
Elixir
mix.exs
shiryel/pawex
b6f08871eb5a971f0bd4226ea18044732ba0e15f
[ "MIT" ]
null
null
null
mix.exs
shiryel/pawex
b6f08871eb5a971f0bd4226ea18044732ba0e15f
[ "MIT" ]
null
null
null
mix.exs
shiryel/pawex
b6f08871eb5a971f0bd4226ea18044732ba0e15f
[ "MIT" ]
null
null
null
defmodule Pawex.MixProject do use Mix.Project def project do [ app: :pawex, version: "0.1.0", elixir: "~> 1.11", start_permanent: Mix.env() == :prod, deps: deps(), # ex_docs: docs: fn -> {result, _} = Code.eval_file("docs.exs") result end, # excoveralls: test_coverage: [tool: ExCoveralls], preferred_cli_env: [coveralls: :test] ] end # Run "mix help compile.app" to learn about applications. def application do [ extra_applications: [:logger], mod: {Pawex.Application, []} ] end # Run "mix help deps" to learn about dependencies. defp deps do [ {:credo, "~> 1.5", only: [:dev, :test], runtime: false}, {:dialyxir, "~> 1.0", only: [:dev], runtime: false}, {:ex_doc, "~> 0.23", only: :dev, runtime: false}, {:excoveralls, "~> 0.10", only: :test} ] end end
22.95
62
0.545752
ffb9e8b7f91f2c1667dc57189f7773aa39d699f9
2,355
exs
Elixir
test/support/ecto_helper.exs
TigerWolf/sentinel
d8b9c948bb7b9cafe3868e3b66b3fa008994e39e
[ "MIT" ]
null
null
null
test/support/ecto_helper.exs
TigerWolf/sentinel
d8b9c948bb7b9cafe3868e3b66b3fa008994e39e
[ "MIT" ]
null
null
null
test/support/ecto_helper.exs
TigerWolf/sentinel
d8b9c948bb7b9cafe3868e3b66b3fa008994e39e
[ "MIT" ]
null
null
null
defmodule Sentinel.TestRepo do use Ecto.Repo, otp_app: :sentinel end alias Sentinel.TestRepo defmodule Sentinel.Case do use ExUnit.CaseTemplate setup_all do Ecto.Adapters.SQL.begin_test_transaction(TestRepo, []) on_exit fn -> Ecto.Adapters.SQL.rollback_test_transaction(TestRepo, []) end :ok end setup do Ecto.Adapters.SQL.restart_test_transaction(TestRepo, []) :ok end end defmodule UsersMigration do use Ecto.Migration def change do create table(:users) do add :email, :text add :username, :text add :role, :text add :hashed_password, :text add :hashed_confirmation_token, :text add :confirmed_at, :datetime add :hashed_password_reset_token, :text add :unconfirmed_email, :text end create index(:users, [:email], unique: true) create index(:users, [:username], unique: true) end end defmodule GuardianDbMigration do use Ecto.Migration def up do create table(:guardian_tokens, primary_key: false) do add :jti, :string, primary_key: true add :typ, :string add :aud, :string add :iss, :string add :sub, :string add :exp, :bigint add :jwt, :text add :claims, :map timestamps end end def down do drop table(:guardian_tokens) end end # Load up the repository, start it, and run migrations Ecto.Storage.down(TestRepo) :ok = Ecto.Storage.up(TestRepo) {:ok, _pid} = TestRepo.start_link :ok = Ecto.Migrator.up(TestRepo, 0, UsersMigration, log: false) :ok = Ecto.Migrator.up(TestRepo, 1, GuardianDbMigration, log: false) defmodule Sentinel.User do use Ecto.Model schema "users" do field :email, :string field :username, :string field :role, :string field :hashed_password, :string field :hashed_confirmation_token, :string field :confirmed_at, Ecto.DateTime field :hashed_password_reset_token, :string field :unconfirmed_email, :string end @required_fields ~w() @optional_fields ~w() def changeset(model, params \\ :empty) do model |> cast(params, @required_fields, @optional_fields) end def permissions(role \\ "user") do permissions_env = Application.get_env(:sentinel, :permissions) end end
25.053191
79
0.655202
ffba211642569470028755f4277b6e2015bb4f72
1,848
ex
Elixir
clients/compute/lib/google_api/compute/v1/model/location_policy.ex
jechol/elixir-google-api
0290b683dfc6491ca2ef755a80bc329378738d03
[ "Apache-2.0" ]
null
null
null
clients/compute/lib/google_api/compute/v1/model/location_policy.ex
jechol/elixir-google-api
0290b683dfc6491ca2ef755a80bc329378738d03
[ "Apache-2.0" ]
null
null
null
clients/compute/lib/google_api/compute/v1/model/location_policy.ex
jechol/elixir-google-api
0290b683dfc6491ca2ef755a80bc329378738d03
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Compute.V1.Model.LocationPolicy do @moduledoc """ Configuration for location policy among multiple possible locations (e.g. preferences for zone selection among zones in a single region). ## Attributes * `locations` (*type:* `%{optional(String.t) => GoogleApi.Compute.V1.Model.LocationPolicyLocation.t}`, *default:* `nil`) - Location configurations mapped by location name. Currently only zone names are supported and must be represented as valid internal URLs, like: zones/us-central1-a. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :locations => %{optional(String.t()) => GoogleApi.Compute.V1.Model.LocationPolicyLocation.t()} | nil } field(:locations, as: GoogleApi.Compute.V1.Model.LocationPolicyLocation, type: :map) end defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.LocationPolicy do def decode(value, options) do GoogleApi.Compute.V1.Model.LocationPolicy.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.LocationPolicy do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
38.5
290
0.746753
ffba22f9bcee4cbabaaa63a48fde10817207177d
468
ex
Elixir
lib/web/socket/state.ex
SwiftAusterity/gossip
d79c53acd02fcb9905acb9730e59065efdd5a589
[ "MIT" ]
null
null
null
lib/web/socket/state.ex
SwiftAusterity/gossip
d79c53acd02fcb9905acb9730e59065efdd5a589
[ "MIT" ]
null
null
null
lib/web/socket/state.ex
SwiftAusterity/gossip
d79c53acd02fcb9905acb9730e59065efdd5a589
[ "MIT" ]
null
null
null
defmodule Web.Socket.State do @moduledoc """ Struct for socket state """ @doc """ - status: "active" or "inactive" - game: the connected game when active - supports: list of features the socket supporst - channels: list of channels the socket is subscribed to - players: list of connected players - hearbeat_count: the count of heartbeats with no response """ defstruct [:status, :game, :supports, :channels, :players, heartbeat_count: 0] end
29.25
80
0.707265
ffba25c2187e9a92af870c54c8a7b8515025d64a
1,983
ex
Elixir
lib/tables/hmtx.ex
bengtson/fonts
b41daac7e010ec95eb62f2f68c5987429d771e33
[ "MIT" ]
null
null
null
lib/tables/hmtx.ex
bengtson/fonts
b41daac7e010ec95eb62f2f68c5987429d771e33
[ "MIT" ]
null
null
null
lib/tables/hmtx.ex
bengtson/fonts
b41daac7e010ec95eb62f2f68c5987429d771e33
[ "MIT" ]
null
null
null
defmodule Fonts.Tables.Hmtx do def glyph_metrics_for_index(index, font) do horiz_metrics_table = font["Tables"]["hmtx"]["Horizontal Metrics Table"] offset = index * 4 << _::binary-size(offset), advance_width::unsigned-integer-size(16), left_side_bearing::signed-integer-size(16), _rest::binary >> = horiz_metrics_table # IO.inspect left_side_bearing {advance_width, left_side_bearing} end def parse_table(checksum, offset, length, state) do # Needs hhea and maxp Tables table_map = state["Font"]["Tables"] hhea_table = table_map["hhea"] maxp_table = table_map["maxp"] cond do hhea_table == nil -> # IO.puts "'hmtx' : Skipping - Wait For 'hhea'" {:skip} maxp_table == nil -> # IO.puts "'hmtx' : Skipping - Wait For 'maxp'" {:skip} true -> gen_table(checksum, offset, length, state) end end def gen_table(checksum, offset, length, state) do # IO.puts "'hmtx'" <> " : Parsing" %{"Binary" => binary, "Parser" => _parser, "Font" => font} = state # Get number of h metrics # Get number of Glyphs num_h_metrics = font["Tables"]["hhea"]["Number Of H Metrics"] num_glyphs = font["Tables"]["maxp"]["Number Of Glyphs"] metrics_size = num_h_metrics * 4 glyphs_size = (num_glyphs - num_h_metrics) * 2 << _::binary-size(offset), long_hor_metric::binary-size(metrics_size), left_side_bearing::binary-size(glyphs_size), _rest::binary >> = binary table = %{ "hmtx" => %{ "Table Length" => length, "Table Offset" => offset, "Table Checksum" => checksum, "Horizontal Metrics Table" => long_hor_metric, "Left Side Bearing Table" => left_side_bearing } } %{"Tables" => tables} = font tables = Map.merge(tables, table) state = put_in(state, ["Font", "Tables"], tables) {:ok, state} end end
27.164384
76
0.598084
ffba2e23dab4dc7501c70b82362b5e145ec4161a
1,065
exs
Elixir
test/model/build_method_test.exs
szTheory/neo4j_sips_models
70c7ad45da4aec97dc56dcf951df878b9e801e47
[ "MIT" ]
8
2016-04-22T03:13:02.000Z
2021-01-01T02:49:28.000Z
test/model/build_method_test.exs
szTheory/neo4j_sips_models
70c7ad45da4aec97dc56dcf951df878b9e801e47
[ "MIT" ]
2
2016-10-06T07:20:19.000Z
2020-03-05T02:04:52.000Z
test/model/build_method_test.exs
szTheory/neo4j_sips_models
70c7ad45da4aec97dc56dcf951df878b9e801e47
[ "MIT" ]
5
2016-07-21T04:31:24.000Z
2020-03-03T20:23:01.000Z
defmodule Model.BuildMethodTest do use ExUnit.Case test "defines a method for building models without specifying attributes" do person = Person.build assert person.name == nil assert person.age == nil assert person.email == nil end test "defines a method for building models by specifying required attributes" do person = Person.build(name: "John DOE", email: "johndoe@example.com") assert person.name == "John DOE" assert person.age == nil assert person.email == "johndoe@example.com" end test "defines a method for building models by specifying one attribute at the time" do person = Person.build(name: "John DOE") assert person.name == "John DOE" assert person.age == nil assert person.email == nil person = Person.build(email: "johndoe@example.com") assert person.name == nil assert person.age == nil assert person.email == "johndoe@example.com" person = Person.build(age: 30) assert person.name == nil assert person.age == 30 assert person.email == nil end end
30.428571
88
0.688263
ffba82d66737c17fef74c6dabadf3fa3d0c50653
3,397
exs
Elixir
apps/artemis/test/artemis/contexts/user/list_users_test.exs
chrislaskey/atlas_platform
969aea95814f62d3471f93000ee5ad77edb9d1bf
[ "MIT" ]
10
2019-07-05T19:59:20.000Z
2021-05-23T07:36:11.000Z
apps/artemis/test/artemis/contexts/user/list_users_test.exs
chrislaskey/atlas_platform
969aea95814f62d3471f93000ee5ad77edb9d1bf
[ "MIT" ]
3
2019-03-05T23:55:09.000Z
2019-05-29T13:46:34.000Z
apps/artemis/test/artemis/contexts/user/list_users_test.exs
chrislaskey/atlas_platform
969aea95814f62d3471f93000ee5ad77edb9d1bf
[ "MIT" ]
4
2019-07-05T20:04:08.000Z
2021-05-13T16:28:33.000Z
defmodule Artemis.ListUsersTest do use Artemis.DataCase import Artemis.Factories alias Artemis.ListUsers alias Artemis.Repo alias Artemis.User describe "access permissions" do setup do insert_list(3, :user) {:ok, []} end test "returns empty list with no permissions" do user = Mock.user_without_permissions() result = ListUsers.call(user) assert length(result) == 0 end test "requires access:self permission to return own record" do user = Mock.user_with_permission("users:access:self") result = ListUsers.call(user) assert length(result) == 1 end test "requires access:all permission to return other records" do user = Mock.user_with_permission("users:access:all") result = ListUsers.call(user) total = Repo.all(User) assert length(result) == length(total) end end describe "call" do test "returns empty list when no users exist" do Repo.delete_all(User) assert ListUsers.call(Mock.system_user()) == [] end test "returns a list of users" do start = length(Repo.all(User)) count = 3 insert_list(count, :user) users = ListUsers.call(Mock.system_user()) assert length(users) == start + count end end describe "call - params" do setup do user = insert(:user) {:ok, user: user} end test "order" do insert_list(3, :user) params = %{order: "name"} ascending = ListUsers.call(params, Mock.system_user()) params = %{order: "-name"} descending = ListUsers.call(params, Mock.system_user()) assert ascending == Enum.reverse(descending) end test "paginate" do params = %{ paginate: true } response_keys = ListUsers.call(params, Mock.system_user()) |> Map.from_struct() |> Map.keys() pagination_keys = [ :entries, :page_number, :page_size, :total_entries, :total_pages ] assert response_keys == pagination_keys end test "preload" do users = ListUsers.call(Mock.system_user()) user = hd(users) assert !is_list(user.user_roles) assert user.user_roles.__struct__ == Ecto.Association.NotLoaded params = %{ preload: [:user_roles] } users = ListUsers.call(params, Mock.system_user()) user = hd(users) assert is_list(user.user_roles) end test "query - search" do insert(:user, name: "Four Six", email: "four-six") insert(:user, name: "Four Two", email: "four-two") insert(:user, name: "Five Six", email: "five-six") users = ListUsers.call(Mock.system_user()) assert length(users) > 2 # Succeeds when given a word part of a larger phrase params = %{ query: "Six" } users = ListUsers.call(params, Mock.system_user()) assert length(users) == 2 # Succeeds with partial value when it is start of a word params = %{ query: "four-" } users = ListUsers.call(params, Mock.system_user()) assert length(users) == 2 # Fails with partial value when it is not the start of a word params = %{ query: "our" } users = ListUsers.call(params, Mock.system_user()) assert length(users) == 0 end end end
21.23125
69
0.604062
ffba8899c227b00c0f0e7f85d334f18262fc2330
9,147
ex
Elixir
lib/teiserver/clans.ex
icexuick/teiserver
22f2e255e7e21f977e6b262acf439803626a506c
[ "MIT" ]
6
2021-02-08T10:42:53.000Z
2021-04-25T12:12:03.000Z
lib/teiserver/clans.ex
icexuick/teiserver
22f2e255e7e21f977e6b262acf439803626a506c
[ "MIT" ]
14
2021-08-01T02:36:14.000Z
2022-01-30T21:15:03.000Z
lib/teiserver/clans.ex
icexuick/teiserver
22f2e255e7e21f977e6b262acf439803626a506c
[ "MIT" ]
7
2021-05-13T12:55:28.000Z
2022-01-14T06:39:06.000Z
defmodule Teiserver.Clans do @moduledoc """ The Clans context. """ import Ecto.Query, warn: false alias Central.Helpers.QueryHelpers alias Central.Repo alias Teiserver.Clans.Clan alias Teiserver.Clans.ClanLib def clan_query(args) do clan_query(nil, args) end def clan_query(id, args) do ClanLib.query_clans() |> ClanLib.search(%{id: id}) |> ClanLib.search(args[:search]) |> ClanLib.preload(args[:preload]) |> ClanLib.order_by(args[:order_by]) |> QueryHelpers.select(args[:select]) end @doc """ Returns the list of clans. ## Examples iex> list_clans() [%Clan{}, ...] """ def list_clans(args \\ []) do clan_query(args) |> QueryHelpers.limit_query(args[:limit] || 50) |> Repo.all() end @doc """ Gets a single clan. Raises `Ecto.NoResultsError` if the Clan does not exist. ## Examples iex> get_clan!(123) %Clan{} iex> get_clan!(456) ** (Ecto.NoResultsError) """ def get_clan!(id) when not is_list(id) do ConCache.get_or_store(:teiserver_clan_cache_bang, id, fn -> clan_query(id, []) |> Repo.one!() end) end def get_clan!(args) do clan_query(nil, args) |> Repo.one!() end def get_clan!(id, args) do clan_query(id, args) |> Repo.one!() end # Uncomment this if needed, default files do not need this function # @doc """ # Gets a single clan. # Returns `nil` if the Clan does not exist. # ## Examples # iex> get_clan(123) # %Clan{} # iex> get_clan(456) # nil # """ # def get_clan(id, args \\ []) when not is_list(id) do # clan_query(id, args) # |> Repo.one # end @doc """ Creates a clan. ## Examples iex> create_clan(%{field: value}) {:ok, %Clan{}} iex> create_clan(%{field: bad_value}) {:error, %Ecto.Changeset{}} """ def create_clan(attrs \\ %{}) do %Clan{} |> Clan.changeset(attrs) |> Repo.insert() end @doc """ Updates a clan. ## Examples iex> update_clan(clan, %{field: new_value}) {:ok, %Clan{}} iex> update_clan(clan, %{field: bad_value}) {:error, %Ecto.Changeset{}} """ def update_clan(%Clan{} = clan, attrs) do ConCache.dirty_delete(:teiserver_clan_cache_bang, clan.id) clan |> Clan.changeset(attrs) |> Repo.update() end @doc """ Deletes a Clan. ## Examples iex> delete_clan(clan) {:ok, %Clan{}} iex> delete_clan(clan) {:error, %Ecto.Changeset{}} """ def delete_clan(%Clan{} = clan) do Repo.delete(clan) end @doc """ Returns an `%Ecto.Changeset{}` for tracking clan changes. ## Examples iex> change_clan(clan) %Ecto.Changeset{source: %Clan{}} """ def change_clan(%Clan{} = clan) do Clan.changeset(clan, %{}) end alias Teiserver.Clans.ClanInvite alias Teiserver.Clans.ClanInviteLib @doc """ Returns the list of clan_invites. ## Examples iex> list_clan_invites() [%Location{}, ...] """ def list_clan_invites_by_clan(clan_id, args \\ []) do ClanInviteLib.get_clan_invites() |> ClanInviteLib.search(clan_id: clan_id) |> ClanInviteLib.search(args[:search]) |> ClanInviteLib.preload(args[:joins]) # |> ClanInviteLib.order_by(args[:order_by]) |> QueryHelpers.select(args[:select]) |> Repo.all() end def list_clan_invites_by_user(user_id, args \\ []) do ClanInviteLib.get_clan_invites() |> ClanInviteLib.search(user_id: user_id) |> ClanInviteLib.search(args[:search]) |> ClanInviteLib.preload(args[:joins]) # |> ClanInviteLib.order_by(args[:order_by]) |> QueryHelpers.select(args[:select]) |> Repo.all() end @doc """ Gets a single clan_invite. Raises `Ecto.NoResultsError` if the ClanInvite does not exist. ## Examples iex> get_clan_invite!(123) %ClanInvite{} iex> get_clan_invite!(456) ** (Ecto.NoResultsError) """ def get_clan_invite!(clan_id, user_id) do ClanInviteLib.get_clan_invites() |> ClanInviteLib.search(%{clan_id: clan_id, user_id: user_id}) |> Repo.one!() end def get_clan_invite(clan_id, user_id) do ClanInviteLib.get_clan_invites() |> ClanInviteLib.search(%{clan_id: clan_id, user_id: user_id}) |> Repo.one() end @doc """ Creates a clan_invite. ## Examples iex> create_clan_invite(%{field: value}) {:ok, %ClanInvite{}} iex> create_clan_invite(%{field: bad_value}) {:error, %Ecto.Changeset{}} """ def create_clan_invite(attrs) do %ClanInvite{} |> ClanInvite.changeset(attrs) |> Repo.insert() end def create_clan_invite(clan_id, user_id) do %ClanInvite{} |> ClanInvite.changeset(%{ clan_id: clan_id, user_id: user_id }) |> Repo.insert() end @doc """ Updates a ClanInvite. ## Examples iex> update_clan_invite(clan_invite, %{field: new_value}) {:ok, %Ruleset{}} iex> update_clan_invite(clan_invite, %{field: bad_value}) {:error, %Ecto.Changeset{}} """ def update_clan_invite(%ClanInvite{} = clan_invite, attrs) do clan_invite |> ClanInvite.changeset(attrs) |> Repo.update() end @doc """ Deletes a ClanInvite. ## Examples iex> delete_clan_invite(clan_invite) {:ok, %ClanInvite{}} iex> delete_clan_invite(clan_invite) {:error, %Ecto.Changeset{}} """ def delete_clan_invite(%ClanInvite{} = clan_invite) do Repo.delete(clan_invite) end @doc """ Returns an `%Ecto.Changeset{}` for tracking clan_invite changes. ## Examples iex> change_clan_invite(clan_invite) %Ecto.Changeset{source: %ClanInvite{}} """ def change_clan_invite(%ClanInvite{} = clan_invite) do ClanInvite.changeset(clan_invite, %{}) end alias Teiserver.Clans.ClanMembership alias Teiserver.Clans.ClanMembershipLib @doc """ Returns the list of clan_memberships. ## Examples iex> list_clan_memberships() [%Location{}, ...] """ def list_clan_memberships_by_clan(clan_id, args \\ []) do ClanMembershipLib.get_clan_memberships() |> ClanMembershipLib.search(clan_id: clan_id) |> ClanMembershipLib.search(args[:search]) |> ClanMembershipLib.preload(args[:joins]) # |> ClanMembershipLib.order_by(args[:order_by]) |> QueryHelpers.select(args[:select]) |> Repo.all() end def list_clan_memberships_by_user(user_id, args \\ []) do ClanMembershipLib.get_clan_memberships() |> ClanMembershipLib.search(user_id: user_id) |> ClanMembershipLib.search(args[:search]) |> ClanMembershipLib.preload(args[:joins]) # |> ClanMembershipLib.order_by(args[:order_by]) |> QueryHelpers.select(args[:select]) |> Repo.all() end @doc """ Gets a single clan_membership. Raises `Ecto.NoResultsError` if the ClanMembership does not exist. ## Examples iex> get_clan_membership!(123) %ClanMembership{} iex> get_clan_membership!(456) ** (Ecto.NoResultsError) """ def get_clan_membership!(clan_id, user_id) do ClanMembershipLib.get_clan_memberships() |> ClanMembershipLib.search(%{clan_id: clan_id, user_id: user_id}) |> Repo.one!() end @spec get_clan_membership(Integer.t(), Integer.t()) :: ClanMembership.t() | nil def get_clan_membership(clan_id, user_id) do ClanMembershipLib.get_clan_memberships() |> ClanMembershipLib.search(%{clan_id: clan_id, user_id: user_id}) |> Repo.one() end @doc """ Creates a clan_membership. ## Examples iex> create_clan_membership(%{field: value}) {:ok, %ClanMembership{}} iex> create_clan_membership(%{field: bad_value}) {:error, %Ecto.Changeset{}} """ def create_clan_membership(attrs) do %ClanMembership{} |> ClanMembership.changeset(attrs) |> Repo.insert() end def create_clan_membership(clan_id, user_id) do %ClanMembership{} |> ClanMembership.changeset(%{ clan_id: clan_id, user_id: user_id }) |> Repo.insert() end @doc """ Updates a ClanMembership. ## Examples iex> update_clan_membership(clan_membership, %{field: new_value}) {:ok, %Ruleset{}} iex> update_clan_membership(clan_membership, %{field: bad_value}) {:error, %Ecto.Changeset{}} """ def update_clan_membership(%ClanMembership{} = clan_membership, attrs) do clan_membership |> ClanMembership.changeset(attrs) |> Repo.update() end @doc """ Deletes a ClanMembership. ## Examples iex> delete_clan_membership(clan_membership) {:ok, %ClanMembership{}} iex> delete_clan_membership(clan_membership) {:error, %Ecto.Changeset{}} """ def delete_clan_membership(%ClanMembership{} = clan_membership) do Repo.delete(clan_membership) end @doc """ Returns an `%Ecto.Changeset{}` for tracking clan_membership changes. ## Examples iex> change_clan_membership(clan_membership) %Ecto.Changeset{source: %ClanMembership{}} """ def change_clan_membership(%ClanMembership{} = clan_membership) do ClanMembership.changeset(clan_membership, %{}) end end
21.573113
81
0.640975
ffbaa1d8ca3020faa520d8309dfab581980d1feb
697
ex
Elixir
lib/changelog/meta/feeds.ex
kentcdodds/changelog.com
e1c0d7ee5d47dc83dd443d623adb0f07e4acb28d
[ "MIT" ]
null
null
null
lib/changelog/meta/feeds.ex
kentcdodds/changelog.com
e1c0d7ee5d47dc83dd443d623adb0f07e4acb28d
[ "MIT" ]
null
null
null
lib/changelog/meta/feeds.ex
kentcdodds/changelog.com
e1c0d7ee5d47dc83dd443d623adb0f07e4acb28d
[ "MIT" ]
null
null
null
defmodule Changelog.Meta.Feeds do alias Changelog.{PostView} import Changelog.Router.Helpers def rss_feeds(assigns), do: assigns |> get defp get(%{podcast: podcast}) do shared() ++ [%{url: feed_url(conn(), :podcast, podcast.slug), title: "#{podcast.name} Podcast Feed"}] end defp get(%{view_module: PostView}) do shared() ++ [%{url: feed_url(conn(), :posts), title: "Posts Feed"}] end defp get(_), do: shared() defp shared do [ %{url: feed_url(conn(), :all), title: "Fire Hose Feed (All Shows + All Posts)"}, %{url: feed_url(conn(), :podcast, "master"), title: "Master Feed (All Shows)"} ] end defp conn do Changelog.Endpoint end end
24.034483
105
0.629842
ffbabc7703352bff7386e35a9ddc3034d052f915
2,085
ex
Elixir
clients/content/lib/google_api/content/v21/model/accounts_link_request.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/content/lib/google_api/content/v21/model/accounts_link_request.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/content/lib/google_api/content/v21/model/accounts_link_request.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Content.V21.Model.AccountsLinkRequest do @moduledoc """ ## Attributes * `action` (*type:* `String.t`, *default:* `nil`) - Action to perform for this link. The "request" action is only available to select merchants. Acceptable values are: - "approve" - "remove" - "request" * `linkType` (*type:* `String.t`, *default:* `nil`) - Type of the link between the two accounts. Acceptable values are: - "channelPartner" - "eCommercePlatform" * `linkedAccountId` (*type:* `String.t`, *default:* `nil`) - The ID of the linked account. * `services` (*type:* `list(String.t)`, *default:* `nil`) - List of provided services. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :action => String.t(), :linkType => String.t(), :linkedAccountId => String.t(), :services => list(String.t()) } field(:action) field(:linkType) field(:linkedAccountId) field(:services, type: :list) end defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.AccountsLinkRequest do def decode(value, options) do GoogleApi.Content.V21.Model.AccountsLinkRequest.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.AccountsLinkRequest do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
32.076923
148
0.686331
ffbac27a320057219eccd70b2c68a023741d7082
2,182
ex
Elixir
lib/on_flow/crypto.ex
dangdennis/on_flow
cf3e59cd47bcdf77ee02131e3f269e71c7277556
[ "MIT" ]
3
2021-04-09T06:31:43.000Z
2021-08-10T02:36:08.000Z
lib/on_flow/crypto.ex
dangdennis/on_flow
cf3e59cd47bcdf77ee02131e3f269e71c7277556
[ "MIT" ]
3
2021-04-09T07:09:32.000Z
2022-03-13T18:38:00.000Z
lib/on_flow/crypto.ex
dangdennis/on_flow
cf3e59cd47bcdf77ee02131e3f269e71c7277556
[ "MIT" ]
3
2021-08-14T16:17:07.000Z
2022-03-13T05:38:50.000Z
defmodule OnFlow.Crypto do use Bitwise @doc """ Generates a key pair. """ @spec generate_keys() :: %{public_key: String.t(), private_key: String.t()} def generate_keys do {<<4>> <> public_key, private_key} = :crypto.generate_key(:ecdh, :secp256r1) public_key = Base.encode16(public_key, case: :lower) private_key = Base.encode16(private_key, case: :lower) %{public_key: public_key, private_key: private_key} end @doc """ Signs the message with the given private key. Options are: * `:hash`, which defaults to `:sha3_256` * `:sign`, which defaults to `:secp256r1` """ def sign(msg, private_key, opts \\ []) do msg |> signature(private_key, opts) |> rs_pair() end @doc false def signature(msg, private_key, opts) do hash = Keyword.get(opts, :hash, :sha3_256) sign = Keyword.get(opts, :sign, :secp256r1) :crypto.sign(:ecdsa, hash, msg, [private_key, sign]) end @doc false def rs_pair(signature) do at = fn index -> <<n>> = binary_part(signature, index, 1) n end start_r = if (at.(1) &&& 0x80) == 1, do: 3, else: 2 length_r = at.(start_r + 1) start_s = start_r + 2 + length_r length_s = at.(start_s + 1) r = binary_part(signature, start_r + 2, length_r) s = binary_part(signature, start_s + 2, length_s) # 256 >> 3 n = 32 final_signature = :binary.copy(<<0>>, n * 2) offset_r = max(n - byte_size(r), 0) start_r = max(0, byte_size(r) - n) final_signature = copy_into(final_signature, r, offset_r, start_r) offset_s = max(2 * n - byte_size(s), n) start_s = max(0, byte_size(s) - n) final_signature = copy_into(final_signature, s, offset_s, start_s) final_signature end @doc false def copy_into(destination, src, destination_offset \\ 0, start_index \\ 0) do prefix = :binary.part(destination, 0, destination_offset) src = :binary.part(src, start_index, byte_size(src) - start_index) suffix_length = byte_size(destination) - destination_offset - byte_size(src) suffix = :binary.part(destination, destination_offset + byte_size(src), suffix_length) prefix <> src <> suffix end end
28.710526
90
0.647571
ffbac3bfd3786a4b6edb551c0e2af9d7f671de07
212
exs
Elixir
priv/repo/migrations/20210203074554_update_accounts_index.exs
jackjoe/mailgun_logger
7d5a1989afdeb215bcd3753671c61bc25ed4e522
[ "MIT" ]
64
2020-02-10T20:42:46.000Z
2021-11-16T10:47:50.000Z
priv/repo/migrations/20210203074554_update_accounts_index.exs
jackjoe/mailgun_logger
7d5a1989afdeb215bcd3753671c61bc25ed4e522
[ "MIT" ]
16
2020-02-10T20:45:57.000Z
2022-03-04T12:53:34.000Z
priv/repo/migrations/20210203074554_update_accounts_index.exs
jackjoe/mailgun_logger
7d5a1989afdeb215bcd3753671c61bc25ed4e522
[ "MIT" ]
4
2020-04-03T17:13:19.000Z
2020-07-17T12:56:31.000Z
defmodule MailgunLogger.Repo.Migrations.UpdateAccountsIndex do use Ecto.Migration def change do drop(unique_index(:accounts, :api_key)) create(unique_index(:accounts, [:api_key, :domain])) end end
23.555556
62
0.754717
ffbacc7c4e86b36fb056a4befa3e7ba2cd804256
692
ex
Elixir
apps/ewallet_config/lib/ewallet_config/vault.ex
amadeobrands/ewallet
505b7822721940a7b892a9b35c225e80cc8ac0b4
[ "Apache-2.0" ]
1
2018-12-07T06:21:21.000Z
2018-12-07T06:21:21.000Z
apps/ewallet_config/lib/ewallet_config/vault.ex
amadeobrands/ewallet
505b7822721940a7b892a9b35c225e80cc8ac0b4
[ "Apache-2.0" ]
null
null
null
apps/ewallet_config/lib/ewallet_config/vault.ex
amadeobrands/ewallet
505b7822721940a7b892a9b35c225e80cc8ac0b4
[ "Apache-2.0" ]
null
null
null
defmodule EWalletConfig.Vault do @moduledoc false use Cloak.Vault, otp_app: :ewallet_db @impl Cloak.Vault def init(config) do env = Mix.env() config = Keyword.put( config, :ciphers, default: {Cloak.Ciphers.AES.GCM, tag: "AES.GCM.V1", key: secret_key(env)} ) {:ok, config} end defp secret_key(:prod), do: decode_env("EWALLET_SECRET_KEY") defp secret_key(_), do: <<126, 194, 0, 33, 217, 227, 143, 82, 252, 80, 133, 89, 70, 211, 139, 150, 209, 103, 94, 240, 194, 108, 166, 100, 48, 144, 207, 242, 93, 244, 27, 144>> defp decode_env(var) do var |> System.get_env() |> Base.decode64!() end end
20.969697
94
0.586705
ffbadd1b6ecabccdd300f3541c0c62e0e9d49879
1,734
ex
Elixir
lib/archery_competition_web.ex
barnaba/archery-competition
cd5d302431429218aeb72c71fa96981667d8d95c
[ "MIT" ]
null
null
null
lib/archery_competition_web.ex
barnaba/archery-competition
cd5d302431429218aeb72c71fa96981667d8d95c
[ "MIT" ]
6
2018-07-11T21:01:51.000Z
2018-07-11T21:06:07.000Z
lib/archery_competition_web.ex
barnaba/archery-competition
cd5d302431429218aeb72c71fa96981667d8d95c
[ "MIT" ]
null
null
null
defmodule ArcheryCompetitionWeb do @moduledoc """ The entrypoint for defining your web interface, such as controllers, views, channels and so on. This can be used in your application as: use ArcheryCompetitionWeb, :controller use ArcheryCompetitionWeb, :view The definitions below will be executed for every view, controller, etc, so keep them short and clean, focused on imports, uses and aliases. Do NOT define functions inside the quoted expressions below. Instead, define any helper function in modules and import those modules here. """ def controller do quote do use Phoenix.Controller, namespace: ArcheryCompetitionWeb import Plug.Conn import ArcheryCompetitionWeb.Router.Helpers import ArcheryCompetitionWeb.Gettext end end def view do quote do use Phoenix.View, root: "lib/archery_competition_web/templates", namespace: ArcheryCompetitionWeb # Import convenience functions from controllers import Phoenix.Controller, only: [get_flash: 2, view_module: 1] # Use all HTML functionality (forms, tags, etc) use Phoenix.HTML import ArcheryCompetitionWeb.Router.Helpers import ArcheryCompetitionWeb.ErrorHelpers import ArcheryCompetitionWeb.Gettext end end def router do quote do use Phoenix.Router import Plug.Conn import Phoenix.Controller end end def channel do quote do use Phoenix.Channel import ArcheryCompetitionWeb.Gettext end end @doc """ When used, dispatch to the appropriate controller/view/etc. """ defmacro __using__(which) when is_atom(which) do apply(__MODULE__, which, []) end end
25.5
70
0.710496
ffbb40494e0e0c55c246e7c4bbf54273c5d135b7
1,424
ex
Elixir
lib/google_api/you_tube/v3/model/video_suggestions.ex
jesteracer/ytb
67e3cab899e4f69e586383f7be2c3855c6beea49
[ "Apache-2.0" ]
null
null
null
lib/google_api/you_tube/v3/model/video_suggestions.ex
jesteracer/ytb
67e3cab899e4f69e586383f7be2c3855c6beea49
[ "Apache-2.0" ]
null
null
null
lib/google_api/you_tube/v3/model/video_suggestions.ex
jesteracer/ytb
67e3cab899e4f69e586383f7be2c3855c6beea49
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.YouTube.V3.Model.VideoSuggestions do @moduledoc """ Specifies suggestions on how to improve video content, including encoding hints, tag suggestions, and editor suggestions. """ @derive [Poison.Encoder] defstruct [ :"editorSuggestions", :"processingErrors", :"processingHints", :"processingWarnings", :"tagSuggestions" ] end defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.VideoSuggestions do import GoogleApi.YouTube.V3.Deserializer def decode(value, options) do value |> deserialize(:"tagSuggestions", :list, GoogleApi.YouTube.V3.Model.VideoSuggestionsTagSuggestion, options) end end
33.116279
123
0.754213
ffbb5b0614a0cf16a62856e17d6d066d99e1efef
3,574
ex
Elixir
lib/ecto/repo/supervisor.ex
rbishop/ecto
a8a3215c9e2e35f7556f54c8d47d78a3670796d8
[ "Apache-2.0" ]
1
2015-08-24T06:01:51.000Z
2015-08-24T06:01:51.000Z
deps/ecto/lib/ecto/repo/supervisor.ex
Thrashmandicoot/my-first-phoenix-app
7cdfe34a1d874cbce8dba17e9824a5c91e3b47da
[ "MIT" ]
null
null
null
deps/ecto/lib/ecto/repo/supervisor.ex
Thrashmandicoot/my-first-phoenix-app
7cdfe34a1d874cbce8dba17e9824a5c91e3b47da
[ "MIT" ]
null
null
null
defmodule Ecto.Repo.Supervisor do @moduledoc false use Supervisor @doc """ Starts the repo supervisor. """ def start_link(repo, otp_app, adapter, opts) do name = opts[:name] || Application.get_env(otp_app, repo)[:name] || repo Supervisor.start_link(__MODULE__, {name, repo, otp_app, adapter, opts}, [name: name]) end @doc """ Retrieves and normalizes the configuration for `repo` in `otp_app`. """ def config(repo, otp_app, custom) do if config = Application.get_env(otp_app, repo) do config = Keyword.merge(config, custom) {url, config} = Keyword.pop(config, :url) [otp_app: otp_app, repo: repo] ++ Keyword.merge(config, parse_url(url || "")) else raise ArgumentError, "configuration for #{inspect repo} not specified in #{inspect otp_app} environment" end end @doc """ Parses the OTP configuration for compile time. """ def parse_config(repo, opts) do otp_app = Keyword.fetch!(opts, :otp_app) config = Application.get_env(otp_app, repo, []) adapter = opts[:adapter] || config[:adapter] unless adapter do raise ArgumentError, "missing :adapter configuration in " <> "config #{inspect otp_app}, #{inspect repo}" end unless Code.ensure_loaded?(adapter) do raise ArgumentError, "adapter #{inspect adapter} was not compiled, " <> "ensure it is correct and it is included as a project dependency" end {otp_app, adapter, pool(repo, config), config} end defp pool(repo, config) do pool = Keyword.get(config, :pool, Ecto.Pools.Poolboy) name = Keyword.get(config, :pool_name, default_pool_name(repo, config)) timeout = Keyword.get(config, :timeout, 5000) {pool, name, timeout} end defp default_pool_name(repo, config) do Module.concat(Keyword.get(config, :name, repo), Pool) end @doc """ Parses an Ecto URL allowed in configuration. The format must be: "ecto://username:password@hostname:port/database" or {:system, "DATABASE_URL"} """ def parse_url(""), do: [] def parse_url({:system, env}) when is_binary(env) do parse_url(System.get_env(env) || "") end def parse_url(url) when is_binary(url) do info = url |> URI.decode() |> URI.parse() unless info.host do raise Ecto.InvalidURLError, url: url, message: "host is not present" end unless String.match? info.path, ~r"^/([^/])+$" do raise Ecto.InvalidURLError, url: url, message: "path should be a database name" end if info.userinfo do destructure [username, password], String.split(info.userinfo, ":") end database = String.slice(info.path, 1, String.length(info.path)) opts = [username: username, password: password, database: database, hostname: info.host, port: info.port] Enum.reject(opts, fn {_k, v} -> is_nil(v) end) end ## Callbacks def init({name, repo, otp_app, adapter, opts}) do opts = config(repo, otp_app, opts) {default_pool, _, _} = repo.__pool__ opts = opts |> Keyword.delete(:name) |> Keyword.put_new(:pool, default_pool) |> Keyword.put_new(:pool_name, Module.concat(name, Pool)) children = [ supervisor(adapter, [repo, opts]) ] if Keyword.get(opts, :query_cache_owner, repo == repo.__query_cache__) do :ets.new(repo.__query_cache__, [:set, :public, :named_table, read_concurrency: true]) end supervise(children, strategy: :one_for_one) end end
28.141732
92
0.63906
ffbb6d8714ec82ca5a61d345fd6a65abf39a914d
1,651
ex
Elixir
clients/calendar/lib/google_api/calendar/v3/model/conference_properties.ex
matehat/elixir-google-api
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
[ "Apache-2.0" ]
1
2018-12-03T23:43:10.000Z
2018-12-03T23:43:10.000Z
clients/calendar/lib/google_api/calendar/v3/model/conference_properties.ex
matehat/elixir-google-api
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
[ "Apache-2.0" ]
null
null
null
clients/calendar/lib/google_api/calendar/v3/model/conference_properties.ex
matehat/elixir-google-api
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the elixir code generator program. # Do not edit the class manually. defmodule GoogleApi.Calendar.V3.Model.ConferenceProperties do @moduledoc """ ## Attributes * `allowedConferenceSolutionTypes` (*type:* `list(String.t)`, *default:* `nil`) - The types of conference solutions that are supported for this calendar. The possible values are: - "eventHangout" - "eventNamedHangout" - "hangoutsMeet" Optional. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :allowedConferenceSolutionTypes => list(String.t()) } field(:allowedConferenceSolutionTypes, type: :list) end defimpl Poison.Decoder, for: GoogleApi.Calendar.V3.Model.ConferenceProperties do def decode(value, options) do GoogleApi.Calendar.V3.Model.ConferenceProperties.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Calendar.V3.Model.ConferenceProperties do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
32.372549
157
0.73834
ffbb898b42e514a102440db2c0234de24fa79575
28,101
ex
Elixir
lib/ash/api/api.ex
michaelst/ash
650b256b2a4396cf068ca31d5364e43fa3c10032
[ "MIT" ]
null
null
null
lib/ash/api/api.ex
michaelst/ash
650b256b2a4396cf068ca31d5364e43fa3c10032
[ "MIT" ]
null
null
null
lib/ash/api/api.ex
michaelst/ash
650b256b2a4396cf068ca31d5364e43fa3c10032
[ "MIT" ]
null
null
null
defmodule Ash.Api do @moduledoc """ An Api allows you to interact with your resources, and holds non-resource-specific configuration. For example, the json api extension adds an api extension that lets you toggle authorization on/off for all resources in that Api. You include them in an Api like so: ```elixir defmodule MyApp.Api do use Ash.Api resources do resource OneResource resource SecondResource end end ``` Then you can interact through that Api with the actions that those resources expose. For example: `MyApp.Api.create(changeset)`, or `MyApp.Api.read(query)`. Corresponding actions must be defined in your resources in order to call them through the Api. ## Interface The functions documented here can be used to call any action on any resource in the Api. For example, `MyApi.read(Myresource, [...])`. Additionally, you can define a `code_interface` on each resource to be exposed in the Api module. See the resource DSL documentation for more. """ use Ash.Dsl, default_extensions: [extensions: [Ash.Api.Dsl]] import Ash.OptionsHelpers, only: [merge_schemas: 3] alias Ash.Actions.{Create, Destroy, Read, Update} alias Ash.Error.Invalid.{ NoPrimaryAction, NoSuchAction, NoSuchResource } alias Ash.Error.Query.NotFound alias Ash.Dsl.Extension require Ash.Query @type t() :: module @type page_request :: :next | :prev | :first | :last | integer @global_opts [ verbose?: [ type: :boolean, default: false, doc: "Log engine operations (very verbose!)" ], action: [ type: :any, doc: "The action to use, either an Action struct or the name of the action" ], authorize?: [ type: {:in, [true, false, nil]}, doc: "If an actor option is provided (even if it is `nil`), authorization happens automatically. If not, this flag can be used to authorize with no user." ], stacktraces?: [ type: :boolean, default: true, doc: "For Ash errors, wether or not each error has a stacktrace. See the error_handling guide for more." ], tenant: [ type: :any, doc: "A tenant to set on the query or changeset" ], actor: [ type: :any, doc: "If an actor is provided, it will be used in conjunction with the authorizers of a resource to authorize access" ] ] @read_opts_schema merge_schemas( [ page: [ doc: "Nested pagination options, see the section on pagination for more", type: {:custom, __MODULE__, :page_opts, []} ], return_query?: [ type: :boolean, doc: """ If `true`, the query that was ultimately used is returned as a third tuple element. The query goes through many potential changes during a request, potentially adding authorization filters, or replacing relationships for other data layers with their corresponding ids. This option can be used to get the true query that was sent to the data layer. """, default: false ] ], @global_opts, "Global Options" ) @doc false def read_opts_schema, do: @read_opts_schema @offset_page_opts [ offset: [ type: :non_neg_integer, doc: "The number of records to skip from the beginning of the query" ], limit: [ type: :pos_integer, doc: "The number of records to include in the page" ], filter: [ type: :any, doc: """ A filter to apply for pagination purposes, that should not be considered in the full count. This is used by the liveview paginator to only fetch the records that were *already* on the page when refreshing data, to avoid pages jittering. """ ], count: [ type: :boolean, doc: "Whether or not to return the page with a full count of all records" ] ] @keyset_page_opts [ before: [ type: :string, doc: "Get records that appear before the provided keyset (mutually exclusive with `after`)" ], after: [ type: :string, doc: "Get records that appear after the provided keyset (mutually exclusive with `before`)" ], limit: [ type: :pos_integer, doc: "How many records to include in the page" ], filter: [ type: :any, doc: "See the `filter` option for offset pagination, this behaves the same." ], count: [ type: :boolean, doc: "Whether or not to return the page with a full count of all records" ] ] @doc false def page_opts(page_opts) do if page_opts == false do {:ok, false} else if page_opts[:after] || page_opts[:before] do validate_or_error(page_opts, @keyset_page_opts) else if page_opts[:offset] do validate_or_error(page_opts, @offset_page_opts) else validate_or_error(page_opts, @keyset_page_opts) end end end end defp validate_or_error(opts, schema) do case Ash.OptionsHelpers.validate(opts, schema) do {:ok, value} -> {:ok, value} {:error, error} -> {:error, Exception.message(error)} end end @load_opts_schema merge_schemas([], @global_opts, "Global Options") @get_opts_schema [ load: [ type: :any, doc: "Fields or relationships to load in the query. See `Ash.Query.load/2`" ], tenant: [ type: :any, doc: "The tenant to set on the query being run" ], action: [ type: :atom, doc: "The action to use for reading the data" ], context: [ type: :any, doc: "Context to be set on the query being run" ] ] |> merge_schemas(@global_opts, "Global Options") @shared_created_update_and_destroy_opts_schema [ return_notifications?: [ type: :boolean, default: false, doc: """ Use this if you're running ash actions in your own transaction and you want notifications to happen still. If a transaction is ongoing, and this is false, notifications will be discarded, otherwise the return value is `{:ok, result, notifications}` (or `{:ok, notifications}`) To send notifications later, use `Ash.Notifier.notify(notifications)`. It sends any notifications that can be sent, and returns the rest. """ ] ] @create_opts_schema [ upsert?: [ type: :boolean, default: false, doc: "If a conflict is found based on the primary key, the record is updated in the database (requires upsert support)" ], upsert_identity: [ type: :atom, doc: "The identity to use when detecting conflicts for upsert?. By default, the primary key is used. Has no effect if `upsert?: true` is not provided" ] ] |> merge_schemas(@global_opts, "Global Options") |> merge_schemas( @shared_created_update_and_destroy_opts_schema, "Shared create/update/destroy Options" ) @doc false def create_opts_schema, do: @create_opts_schema @update_opts_schema [] |> merge_schemas(@global_opts, "Global Options") |> merge_schemas( @shared_created_update_and_destroy_opts_schema, "Shared create/update/destroy Options" ) @doc false def update_opts_schema, do: @update_opts_schema @destroy_opts_schema [] |> merge_schemas(@global_opts, "Global Opts") |> merge_schemas( @shared_created_update_and_destroy_opts_schema, "Shared create/update/destroy Options" ) def destroy_opts_schema, do: @destroy_opts_schema @doc """ Get a record by a primary key. See `c:get/3` for more. """ @callback get!( resource :: Ash.Resource.t(), id_or_filter :: term(), params :: Keyword.t() ) :: Ash.Resource.record() | no_return @doc """ Get a record by a primary key. For a resource with a composite primary key, pass a keyword list, e.g `MyApi.get(MyResource, first_key: 1, second_key: 2)` #{Ash.OptionsHelpers.docs(@get_opts_schema)} """ @callback get( resource :: Ash.Resource.t(), id_or_filter :: term(), params :: Keyword.t() ) :: {:ok, Ash.Resource.record()} | {:error, term} @doc """ Run an ash query, raising on more than one result. See `c:read_one/2` for more. """ @callback read_one!(Ash.Query.t() | Ash.Resource.t(), params :: Keyword.t()) :: Ash.Resource.record() | {Ash.Resource.record(), Ash.Query.t()} | no_return @doc """ Run a query on a resource, but fail on more than one result This is useful if you have a query that doesn't include a primary key but you know that it will only ever return a single result """ @callback read_one(Ash.Query.t() | Ash.Resource.t(), params :: Keyword.t()) :: {:ok, Ash.Resource.record()} | {:ok, Ash.Resource.record(), Ash.Query.t()} | {:error, term} @doc """ Run an ash query. See `c:read/2` for more. """ @callback read!(Ash.Query.t() | Ash.Resource.t(), params :: Keyword.t()) :: list(Ash.Resource.record()) | {list(Ash.Resource.record()), Ash.Query.t()} | no_return @doc """ Run a query on a resource. For more information, on building a query, see `Ash.Query`. #{Ash.OptionsHelpers.docs(@read_opts_schema)} ## Pagination #### Limit/offset pagination #{Ash.OptionsHelpers.docs(@offset_page_opts)} #### Keyset pagination #{Ash.OptionsHelpers.docs(@keyset_page_opts)} """ @callback read(Ash.Query.t(), params :: Keyword.t()) :: {:ok, list(Ash.Resource.record())} | {:ok, list(Ash.Resource.record()), Ash.Query.t()} | {:error, term} @doc """ Fetch a page relative to the provided page. """ @callback page!(Ash.Page.page(), page_request) :: Ash.Page.page() | no_return @doc """ Fetch a page relative to the provided page. A page is the return value of a paginated action called via `c:read/2`. """ @callback page(Ash.Page.page(), page_request) :: {:ok, Ash.Page.page()} | {:error, term} @type load_statement :: Ash.Query.t() | [atom] | atom | Keyword.t() | list(atom | {atom, atom | Keyword.t()}) @doc """ Load fields or relationships on already fetched records. See `c:load/3` for more information. """ @callback load!( record_or_records :: Ash.Resource.record() | [Ash.Resource.record()], query :: load_statement(), opts :: Keyword.t() ) :: Ash.Resource.record() | [Ash.Resource.record()] | no_return @doc """ Load fields or relationships on already fetched records. Accepts a list of non-loaded fields and loads them on the provided records or a query, in which case the loaded fields of the query are used. Relationship loads can be nested, for example: `MyApi.load(record, [posts: [:comments]])`. #{Ash.OptionsHelpers.docs(@load_opts_schema)} """ @callback load( record_or_records :: Ash.Resource.record() | [Ash.Resource.record()], query :: load_statement(), opts :: Keyword.t() ) :: {:ok, Ash.Resource.record() | [Ash.Resource.record()]} | {:error, term} @doc """ Create a record. See `c:create/2` for more information. """ @callback create!(Ash.Changeset.t(), params :: Keyword.t()) :: Ash.Resource.record() | no_return @doc """ Create a record. #{Ash.OptionsHelpers.docs(@create_opts_schema)} """ @callback create(Ash.Changeset.t(), params :: Keyword.t()) :: {:ok, Ash.Resource.record()} | {:error, term} @doc """ Update a record. See `c:update/2` for more information. """ @callback update!(Ash.Changeset.t(), params :: Keyword.t()) :: Ash.Resource.record() | no_return @doc """ Update a record. #{Ash.OptionsHelpers.docs(@update_opts_schema)} """ @callback update(Ash.Changeset.t(), params :: Keyword.t()) :: {:ok, Ash.Resource.record()} | {:error, term} @doc """ Destroy a record. See `c:destroy/2` for more information. """ @callback destroy!(Ash.Changeset.t() | Ash.Resource.record(), params :: Keyword.t()) :: :ok | no_return @doc """ Destroy a record. #{Ash.OptionsHelpers.docs(@destroy_opts_schema)} """ @callback destroy(Ash.Changeset.t() | Ash.Resource.record(), params :: Keyword.t()) :: :ok | {:error, term} @doc """ Refetches a record by primary key. See `c:reload/1` for more. """ @callback reload!(record :: Ash.Resource.record(), params :: Keyword.t()) :: Ash.Resource.record() | no_return @doc """ Refetches a record by primary key. """ @callback reload(record :: Ash.Resource.record()) :: {:ok, Ash.Resource.record()} | {:error, term} def handle_opts(_) do quote do @behaviour Ash.Api end end def handle_before_compile(_) do quote do use Ash.Api.Interface end end def resource(api, resource) do api |> resource_references() |> Enum.find(&(&1.resource == resource || &1.as == resource)) |> case do nil -> {:error, NoSuchResource.exception(resource: resource)} reference -> {:ok, reference.resource} end end @spec resources(Ash.Api.t()) :: [Ash.Resource.t()] def resources(api) do api |> Extension.get_entities([:resources]) |> Enum.map(& &1.resource) end @spec resource_references(Ash.Api.t()) :: [Ash.Api.ResourceReference.t()] def resource_references(api) do Extension.get_entities(api, [:resources]) end @doc false @spec get!(Ash.Api.t(), Ash.Resource.t(), term(), Keyword.t()) :: Ash.Resource.record() | no_return def get!(api, resource, id, opts \\ []) do opts = Ash.OptionsHelpers.validate!(opts, @get_opts_schema) api |> get(resource, id, opts) |> unwrap_or_raise!(opts[:stacktraces?]) end @doc false @spec get(Ash.Api.t(), Ash.Resource.t(), term(), Keyword.t()) :: {:ok, Ash.Resource.record()} | {:error, term} def get(api, resource, id, opts) do with {:ok, opts} <- Ash.OptionsHelpers.validate(opts, @get_opts_schema), {:ok, resource} <- Ash.Api.resource(api, resource), {:ok, filter} <- Ash.Filter.get_filter(resource, id) do query = resource |> Ash.Query.new(api) |> Ash.Query.set_tenant(opts[:tenant]) |> Ash.Query.filter(^filter) |> Ash.Query.load(opts[:load] || []) |> Ash.Query.set_context(opts[:context] || %{}) query = if Ash.DataLayer.data_layer_can?(query.resource, :limit) do Ash.Query.limit(query, 2) else query end query |> api.read(Keyword.take(opts, Keyword.keys(@read_opts_schema))) |> case do {:ok, %{results: [single_result]}} -> {:ok, single_result} {:ok, %{results: []}} -> {:error, NotFound.exception( primary_key: filter, resource: resource )} {:ok, %{results: results}} -> {:error, Ash.Error.Invalid.MultipleResults.exception( count: Enum.count(results), query: query, at_least?: true )} {:ok, [single_result]} -> {:ok, single_result} {:ok, []} -> {:error, NotFound.exception( primary_key: filter, resource: resource )} {:error, error} -> {:error, error} {:ok, results} when is_list(results) -> {:error, Ash.Error.Invalid.MultipleResults.exception(count: Enum.count(results), query: query)} end else {:error, error} -> {:error, error} end end def page!(api, keyset, request) do {_, opts} = keyset.rerun api |> page(keyset, request) |> unwrap_or_raise!(opts[:stacktraces?]) end def page(_, %Ash.Page.Keyset{results: []} = page, :next) do {:ok, page} end def page(_, %Ash.Page.Keyset{results: []} = page, :prev) do {:ok, page} end def page(_, %Ash.Page.Keyset{}, n) when is_integer(n) do {:error, "Cannot seek to a specific page with keyset based pagination"} end def page( api, %Ash.Page.Keyset{results: results, rerun: {query, opts}}, :next ) do last_keyset = results |> :lists.last() |> Map.get(:__metadata__) |> Map.get(:keyset) new_page_opts = opts[:page] |> Keyword.delete(:before) |> Keyword.put(:after, last_keyset) read(api, query, Keyword.put(opts, :page, new_page_opts)) end def page(api, %Ash.Page.Keyset{results: results, rerun: {query, opts}}, :prev) do first_keyset = results |> List.first() |> Map.get(:__metadata__) |> Map.get(:keyset) new_page_opts = opts[:page] |> Keyword.put(:before, first_keyset) |> Keyword.delete(:after) read(api, query, Keyword.put(opts, :page, new_page_opts)) end def page(api, %Ash.Page.Keyset{rerun: {query, opts}}, :first) do page_opts = if opts[:page][:count] do [count: true] else [] end read(api, query, Keyword.put(opts, :page, page_opts)) end def page( api, %Ash.Page.Offset{count: count, limit: limit, offset: offset, rerun: {query, opts}}, request ) do page_opts = case request do :next -> [offset: offset + limit, limit: limit] :prev -> [offset: max(offset - limit, 0), limit: limit] :first -> [offset: 0, limit: limit] :last -> if count do [offset: count - limit, limit: limit] else [offset: 0, limit: limit] end page_num when is_integer(page_num) -> [offset: (page_num - 1) * limit, limit: limit] end page_opts = if opts[:page][:count] do Keyword.put(page_opts, :count, true) else page_opts end if request == :last && !count do {:error, "Cannot fetch last page without counting"} else read(api, query, Keyword.put(opts, :page, page_opts)) end end @doc false def load!(api, data, query, opts \\ []) do opts = Ash.OptionsHelpers.validate!(opts, @load_opts_schema) api |> load(data, query, opts) |> unwrap_or_raise!(opts[:stacktraces?]) end @doc false def load(api, data, query, opts \\ []) def load(_, [], _, _), do: {:ok, []} def load(_, nil, _, _), do: {:ok, nil} def load(_, {:error, error}, _, _), do: {:error, error} def load(api, {:ok, values}, query, opts) do load(api, values, query, opts) end def load(api, %struct{results: results} = page, query, opts) when struct in [Ash.Page.Offset, Ash.Page.Keyset] do api |> load(results, query, opts) |> case do {:ok, results} -> %{page | results: results} {:error, error} -> {:error, error} end end def load(api, data, query, opts) when not is_list(data) do api |> load(List.wrap(data), query, opts) |> case do {:ok, data} -> {:ok, Enum.at(data, 0)} {:error, error} -> {:error, error} end end def load(api, [%resource{} = record | _] = data, query, opts) do query = case query do %Ash.Query{} = query -> Ash.Query.set_tenant(query, query.tenant || Map.get(record.__metadata__, :tenant)) keyword -> resource |> Ash.Query.new(api) |> Ash.Query.set_tenant(Map.get(record.__metadata__, :tenant)) |> Ash.Query.load(keyword) end with %{valid?: true} <- query, {:ok, action} <- get_action(query.resource, opts, :read, query.action), {:ok, opts} <- Ash.OptionsHelpers.validate(opts, @load_opts_schema) do Read.run(query, action, Keyword.put(opts, :initial_data, data)) else {:error, error} -> {:error, error} %{errors: errors} -> {:error, errors} end end @doc false @spec read!(Ash.Api.t(), Ash.Query.t() | Ash.Resource.t(), Keyword.t()) :: list(Ash.Resource.record()) | no_return def read!(api, query, opts \\ []) do opts = Ash.OptionsHelpers.validate!(opts, @read_opts_schema) api |> read(query, opts) |> unwrap_or_raise!(opts[:stacktraces?]) end @doc false @spec read(Ash.Api.t(), Ash.Query.t() | Ash.Resource.t(), Keyword.t()) :: {:ok, list(Ash.Resource.record()) | Ash.Page.page()} | {:error, term} def read(api, query, opts \\ []) def read(api, resource, opts) when is_atom(resource) do read(api, Ash.Query.new(resource, api), opts) end def read(api, query, opts) do query = Ash.Query.set_api(query, api) with {:ok, opts} <- Ash.OptionsHelpers.validate(opts, @read_opts_schema), {:ok, action} <- get_action(query.resource, opts, :read, query.action) do Read.run(query, action, opts) else {:error, error} -> {:error, error} end end @doc false def read_one!(api, query, opts) do api |> read_one(query, opts) |> unwrap_or_raise!(opts[:stacktraces?]) end @doc false def read_one(api, query, opts) do api |> read(query, opts) |> unwrap_one() end defp unwrap_one({:error, error}) do {:error, error} end defp unwrap_one({:ok, result, query}) do case unwrap_one({:ok, result}) do {:ok, result} -> {:ok, result, query} {:error, %Ash.Error.Invalid.MultipleResults{} = error} -> {:error, %{error | query: query}} {:error, error} -> {:error, error} end end defp unwrap_one({:ok, result}) do case unwrap_one(result) do {:ok, result} -> {:ok, result} {:error, error} -> {:error, error} end end defp unwrap_one(%{results: results}) do unwrap_one(results) end defp unwrap_one([]), do: {:ok, nil} defp unwrap_one([result]), do: {:ok, result} defp unwrap_one([_ | _] = results) do error = Ash.Error.Invalid.MultipleResults.exception( count: Enum.count(results), at_least?: true ) {:error, error} end @doc false @spec create!(Ash.Api.t(), Ash.Changeset.t(), Keyword.t()) :: Ash.Resource.record() | no_return def create!(api, changeset, opts) do opts = Ash.OptionsHelpers.validate!(opts, @create_opts_schema) api |> create(changeset, opts) |> unwrap_or_raise!(opts[:stacktraces?]) end @doc false @spec create(Ash.Api.t(), Ash.Changeset.t(), Keyword.t()) :: {:ok, Ash.Resource.record(), list(Ash.Notifier.Notification.t())} | {:ok, Ash.Resource.record()} | {:error, term} def create(api, changeset, opts) do with {:ok, opts} <- Ash.OptionsHelpers.validate(opts, @create_opts_schema), {:ok, resource} <- Ash.Api.resource(api, changeset.resource), {:ok, action} <- get_action(resource, opts, :create, changeset.action) do Create.run(api, changeset, action, opts) end end @doc false def update!(api, changeset, opts) do opts = Ash.OptionsHelpers.validate!(opts, @update_opts_schema) api |> update(changeset, opts) |> unwrap_or_raise!(opts[:stacktraces?]) end @doc false @spec update(Ash.Api.t(), Ash.Resource.record(), Keyword.t()) :: {:ok, Ash.Resource.record(), list(Ash.Notifier.Notification.t())} | {:ok, Ash.Resource.record()} | {:error, term} def update(api, changeset, opts) do with {:ok, opts} <- Ash.OptionsHelpers.validate(opts, @update_opts_schema), {:ok, resource} <- Ash.Api.resource(api, changeset.resource), {:ok, action} <- get_action(resource, opts, :update, changeset.action) do Update.run(api, changeset, action, opts) end end @doc false @spec destroy!(Ash.Api.t(), Ash.Changeset.t() | Ash.Resource.record(), Keyword.t()) :: :ok | no_return def destroy!(api, changeset, opts) do opts = Ash.OptionsHelpers.validate!(opts, @destroy_opts_schema) api |> destroy(changeset, opts) |> unwrap_or_raise!(opts[:stacktraces?]) end @doc false @spec destroy(Ash.Api.t(), Ash.Changeset.t() | Ash.Resource.record(), Keyword.t()) :: {:ok, list(Ash.Notifier.Notification.t())} | :ok | {:error, term} def destroy(api, %Ash.Changeset{resource: resource} = changeset, opts) do with {:ok, opts} <- Ash.OptionsHelpers.validate(opts, @destroy_opts_schema), {:ok, resource} <- Ash.Api.resource(api, resource), {:ok, action} <- get_action(resource, opts, :destroy, changeset.action) do Destroy.run(api, changeset, action, opts) end end def destroy(api, record, opts) do destroy(api, Ash.Changeset.new(record), opts) end defp get_action(resource, params, type, preset \\ nil) do case Keyword.fetch(params, :action) do {:ok, %_{} = action} -> {:ok, action} {:ok, nil} -> if preset do get_action(resource, Keyword.put(params, :action, preset), type) else get_action(resource, Keyword.delete(params, :action), type) end {:ok, action} -> case Ash.Resource.Info.action(resource, action, type) do nil -> {:error, NoSuchAction.exception(resource: resource, action: action, type: type)} action -> {:ok, action} end :error -> if preset do get_action(resource, Keyword.put(params, :action, preset), type) else case Ash.Resource.Info.primary_action(resource, type) do nil -> if Ash.Resource.Info.resource?(resource) do {:error, NoSuchResource.exception(resource: resource)} else {:error, NoPrimaryAction.exception(resource: resource, type: type)} end action -> {:ok, action} end end end end defp unwrap_or_raise!(:ok, _), do: :ok defp unwrap_or_raise!({:ok, result}, _), do: result defp unwrap_or_raise!({:ok, result, other}, _), do: {result, other} defp unwrap_or_raise!({:error, error}, stacktraces?) do exception = Ash.Error.to_ash_error(error) exception = if stacktraces? do exception else Ash.Error.clear_stacktraces(exception) end case exception do %{stacktraces?: _} -> if stacktraces? do reraise %{exception | stacktraces?: stacktraces?}, Map.get(exception.stacktrace || %{}, :stacktrace) else raise %{exception | stacktraces?: stacktraces?} end _ -> raise exception end end end
29.705074
173
0.569553
ffbbd002e8364dd267b87df9e4059d3c9e1c40fe
25,012
exs
Elixir
installer/test/phx_new_test.exs
aaronrenner/phoenix
9148d0d3e248403a5a319d306826961db426fc32
[ "MIT" ]
1
2022-02-09T00:31:50.000Z
2022-02-09T00:31:50.000Z
installer/test/phx_new_test.exs
aaronrenner/phoenix
9148d0d3e248403a5a319d306826961db426fc32
[ "MIT" ]
null
null
null
installer/test/phx_new_test.exs
aaronrenner/phoenix
9148d0d3e248403a5a319d306826961db426fc32
[ "MIT" ]
null
null
null
Code.require_file "mix_helper.exs", __DIR__ defmodule Mix.Tasks.Phx.NewTest do use ExUnit.Case, async: false import MixHelper import ExUnit.CaptureIO @app_name "phx_blog" setup do # The shell asks to install deps. # We will politely say not. send self(), {:mix_shell_input, :yes?, false} :ok end test "assets are in sync with installer" do for file <- ~w(favicon.ico phoenix.png) do assert File.read!("../priv/static/#{file}") == File.read!("templates/phx_static/#{file}") end end test "returns the version" do Mix.Tasks.Phx.New.run(["-v"]) assert_received {:mix_shell, :info, ["Phoenix installer v" <> _]} end test "new with defaults" do in_tmp "new with defaults", fn -> Mix.Tasks.Phx.New.run([@app_name]) assert_file "phx_blog/README.md" assert_file "phx_blog/.formatter.exs", fn file -> assert file =~ "import_deps: [:ecto, :phoenix]" assert file =~ "inputs: [\"*.{ex,exs}\", \"priv/*/seeds.exs\", \"{config,lib,test}/**/*.{ex,exs}\"]" assert file =~ "subdirectories: [\"priv/*/migrations\"]" end assert_file "phx_blog/mix.exs", fn file -> assert file =~ "app: :phx_blog" refute file =~ "deps_path: \"../../deps\"" refute file =~ "lockfile: \"../../mix.lock\"" end assert_file "phx_blog/config/config.exs", fn file -> assert file =~ "ecto_repos: [PhxBlog.Repo]" assert file =~ "config :phoenix, :json_library, Jason" refute file =~ "namespace: PhxBlog" refute file =~ "config :phx_blog, :generators" end assert_file "phx_blog/config/prod.exs", fn file -> assert file =~ "port: 443" end assert_file "phx_blog/config/runtime.exs", ~r/ip: {0, 0, 0, 0, 0, 0, 0, 0}/ assert_file "phx_blog/lib/phx_blog/application.ex", ~r/defmodule PhxBlog.Application do/ assert_file "phx_blog/lib/phx_blog.ex", ~r/defmodule PhxBlog do/ assert_file "phx_blog/mix.exs", fn file -> assert file =~ "mod: {PhxBlog.Application, []}" assert file =~ "{:jason," assert file =~ "{:phoenix_live_dashboard," end assert_file "phx_blog/lib/phx_blog_web.ex", fn file -> assert file =~ "defmodule PhxBlogWeb do" assert file =~ "use Phoenix.View,\n root: \"lib/phx_blog_web/templates\"" assert file =~ "use Phoenix.HTML" assert file =~ "Phoenix.LiveView" end assert_file "phx_blog/test/phx_blog_web/controllers/page_controller_test.exs" assert_file "phx_blog/test/phx_blog_web/views/page_view_test.exs" assert_file "phx_blog/test/phx_blog_web/views/error_view_test.exs" assert_file "phx_blog/test/phx_blog_web/views/layout_view_test.exs" assert_file "phx_blog/test/support/conn_case.ex" assert_file "phx_blog/test/test_helper.exs" assert_file "phx_blog/lib/phx_blog_web/controllers/page_controller.ex", ~r/defmodule PhxBlogWeb.PageController/ assert_file "phx_blog/lib/phx_blog_web/views/page_view.ex", ~r/defmodule PhxBlogWeb.PageView/ assert_file "phx_blog/lib/phx_blog_web/router.ex", fn file -> assert file =~ "defmodule PhxBlogWeb.Router" assert file =~ "live_dashboard" assert file =~ "import Phoenix.LiveDashboard.Router" end assert_file "phx_blog/lib/phx_blog_web/endpoint.ex", fn file -> assert file =~ ~s|defmodule PhxBlogWeb.Endpoint| assert file =~ ~s|socket "/live"| assert file =~ ~s|plug Phoenix.LiveDashboard.RequestLogger| end assert_file "phx_blog/lib/phx_blog_web/templates/layout/root.html.heex" assert_file "phx_blog/lib/phx_blog_web/templates/layout/app.html.heex" assert_file "phx_blog/lib/phx_blog_web/templates/page/index.html.heex", fn file -> version = Application.spec(:phx_new, :vsn) |> to_string() |> Version.parse!() changelog_vsn = "v#{version.major}.#{version.minor}" assert file =~ "https://github.com/phoenixframework/phoenix/blob/#{changelog_vsn}/CHANGELOG.md" end # assets assert_file "phx_blog/.gitignore", fn file -> assert file =~ "/priv/static/assets/" assert file =~ "phx_blog-*.tar" assert file =~ ~r/\n$/ end assert_file "phx_blog/config/dev.exs", fn file -> assert file =~ "esbuild: {Esbuild," assert file =~ "lib/phx_blog_web/(live|views)/.*(ex)" assert file =~ "lib/phx_blog_web/templates/.*(eex)" end assert_file "phx_blog/assets/css/app.css" assert_file "phx_blog/assets/css/phoenix.css" refute File.exists? "phx_blog/priv/static/assets/app.css" refute File.exists? "phx_blog/priv/static/assets/phoenix.css" refute File.exists? "phx_blog/priv/static/assets/app.js" assert File.exists? "phx_blog/assets/vendor" assert_file "phx_blog/config/config.exs", fn file -> assert file =~ "cd: Path.expand(\"../assets\", __DIR__)" assert file =~ "config :esbuild" end # Ecto config = ~r/config :phx_blog, PhxBlog.Repo,/ assert_file "phx_blog/mix.exs", fn file -> assert file =~ "{:phoenix_ecto," assert file =~ "aliases: aliases()" assert file =~ "ecto.setup" assert file =~ "ecto.reset" end assert_file "phx_blog/config/dev.exs", config assert_file "phx_blog/config/test.exs", config assert_file "phx_blog/config/runtime.exs", fn file -> assert file =~ config assert file =~ ~S|maybe_ipv6 = if System.get_env("ECTO_IPV6"), do: [:inet6], else: []| assert file =~ ~S|socket_options: maybe_ipv6| assert file =~ """ if System.get_env("PHX_SERVER") && System.get_env("RELEASE_NAME") do config :phx_blog, PhxBlogWeb.Endpoint, server: true end """ assert file =~ ~S[host = System.get_env("PHX_HOST") || "example.com"] assert file =~ ~S|url: [host: host, port: 443],| end assert_file "phx_blog/config/test.exs", ~R/database: "phx_blog_test#\{System.get_env\("MIX_TEST_PARTITION"\)\}"/ assert_file "phx_blog/lib/phx_blog/repo.ex", ~r"defmodule PhxBlog.Repo" assert_file "phx_blog/lib/phx_blog_web.ex", ~r"defmodule PhxBlogWeb" assert_file "phx_blog/lib/phx_blog_web/endpoint.ex", ~r"plug Phoenix.Ecto.CheckRepoStatus, otp_app: :phx_blog" assert_file "phx_blog/priv/repo/seeds.exs", ~r"PhxBlog.Repo.insert!" assert_file "phx_blog/test/support/data_case.ex", ~r"defmodule PhxBlog.DataCase" assert_file "phx_blog/priv/repo/migrations/.formatter.exs", ~r"import_deps: \[:ecto_sql\]" # LiveView refute_file "phx_blog/lib/phx_blog_web/live/page_live_view.ex" assert_file "phx_blog/assets/js/app.js", fn file -> assert file =~ ~s|import {LiveSocket} from "phoenix_live_view"| assert file =~ ~s|liveSocket.connect()| end assert_file "phx_blog/mix.exs", fn file -> assert file =~ ~r":phoenix_live_view" assert file =~ ~r":floki" end assert_file "phx_blog/lib/phx_blog_web/router.ex", &assert(&1 =~ ~s[plug :fetch_live_flash]) assert_file "phx_blog/lib/phx_blog_web/router.ex", &assert(&1 =~ ~s[plug :put_root_layout]) assert_file "phx_blog/lib/phx_blog_web/router.ex", &assert(&1 =~ ~s[PageController]) # Telemetry assert_file "phx_blog/mix.exs", fn file -> assert file =~ "{:telemetry_metrics," assert file =~ "{:telemetry_poller," end assert_file "phx_blog/lib/phx_blog_web/telemetry.ex", fn file -> assert file =~ "defmodule PhxBlogWeb.Telemetry do" assert file =~ "{:telemetry_poller, measurements: periodic_measurements()" assert file =~ "defp periodic_measurements do" assert file =~ "# {PhxBlogWeb, :count_users, []}" assert file =~ "def metrics do" assert file =~ "summary(\"phoenix.endpoint.stop.duration\"," assert file =~ "summary(\"phoenix.router_dispatch.stop.duration\"," assert file =~ "# Database Metrics" assert file =~ "summary(\"phx_blog.repo.query.total_time\"," end # Mailer assert_file "phx_blog/mix.exs", fn file -> assert file =~ "{:swoosh, \"~> 1.3\"}" end assert_file "phx_blog/lib/phx_blog/mailer.ex", fn file -> assert file =~ "defmodule PhxBlog.Mailer do" assert file =~ "use Swoosh.Mailer, otp_app: :phx_blog" end assert_file "phx_blog/config/config.exs", fn file -> assert file =~ "config :swoosh" assert file =~ "config :phx_blog, PhxBlog.Mailer, adapter: Swoosh.Adapters.Local" end assert_file "phx_blog/config/test.exs", fn file -> assert file =~ "config :phx_blog, PhxBlog.Mailer, adapter: Swoosh.Adapters.Test" end # Install dependencies? assert_received {:mix_shell, :yes?, ["\nFetch and install dependencies?"]} # Instructions assert_received {:mix_shell, :info, ["\nWe are almost there" <> _ = msg]} assert msg =~ "$ cd phx_blog" assert msg =~ "$ mix deps.get" assert_received {:mix_shell, :info, ["Then configure your database in config/dev.exs" <> _]} assert_received {:mix_shell, :info, ["Start your Phoenix app" <> _]} # Gettext assert_file "phx_blog/lib/phx_blog_web/gettext.ex", ~r"defmodule PhxBlogWeb.Gettext" assert File.exists?("phx_blog/priv/gettext/errors.pot") assert File.exists?("phx_blog/priv/gettext/en/LC_MESSAGES/errors.po") end end test "new without defaults" do in_tmp "new without defaults", fn -> Mix.Tasks.Phx.New.run([@app_name, "--no-html", "--no-assets", "--no-ecto", "--no-gettext", "--no-dashboard", "--no-mailer"]) # No assets assert_file "phx_blog/.gitignore", fn file -> refute file =~ "/priv/static/assets/" assert file =~ ~r/\n$/ end assert_file "phx_blog/config/dev.exs", ~r/watchers: \[\]/ # No assets & No HTML refute_file "phx_blog/priv/static/assets/app.css" refute_file "phx_blog/priv/static/assets/phoenix.css" refute_file "phx_blog/priv/static/favicon.ico" refute_file "phx_blog/priv/static/images/phoenix.png" refute_file "phx_blog/priv/static/assets/app.js" # No Ecto config = ~r/config :phx_blog, PhxBlog.Repo,/ refute File.exists?("phx_blog/lib/phx_blog/repo.ex") assert_file "phx_blog/lib/phx_blog_web/endpoint.ex", fn file -> refute file =~ "plug Phoenix.Ecto.CheckRepoStatus, otp_app: :phx_blog" end assert_file "phx_blog/lib/phx_blog_web/telemetry.ex", fn file -> refute file =~ "# Database Metrics" refute file =~ "summary(\"phx_blog.repo.query.total_time\"," end assert_file "phx_blog/.formatter.exs", fn file -> assert file =~ "import_deps: [:phoenix]" assert file =~ "inputs: [\"*.{ex,exs}\", \"{config,lib,test}/**/*.{ex,exs}\"]" refute file =~ "subdirectories:" end assert_file "phx_blog/mix.exs", &refute(&1 =~ ~r":phoenix_ecto") assert_file "phx_blog/config/config.exs", fn file -> refute file =~ "config :esbuild" refute file =~ "config :phx_blog, :generators" refute file =~ "ecto_repos:" end assert_file "phx_blog/config/dev.exs", fn file -> refute file =~ config assert file =~ "config :phoenix, :plug_init_mode, :runtime" end assert_file "phx_blog/config/test.exs", &refute(&1 =~ config) assert_file "phx_blog/config/runtime.exs", &refute(&1 =~ config) assert_file "phx_blog/lib/phx_blog_web.ex", &refute(&1 =~ ~r"alias PhxBlog.Repo") # No gettext refute_file "phx_blog/lib/phx_blog_web/gettext.ex" refute_file "phx_blog/priv/gettext/en/LC_MESSAGES/errors.po" refute_file "phx_blog/priv/gettext/errors.pot" assert_file "phx_blog/mix.exs", &refute(&1 =~ ~r":gettext") assert_file "phx_blog/lib/phx_blog_web.ex", &refute(&1 =~ ~r"import AmsMockWeb.Gettext") assert_file "phx_blog/lib/phx_blog_web/views/error_helpers.ex", &refute(&1 =~ ~r"gettext") assert_file "phx_blog/config/dev.exs", &refute(&1 =~ ~r"gettext") # No HTML assert File.exists?("phx_blog/test/phx_blog_web/controllers") assert File.exists?("phx_blog/lib/phx_blog_web/controllers") assert File.exists?("phx_blog/lib/phx_blog_web/views") refute File.exists? "phx_blog/test/web/controllers/pager_controller_test.exs" refute File.exists? "phx_blog/test/views/layout_view_test.exs" refute File.exists? "phx_blog/test/views/page_view_test.exs" refute File.exists? "phx_blog/lib/phx_blog_web/controllers/page_controller.ex" refute File.exists? "phx_blog/lib/phx_blog_web/templates/layout/app.html.heex" refute File.exists? "phx_blog/lib/phx_blog_web/templates/page/index.html.heex" refute File.exists? "phx_blog/lib/phx_blog_web/views/layout_view.ex" refute File.exists? "phx_blog/lib/phx_blog_web/views/page_view.ex" assert_file "phx_blog/mix.exs", &refute(&1 =~ ~r":phoenix_html") assert_file "phx_blog/mix.exs", &refute(&1 =~ ~r":phoenix_live_reload") assert_file "phx_blog/lib/phx_blog_web.ex", fn file -> assert file =~ "defp view_helpers do" refute file =~ "Phoenix.HTML" refute file =~ "Phoenix.LiveView" end assert_file "phx_blog/lib/phx_blog_web/endpoint.ex", fn file -> refute file =~ ~r"Phoenix.LiveReloader" refute file =~ ~r"Phoenix.LiveReloader.Socket" end assert_file "phx_blog/lib/phx_blog_web/views/error_view.ex", ~r".json" assert_file "phx_blog/lib/phx_blog_web/router.ex", &refute(&1 =~ ~r"pipeline :browser") # No Dashboard assert_file "phx_blog/lib/phx_blog_web/endpoint.ex", fn file -> refute file =~ ~s|plug Phoenix.LiveDashboard.RequestLogger| end assert_file "phx_blog/lib/phx_blog_web/router.ex", fn file -> refute file =~ "live_dashboard" refute file =~ "import Phoenix.LiveDashboard.Router" end # No mailer or emails refute File.exists? "phx_blog/lib/phx_blog/mailer.ex" assert_file "phx_blog/config/config.exs", fn file -> refute file =~ "config :swoosh" refute file =~ "config :phx_blog, PhxBlog.Mailer, adapter: Swoosh.Adapters.Local" end end end test "new with --no-dashboard" do in_tmp "new with no_dashboard", fn -> Mix.Tasks.Phx.New.run([@app_name, "--no-dashboard"]) assert_file "phx_blog/mix.exs", &refute(&1 =~ ~r":phoenix_live_dashboard") assert_file "phx_blog/lib/phx_blog_web/templates/layout/app.html.heex", fn file -> refute file =~ ~s|<%= link "LiveDashboard", to: Routes.live_dashboard_path(@conn, :home)| end assert_file "phx_blog/lib/phx_blog_web/endpoint.ex", fn file -> assert file =~ ~s|defmodule PhxBlogWeb.Endpoint| assert file =~ ~s| socket "/live"| refute file =~ ~s|plug Phoenix.LiveDashboard.RequestLogger| end end end test "new with --no-dashboard and --no-live" do in_tmp "new with no_dashboard and no_live", fn -> Mix.Tasks.Phx.New.run([@app_name, "--no-dashboard", "--no-live"]) assert_file "phx_blog/lib/phx_blog_web/endpoint.ex", fn file -> assert file =~ ~s|defmodule PhxBlogWeb.Endpoint| assert file =~ ~s|# socket "/live"| refute file =~ ~s|plug Phoenix.LiveDashboard.RequestLogger| end end end test "new with --no-html" do in_tmp "new with no_html", fn -> Mix.Tasks.Phx.New.run([@app_name, "--no-html"]) assert_file "phx_blog/mix.exs", fn file -> refute file =~ ~s|:phoenix_live_view| refute file =~ ~s|:phoenix_html| assert file =~ ~s|:phoenix_live_dashboard| end assert_file "phx_blog/lib/phx_blog_web/endpoint.ex", fn file -> assert file =~ ~s|defmodule PhxBlogWeb.Endpoint| assert file =~ ~s|socket "/live"| assert file =~ ~s|plug Phoenix.LiveDashboard.RequestLogger| end assert_file "phx_blog/lib/phx_blog_web.ex", fn file -> refute file =~ ~s|Phoenix.HTML| refute file =~ ~s|Phoenix.LiveView| end assert_file "phx_blog/lib/phx_blog_web/router.ex", fn file -> refute file =~ ~s|pipeline :browser| assert file =~ ~s|pipe_through [:fetch_session, :protect_from_forgery]| end end end test "new with --no-assets" do in_tmp "new no_assets", fn -> Mix.Tasks.Phx.New.run([@app_name, "--no-assets"]) assert_file "phx_blog/.gitignore", fn file -> refute file =~ "/priv/static/assets/" end assert_file "phx_blog/.gitignore" assert_file "phx_blog/.gitignore", ~r/\n$/ assert_file "phx_blog/priv/static/assets/app.css" assert_file "phx_blog/priv/static/assets/phoenix.css" assert_file "phx_blog/priv/static/assets/app.js" assert_file "phx_blog/priv/static/favicon.ico" assert_file "phx_blog/priv/static/images/phoenix.png" assert_file "phx_blog/config/config.exs", fn file -> refute file =~ "config :esbuild" end end end test "new with binary_id" do in_tmp "new with binary_id", fn -> Mix.Tasks.Phx.New.run([@app_name, "--binary-id"]) assert_file "phx_blog/config/config.exs", ~r/generators: \[binary_id: true\]/ end end test "new with uppercase" do in_tmp "new with uppercase", fn -> Mix.Tasks.Phx.New.run(["phxBlog"]) assert_file "phxBlog/README.md" assert_file "phxBlog/mix.exs", fn file -> assert file =~ "app: :phxBlog" end assert_file "phxBlog/config/dev.exs", fn file -> assert file =~ ~r/config :phxBlog, PhxBlog.Repo,/ assert file =~ "database: \"phxblog_dev\"" end end end test "new with path, app and module" do in_tmp "new with path, app and module", fn -> project_path = Path.join(File.cwd!(), "custom_path") Mix.Tasks.Phx.New.run([project_path, "--app", @app_name, "--module", "PhoteuxBlog"]) assert_file "custom_path/.gitignore" assert_file "custom_path/.gitignore", ~r/\n$/ assert_file "custom_path/mix.exs", ~r/app: :phx_blog/ assert_file "custom_path/lib/phx_blog_web/endpoint.ex", ~r/app: :phx_blog/ assert_file "custom_path/config/config.exs", ~r/namespace: PhoteuxBlog/ assert_file "custom_path/lib/phx_blog_web.ex", ~r/use Phoenix.Controller, namespace: PhoteuxBlogWeb/ end end test "new inside umbrella" do in_tmp "new inside umbrella", fn -> File.write! "mix.exs", MixHelper.umbrella_mixfile_contents() File.mkdir! "apps" File.cd! "apps", fn -> Mix.Tasks.Phx.New.run([@app_name]) assert_file "phx_blog/mix.exs", fn file -> assert file =~ "deps_path: \"../../deps\"" assert file =~ "lockfile: \"../../mix.lock\"" end end end end test "new with --no-install" do in_tmp "new with no install", fn -> Mix.Tasks.Phx.New.run([@app_name, "--no-install"]) # Does not prompt to install dependencies refute_received {:mix_shell, :yes?, ["\nFetch and install dependencies?"]} # Instructions assert_received {:mix_shell, :info, ["\nWe are almost there" <> _ = msg]} assert msg =~ "$ cd phx_blog" assert msg =~ "$ mix deps.get" assert_received {:mix_shell, :info, ["Then configure your database in config/dev.exs" <> _]} assert_received {:mix_shell, :info, ["Start your Phoenix app" <> _]} end end test "new defaults to pg adapter" do in_tmp "new defaults to pg adapter", fn -> project_path = Path.join(File.cwd!(), "custom_path") Mix.Tasks.Phx.New.run([project_path]) assert_file "custom_path/mix.exs", ":postgrex" assert_file "custom_path/config/dev.exs", [~r/username: "postgres"/, ~r/password: "postgres"/, ~r/hostname: "localhost"/] assert_file "custom_path/config/test.exs", [~r/username: "postgres"/, ~r/password: "postgres"/, ~r/hostname: "localhost"/] assert_file "custom_path/config/runtime.exs", [~r/url: database_url/] assert_file "custom_path/lib/custom_path/repo.ex", "Ecto.Adapters.Postgres" assert_file "custom_path/test/support/conn_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" assert_file "custom_path/test/support/channel_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" assert_file "custom_path/test/support/data_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" end end test "new with mysql adapter" do in_tmp "new with mysql adapter", fn -> project_path = Path.join(File.cwd!(), "custom_path") Mix.Tasks.Phx.New.run([project_path, "--database", "mysql"]) assert_file "custom_path/mix.exs", ":myxql" assert_file "custom_path/config/dev.exs", [~r/username: "root"/, ~r/password: ""/] assert_file "custom_path/config/test.exs", [~r/username: "root"/, ~r/password: ""/] assert_file "custom_path/config/runtime.exs", [~r/url: database_url/] assert_file "custom_path/lib/custom_path/repo.ex", "Ecto.Adapters.MyXQL" assert_file "custom_path/test/support/conn_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" assert_file "custom_path/test/support/channel_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" assert_file "custom_path/test/support/data_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" end end test "new with sqlite3 adapter" do in_tmp "new with sqlite3 adapter", fn -> project_path = Path.join(File.cwd!(), "custom_path") Mix.Tasks.Phx.New.run([project_path, "--database", "sqlite3"]) assert_file "custom_path/mix.exs", ":ecto_sqlite3" assert_file "custom_path/config/dev.exs", [~r/database: .*_dev.db/] assert_file "custom_path/config/test.exs", [~r/database: .*_test.db/] assert_file "custom_path/config/runtime.exs", [~r/database: database_path/] assert_file "custom_path/lib/custom_path/repo.ex", "Ecto.Adapters.SQLite3" assert_file "custom_path/test/support/conn_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" assert_file "custom_path/test/support/channel_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" assert_file "custom_path/test/support/data_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" assert_file "custom_path/.gitignore", "*.db" assert_file "custom_path/.gitignore", "*.db-*" end end test "new with mssql adapter" do in_tmp "new with mssql adapter", fn -> project_path = Path.join(File.cwd!(), "custom_path") Mix.Tasks.Phx.New.run([project_path, "--database", "mssql"]) assert_file "custom_path/mix.exs", ":tds" assert_file "custom_path/config/dev.exs", [~r/username: "sa"/, ~r/password: "some!Password"/] assert_file "custom_path/config/test.exs", [~r/username: "sa"/, ~r/password: "some!Password"/] assert_file "custom_path/config/runtime.exs", [~r/url: database_url/] assert_file "custom_path/lib/custom_path/repo.ex", "Ecto.Adapters.Tds" assert_file "custom_path/test/support/conn_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" assert_file "custom_path/test/support/channel_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" assert_file "custom_path/test/support/data_case.ex", "Ecto.Adapters.SQL.Sandbox.start_owner" end end test "new with invalid database adapter" do in_tmp "new with invalid database adapter", fn -> project_path = Path.join(File.cwd!(), "custom_path") assert_raise Mix.Error, ~s(Unknown database "invalid"), fn -> Mix.Tasks.Phx.New.run([project_path, "--database", "invalid"]) end end end test "new with invalid args" do assert_raise Mix.Error, ~r"Application name must start with a letter and ", fn -> Mix.Tasks.Phx.New.run ["007invalid"] end assert_raise Mix.Error, ~r"Application name must start with a letter and ", fn -> Mix.Tasks.Phx.New.run ["valid", "--app", "007invalid"] end assert_raise Mix.Error, ~r"Module name must be a valid Elixir alias", fn -> Mix.Tasks.Phx.New.run ["valid", "--module", "not.valid"] end assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn -> Mix.Tasks.Phx.New.run ["string"] end assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn -> Mix.Tasks.Phx.New.run ["valid", "--app", "mix"] end assert_raise Mix.Error, ~r"Module name \w+ is already taken", fn -> Mix.Tasks.Phx.New.run ["valid", "--module", "String"] end end test "invalid options" do assert_raise Mix.Error, ~r/Invalid option: -d/, fn -> Mix.Tasks.Phx.New.run(["valid", "-database", "mysql"]) end end test "new without args" do in_tmp "new without args", fn -> assert capture_io(fn -> Mix.Tasks.Phx.New.run([]) end) =~ "Creates a new Phoenix project." end end end
40.212219
130
0.649568
ffbbded9f0d0563362e989838ceb3501b24837e3
415
ex
Elixir
lib/casino/players/player_supervisor.ex
vellov/SQTests
3bf20bb15245c11547efd885532be15044493eaf
[ "MIT" ]
null
null
null
lib/casino/players/player_supervisor.ex
vellov/SQTests
3bf20bb15245c11547efd885532be15044493eaf
[ "MIT" ]
null
null
null
lib/casino/players/player_supervisor.ex
vellov/SQTests
3bf20bb15245c11547efd885532be15044493eaf
[ "MIT" ]
null
null
null
defmodule Casino.Players.PlayerSupervisor do use Supervisor def start_link do Supervisor.start_link(__MODULE__, :ok, name: __MODULE__) end def init(:ok) do children = [worker(Casino.Players.Player, [], restart: :temporary)] supervise(children, strategy: :simple_one_for_one) end def new_player(balance) do Supervisor.start_child(Casino.Players.PlayerSupervisor, [balance]) end end
23.055556
71
0.742169
ffbc5439775ff349024b985b777bec2e5e5fd2ca
1,262
ex
Elixir
lib/towwwer_web/endpoint.ex
juhalehtonen/towwwer
552fe57e93adc9c521a4c6a8ef550b84efec32ca
[ "MIT" ]
1
2019-05-03T13:39:40.000Z
2019-05-03T13:39:40.000Z
lib/towwwer_web/endpoint.ex
juhalehtonen/towwwer
552fe57e93adc9c521a4c6a8ef550b84efec32ca
[ "MIT" ]
13
2019-03-27T05:40:41.000Z
2019-05-24T10:43:31.000Z
lib/towwwer_web/endpoint.ex
juhalehtonen/towwwer
552fe57e93adc9c521a4c6a8ef550b84efec32ca
[ "MIT" ]
null
null
null
defmodule TowwwerWeb.Endpoint do use Phoenix.Endpoint, otp_app: :towwwer socket "/socket", TowwwerWeb.UserSocket, websocket: true, longpoll: false # Serve at "/" the static files from "priv/static" directory. # # You should set gzip to true if you are running phx.digest # when deploying your static files in production. plug Plug.Static, at: "/", from: :towwwer, gzip: false, only: ~w(css fonts images js favicon.ico robots.txt) # Code reloading can be explicitly enabled under the # :code_reloader configuration of your endpoint. if code_reloading? do socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket plug Phoenix.LiveReloader plug Phoenix.CodeReloader end plug Plug.RequestId plug Plug.Logger plug Plug.Parsers, parsers: [:urlencoded, :multipart, :json], pass: ["*/*"], json_decoder: Phoenix.json_library() plug Plug.MethodOverride plug Plug.Head # The session will be stored in the cookie and signed, # this means its contents can be read but not tampered with. # Set :encryption_salt if you would also like to encrypt it. plug Plug.Session, store: :cookie, key: "_towwwer_key", signing_salt: "Qm/mAiJ6" plug TowwwerWeb.Router end
26.851064
69
0.708399
ffbc64c132075fea07690dc963b9d43231d6059a
3,330
ex
Elixir
samples/client/petstore/elixir/lib/swagger_petstore/api/pet.ex
dangthaison91/swagger-codegen-moya
d0f3a119f2af176cfad10a80c77dc9c77c9ed22f
[ "Apache-2.0" ]
3
2017-09-07T16:18:04.000Z
2020-11-05T02:20:50.000Z
samples/client/petstore/elixir/lib/swagger_petstore/api/pet.ex
dangthaison91/swagger-codegen-moya
d0f3a119f2af176cfad10a80c77dc9c77c9ed22f
[ "Apache-2.0" ]
null
null
null
samples/client/petstore/elixir/lib/swagger_petstore/api/pet.ex
dangthaison91/swagger-codegen-moya
d0f3a119f2af176cfad10a80c77dc9c77c9ed22f
[ "Apache-2.0" ]
5
2017-05-01T15:47:40.000Z
2021-12-29T03:23:06.000Z
defmodule SwaggerPetstore.Api.Pet do @moduledoc """ Documentation for SwaggerPetstore.Api.Pet. """ use Tesla plug Tesla.Middleware.BaseUrl, "http://petstore.swagger.io/v2" plug Tesla.Middleware.JSON def add_pet(body) do method = [method: :post] url = [url: "/pet"] query_params = [] header_params = [] body_params = [body: body] form_params = [] params = query_params ++ header_params ++ body_params ++ form_params opts = [] options = method ++ url ++ params ++ opts request(options) end def delete_pet(pet_id, api_key) do method = [method: :delete] url = [url: "/pet/#{pet_id}"] query_params = [] header_params = [header: [{:"api_key", api_key}]] body_params = [] form_params = [] params = query_params ++ header_params ++ body_params ++ form_params opts = [] options = method ++ url ++ params ++ opts request(options) end def find_pets_by_status(status) do method = [method: :get] url = [url: "/pet/findByStatus"] query_params = [query: [{:"status", status}]] header_params = [] body_params = [] form_params = [] params = query_params ++ header_params ++ body_params ++ form_params opts = [] options = method ++ url ++ params ++ opts request(options) end def find_pets_by_tags(tags) do method = [method: :get] url = [url: "/pet/findByTags"] query_params = [query: [{:"tags", tags}]] header_params = [] body_params = [] form_params = [] params = query_params ++ header_params ++ body_params ++ form_params opts = [] options = method ++ url ++ params ++ opts request(options) end def get_pet_by_id(pet_id) do method = [method: :get] url = [url: "/pet/#{pet_id}"] query_params = [] header_params = [] body_params = [] form_params = [] params = query_params ++ header_params ++ body_params ++ form_params opts = [] options = method ++ url ++ params ++ opts request(options) end def update_pet(body) do method = [method: :put] url = [url: "/pet"] query_params = [] header_params = [] body_params = [body: body] form_params = [] params = query_params ++ header_params ++ body_params ++ form_params opts = [] options = method ++ url ++ params ++ opts request(options) end def update_pet_with_form(pet_id, name, status) do method = [method: :post] url = [url: "/pet/#{pet_id}"] query_params = [] header_params = [] body_params = [] form_params = [body: Enum.map_join([{:"name", name}, {:"status", status}], "&", &("#{elem(&1, 0)}=#{elem(&1, 1)}"))] params = query_params ++ header_params ++ body_params ++ form_params opts = [] options = method ++ url ++ params ++ opts request(options) end def upload_file(pet_id, additional_metadata, file) do method = [method: :post] url = [url: "/pet/#{pet_id}/uploadImage"] query_params = [] header_params = [] body_params = [] form_params = [body: Enum.map_join([{:"additionalMetadata", additional_metadata}, {:"file", file}], "&", &("#{elem(&1, 0)}=#{elem(&1, 1)}"))] params = query_params ++ header_params ++ body_params ++ form_params opts = [] options = method ++ url ++ params ++ opts request(options) end end
27.073171
145
0.603003
ffbc690934e031c3e9131b238e534e4fc51c7316
7,834
ex
Elixir
lib/elsa/supervisor.ex
mhanberg/elsa
53eb831ca4abcc0f2de6a8a9686bd13fc4767ca6
[ "Apache-2.0" ]
76
2019-05-31T20:35:19.000Z
2022-02-26T10:15:27.000Z
lib/elsa/supervisor.ex
mhanberg/elsa
53eb831ca4abcc0f2de6a8a9686bd13fc4767ca6
[ "Apache-2.0" ]
44
2019-06-13T14:43:30.000Z
2022-03-31T12:16:19.000Z
lib/elsa/supervisor.ex
mhanberg/elsa
53eb831ca4abcc0f2de6a8a9686bd13fc4767ca6
[ "Apache-2.0" ]
11
2019-06-05T02:23:06.000Z
2022-02-10T16:14:33.000Z
defmodule Elsa.Supervisor do @moduledoc """ Top-level supervisor that orchestrates all other components of the Elsa library. Allows for a single point of integration into your application supervision tree and configuration by way of a series of nested keyword lists Components not needed by a running application (if your application _only_ consumes messages from Kafka and never producers back to it) can be safely omitted from the configuration. """ use Supervisor @doc """ Defines a connection for locating the Elsa Registry process. """ @spec registry(String.t() | atom()) :: atom() def registry(connection) do :"elsa_registry_#{connection}" end def via_name(registry, name) do {:via, Elsa.Registry, {registry, name}} end def dynamic_supervisor(registry) do via_name(registry, DynamicSupervisor) end @doc """ Starts the top-level Elsa supervisor and links it to the current process. Starts a brod client and a custom process registry by default and then conditionally starts and takes supervision of any brod group-based consumers or producer processes defined. ## Options * `:endpoints` - Required. Keyword list of kafka brokers. ex. `[localhost: 9092]` * `:connection` - Required. Atom used to track kafka connection. * `:config` - Optional. Client configuration options passed to brod. * `:producer` - Optional. Can be a single producer configuration of multiples in a list. * `:group_consumer` - Optional. Group consumer configuration. * `:consumer` - Optional. Simple topic consumer configuration. ## Producer Config * `:topic` - Required. Producer will be started for configured topic. * `:poll` - Optional. If set to a number in milliseconds, will poll for new partitions and startup producers on the fly. * `:config` - Optional. Producer configuration options passed to `brod_producer`. ## Group Consumer Config * `:group` - Required. Name of consumer group. * `:topics` - Required. List of topics to subscribe to. * `:handler` - Required. Module that implements Elsa.Consumer.MessageHandler behaviour. * `:handler_init_args` - Optional. Any args to be passed to init function in handler module. * `:assignment_received_handler` - Optional. Arity 4 Function that will be called with any partition assignments. Return `:ok` to for assignment to be subscribed to. Return `{:error, reason}` to stop subscription. Arguments are group, topic, partition, generation_id. * `:assignments_revoked_handler` - Optional. Zero arity function that will be called when assignments are revoked. All workers will be shutdown before callback is invoked and must return `:ok`. * `:config` - Optional. Consumer configuration options passed to `brod_consumer`. ## Consumer Config * `:topic` - Required. Topic to subscribe to. * `:begin_offset` - Required. Where to begin consuming from. Must be either `:earliest`, `:latest`, or a valid offset integer. * `:handler` - Required. Module that implements `Elsa.Consumer.MessageHandler` behaviour. * `:partition` - Optional. Topic partition to subscribe to. If `nil`, will default to all partitions. * `:handler_init_args` - Optional. Any args to be passed to init function in handler module. * `:poll` - Optional. If set to number of milliseconds, will poll for new partitions and startup consumers on the fly. ## Example ``` Elsa.Supervisor.start_link([ endpoints: [localhost: 9092], connection: :conn, producer: [topic: "topic1"], consumer: [ topic: "topic2", partition: 0, begin_offset: :earliest, handler: ExampleHandler ], group_consumer: [ group: "example-group", topics: ["topic1"], handler: ExampleHandler, config: [ begin_offset: :earliest, offset_reset_policy: :reset_to_earliest ] ] ]) ``` """ @spec start_link(keyword()) :: GenServer.on_start() def start_link(args) do opts = Keyword.take(args, [:name]) Supervisor.start_link(__MODULE__, args, opts) end @doc """ Starts producer processes under Elsa's `DynamicSupervisor` for the specified connection. Polling cannot be configured for producers at runtime. Configuration at `Elsa.Supervisor` start is how polling will behave for all producers on that connection. Other than polling, producer configuration is the same as `Elsa.Supervisor.start_link/1`. ## Producer Config * `:topic` - Required. Producer will be started for configured topic. * `:config` - Optional. Producer configuration options passed to `brod_producer`. """ @spec start_producer(String.t() | atom, keyword) :: [DynamicSupervisor.on_start_child()] def start_producer(connection, args) do registry = registry(connection) process_manager = via_name(registry, :producer_process_manager) Elsa.Producer.Initializer.init(registry, args) |> Enum.map(&Elsa.DynamicProcessManager.start_child(process_manager, &1)) end def init(args) do connection = Keyword.fetch!(args, :connection) registry = registry(connection) children = [ {Elsa.Registry, name: registry}, {DynamicSupervisor, strategy: :one_for_one, name: dynamic_supervisor(registry)}, start_client(args), producer_spec(registry, Keyword.get(args, :producer)), start_group_consumer(connection, registry, Keyword.get(args, :group_consumer)), start_consumer(connection, registry, Keyword.get(args, :consumer)) ] |> List.flatten() Supervisor.init(children, strategy: :rest_for_one) end defp start_client(args) do connection = Keyword.fetch!(args, :connection) endpoints = Keyword.fetch!(args, :endpoints) config = Keyword.get(args, :config, []) {Elsa.Wrapper, mfa: {:brod_client, :start_link, [endpoints, connection, config]}, register: {registry(connection), :brod_client}} end defp start_group_consumer(_connection, _registry, nil), do: [] defp start_group_consumer(connection, registry, args) do group_consumer_args = args |> Keyword.put(:registry, registry) |> Keyword.put(:connection, connection) |> Keyword.put(:name, via_name(registry, Elsa.Group.Supervisor)) {Elsa.Group.Supervisor, group_consumer_args} end defp start_consumer(_connection, _registry, nil), do: [] defp start_consumer(connection, registry, args) do topics = case Keyword.has_key?(args, :partition) do true -> [{Keyword.fetch!(args, :topic), Keyword.fetch!(args, :partition)}] false -> [Keyword.fetch!(args, :topic)] end consumer_args = args |> Keyword.put(:registry, registry) |> Keyword.put(:connection, connection) |> Keyword.put(:topics, topics) |> Keyword.put_new(:config, []) {Elsa.DynamicProcessManager, id: :worker_process_manager, dynamic_supervisor: dynamic_supervisor(registry), poll: Keyword.get(args, :poll, false), initializer: {Elsa.Consumer.Worker.Initializer, :init, [consumer_args]}} end defp producer_spec(registry, nil) do [ { Elsa.DynamicProcessManager, id: :producer_process_manager, dynamic_supervisor: dynamic_supervisor(registry), initializer: nil, poll: false, name: via_name(registry, :producer_process_manager) } ] end defp producer_spec(registry, args) do [ { Elsa.DynamicProcessManager, id: :producer_process_manager, dynamic_supervisor: dynamic_supervisor(registry), initializer: {Elsa.Producer.Initializer, :init, [registry, args]}, poll: Keyword.get(args, :poll, false), name: via_name(registry, :producer_process_manager) } ] end end
33.054852
128
0.693771
ffbc79e2965a8970b257ec5f411ff5c6305e3bea
412
ex
Elixir
test/support/conn_case.ex
treebee/supabase-surface
5a184ca92323c085dd81e2fc8aa8c10367f2382e
[ "Apache-2.0" ]
5
2021-06-08T08:02:43.000Z
2022-02-09T23:13:46.000Z
test/support/conn_case.ex
treebee/supabase-surface
5a184ca92323c085dd81e2fc8aa8c10367f2382e
[ "Apache-2.0" ]
null
null
null
test/support/conn_case.ex
treebee/supabase-surface
5a184ca92323c085dd81e2fc8aa8c10367f2382e
[ "Apache-2.0" ]
1
2021-07-14T05:20:31.000Z
2021-07-14T05:20:31.000Z
defmodule SupabaseSurface.ConnCase do @moduledoc """ This module defines the test case to be used by tests that require setting up a connection. """ use ExUnit.CaseTemplate using do quote do # Import conveniences for testing with connections use Surface.LiveViewTest @endpoint Endpoint end end setup _tags do {:ok, conn: Phoenix.ConnTest.build_conn()} end end
18.727273
56
0.699029
ffbcc49382e8bbe90e7bc8b01690aec8229175c6
399
ex
Elixir
lib/ecto_tablestore/hashids.ex
xosdy/ecto_tablestore
bea08b70fcf7a12932e677b63882cfb936956b1a
[ "MIT" ]
null
null
null
lib/ecto_tablestore/hashids.ex
xosdy/ecto_tablestore
bea08b70fcf7a12932e677b63882cfb936956b1a
[ "MIT" ]
null
null
null
lib/ecto_tablestore/hashids.ex
xosdy/ecto_tablestore
bea08b70fcf7a12932e677b63882cfb936956b1a
[ "MIT" ]
null
null
null
if Code.ensure_loaded?(Hashids) do defmodule EctoTablestore.Hashids do @moduledoc false use Ecto.Type def type, do: :binary_id def cast(id) when is_bitstring(id) do {:ok, id} end def cast(_), do: :error def load(term) when is_bitstring(term) do {:ok, term} end def load(_), do: :error def dump(term) do {:ok, term} end end end
15.96
45
0.598997
ffbcdb9865fed310a80ef1e7c7cbbad61b46a313
2,996
ex
Elixir
lib/chroxy/proxy_listener.ex
heydtn/chroxy
68f2188ba835239bcb9e40856e117ef604c2248e
[ "MIT" ]
4
2019-05-20T12:54:00.000Z
2019-05-23T02:08:12.000Z
lib/chroxy/proxy_listener.ex
zacksiri/chroxy
99b882d01271ea4353762961bc8030d70212bd42
[ "MIT" ]
null
null
null
lib/chroxy/proxy_listener.ex
zacksiri/chroxy
99b882d01271ea4353762961bc8030d70212bd42
[ "MIT" ]
null
null
null
defmodule Chroxy.ProxyListener do @moduledoc """ Reponsible for accepting upstream connections from client, and delegating ownership of the connection to a new `Chroxy.ProxyServer` process which will in turn create the transparent forwarding channel to the downstream host. """ use GenServer require Logger @upstream_tcp_opts [ :binary, packet: 0, active: true, backlog: 10_000, reuseaddr: true ] @default_port 1331 def child_spec(opts) do %{ id: __MODULE__, start: {__MODULE__, :start_link, [opts]}, restart: :transient, shutdown: 5000, type: :worker } end @doc """ Spawns a ProxyLister which will listen on the port. Keyword `args`: - `:port` - port in which the TCP listener will accept connections. """ def start_link(args) do GenServer.start_link(__MODULE__, args, name: __MODULE__) end @doc """ Instruct listener to accept incoming tcp connections and spawn a dynamic transparent proxy `Chroxy.ProxySever` to handle the connection. """ def accept(proxy_opts) do GenServer.cast(__MODULE__, {:accept, proxy_opts}) end ## # Callbacks @doc false def init(args) do port = case Keyword.get(args, :port, "") do "" -> @default_port p when is_binary(p) -> String.to_integer(p) p when is_integer(p) -> p end send(self(), {:listen, port}) {:ok, %{listen_socket: nil}} end @doc false def handle_info({:listen, port}, state = %{listen_socket: nil}) do case :gen_tcp.listen(port, @upstream_tcp_opts) do {:ok, socket} -> Logger.debug("Listening on port: #{port}") {:noreply, %{listen_socket: socket}} {:error, reason} -> Logger.error("TCP Listen failed due to: #{inspect(reason)}") {:stop, :normal, state} end end def handle_info(msg, state) do Logger.warn("Unexpected message: #{inspect(msg)}}") {:noreply, state} end @doc false def handle_cast({:accept, proxy_opts}, state = %{listen_socket: socket}) do case :gen_tcp.accept(socket) do {:ok, upstream_socket} -> Logger.info("Connection accepted, spawning proxy server to manage connection") {:ok, proxy} = Chroxy.ProxyServer.start_link( upstream_socket: upstream_socket, proxy_opts: proxy_opts ) # set the spawned proxy as the controlling process for the socket :gen_tcp.controlling_process(upstream_socket, proxy) {:noreply, state} {:error, :closed} -> Logger.warn("Upstream listener socket closed") {:stop, :normal, state} {:error, :timeout} -> Logger.error("Upstream listener timed out waiting to accept") {:stop, :normal, state} {:error, :system_limit} -> Logger.error( "Upstream listen hit system limit all available ports in the Erlang emulator are in use" ) {:stop, :normal, state} end end end
25.827586
98
0.63251
ffbd05af3366bf364a4fd0e271f77a361b4715ca
8,012
ex
Elixir
lib/ory/hydra.ex
churcho/ory-hydra-elixir
6610df7bfc6cb4ae569a23cf7f7840d9873a28c2
[ "MIT" ]
null
null
null
lib/ory/hydra.ex
churcho/ory-hydra-elixir
6610df7bfc6cb4ae569a23cf7f7840d9873a28c2
[ "MIT" ]
null
null
null
lib/ory/hydra.ex
churcho/ory-hydra-elixir
6610df7bfc6cb4ae569a23cf7f7840d9873a28c2
[ "MIT" ]
null
null
null
defmodule ORY.Hydra do @moduledoc """ ORY Hydra is an open source OAuth 2.0 and OpenID Connect 1.0 provider. This project provides an Elixir client for interacting with ORY Hydra's administrative REST API. For additional information about ORY Hydra please refer to their [official documentation](https://www.ory.sh/docs/hydra/). """ alias ORY.Hydra.{ Config, Operation, Request, Response } @type http_headers_t :: [{ String.t(), String.t() }] @type http_method_t :: :delete | :get | :post | :put @type response_t :: { :ok, Response.t() } | { :error, Response.t() | any } @doc """ Accept a consent request. """ @spec accept_consent_request(map) :: Operation.t() def accept_consent_request(params) do %Operation{ method: :put, params: params, params_in_query: [:consent_challenge], path: "/oauth2/auth/requests/consent/accept" } end @doc """ Accept a login request. """ @spec accept_login_request(map) :: Operation.t() def accept_login_request(params) do %Operation{ method: :put, params: params, params_in_query: [:login_challenge], path: "/oauth2/auth/requests/login/accept" } end @doc """ Accepts a logout request. """ @spec accept_logout_request(map) :: Operation.t() def accept_logout_request(params) do %Operation{ method: :put, params: params, params_in_query: [:logout_challenge], path: "/oauth2/auth/requests/logout/accept" } end @doc """ Check whether an ORY Hydra service is alive. """ @spec check_alive :: Operation.t() def check_alive do %Operation{ method: :get, path: "/health/alive" } end @doc """ Create an OAuth 2.0 client. """ @spec create_client(map) :: Operation.t() def create_client(params \\ %{}) do %Operation{ method: :post, params: params, path: "/clients" } end @doc """ Delete an OAuth 2.0 client. """ @spec delete_client(String.t()) :: Operation.t() def delete_client(id) do %Operation{ method: :delete, path: "/clients/#{id}" } end @doc """ Delete a JSON Web Key. """ @spec delete_jwk(String.t(), String.t()) :: Operation.t() def delete_jwk(set, kid) do %Operation{ method: :delete, path: "/keys/#{set}/#{kid}" } end @doc """ Delete a JSON Web Key set. """ @spec delete_jwk_set(String.t()) :: Operation.t() def delete_jwk_set(set) do %Operation{ method: :delete, path: "/keys/#{set}" } end @doc """ Delete expired OAuth2 access tokens. """ @spec flush_expired_access_tokens(map) :: Operation.t() def flush_expired_access_tokens(params \\ %{}) do %Operation{ method: :post, params: params, path: "/oauth2/flush" } end @doc """ Generate a JSON Web Key set. """ @spec generate_jwk_set(String.t(), map) :: Operation.t() def generate_jwk_set(set, params) do %Operation{ method: :post, params: params, path: "/keys/#{set}" } end @doc """ Retrieve an OAuth 2.0 client. """ @spec get_client(String.t()) :: Operation.t() def get_client(id) do %Operation{ method: :get, path: "/clients/#{id}" } end @doc """ Retrieve consent request. """ @spec get_consent_request(map) :: Operation.t() def get_consent_request(params) do %Operation{ method: :get, params: params, params_in_query: [:consent_challenge], path: "/oauth2/auth/requests/consent" } end @doc """ Retrieve a JSON Web Key. """ @spec get_jwk(String.t(), String.t()) :: Operation.t() def get_jwk(set, kid) do %Operation{ method: :get, path: "/keys/#{set}/#{kid}" } end @doc """ Retrieve a JSON Web Key set. """ @spec get_jwk_set(String.t()) :: Operation.t() def get_jwk_set(set) do %Operation{ method: :get, path: "/keys/#{set}" } end @doc """ Retrieve login request. """ @spec get_login_request(map) :: Operation.t() def get_login_request(params) do %Operation{ method: :get, params: params, path: "/oauth2/auth/requests/login" } end @doc """ Retrieve logout request. """ @spec get_logout_request(map) :: Operation.t() def get_logout_request(params) do %Operation{ method: :get, params: params, path: "/oauth2/auth/requests/logout" } end @doc """ Retrieve a metric snapshot from an ORY Hydra service. """ @spec get_metrics :: Operation.t() def get_metrics do %Operation{ method: :get, path: "/metrics/prometheus" } end @doc """ Retrieve the version of the ORY Hydra service. """ @spec get_version :: Operation.t() def get_version do %Operation{ method: :get, path: "/version" } end @doc """ Check whether a token is active. """ @spec introspect(map) :: Operation.t() def introspect(params) do %Operation{ content_type: :form_urlencoded, method: :post, params: params, path: "/oauth2/introspect" } end @doc """ List all OAuth 2.0 clients. """ @spec list_clients(map) :: Operation.t() def list_clients(params \\ %{}) do %Operation{ method: :get, params: params, path: "/clients" } end @doc """ List all consent sessions. """ @spec list_consent_sessions(map) :: Operation.t() def list_consent_sessions(params) do %Operation{ method: :get, params: params, path: "/oauth2/auth/sessions/consent" } end @doc """ Reject a consent request. """ @spec reject_consent_request(map) :: Operation.t() def reject_consent_request(params) do %Operation{ method: :put, params: params, params_in_query: [:consent_challenge], path: "/oauth2/auth/requests/consent/reject" } end @doc """ Reject a login request. """ @spec reject_login_request(map) :: Operation.t() def reject_login_request(params) do %Operation{ method: :put, params: params, params_in_query: [:login_challenge], path: "/oauth2/auth/requests/login/reject" } end @doc """ Reject a logout request. """ @spec reject_logout_request(map) :: Operation.t() def reject_logout_request(params) do %Operation{ method: :put, params: params, params_in_query: [:logout_challenge], path: "/oauth2/auth/requests/logout/reject" } end @doc """ Issue a request to the ORY Hydra administrative API. """ @spec request(Operation.t(), map) :: response_t def request(operation, config) do Request.send(operation, Config.new(config)) end @doc """ Revoke consent sessions. """ @spec revoke_consent_sessions(map) :: Operation.t() def revoke_consent_sessions(params) do %Operation{ method: :delete, params: params, params_in_query: [:subject], path: "/oauth2/auth/sessions/consent" } end @doc """ Revoke login sessions. """ @spec revoke_login_sessions(map) :: Operation.t() def revoke_login_sessions(params) do %Operation{ method: :delete, params: params, params_in_query: [:subject], path: "/oauth2/auth/sessions/login" } end @doc """ Update an OAuth 2.0 client. """ @spec update_client(String.t(), map) :: Operation.t() def update_client(id, params) do %Operation{ method: :put, params: params, path: "/clients/#{id}" } end @doc """ Update a JSON Web Key. """ @spec update_jwk(String.t(), String.t(), map) :: Operation.t() def update_jwk(set, kid, params) do %Operation{ method: :put, params: params, path: "/keys/#{set}/#{kid}" } end @doc """ Update a JSON Web Key set. """ @spec update_jwk_set(String.t(), map) :: Operation.t() def update_jwk_set(set, params) do %Operation{ method: :put, params: params, path: "/keys/#{set}" } end end
21.308511
77
0.604718
ffbd2e7de0c2e87966a42f493d0207ce81b9aeed
2,699
ex
Elixir
lib/game/gossip.ex
NatTuck/ex_venture
7a74d33025a580f1e3e93d3755f22258eb3e9127
[ "MIT" ]
null
null
null
lib/game/gossip.ex
NatTuck/ex_venture
7a74d33025a580f1e3e93d3755f22258eb3e9127
[ "MIT" ]
null
null
null
lib/game/gossip.ex
NatTuck/ex_venture
7a74d33025a580f1e3e93d3755f22258eb3e9127
[ "MIT" ]
null
null
null
defmodule Game.Gossip do @moduledoc """ Callback module for Gossip """ require Logger alias Game.Channel alias Game.Channels alias Game.Character alias Game.Message alias Game.Session @behaviour Gossip.Client.Core @behaviour Gossip.Client.Players @behaviour Gossip.Client.Tells @behaviour Gossip.Client.Games @impl true def user_agent() do ExVenture.version() end @impl true def channels() do Enum.map(Channels.gossip_channels(), &(&1.gossip_channel)) end @impl true def players() do Session.Registry.connected_players() |> Enum.map(&(&1.player.name)) end @impl true def message_broadcast(message) do with {:ok, channel} <- Channels.gossip_channel(message.channel), true <- Squabble.node_is_leader?() do message = Message.gossip_broadcast(channel, message) Channel.broadcast(channel.name, message) :ok else _ -> :ok end end @impl true def player_sign_in(game_name, player_name) do Logger.info(fn -> "Gossip - new player sign in #{player_name}@#{game_name}" end) case Squabble.node_is_leader?() do true -> Session.Registry.connected_players() |> Enum.each(fn %{player: player} -> Character.notify({:player, player}, {"gossip/player-online", game_name, player_name}) end) false -> :ok end end @impl true def player_sign_out(game_name, player_name) do Logger.info(fn -> "Gossip - new player sign out #{player_name}@#{game_name}" end) case Squabble.node_is_leader?() do true -> Session.Registry.connected_players() |> Enum.each(fn %{player: player} -> Character.notify({:player, player}, {"gossip/player-offline", game_name, player_name}) end) false -> :ok end end @impl true def player_update(game_name, player_names) do Logger.debug(fn -> "Received update for game #{game_name} - #{inspect(player_names)}" end) end @impl true def tell_receive(from_game, from_player, to_player, message) do Logger.info(fn -> "Received a new tell from #{from_player}@#{from_game} to #{to_player}" end) with true <- Squabble.node_is_leader?, {:ok, player} <- Session.Registry.find_player(to_player) do player_name = "#{from_player}@#{from_game}" Channel.tell({:player, player}, {:gossip, player_name}, Message.tell(%{name: player_name}, message)) :ok else _ -> :ok end end @impl true def game_update(_game), do: :ok @impl true def game_connect(_game), do: :ok @impl true def game_disconnect(_game), do: :ok end
22.680672
106
0.639126
ffbd329b93290f30af89891a3e1738c980ba50a6
1,351
ex
Elixir
lib/elixir/lib/kernel/error_handler.ex
Sae497/elixir
666a0bca8daac6a9dffb80db837c883805a6a22e
[ "Apache-2.0" ]
1
2018-02-24T19:48:35.000Z
2018-02-24T19:48:35.000Z
lib/elixir/lib/kernel/error_handler.ex
Sae497/elixir
666a0bca8daac6a9dffb80db837c883805a6a22e
[ "Apache-2.0" ]
null
null
null
lib/elixir/lib/kernel/error_handler.ex
Sae497/elixir
666a0bca8daac6a9dffb80db837c883805a6a22e
[ "Apache-2.0" ]
null
null
null
# Implement error_handler pattern for Erlang # which is integrated with Kernel.ParallelCompiler defmodule Kernel.ErrorHandler do @moduledoc false @spec undefined_function(module, atom, list) :: term def undefined_function(module, fun, args) do ensure_loaded(module) or ensure_compiled(module, :module, true) :error_handler.undefined_function(module, fun, args) end @spec undefined_lambda(module, fun, list) :: term def undefined_lambda(module, fun, args) do ensure_loaded(module) or ensure_compiled(module, :module, true) :error_handler.undefined_lambda(module, fun, args) end @spec ensure_loaded(module) :: boolean def ensure_loaded(module) do case :code.ensure_loaded(module) do {:module, _} -> true {:error, _} -> false end end @spec ensure_compiled(module, atom, boolean) :: :found | :not_found | :deadlock # Never wait on nil because it should never be defined. def ensure_compiled(nil, _kind, _deadlock?) do :not_found end def ensure_compiled(module, kind, deadlock?) do parent = :erlang.get(:elixir_compiler_pid) ref = :erlang.make_ref() modules = :elixir_module.compiler_modules() send(parent, {:waiting, kind, self(), ref, module, modules, deadlock?}) :erlang.garbage_collect(self()) receive do {^ref, value} -> value end end end
30.704545
81
0.709104
ffbd6b9b789c11abba170b70ee6ec4bc2e203fd9
267
ex
Elixir
lib/geo/h3index.ex
tierralibre/geo
c02606e1a524bcf20e6f49160c01a79a7eb5e6d8
[ "MIT" ]
null
null
null
lib/geo/h3index.ex
tierralibre/geo
c02606e1a524bcf20e6f49160c01a79a7eb5e6d8
[ "MIT" ]
null
null
null
lib/geo/h3index.ex
tierralibre/geo
c02606e1a524bcf20e6f49160c01a79a7eb5e6d8
[ "MIT" ]
null
null
null
defmodule Geo.H3Index do @moduledoc """ Defines the H3Index struct. """ #@type latitude :: number #@type longitude :: number #@type t :: %Geo.H3Index{coordinates: {longitude, latitude}, srid: integer | nil, properties: map} defstruct h3index: nil end
22.25
100
0.681648
ffbd82c0be32f7b479428d48116781758cc88753
7,066
exs
Elixir
test/mongo/change_stream_test.exs
aenglisc/elixir-mongodb-driver
a8a72fbb8690f44ac349e0449616ac2cfbf50640
[ "Apache-2.0" ]
null
null
null
test/mongo/change_stream_test.exs
aenglisc/elixir-mongodb-driver
a8a72fbb8690f44ac349e0449616ac2cfbf50640
[ "Apache-2.0" ]
null
null
null
test/mongo/change_stream_test.exs
aenglisc/elixir-mongodb-driver
a8a72fbb8690f44ac349e0449616ac2cfbf50640
[ "Apache-2.0" ]
null
null
null
defmodule Mongo.ChangeStreamTest do use ExUnit.Case, async: false # DO NOT MAKE ASYNCHRONOUS setup_all do assert {:ok, top} = Mongo.TestConnection.connect Mongo.drop_database(top) assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Waldo"}) %{pid: top} end setup do {:ok, catcher} = EventCatcher.start_link() on_exit(fn -> EventCatcher.stop(catcher) end) [catcher: catcher] end def consumer(top, monitor) do cursor = Mongo.watch_collection(top, "users", [], fn doc -> send(monitor, {:token, doc}) end, max_time: 1_000 ) send(monitor, :go) result = cursor |> Enum.take(1) |> Enum.at(0) send(monitor, {:insert, result}) end def consumer_1(top, monitor) do Process.sleep(1000) cursor = Mongo.watch_collection(top, "users", [], fn doc -> send(monitor, {:token, doc}) end, max_time: 1_000 ) result = cursor |> Enum.take(1) |> Enum.at(0) send(monitor, {:insert, result}) end def consumer_2(top, monitor, token) do Process.sleep(1000) cursor = Mongo.watch_collection(top, "users", [], fn doc -> send(monitor, {:token, doc}) end, resume_after: token, max_time: 1_000 ) result = cursor |> Enum.take(1) |> Enum.at(0) send(monitor, {:insert, result}) end def consumer_3(top, monitor, token) do Process.sleep(1000) cursor = Mongo.watch_collection(top, "users", [], fn doc -> send(monitor, {:token, doc}) end, resume_after: token, max_time: 1_000 ) result = cursor |> Enum.take(4) |> Enum.map(fn %{"fullDocument" => %{"name" => name}} -> name end) send(monitor, {:insert, result}) end def producer(top) do Process.sleep(2000) assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Greta"}) assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Gustav"}) assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Tom"}) end @tag :mongo_3_6 test "change stream resumes after HostUnreachable", c do top = c.pid catcher = c.catcher cmd = [ configureFailPoint: "failGetMoreAfterCursorCheckout", mode: [times: 1], data: [errorCode: 6, closeConnection: false] ] me = self() Mongo.admin_command(top, cmd) spawn(fn -> consumer(top, me) end) assert_receive :go assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Greta"}) assert_receive {:token, _}, 5_000 assert_receive {:token, _token}, 5_000 assert_receive {:insert, %{"fullDocument" => %{"name" => "Greta"}}}, 5_000 assert [{:getMore, ["ResumableChangeStreamError"]}] == EventCatcher.failed_events(catcher) |> Enum.map(fn event -> {event.command_name, event.failure.error_labels} end) end @tag :mongo_3_6 test "change stream resumes after HostNotFound", c do top = c.pid catcher = c.catcher cmd = [ configureFailPoint: "failGetMoreAfterCursorCheckout", mode: [times: 1], data: [errorCode: 7, closeConnection: false] ] me = self() Mongo.admin_command(top, cmd) spawn(fn -> consumer(top, me) end) assert_receive :go assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Greta"}) assert_receive {:token, _}, 5_000 assert_receive {:token, _token}, 5_000 assert_receive {:insert, %{"fullDocument" => %{"name" => "Greta"}}}, 5_000 assert [{:getMore, ["ResumableChangeStreamError"]}] == EventCatcher.failed_events(catcher) |> Enum.map(fn event -> {event.command_name, event.failure.error_labels} end) end @tag :mongo_3_6 test "change stream resumes after NetworkTimeout", c do top = c.pid catcher = c.catcher cmd = [ configureFailPoint: "failGetMoreAfterCursorCheckout", mode: [times: 1], data: [errorCode: 89, closeConnection: false] ] me = self() Mongo.admin_command(top, cmd) spawn(fn -> consumer(top, me) end) assert_receive :go assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Greta"}) assert_receive {:token, _}, 5_000 assert_receive {:token, _token}, 5_000 assert_receive {:insert, %{"fullDocument" => %{"name" => "Greta"}}}, 5_000 assert [{:getMore, ["ResumableChangeStreamError"]}] == EventCatcher.failed_events(catcher) |> Enum.map(fn event -> {event.command_name, event.failure.error_labels} end) end @tag :mongo_3_6 test "change stream resumes after ShutdownInProgress", c do top = c.pid catcher = c.catcher cmd = [ configureFailPoint: "failGetMoreAfterCursorCheckout", mode: [times: 1], data: [errorCode: 91, closeConnection: false] ] me = self() Mongo.admin_command(top, cmd) spawn(fn -> consumer(top, me) end) assert_receive :go assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Greta"}) assert_receive {:token, _}, 5_000 assert_receive {:token, _token}, 5_000 assert_receive {:insert, %{"fullDocument" => %{"name" => "Greta"}}}, 5_000 assert [{:getMore, ["ResumableChangeStreamError"]}] == EventCatcher.failed_events(catcher) |> Enum.map(fn event -> {event.command_name, event.failure.error_labels} end) end @tag :mongo_4_3 test "change stream resumes if error contains ResumableChangeStreamError", c do top = c.pid catcher = c.catcher cmd = [ configureFailPoint: "failCommand", mode: [times: 1], data: [errorCode: 50, failCommands: ["getMore"], closeConnection: false, errorLabels: ["ResumableChangeStreamError"]] ] me = self() Mongo.admin_command(top, cmd) spawn(fn -> consumer(top, me) end) assert_receive :go assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Greta"}) assert_receive {:token, _}, 5_000 assert_receive {:token, token}, 5_000 assert_receive {:insert, %{"fullDocument" => %{"name" => "Greta"}}}, 5_000 assert [{:getMore, ["ResumableChangeStreamError"]}] == EventCatcher.failed_events(catcher) |> Enum.map(fn event -> {event.command_name, event.failure.error_labels} end) end @tag :mongo_3_6 test "change stream: watch and resume_after", c do top = c.pid me = self() spawn(fn -> consumer_1(top, me) end) spawn(fn -> producer(top) end) assert_receive {:token, _}, 5_000 assert_receive {:token, token}, 5_000 assert_receive {:insert, %{"fullDocument" => %{"name" => "Greta"}}}, 5_000 Process.sleep(500) assert {:ok, %Mongo.InsertOneResult{}} = Mongo.insert_one(top, "users", %{name: "Liese"}) spawn(fn -> consumer_2(top, me, token) end) spawn(fn -> producer(top) end) assert_receive {:token, _}, 5_000 assert_receive {:insert, %{"fullDocument" => %{"name" => "Gustav"}}}, 5_000 #Process.sleep(500) spawn(fn -> consumer_3(top, me, token) end) spawn(fn -> producer(top) end) assert_receive {:token, _}, 5_000 assert_receive {:insert, ["Gustav", "Tom", "Liese", "Greta"]}, 5_000 end end
32.562212
172
0.645202
ffbd93d27fdff37b00fd053a58c98f18f994b35f
1,923
exs
Elixir
mix.exs
zekus/miss-elixir
675107573b30b9f1843930be980c72f755503678
[ "Apache-2.0" ]
null
null
null
mix.exs
zekus/miss-elixir
675107573b30b9f1843930be980c72f755503678
[ "Apache-2.0" ]
null
null
null
mix.exs
zekus/miss-elixir
675107573b30b9f1843930be980c72f755503678
[ "Apache-2.0" ]
null
null
null
defmodule Miss.MixProject do use Mix.Project @app :miss @name "Miss Elixir" @repo "https://github.com/prodis/miss-elixir" @version "0.1.4" def project do [ app: @app, name: @name, version: @version, elixir: "~> 1.10", start_permanent: Mix.env() == :prod, deps: deps(), dialyzer: dialyzer(), description: description(), docs: docs(), package: package(), preferred_cli_env: preferred_cli_env(), test_coverage: [tool: ExCoveralls] ] end def application, do: [] defp deps do [ # Development {:credo, "~> 1.5", only: :dev, runtime: false}, {:dialyxir, "~> 1.1.0", only: :dev, runtime: false}, {:ex_doc, "~> 0.25", only: :dev, runtime: false}, # Test {:decimal, "~> 2.0", only: :test}, {:excoveralls, "~> 0.14", only: :test} ] end defp description do """ Some functions that I miss in Elixir standard library (and maybe you too). Miss Elixir brings in a non-intrusive way some extra functions that, for different reasons, are not part of the Elixir standard library. """ end defp dialyzer do [ plt_file: {:no_warn, "priv/plts/dialyzer.plt"} ] end defp docs do [ main: "readme", extras: ~w(README.md CHANGELOG.md), formatters: ~w(html), logo: "assets/miss-elixir-logo.png", source_ref: @version, source_url: @repo, canonical: "http://hexdocs.pm/miss" ] end defp package do [ files: ~w(lib mix.exs README.md CHANGELOG.md LICENSE), maintainers: ["Fernando Hamasaki de Amorim"], licenses: ["Apache 2.0"], links: %{"GitHub" => @repo} ] end defp preferred_cli_env do [ coveralls: :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test, "coveralls.travis": :test ] end end
22.103448
95
0.573583
ffbdd55afd92db790ae91e732c5999df95a7c3a7
328
ex
Elixir
lib/trento/domain/cluster/events/cluster_discovered_health_changed.ex
trento-project/web
3260b30c781bffbbb0e5205cd650966c4026b9ac
[ "Apache-2.0" ]
1
2022-03-22T16:59:34.000Z
2022-03-22T16:59:34.000Z
lib/trento/domain/cluster/events/cluster_discovered_health_changed.ex
trento-project/web
3260b30c781bffbbb0e5205cd650966c4026b9ac
[ "Apache-2.0" ]
24
2022-03-22T16:45:25.000Z
2022-03-31T13:00:02.000Z
lib/trento/domain/cluster/events/cluster_discovered_health_changed.ex
trento-project/web
3260b30c781bffbbb0e5205cd650966c4026b9ac
[ "Apache-2.0" ]
1
2022-03-30T14:16:16.000Z
2022-03-30T14:16:16.000Z
defmodule Trento.Domain.Events.ClusterDiscoveredHealthChanged do @moduledoc """ This event is emitted when the discovered health of a cluster changes. """ use Trento.Event defevent do field :cluster_id, :string field :discovered_health, Ecto.Enum, values: [:passing, :warning, :critical, :unknown] end end
25.230769
90
0.737805
ffbdda6f3671d442ffbc8c83b08b3bd3f190086c
15,947
ex
Elixir
lib/core/asset.ex
bahanni/custom_rpi4
ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5
[ "MIT" ]
null
null
null
lib/core/asset.ex
bahanni/custom_rpi4
ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5
[ "MIT" ]
null
null
null
lib/core/asset.ex
bahanni/custom_rpi4
ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5
[ "MIT" ]
null
null
null
defmodule FarmbotOS.Asset do @moduledoc """ Top level module, with some helpers. Persists application resources to disk. Submodules of this module usually (but not always) correspond to a resource in the REST API. See official REST API docs for details. """ alias FarmbotOS.Asset.{ CriteriaRetriever, Device, FarmEvent, FarmwareEnv, FbosConfig, FirmwareConfig, Peripheral, Point, PointGroup, Regimen, RegimenInstance, Repo, Sensor, SensorReading, Sequence, Tool } alias FarmbotOS.ChangeSupervisor import Ecto.Query require Logger ## Begin Device def device() do Repo.one(Device) || %Device{} end def device(field) do Map.fetch!(device(), field) end def update_device!(params) do device() |> Device.changeset(params) |> Repo.insert_or_update!() end def delete_device!(id) do if device = Repo.get_by(Device, id: id) do Repo.delete!(device) end :ok end ## End Device ## Begin FarmEvent def new_farm_event!(params) do %FarmEvent{} |> FarmEvent.changeset(params) |> Repo.insert!() end @doc "Returns a FarmEvent by its API id." def get_farm_event(id) do Repo.get_by(FarmEvent, id: id) end def update_farm_event!(farm_event, params) do farm_event = farm_event |> FarmEvent.changeset(params) |> Repo.update!() if farm_event.executable_type == "Regimen" do regimen_instance = get_regimen_instance(farm_event) if regimen_instance do regimen_instance |> Repo.preload([:farm_event, :regimen]) |> RegimenInstance.changeset(%{updated_at: DateTime.utc_now()}) |> Repo.update!() end end farm_event end def delete_farm_event!(farm_event) do ri = get_regimen_instance(farm_event) ri && Repo.delete!(ri) Repo.delete!(farm_event) end def add_execution_to_farm_event!(%FarmEvent{} = farm_event, params \\ %{}) do %FarmEvent.Execution{} |> FarmEvent.Execution.changeset(params) |> Ecto.Changeset.put_assoc(:farm_event, farm_event) |> Repo.insert!() end def get_farm_event_execution(%FarmEvent{} = farm_event, scheduled_at) do Repo.all( from(e in FarmEvent.Execution, where: e.farm_event_local_id == ^farm_event.local_id and e.scheduled_at == ^scheduled_at, limit: 1 ) ) |> Enum.at(0) end ## End FarmEvent ## Begin FbosConfig @doc "Gets the local config" def fbos_config() do Repo.one(FbosConfig) || %FbosConfig{} end @doc "Gets a field on the local config." def fbos_config(field) do Map.fetch!(fbos_config(), field) end @doc """ This function updates Farmbot OS's local database. It will **NOT** send any HTTP requests to the API. To do this, `FarmbotOS.Asset.Private.mark_dirty!/2` is almost certainly what you want. """ def update_fbos_config!(fbos_config \\ nil, params) do new_data = FbosConfig.changeset(fbos_config || fbos_config(), params) |> Repo.insert_or_update!() ChangeSupervisor.cast_child(new_data, {:new_data, new_data}) new_data end def delete_fbos_config!(id) do if fbos_config = Repo.get_by(FbosConfig, id: id) do Repo.delete!(fbos_config) end :ok end ## End FbosConfig ## Begin FirmwareConfig def firmware_config() do Repo.one(FirmwareConfig) || %FirmwareConfig{} end def firmware_config(field) do Map.fetch!(firmware_config(), field) end def update_firmware_config!(firmware_config \\ nil, params) do new_data = FirmwareConfig.changeset(firmware_config || firmware_config(), params) |> Repo.insert_or_update!() ChangeSupervisor.cast_child(new_data, {:new_data, new_data}) new_data end def delete_firmware_config!(id) do if firmware_config = Repo.get_by(FirmwareConfig, id: id) do Repo.delete!(firmware_config) end :ok end ## End FirmwareConfig ## Begin RegimenInstance @doc "returns every regimen instance" def list_regimen_instances() do RegimenInstance |> Repo.all() |> Repo.preload([:regimen, :farm_event]) end def get_regimen_instance(%FarmEvent{} = farm_event) do regimen = Repo.one(from(r in Regimen, where: r.id == ^farm_event.executable_id)) regimen && Repo.one( from(ri in RegimenInstance, where: ri.regimen_id == ^regimen.local_id and ri.farm_event_id == ^farm_event.local_id ) ) end def new_regimen_instance!(%FarmEvent{} = farm_event, params \\ %{}) do regimen = Repo.one!(from(r in Regimen, where: r.id == ^farm_event.executable_id)) RegimenInstance.changeset(%RegimenInstance{}, params) |> Ecto.Changeset.put_assoc(:regimen, regimen) |> Ecto.Changeset.put_assoc(:farm_event, farm_event) |> Repo.insert!() end def delete_regimen_instance!(%RegimenInstance{} = ri) do Repo.delete!(ri) end def add_execution_to_regimen_instance!(%RegimenInstance{} = ri, params \\ %{}) do %RegimenInstance.Execution{} |> RegimenInstance.Execution.changeset(params) |> Ecto.Changeset.put_assoc(:regimen_instance, ri) |> Repo.insert!() end def get_regimen_instance_execution(%RegimenInstance{} = ri, scheduled_at) do Repo.all( from(e in RegimenInstance.Execution, where: e.regimen_instance_local_id == ^ri.local_id and e.scheduled_at == ^scheduled_at, limit: 1 ) ) |> Enum.at(0) end ## End RegimenInstance ## Begin Point def get_point(params) do Repo.get_by(Point, params) end def update_point(point, params) do # TODO: RC 8 MAY 2020 - We need to hard refresh the point. # The CSVM appears to be caching resources. This leads # to problems when a user runs a sequence that has two # MARK AS steps. # NOTE: Updating the `meta` attribute is a _replace_ action # by default, not a merge action. # MORE NOTES: Mixed keys (symbol vs. string) will crash this FN. # Let's just stringify everything... new_meta = params[:meta] || params["meta"] || %{} old_meta = point.meta || %{} updated_meta = Map.merge(old_meta, new_meta) clean_params = params |> Map.merge(%{meta: updated_meta}) |> Enum.map(fn {k, v} -> {"#{k}", v} end) |> Map.new() Repo.get_by(Point, id: point.id) |> Point.changeset(clean_params) |> Repo.update() end @doc "Returns all points matching Point.pointer_type" def get_all_points_by_type(type) do from(p in Point, where: p.pointer_type == ^type and is_nil(p.discarded_at)) |> Repo.all() |> sort_points("random") end def sort_points(points, order_by) do points |> Enum.group_by(&group_points_by(&1, order_by)) |> Enum.sort(&group_sort(&1, &2, order_by)) |> Enum.map(fn {_group_index, group} -> Enum.sort(group, &sort_points(&1, &2, order_by)) end) |> List.flatten() end def group_points_by(%{x: x}, algo) when algo in ~w(xy_ascending xy_descending), do: x def group_points_by(%{y: y}, algo) when algo in ~w(yx_ascending yx_descending), do: y def group_points_by(%{x: x, y: y}, "random"), do: Enum.random([x, y]) def group_sort({lgroup, _}, {rgroup, _}, "xy_ascending"), do: lgroup <= rgroup def group_sort({lgroup, _}, {rgroup, _}, "yx_ascending"), do: lgroup <= rgroup def group_sort({lgroup, _}, {rgroup, _}, "xy_descending"), do: lgroup >= rgroup def group_sort({lgroup, _}, {rgroup, _}, "yx_descending"), do: lgroup >= rgroup def group_sort(_, _, "random"), do: Enum.random([true, false]) def sort_points(%{y: ly}, %{y: ry}, "xy_ascending"), do: ly <= ry def sort_points(%{y: ly}, %{y: ry}, "xy_descending"), do: ly >= ry def sort_points(%{x: lx}, %{x: rx}, "yx_ascending"), do: lx <= rx def sort_points(%{x: lx}, %{x: rx}, "yx_descending"), do: lx >= rx def sort_points(_, _, "random"), do: Enum.random([true, false]) ## End Point ## Begin PointGroup def get_point_group(params) do case Repo.get_by(PointGroup, params) do nil -> nil %{sort_type: nil} = group -> group %{point_ids: unsorted, sort_type: sort_by} = point_group -> sorted = Repo.all(from(p in Point, where: p.id in ^unsorted)) |> sort_points(sort_by) |> Enum.map(&Map.fetch!(&1, :id)) %{point_group | point_ids: sorted} end end def find_points_via_group(id) do case Repo.get_by(PointGroup, id: id) do %{id: _id, sort_type: sort_by} = point_group -> # I don't like this because it makes the code # harder to understand. # We are essentially patching the value of # point_group.point_ids with additional IDs. # Keep this in mind when debugging sequences # that deal with point groups- the point_ids # value is not a reflection of what is in # the DB / API. sorted = CriteriaRetriever.run(point_group) |> sort_points(sort_by || "xy_ascending") |> Enum.map(fn point -> point.id end) %{point_group | point_ids: sorted} other -> # Swallow all other errors a = inspect(id) b = inspect(other) Logger.debug("Unexpected point group #{a} #{b}") nil end end def new_point_group!(params) do %PointGroup{} |> PointGroup.changeset(params) |> Repo.insert!() end def update_point_group!(point_group, params) do updated = point_group |> PointGroup.changeset(params) |> Repo.update!() regimen_instances = list_regimen_instances() farm_events = Repo.all(FarmEvent) # check for any matching asset using this point group. # This is pretty recursive and probably isn't super great # for performance, but SQL can't check this stuff unfortunately. for asset <- farm_events ++ regimen_instances do # TODO(Connor) this might be worth creating a behaviour for if uses_point_group?(asset, point_group) do Logger.debug( "#{inspect(asset)} uses PointGroup: #{inspect(point_group)}. Reindexing it." ) FarmbotOS.ChangeSupervisor.update_child(asset) end end updated end def delete_point_group!(%PointGroup{} = point_group) do Repo.delete!(point_group) end def uses_point_group?(%FarmEvent{body: body}, %PointGroup{id: point_group_id}) do any_body_node_uses_point_group?(body, point_group_id) end def uses_point_group?( %Regimen{body: body, regimen_items: regimen_items}, %PointGroup{ id: point_group_id } ) do any_body_node_uses_point_group?(body, point_group_id) || Enum.find(regimen_items, fn %{sequence_id: sequence_id} -> any_body_node_uses_point_group?( get_sequence(sequence_id).body, point_group_id ) end) end def uses_point_group?( %RegimenInstance{farm_event: farm_event, regimen: regimen}, point_group ) do uses_point_group?(farm_event, point_group) || uses_point_group?(regimen, point_group) end def any_body_node_uses_point_group?(body, point_group_id) do Enum.find(body, fn %{ kind: "execute", body: execute_body } -> any_body_node_uses_point_group?(execute_body, point_group_id) %{ args: %{ "data_value" => %{ "args" => %{"resource_id" => ^point_group_id}, "kind" => "point_group" }, "label" => "parent" }, kind: "parameter_application" } -> true %{ args: %{ "data_value" => %{ "args" => %{"point_group_id" => ^point_group_id}, "kind" => "point_group" }, "label" => "parent" }, kind: "parameter_application" } -> true _ -> false end) end ## End PointGroup ## Begin Regimen @doc "Get a regimen by it's API id" def get_regimen(id) do Repo.get_by(Regimen, id: id) end @doc "Enter a new regimen into the DB" def new_regimen!(params) do %Regimen{} |> Regimen.changeset(params) |> Repo.insert!() end def delete_regimen!(regimen) do regimen_instances = Repo.all( from(ri in RegimenInstance, where: ri.regimen_id == ^regimen.local_id) ) for ri <- regimen_instances do delete_regimen_instance!(ri) end Repo.delete!(regimen) end @doc "Update an existing regimen" def update_regimen!(regimen, params) do regimen_instances = Repo.all( from(ri in RegimenInstance, where: ri.regimen_id == ^regimen.local_id) ) |> Repo.preload([:farm_event, :regimen]) for ri <- regimen_instances do ri |> RegimenInstance.changeset(%{updated_at: DateTime.utc_now()}) |> Repo.update!() end regimen |> Regimen.changeset(params) |> Repo.update!() end ## End Regimen ## Begin Sequence @doc "Get a sequence by it's API id" def get_sequence(id) do Repo.get_by(Sequence, id: id) end def update_sequence!(%Sequence{} = sequence, params \\ %{}) do sequence_id = sequence.id farm_events = Repo.all( from(f in FarmEvent, where: f.executable_type == "Sequence" and f.executable_id == ^sequence_id ) ) regimen_instances = RegimenInstance |> Repo.all() |> Repo.preload([:regimen, :farm_event]) |> Enum.filter(fn %{regimen: %{regimen_items: items}} -> Enum.find(items, fn %{sequence_id: ^sequence_id} -> true %{sequence_id: _} -> true end) %{regimen: nil} -> false end) for asset <- farm_events ++ regimen_instances do FarmbotOS.ChangeSupervisor.update_child(asset) end Sequence.changeset(sequence, params) |> Repo.update!() end def new_sequence!(params \\ %{}) do Sequence.changeset(%Sequence{}, params) |> Repo.insert!() end ## End Sequence ## Begin FarmwareEnv def list_farmware_env() do Repo.all(FarmwareEnv) end def upsert_farmware_env_by_id(id, params) do fwe = Repo.get_by(FarmwareEnv, id: id) || %FarmwareEnv{} FarmwareEnv.changeset(fwe, params) |> Repo.insert_or_update() end def new_farmware_env(params) do key = params["key"] || params[:key] fwe = with key when is_binary(key) <- key, [fwe | _] <- Repo.all(from(fwe in FarmwareEnv, where: fwe.key == ^key)) do fwe else _ -> %FarmwareEnv{} end FarmwareEnv.changeset(fwe, params) |> Repo.insert_or_update() end ## End FarmwareEnv ## Begin Peripheral def get_peripheral(args) do Repo.get_by(Peripheral, args) end def get_peripheral_by_pin(pin) do Repo.get_by(Peripheral, pin: pin) end ## End Peripheral ## Begin Sensor def get_sensor(id) do Repo.get_by(Sensor, id: id) end def get_sensor_by_pin(pin) do Repo.get_by(Sensor, pin: pin) end def new_sensor!(params) do Sensor.changeset(%Sensor{}, params) |> Repo.insert!() end def update_sensor!(sensor, params) do sensor |> Sensor.changeset(params) |> Repo.update!() end ## End Sensor ## Begin SensorReading def get_sensor_reading(id) do Repo.get_by(SensorReading, id: id) end def new_sensor_reading!(params) do SensorReading.changeset(%SensorReading{}, params) |> Repo.insert!() end def update_sensor_reading!(sensor_reading, params) do sensor_reading |> SensorReading.changeset(params) |> Repo.update!() end ## End SensorReading ## Begin Tool def get_tool(args) do Repo.get_by(Tool, args) end ## End Tool end
24.309451
86
0.625384
ffbdf4793a62ef57dc9f6155d988a063558cffa7
1,367
ex
Elixir
apps/utils/lib/helpers/pid_helper.ex
AndonMitev/EWallet
898cde38933d6f134734528b3e594eedf5fa50f3
[ "Apache-2.0" ]
322
2018-02-28T07:38:44.000Z
2020-05-27T23:09:55.000Z
apps/utils/lib/helpers/pid_helper.ex
AndonMitev/EWallet
898cde38933d6f134734528b3e594eedf5fa50f3
[ "Apache-2.0" ]
643
2018-02-28T12:05:20.000Z
2020-05-22T08:34:38.000Z
apps/utils/lib/helpers/pid_helper.ex
AndonMitev/EWallet
898cde38933d6f134734528b3e594eedf5fa50f3
[ "Apache-2.0" ]
63
2018-02-28T10:57:06.000Z
2020-05-27T23:10:38.000Z
# Copyright 2018-2019 OmiseGO Pte Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. defmodule Utils.Helper.PidHelper do @moduledoc """ Module helping out with PID <> String conversions. """ def pid_to_binary(pid) when is_pid(pid) do "#PID" <> (pid |> :erlang.pid_to_list() |> :erlang.list_to_binary()) end def pid_to_binary(port) when is_port(port) do port |> :erlang.port_to_list() |> :erlang.list_to_binary() end # the msg tracer seems to give us back the registered name def pid_to_binary(atom) when is_atom(atom) do atom |> :erlang.whereis() |> pid_to_binary end def pid_from_string("#PID" <> string) do string |> :erlang.binary_to_list() |> :erlang.list_to_pid() end def pid_from_string(string) do string |> :erlang.binary_to_list() |> :erlang.list_to_atom() |> :erlang.whereis() end end
29.717391
74
0.70812
ffbe03caf9af9b8afd8c41ce3376e70b95000c79
1,187
ex
Elixir
web/channels/user_socket.ex
seanreed1111/phoenix-first_app
240d4d586a365392aa8c1fdf1a77986b7e7bd5f6
[ "Apache-2.0" ]
null
null
null
web/channels/user_socket.ex
seanreed1111/phoenix-first_app
240d4d586a365392aa8c1fdf1a77986b7e7bd5f6
[ "Apache-2.0" ]
null
null
null
web/channels/user_socket.ex
seanreed1111/phoenix-first_app
240d4d586a365392aa8c1fdf1a77986b7e7bd5f6
[ "Apache-2.0" ]
null
null
null
defmodule FirstApp.UserSocket do use Phoenix.Socket ## Channels # channel "rooms:*", FirstApp.RoomChannel ## Transports transport :websocket, Phoenix.Transports.WebSocket, timeout: 45_000 # transport :longpoll, Phoenix.Transports.LongPoll # Socket params are passed from the client and can # be used to verify and authenticate a user. After # verification, you can put default assigns into # the socket that will be set for all channels, ie # # {:ok, assign(socket, :user_id, verified_user_id)} # # To deny connection, return `:error`. # # See `Phoenix.Token` documentation for examples in # performing token verification on connect. def connect(_params, socket) do {:ok, socket} end # Socket id's are topics that allow you to identify all sockets for a given user: # # def id(socket), do: "users_socket:#{socket.assigns.user_id}" # # Would allow you to broadcast a "disconnect" event and terminate # all active sockets and channels for a given user: # # FirstApp.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{}) # # Returning `nil` makes this socket anonymous. def id(_socket), do: nil end
31.236842
83
0.703454
ffbe1f934c3006daa6f3dd129d82da255e693b0d
1,053
ex
Elixir
elixir/camp/image_snatcher/image_snatcher.ex
gilmoreg/learn
0c4f34387f0d2235ecd88ac62fb86a51f87eb5c2
[ "MIT" ]
null
null
null
elixir/camp/image_snatcher/image_snatcher.ex
gilmoreg/learn
0c4f34387f0d2235ecd88ac62fb86a51f87eb5c2
[ "MIT" ]
null
null
null
elixir/camp/image_snatcher/image_snatcher.ex
gilmoreg/learn
0c4f34387f0d2235ecd88ac62fb86a51f87eb5c2
[ "MIT" ]
null
null
null
defmodule ImageSnatcher do def start() do get_cwd_list() |> sort_files end def get_cwd_list() do files = File.ls(".") case files do {:ok, files} -> files {:error, reason} -> IO.puts ~s{#{:file.format_error(reason)}} end end def sort_files(files) do Enum.each(files, fn file -> extension = String.split(file, ".") |> List.last case extension do "bmp" -> move_file(file) "jpg" -> move_file(file) "png" -> move_file(file) _ -> nil end end) end def move_file(file) do ensure_target_dir() case File.cp(file, "images/#{file}") do :ok -> case File.rm(file) do :ok -> :ok {:error, reason } -> IO.puts ~s{#{:file.format_error(reason)}} end {:error, reason } -> IO.puts ~s{#{:file.format_error(reason)}} end end def ensure_target_dir() do case File.mkdir("images") do :ok -> :ok {:error, :eexists } -> :ok {:error, reason } -> IO.puts ~s{#{:file.format_error(reason)}} end end end
23.4
70
0.555556
ffbe2fe9f92dd09506b83def3a8a25d95aa9839e
2,143
ex
Elixir
lib/time.ex
supernintendo/exmorph
aea3d65bd188003937478443827ab8df32bc8a01
[ "RSA-MD", "Apache-1.1" ]
null
null
null
lib/time.ex
supernintendo/exmorph
aea3d65bd188003937478443827ab8df32bc8a01
[ "RSA-MD", "Apache-1.1" ]
null
null
null
lib/time.ex
supernintendo/exmorph
aea3d65bd188003937478443827ab8df32bc8a01
[ "RSA-MD", "Apache-1.1" ]
null
null
null
defmodule Exmorph.Time do @doc """ Takes a string representing time and returns the integer value for that time in nanoseconds. ## Examples iex> Exmorph.Time.from_string("100000ms") 1.0e11 iex> Exmorph.Time.from_string("10s") 10000000000 iex> Exmorph.Time.from_string("3min") 180000000000 iex> Exmorph.Time.from_string("1hr") 3600000000000 """ def from_string("infinity"), do: :infinity def from_string(value) when is_bitstring(value) do if Regex.match?(~r/((?:\d*\.)?\d+)(ms|s|m|h)/, value) do parse_time(value) |> to_nano else raise "Cannot parse duration #{value}." end end @doc """ Returns the current system time in nanoseconds. """ def now() do :os.system_time(:nano_seconds) end @doc """ Takes an atom with a duration as the first element and unit of time as the second. Returns the duration converted to nanoseconds. ## Examples iex> Exmorph.Time.to_nano({8_888, :milli_seconds}) 8.888e9 iex> Exmorph.Time.to_nano({88, :seconds}) 88000000000 iex> Exmorph.Time.to_nano({64, :minutes}) 3840000000000 iex> Exmorph.Time.to_nano({4, :hours}) 14400000000000 """ def to_nano({time, :milli_seconds}) do (time / 1_000) * 1_000_000_000 end def to_nano({time, :seconds}) do time * 1_000_000_000 end def to_nano({time, :minutes}) do time * 60 * 1_000_000_000 end def to_nano({time, :hours}) do time * 3600 * 1_000_000_000 end def to_nano({time, _}), do: time defp parse_time(value) when is_bitstring(value) do cond do String.contains?(value, ".") -> {result, unit} = Float.parse(value) {result, parse_unit(unit)} true -> {result, unit} = Integer.parse(value) {result, parse_unit(unit)} end end defp parse_unit(unit) do case unit do "ms" -> :milli_seconds "msec" -> :milli_seconds "s" -> :seconds "sec" -> :seconds "m" -> :minutes "min" -> :minutes "h" -> :hours "hr" -> :hours _ -> :unknown end end end
22.092784
61
0.607093
ffbe32998b7ff64031705e4e0fb247d77814e19f
963
exs
Elixir
test/rocketpay_web/views/users_view_test.exs
tairone-livinalli/rocketpay
f58be9330b678a486d717613790e9e3d9f0fe9d1
[ "MIT" ]
null
null
null
test/rocketpay_web/views/users_view_test.exs
tairone-livinalli/rocketpay
f58be9330b678a486d717613790e9e3d9f0fe9d1
[ "MIT" ]
null
null
null
test/rocketpay_web/views/users_view_test.exs
tairone-livinalli/rocketpay
f58be9330b678a486d717613790e9e3d9f0fe9d1
[ "MIT" ]
null
null
null
defmodule RocketpayWeb.UsersViewTest do use RocketpayWeb.ConnCase import Phoenix.View alias Rocketpay.{Account, User} alias RocketpayWeb.UsersView test "renders create.json" do name = "Tairone" password = "123456" nickname = "Tai" email = "taironelivinalli@hotmail.com" age = 25 {:ok, balance} = Decimal.cast("0.00") params = %{ name: name, password: password, nickname: nickname, email: email, age: age } {:ok, %User{id: user_id, account: %Account{id: account_id}} = user} = Rocketpay.create_user(params) response = render(UsersView, "create.json", user: user) assert %{ message: "User created", user: %{ account: %{balance: ^balance, id: ^account_id}, email: ^email, id: ^user_id, name: ^name, nickname: ^nickname } } = response end end
22.928571
73
0.557632
ffbe3aff00ee9592c6d670c4fa60adbbc94506bf
2,139
ex
Elixir
clients/data_fusion/lib/google_api/data_fusion/v1/model/version.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/data_fusion/lib/google_api/data_fusion/v1/model/version.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/data_fusion/lib/google_api/data_fusion/v1/model/version.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.DataFusion.V1.Model.Version do @moduledoc """ The Data Fusion version. This proto message stores information about certain Data Fusion version, which is used for Data Fusion version upgrade. ## Attributes * `availableFeatures` (*type:* `list(String.t)`, *default:* `nil`) - Represents a list of available feature names for a given version. * `defaultVersion` (*type:* `boolean()`, *default:* `nil`) - Whether this is currently the default version for Cloud Data Fusion * `type` (*type:* `String.t`, *default:* `nil`) - Type represents the release availability of the version * `versionNumber` (*type:* `String.t`, *default:* `nil`) - The version number of the Data Fusion instance, such as '6.0.1.0'. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :availableFeatures => list(String.t()) | nil, :defaultVersion => boolean() | nil, :type => String.t() | nil, :versionNumber => String.t() | nil } field(:availableFeatures, type: :list) field(:defaultVersion) field(:type) field(:versionNumber) end defimpl Poison.Decoder, for: GoogleApi.DataFusion.V1.Model.Version do def decode(value, options) do GoogleApi.DataFusion.V1.Model.Version.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.DataFusion.V1.Model.Version do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
38.196429
146
0.711547
ffbe6e815da3362a5e06418821ec9cd0e29c4e6f
249
ex
Elixir
priv/example-projects/foo_bar_umbrella/apps/foo_bar/lib/foo_bar.ex
c4710n/phx_custom
19080ed8896be9ae846d12b2f631a1bc4aef5c78
[ "MIT" ]
null
null
null
priv/example-projects/foo_bar_umbrella/apps/foo_bar/lib/foo_bar.ex
c4710n/phx_custom
19080ed8896be9ae846d12b2f631a1bc4aef5c78
[ "MIT" ]
null
null
null
priv/example-projects/foo_bar_umbrella/apps/foo_bar/lib/foo_bar.ex
c4710n/phx_custom
19080ed8896be9ae846d12b2f631a1bc4aef5c78
[ "MIT" ]
null
null
null
defmodule FooBar do @moduledoc """ FooBar keeps the contexts that define your domain and business logic. Contexts are also responsible for managing your data, regardless if it comes from the database, an external API or others. """ end
24.9
66
0.751004
ffbeae46644b10bba94105bbedd736e82f49c1eb
250
exs
Elixir
priv/repo/migrations/20190419162425_rename_table_auctions_userid_to_sellerid.exs
rstein66/Renaissance
947880e1b492adc2ce1f4a0883ddb41e597de011
[ "Unlicense" ]
2
2021-02-01T12:09:07.000Z
2021-03-14T12:20:43.000Z
priv/repo/migrations/20190419162425_rename_table_auctions_userid_to_sellerid.exs
rstein66/Renaissance
947880e1b492adc2ce1f4a0883ddb41e597de011
[ "Unlicense" ]
11
2019-04-09T18:01:16.000Z
2019-05-16T15:02:24.000Z
priv/repo/migrations/20190419162425_rename_table_auctions_userid_to_sellerid.exs
rstein66/Renaissance
947880e1b492adc2ce1f4a0883ddb41e597de011
[ "Unlicense" ]
2
2019-04-30T22:43:18.000Z
2019-05-15T20:34:49.000Z
defmodule Renaissance.Repo.Migrations.RenameTableAuctionsUseridToSellerid do use Ecto.Migration def up do rename table(:auctions), :user_id, to: :seller_id end def down do rename table(:auctions), :seller_id, to: :user_id end end
20.833333
76
0.744
ffbebac75e1fd568084cdf415fef8919e07f06a3
8,213
ex
Elixir
lib/chicken_race.ex
xeejp/chicken-race
e6a3cd0663fe90550f79ae6df3b5d385aa65f1a7
[ "MIT" ]
null
null
null
lib/chicken_race.ex
xeejp/chicken-race
e6a3cd0663fe90550f79ae6df3b5d385aa65f1a7
[ "MIT" ]
null
null
null
lib/chicken_race.ex
xeejp/chicken-race
e6a3cd0663fe90550f79ae6df3b5d385aa65f1a7
[ "MIT" ]
null
null
null
defmodule ChickenRace do use XeeThemeScript @experiment_types [ "interaction", "interaction_with_no_information" ] # Callbacks def script_type do :message end def install, do: nil def init do {:ok, %{"data" => %{ started: false, experiment_type: "interaction", participants: %{}, exited_users: 0, prize: 0, host_log: [], participant_log: [], punished: false, log: [] }}} end def join(%{participants: participants} = data, id) do if not Map.has_key?(participants, id) and not data.started do participants = Map.put(participants, id, nil) data = %{data | participants: participants} action = %{ type: "ADD_USER", id: id, user: nil } {:ok, %{"data" => data, "host" => %{action: action}}} else {:ok, %{"data" => data}} end end def handle_received(data, %{"action" => "fetchContents"}) do action = %{ type: "UPDATE_CONTENTS", started: data.started, experiment_type: data.experiment_type, users: data.participants, prize: data.prize, exited_users: data.exited_users, log: data.log } {:ok, %{"data" => data, "host" => %{action: action}}} end def handle_received(data, %{"action" => "changeType", "params" => type}) do data = %{data | experiment_type: type, started: false} action = %{ type: "CHANGE_TYPE", experiment_type: data.experiment_type, } participant = dispatch_to_all(data.participants, action) {:ok, %{"data" => data, "host" => %{action: action}, "participant" => participant}} end def handle_received(data, %{"action" => "start"}) do participants = Enum.map(data.participants, fn {id, _} -> {id, nil} end) |> Enum.into(%{}) data = %{data | started: true, exited_users: 0, participants: participants, punished: false } action = %{ type: "START", users: data.participants } participant = dispatch_to_all(data.participants, action |> Map.put(:users, Map.size(data.participants)) |> Map.put(:exited_users, data.exited_users) ) {:ok, %{"data" => data, "host" => %{action: action}, "participant" => participant}} end def handle_received(data, %{"action" => "stop"}) do data = %{data | started: false} action = %{ type: "STOP" } participant = dispatch_to_all(data.participants, action) {:ok, %{"data" => data, "host" => %{action: action}, "participant" => participant}} end def handle_received(data, %{"action" => "updatePrize", "params" => %{"prize" => prize}}) do data = %{data | prize: prize} action = %{ type: "UPDATE_PRIZE", prize: data.prize } participant = dispatch_to_all(data.participants, action) {:ok, %{"data" => data, "host" => %{action: action}, "participant" => participant}} end def handle_received(data, %{"action" => "exit"}) do if data.started and not data.punished do data = data |> Map.update!(:exited_users, fn x -> x + 1 end) participant = dispatch_to_all(data.participants, %{ type: "UPDATE_USERS", users: Map.size(data.participants), exited_users: data.exited_users }) host_action = %{ type: "UPDATE_EXITED_USER", exited_users: data.exited_users } if Map.size(data.participants) == data.exited_users + 1 do participants = Enum.map(data.participants, fn {id, value} -> if value == nil do {id, :punished} else {id, value} end end) |> Enum.into(%{}) participant = Enum.map(participants, fn {id, value} -> value = if value == :punished do %{action: %{ type: "PUNISHED" }} else %{action: %{ type: "UPDATE_USERS", users: Map.size(data.participants), exited_users: data.exited_users }} end {id, value} end) |> Enum.into(%{}) data = %{data | participants: participants, punished: true} host_action = %{ type: "UPDATE_USER", users: data.participants } end {:ok, %{"data" => data, "host" => %{action: host_action}, "participant" => participant}} else {:ok, %{"data" => data}} end end def handle_received(data, %{"action" => "exitAll"}) do if data.started and not data.punished do data = data |> Map.put(:exited_users, Map.size(data.participants) - 1) participant = dispatch_to_all(data.participants, %{ type: "UPDATE_USERS", users: Map.size(data.participants), exited_users: data.exited_users }) host_action = %{ type: "UPDATE_EXITED_USER", exited_users: data.exited_users } if Map.size(data.participants) == data.exited_users + 1 do participants = Enum.map(data.participants, fn {id, value} -> if value == nil do {id, :punished} else {id, value} end end) |> Enum.into(%{}) participant = Enum.map(participants, fn {id, value} -> value = if value == :punished do %{action: %{ type: "PUNISHED" }} else %{action: %{ type: "UPDATE_USERS", users: Map.size(data.participants), exited_users: data.exited_users }} end {id, value} end) |> Enum.into(%{}) data = %{data | participants: participants, punished: true} host_action = %{ type: "UPDATE_USER", users: data.participants } end {:ok, %{"data" => data, "host" => %{action: host_action}, "participant" => participant}} else {:ok, %{"data" => data}} end end def handle_received(data, %{"action" => "log"}) do users = data.participants data = Map.update!(data, :log, fn logs -> [users | logs] end) action = %{ type: "UPDATE_LOG", log: data.log } {:ok, %{"data" => data, "host" => %{action: action}}} end def handle_received(data, %{"action" => "fetchContents"}, id) do action = %{ type: "UPDATE_CONTENTS", started: data.started, answered: data.participants[id] != nil, punished: data.participants[id] == :punished, experiment_type: data.experiment_type, users: Map.size(data.participants), exited_users: data.exited_users, prize: data.prize, received_prize: is_map(data.participants[id]) and Map.get(data.participants[id], "prize") } {:ok, %{"data" => data, "participant" => %{id => %{action: action}}}} end def handle_received(data, %{"action" => "exit", "params" => params}, id) do # if the user haven't exited yet if Map.get(data.participants, id, {}) == nil and not data.punished do data = data |> put_in([:participants, id], Map.put(params, :prize, data.prize)) |> Map.update!(:exited_users, &(&1 + 1)) host_action = %{ type: "UPDATE_USER", id: id, user: data.participants[id] } participant = dispatch_to_all(data.participants, %{ type: "UPDATE_USERS", users: Map.size(data.participants), exited_users: data.exited_users }) if Map.size(data.participants) == data.exited_users + 1 do {id, nil} = Enum.find(data.participants, fn {id, value} -> value == nil end) data = data |> put_in([:participants, id], :punished) |> Map.put(:punished, true) host_action = Map.put(host_action, :users, data.participants) participant = %{participant | id => %{ action: %{ type: "PUNISHED" } }} end {:ok, %{"data" => data, "host" => %{action: host_action}, "participant" => participant}} else {:ok, %{"data" => data}} end end def dispatch_to_all(participants, action) , do: Enum.map(participants, fn {id, _} -> {id, %{action: action}} end) |> Enum.into(%{}) end
31.467433
95
0.551686
ffbed87a9b9650778871a4835fdbd31420505536
1,091
ex
Elixir
lib/medic/checks/homebrew.ex
geometerio/medic
ce5245b468f443174cfd45953db74172ea23e1e4
[ "MIT" ]
3
2021-06-18T18:42:35.000Z
2022-02-09T01:54:58.000Z
lib/medic/checks/homebrew.ex
geometerio/medic
ce5245b468f443174cfd45953db74172ea23e1e4
[ "MIT" ]
3
2021-06-17T19:02:32.000Z
2021-06-17T19:44:35.000Z
lib/medic/checks/homebrew.ex
geometerio/medic
ce5245b468f443174cfd45953db74172ea23e1e4
[ "MIT" ]
1
2022-03-10T19:16:14.000Z
2022-03-10T19:16:14.000Z
defmodule Medic.Checks.Homebrew do @moduledoc """ Expects a Brewfile to be present, and homebrew packages declared in the Brewfile to be up-to-date. ## Examples {Check.Homebrew, :bundled?} """ @doc """ Expects there to be a Brewfile, and for all the dependencies in that Brewfile to be up to date. """ @spec bundled?() :: Medic.Check.check_return_t() def bundled? do with :ok <- homebrew_installed?(), :ok <- brewfile_exists?() do case System.cmd("brew", ["bundle", "check"], env: [{"HOMEBREW_NO_AUTO_UPDATE", "1"}]) do {_output, 0} -> :ok {output, _} -> {:error, output, "brew bundle"} end end end @spec brewfile_exists?() :: Medic.Check.check_return_t() def brewfile_exists? do if File.exists?("Brewfile"), do: :ok, else: {:error, "Brewfile does not exist", "touch Brewfile"} end def homebrew_installed? do case System.cmd("which", ["brew"]) do {_output, 0} -> :ok {_output, _exit_status} -> {:error, "Homebrew not installed", "open https://brew.sh"} end end end
27.275
94
0.618698
ffbeeb85c4638470d73a814dbc6a5be2a62caa89
261
ex
Elixir
test/support/store.ex
xfumihiro/elasticsearch-elixir
bd71da03f54dd0c1b892d60472d05a4fe2be441e
[ "MIT" ]
null
null
null
test/support/store.ex
xfumihiro/elasticsearch-elixir
bd71da03f54dd0c1b892d60472d05a4fe2be441e
[ "MIT" ]
null
null
null
test/support/store.ex
xfumihiro/elasticsearch-elixir
bd71da03f54dd0c1b892d60472d05a4fe2be441e
[ "MIT" ]
null
null
null
defmodule Elasticsearch.Test.Store do @moduledoc false @behaviour Elasticsearch.Store import Ecto.Query alias Elasticsearch.Test.Repo def load(Post, offset, limit) do Post |> offset(^offset) |> limit(^limit) |> Repo.all() end end
16.3125
37
0.681992
ffbef22de57b7334d4db15505e3b8529893a5a6a
498
exs
Elixir
projects/api/priv/repo/migrations/20180613013723_add_memberships_table.exs
strattadb/margaret
dde5d7b42f6d9b4d320069a0117136dae03b13b5
[ "MIT" ]
82
2017-11-06T01:00:55.000Z
2020-12-09T10:35:29.000Z
projects/api/priv/repo/migrations/20180613013723_add_memberships_table.exs
dbstratta/margaret
dde5d7b42f6d9b4d320069a0117136dae03b13b5
[ "MIT" ]
98
2017-11-06T22:57:32.000Z
2020-07-03T04:46:39.000Z
projects/api/priv/repo/migrations/20180613013723_add_memberships_table.exs
strattadb/margaret
dde5d7b42f6d9b4d320069a0117136dae03b13b5
[ "MIT" ]
10
2017-11-16T05:31:58.000Z
2020-10-29T18:02:35.000Z
defmodule Margaret.Repo.Migrations.AddMembershipsTable do @moduledoc false use Ecto.Migration @doc false def change do create table(:memberships) do add( :customer_id, references(:customers, on_delete: :delete_all), null: false ) add(:stripe_subscription_id, :string, null: false) timestamps() end create(unique_index(:memberships, [:customer_id])) create(unique_index(:memberships, [:stripe_subscription_id])) end end
20.75
65
0.674699
ffbf4916470ed2abb5560d61b2527e421393055a
2,415
exs
Elixir
test/decoding/parsing/exceptions_test.exs
shoeheart/csv
676d5a07c4e3abf74fc56e1086a9386951dab7e8
[ "MIT" ]
null
null
null
test/decoding/parsing/exceptions_test.exs
shoeheart/csv
676d5a07c4e3abf74fc56e1086a9386951dab7e8
[ "MIT" ]
null
null
null
test/decoding/parsing/exceptions_test.exs
shoeheart/csv
676d5a07c4e3abf74fc56e1086a9386951dab7e8
[ "MIT" ]
null
null
null
defmodule DecodingTests.ParsingTests.ExceptionsTest do use ExUnit.Case alias CSV.Decoding.Parser alias CSV.EscapeSequenceError test "raises a escape sequence error when given an invalid sequence of tokens" do parsed = Enum.map( [ {[ {:double_quote, "\""}, {:delimiter, "\r\n"}, {:content, "c"}, {:separator, ","}, {:content, "d"} ], 1}, {[ {:content, "a"}, {:separator, ","}, {:double_quote, "\""}, {:content, "b"}, {:double_quote, "\""}, {:double_quote, "\""}, {:content, "c"}, {:separator, ","}, {:double_quote, "\""} ], 2} ], &Parser.parse/1 ) assert parsed == [ {:error, EscapeSequenceError, "\r\nc,d", 1}, {:ok, ["a", "b\"c,"], 2} ] end test "raises an escape sequence error when halted in an escape sequence" do parsed = Enum.map( [ {[ {:content, "a"}, {:separator, ","}, {:double_quote, "\""}, {:content, "b"}, {:double_quote, "\""}, {:double_quote, "\""}, {:content, "c"}, {:separator, ","}, {:double_quote, "\""} ], 1}, {[ {:double_quote, "\""}, {:delimiter, "\r\n"}, {:content, "c"}, {:separator, ","}, {:content, "d"} ], 2} ], &Parser.parse/1 ) assert parsed == [ {:ok, ["a", "b\"c,"], 1}, {:error, EscapeSequenceError, "\r\nc,d", 2} ] end test "the parser propagates errors" do parsed = Enum.map( [ {[ {:content, "a"}, {:separator, ","}, {:double_quote, "\""}, {:content, "b"}, {:double_quote, "\""}, {:double_quote, "\""}, {:content, "c"}, {:separator, ","}, {:double_quote, "\""} ], 1}, {:error, RuntimeError, "MESSAGE", 2} ], &Parser.parse/1 ) assert parsed == [ {:ok, ["a", "b\"c,"], 1}, {:error, RuntimeError, "MESSAGE", 2} ] end end
24.896907
83
0.36853
ffbf62df1b03951e175758119306dc408cab454b
1,831
ex
Elixir
lib/qwirkl_ex_web/telemetry.ex
ksherman/QwirkleEx
be4aa971767de916b9fc3a9a52124f17ce58b196
[ "MIT" ]
1
2021-02-01T12:09:47.000Z
2021-02-01T12:09:47.000Z
lib/qwirkl_ex_web/telemetry.ex
ksherman/QwirkleEx
be4aa971767de916b9fc3a9a52124f17ce58b196
[ "MIT" ]
null
null
null
lib/qwirkl_ex_web/telemetry.ex
ksherman/QwirkleEx
be4aa971767de916b9fc3a9a52124f17ce58b196
[ "MIT" ]
null
null
null
defmodule QwirklExWeb.Telemetry do use Supervisor import Telemetry.Metrics def start_link(arg) do Supervisor.start_link(__MODULE__, arg, name: __MODULE__) end @impl true def init(_arg) do children = [ # Telemetry poller will execute the given period measurements # every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics {:telemetry_poller, measurements: periodic_measurements(), period: 10_000} # Add reporters as children of your supervision tree. # {Telemetry.Metrics.ConsoleReporter, metrics: metrics()} ] Supervisor.init(children, strategy: :one_for_one) end def metrics do [ # Phoenix Metrics summary("phoenix.endpoint.stop.duration", unit: {:native, :millisecond} ), summary("phoenix.router_dispatch.stop.duration", tags: [:route], unit: {:native, :millisecond} ), # Database Metrics summary("qwirkl_ex.repo.query.total_time", unit: {:native, :millisecond}), summary("qwirkl_ex.repo.query.decode_time", unit: {:native, :millisecond}), summary("qwirkl_ex.repo.query.query_time", unit: {:native, :millisecond}), summary("qwirkl_ex.repo.query.queue_time", unit: {:native, :millisecond}), summary("qwirkl_ex.repo.query.idle_time", unit: {:native, :millisecond}), # VM Metrics summary("vm.memory.total", unit: {:byte, :kilobyte}), summary("vm.total_run_queue_lengths.total"), summary("vm.total_run_queue_lengths.cpu"), summary("vm.total_run_queue_lengths.io") ] end defp periodic_measurements do [ # A module, function and arguments to be invoked periodically. # This function must call :telemetry.execute/3 and a metric must be added above. # {QwirklExWeb, :count_users, []} ] end end
32.696429
86
0.673949
ffbf7f685c3553e0f52e9aa310e34b170ccb2dce
1,941
ex
Elixir
clients/container/lib/google_api/container/v1/model/usable_subnetwork_secondary_range.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/container/lib/google_api/container/v1/model/usable_subnetwork_secondary_range.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/container/lib/google_api/container/v1/model/usable_subnetwork_secondary_range.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Container.V1.Model.UsableSubnetworkSecondaryRange do @moduledoc """ Secondary IP range of a usable subnetwork. ## Attributes * `ipCidrRange` (*type:* `String.t`, *default:* `nil`) - The range of IP addresses belonging to this subnetwork secondary range. * `rangeName` (*type:* `String.t`, *default:* `nil`) - The name associated with this subnetwork secondary range, used when adding an alias IP range to a VM instance. * `status` (*type:* `String.t`, *default:* `nil`) - This field is to determine the status of the secondary range programmably. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :ipCidrRange => String.t() | nil, :rangeName => String.t() | nil, :status => String.t() | nil } field(:ipCidrRange) field(:rangeName) field(:status) end defimpl Poison.Decoder, for: GoogleApi.Container.V1.Model.UsableSubnetworkSecondaryRange do def decode(value, options) do GoogleApi.Container.V1.Model.UsableSubnetworkSecondaryRange.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Container.V1.Model.UsableSubnetworkSecondaryRange do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
36.622642
169
0.726945
ffbf9feba7ba7a538760dde2de61e5923b0000b8
370
exs
Elixir
test/other/other_test.exs
benyblack/Taex
a98bb1523497c455ce626cadd345185ecc03cea8
[ "MIT" ]
null
null
null
test/other/other_test.exs
benyblack/Taex
a98bb1523497c455ce626cadd345185ecc03cea8
[ "MIT" ]
null
null
null
test/other/other_test.exs
benyblack/Taex
a98bb1523497c455ce626cadd345185ecc03cea8
[ "MIT" ]
null
null
null
defmodule Taex.Other do use ExUnit.Case alias Taex.Helpers test "high helper" do assert Helpers.high([1,2,3,4,5]) == 5 assert Helpers.high(5)== 5 assert Helpers.high([1,1,4,2,1,5]) == 5 end test "low helper" do assert Helpers.low([1,2,3,4,5]) == 1 assert Helpers.low(1) == 1 assert Helpers.low([1,4,2,6,8,234,1,2,4323]) == 1 end end
23.125
53
0.610811
ffbfb13a4fbad4784c189c174df6b3ec7ed50a07
3,368
ex
Elixir
2021/day11/lib/day11.ex
faried/advent
1d3817a3c305963ff22e844630bbafd3af242d3e
[ "MIT" ]
null
null
null
2021/day11/lib/day11.ex
faried/advent
1d3817a3c305963ff22e844630bbafd3af242d3e
[ "MIT" ]
null
null
null
2021/day11/lib/day11.ex
faried/advent
1d3817a3c305963ff22e844630bbafd3af242d3e
[ "MIT" ]
null
null
null
defmodule Day11 do @moduledoc """ Documentation for `Day11`. """ def part1(input, steps \\ 100) do {map, maxrows, maxcols} = to_map(input) step({map, 0}, maxrows, maxcols, steps) |> elem(1) end def part2(input) do {map, maxrows, maxcols} = to_map(input) until_all_flash(map, maxrows, maxcols) end defp until_all_flash(map, maxrows, maxcols, step_count \\ 1) defp until_all_flash(map, maxrows, maxcols, step_count) do {newmap, _} = step({map, 0}, maxrows, maxcols, 1) if all_flash?(newmap), do: step_count, else: until_all_flash(newmap, maxrows, maxcols, step_count + 1) end defp all_flash?(map), do: Enum.all?(map, fn {_, octopus} -> octopus == 0 end) defp step({map, flashes}, _, _, 0), do: {map, flashes} defp step({map, flashes}, mr, mc, count) do map |> energy(mr, mc) |> flash(flashes, mr, mc, MapSet.new()) |> step(mr, mc, count - 1) end defp energy(map, mr, mc) do for(r <- 0..(mr - 1), c <- 0..(mc - 1), do: {{r, c}, map[{r, c}] + 1}) |> Map.new() end # this is complicated # 1. it loops through the map, and finds any > 9 octopuses that have not already # flashed in the current step # 2. it puts each such octopus's position in the flashed set # 3. it builds a list of {{pos}, adj(pos)} pairs for each octopus # 4. it calls do_update with the map and the updatelist, which returns an updated map # 5. if the updated map is not the same as the map we're given, run it again (it might # have new > 9 octopuses) defp flash(map, flashes, mr, mc, flashed) do {adjlist, newflash} = Enum.reduce(map, {[], flashed}, fn {{r, c}, octopus}, {acc, flashed} -> if octopus > 9 and {r, c} not in flashed do {[{{r, c}, adj(map, r, c)} | acc], MapSet.put(flashed, {r, c})} else {acc, flashed} end end) {newmap, flashes} = do_update(map, flashes, adjlist, newflash) if map != newmap do flash(newmap, flashes, mr, mc, newflash) else {newmap, flashes} end end defp do_update(map, flashes, [], _flashed), do: {map, flashes} # for each adjacent position to the flashing octopus, # increment their energy level (unless they've already flashed) defp do_update(map, flashes, [{pos, adjpos} | octopuses], flashed) do map = Map.put(map, pos, 0) newmap = Enum.reduce(adjpos, map, fn pos, acc -> if pos in flashed do acc else Map.put(acc, pos, 1 + Map.get(acc, pos)) end end) do_update(newmap, flashes + 1, octopuses, flashed) end defp adj(m, r, c) do [ {r, c + 1}, {r, c - 1}, {r - 1, c}, {r + 1, c}, {r + 1, c + 1}, {r + 1, c - 1}, {r - 1, c + 1}, {r - 1, c - 1} ] |> Enum.filter(fn key -> Map.has_key?(m, key) end) end defp to_map(input) do rows = String.split(input, "\n", trim: true) maxrows = Enum.count(rows) maxcols = String.length(Enum.at(rows, 0)) map = rows |> Enum.with_index() |> Enum.flat_map(fn {row, rownum} -> row |> String.graphemes() |> Enum.with_index() |> Enum.map(fn {octopus, colnum} -> {{rownum, colnum}, String.to_integer(octopus)} end) end) |> Map.new() {map, maxrows, maxcols} end end
26.730159
88
0.570665
ffbfdb174a60f8ab938907f750f2eea342d9a8bc
1,093
ex
Elixir
web/router.ex
tedstrauss/sonar
65de7a9a78eae9fd4e02f5abdf00f9fdec19b081
[ "MIT" ]
1
2020-07-17T14:02:31.000Z
2020-07-17T14:02:31.000Z
web/router.ex
tedstrauss/sonar
65de7a9a78eae9fd4e02f5abdf00f9fdec19b081
[ "MIT" ]
null
null
null
web/router.ex
tedstrauss/sonar
65de7a9a78eae9fd4e02f5abdf00f9fdec19b081
[ "MIT" ]
1
2019-12-02T17:51:36.000Z
2019-12-02T17:51:36.000Z
defmodule Sonar.Router do use Sonar.Web, :router pipeline :browser do plug :accepts, ["html"] plug :fetch_session plug :fetch_flash plug :protect_from_forgery plug :put_secure_browser_headers end pipeline :api do plug :accepts, ["json"] end scope "/api", Sonar do pipe_through :api get "/events/:id", ShowApiController, :index post "/shows_prox", ShowApiController, :shows_prox get "/scrape", ScraperController, :index end scope "/auth", Sonar do pipe_through :browser get "/:provider", AuthController, :request get "/:provider/callback", AuthController, :callback post "/:provider/callback", AuthController, :callback end scope "/", Sonar do pipe_through :browser # Use the default browser stack get "/*path", PageController, :index resources "/users", UserController resources "/shows", ShowController resources "/venues", VenueController get "/login", SessionController, :new post "/login", SessionController, :create delete "/logout", SessionController, :delete end end
24.840909
57
0.6871
ffbfe184df2ed8969314ffb80f655fe0c2bb7ff8
628
exs
Elixir
test/rand_nif_test.exs
gyson/rand_nif
2f087b9081766fdffd7bc6a383017930c412dc05
[ "MIT" ]
3
2019-02-24T21:33:45.000Z
2021-09-03T16:21:21.000Z
test/rand_nif_test.exs
gyson/rand_nif
2f087b9081766fdffd7bc6a383017930c412dc05
[ "MIT" ]
null
null
null
test/rand_nif_test.exs
gyson/rand_nif
2f087b9081766fdffd7bc6a383017930c412dc05
[ "MIT" ]
null
null
null
defmodule RandNifTest do use ExUnit.Case doctest RandNif test "RandNif.uniform/0" do result = RandNif.uniform() assert result >= 0.0 assert result < 1.0 end test "RandNif.uniform/1" do assert_raise ArgumentError, fn -> RandNif.uniform("hello") end assert_raise ArgumentError, fn -> RandNif.uniform(0) end assert_raise ArgumentError, fn -> RandNif.uniform(1.0) end assert_raise ArgumentError, fn -> RandNif.uniform(-1) end assert RandNif.uniform(1) == 1 assert RandNif.uniform(10) <= 10 assert RandNif.uniform(100) <= 100 end end
18.470588
38
0.644904
ffbfe559c0b49f6df908fbc2ad1382ece5e383ae
1,853
exs
Elixir
config/config.exs
SeppPenner/webbkoll
4e1b8f6dab8c996996824fa83506114fb68e826e
[ "MIT" ]
1
2019-11-28T09:43:24.000Z
2019-11-28T09:43:24.000Z
config/config.exs
LinkBaseORG/webbkoll
4e1b8f6dab8c996996824fa83506114fb68e826e
[ "MIT" ]
null
null
null
config/config.exs
LinkBaseORG/webbkoll
4e1b8f6dab8c996996824fa83506114fb68e826e
[ "MIT" ]
null
null
null
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. # # This configuration file is loaded before any dependency and # is restricted to this project. use Mix.Config # General application configuration config :webbkoll, locales: %{ "en" => "English", "sv" => "Svenska", "de" => "Deutsch", "no" => "Norsk" # "fr" => "Français" }, default_locale: "en", version: System.cmd("git", ["log", "-1", "--format=%h %ci"]) |> elem(0) |> String.trim() # Configures the endpoint config :webbkoll, WebbkollWeb.Endpoint, url: [host: "localhost"], # secret_key_base is not actually used for anything at the moment, as Webbkoll doesn't # use cookies at all. secret_key_base: "Yk9QpNTp3jg15sA4KFDjBq4hgfp0eYV0o1bYO6Hxf0BUV5deh4HkwMks/Z541bCR", render_errors: [accepts: ~w(html json)], pubsub: [name: Webbkoll.PubSub, adapter: Phoenix.PubSub.PG2], server: true # Configures Elixir's Logger config :logger, :console, format: "$time $metadata[$level] $message\n", metadata: [:request_id], level: :error # Use Jason for JSON parsing in Phoenix config :phoenix, :json_library, Jason # Configure phoenix generators config :phoenix, :generators, migration: true, binary_id: false config :webbkoll, Webbkoll.Scheduler, jobs: [ {"@reboot", {Webbkoll.CronJobs, :download_geoip_if_necessary, []}}, {"@weekly", {Webbkoll.CronJobs, :update_geoip, []}}, {"* * * * *", {Webbkoll.CronJobs, :find_and_remove_stuck_records, []}} ] config :geolix, databases: [ %{ id: :country, adapter: Geolix.Adapter.MMDB2, source: "priv/GeoLite2-Country.mmdb" } ] # Import environment specific config. This must remain at the bottom # of this file so it overrides the configuration defined above. import_config "#{Mix.env()}.exs"
29.412698
90
0.689692
ffbfff4134883647dc45bbb5d3a24893ec719861
1,301
exs
Elixir
data/web/deps/csv/mix.exs
lydiadwyer/trains_elixir
16da18d4582307f4967b6cce7320e9aa08a849c3
[ "Apache-2.0" ]
null
null
null
data/web/deps/csv/mix.exs
lydiadwyer/trains_elixir
16da18d4582307f4967b6cce7320e9aa08a849c3
[ "Apache-2.0" ]
null
null
null
data/web/deps/csv/mix.exs
lydiadwyer/trains_elixir
16da18d4582307f4967b6cce7320e9aa08a849c3
[ "Apache-2.0" ]
null
null
null
defmodule CSV.Mixfile do use Mix.Project def project do [ app: :csv, version: "1.4.4", elixir: "~> 1.1", deps: deps, package: package, docs: &docs/0, name: "CSV", consolidate_protocols: true, source_url: "https://github.com/beatrichartz/csv", description: "CSV Decoding and Encoding for Elixir", test_coverage: [tool: ExCoveralls], preferred_cli_env: ["coveralls": :test, "coveralls.detail": :test, "coveralls.post": :test] ] end defp package do [ maintainers: ["Beat Richartz"], licenses: ["MIT"], links: %{github: "https://github.com/beatrichartz/csv" } ] end def application do [applications: [:parallel_stream]] end defp deps do [ {:parallel_stream, "~> 1.0.4"}, {:excoveralls, "~> 0.5", only: :test}, {:benchfella, only: :bench}, {:ex_csv, only: :bench}, {:csvlixir, only: :bench}, {:cesso, only: :bench}, {:ex_doc, "0.9.0", only: :docs}, {:inch_ex, only: :docs}, {:earmark, "0.1.19", only: :docs} ] end defp docs do {ref, 0} = System.cmd("git", ["rev-parse", "--verify", "--quiet", "HEAD"]) [ source_ref: ref, main: "overview" ] end end
23.232143
99
0.534204
ffc01acc56b818628ab010d5e98ab620d0ffeb5d
34
ex
Elixir
testData/org/elixir_lang/parser_definition/matched_and_operation_parsing_test_case/EmptyBlock.ex
ArtemGordinsky/intellij-elixir
e2d9b4dfc65651b293d499043edeaad606cf5652
[ "Apache-2.0" ]
null
null
null
testData/org/elixir_lang/parser_definition/matched_and_operation_parsing_test_case/EmptyBlock.ex
ArtemGordinsky/intellij-elixir
e2d9b4dfc65651b293d499043edeaad606cf5652
[ "Apache-2.0" ]
null
null
null
testData/org/elixir_lang/parser_definition/matched_and_operation_parsing_test_case/EmptyBlock.ex
ArtemGordinsky/intellij-elixir
e2d9b4dfc65651b293d499043edeaad606cf5652
[ "Apache-2.0" ]
null
null
null
(;) && (;) (;) &&& (;) (;) and (;)
11.333333
11
0.088235
ffc02fb907b80958fc8b85d9c9159f53e3222ae8
1,172
exs
Elixir
test/elixir_challenge_test.exs
jofan/elixir-challenge
8090a0d6fbd87d95d193b1c0e036f7387c99815a
[ "MIT" ]
1
2016-10-19T20:49:17.000Z
2016-10-19T20:49:17.000Z
test/elixir_challenge_test.exs
jofan/elixir-challenge
8090a0d6fbd87d95d193b1c0e036f7387c99815a
[ "MIT" ]
null
null
null
test/elixir_challenge_test.exs
jofan/elixir-challenge
8090a0d6fbd87d95d193b1c0e036f7387c99815a
[ "MIT" ]
null
null
null
defmodule ElixirChallengeTest do use ExUnit.Case doctest ElixirChallenge test "zero: math power" do expected = 274877906944 actual = ElixirChallenge.zero 2, 38 IO.puts "Challenge 0: " <> to_string(actual) assert actual == expected end test "one: decode string where m -> o" do expected = "ocr" actual = ElixirChallenge.one "map", 2 IO.puts "Challenge 1: " <> actual assert actual == expected end test "two_cheat: find rare letters" do expected = "equality" actual = ElixirChallenge.two_cheat() assert actual == expected end test "two: find rare characters without assumptions" do expected = "equality" actual = ElixirChallenge.two() IO.puts "Challenge 2: " <> actual assert actual == expected end test "three: find guarded characters" do expected = "linkedlist" actual = ElixirChallenge.three() IO.puts "Challenge 3: " <> actual assert actual == expected end # Not a great test :) test "five: print the word" do expected = :ok IO.puts "Challenge 5:" actual = ElixirChallenge.five() assert actual == expected end test "six: ?" do end end
22.538462
57
0.658703
ffc0411b49209589c655d6ef7af779de9fc18975
10,219
ex
Elixir
lib/teiserver_web/router.ex
marseel/teiserver
7e085ae7853205d217183737d3eb69a4941bbe7e
[ "MIT" ]
null
null
null
lib/teiserver_web/router.ex
marseel/teiserver
7e085ae7853205d217183737d3eb69a4941bbe7e
[ "MIT" ]
null
null
null
lib/teiserver_web/router.ex
marseel/teiserver
7e085ae7853205d217183737d3eb69a4941bbe7e
[ "MIT" ]
null
null
null
defmodule TeiserverWeb.Router do defmacro __using__(_opts \\ []) do quote do import unquote(__MODULE__) end end defmacro teiserver_routes() do quote do scope "/", TeiserverWeb.General, as: :ts_general do pipe_through([:browser, :nomenu_layout]) get("/code_of_conduct", GeneralController, :code_of_conduct) get("/privacy_policy", GeneralController, :gdpr) get("/gdpr", GeneralController, :gdpr) end scope "/teiserver", TeiserverWeb.General, as: :ts_general do pipe_through([:browser, :admin_layout, :protected]) get("/", GeneralController, :index) end # ts_account_X_path scope "/teiserver/account", TeiserverWeb.Account, as: :ts_account do pipe_through([:browser, :admin_layout, :protected]) get("/relationships", RelationshipsController, :index) post("/relationships/find/", RelationshipsController, :find) post("/relationships/create/:action/:target", RelationshipsController, :create) put("/relationships/update/:action/:target", RelationshipsController, :update) delete("/relationships/delete/:action/:target", RelationshipsController, :delete) resources("/preferences", PreferencesController, only: [:index, :edit, :update, :new, :create] ) get("/", GeneralController, :index) end # ts_clans_X_path scope "/teiserver/clans", TeiserverWeb.Clans, as: :ts_clans do pipe_through([:browser, :admin_layout, :protected]) get("/", ClanController, :index) get("/:name", ClanController, :show) put("/update/:clan_id", ClanController, :update) get("/set_default/:id", ClanController, :set_default) post("/create_invite", ClanController, :create_invite) delete("/delete_invite/:clan_id/:user_id", ClanController, :delete_invite) put("/respond_to_invite/:clan_id/:response", ClanController, :respond_to_invite) delete("/delete_membership/:clan_id/:user_id", ClanController, :delete_membership) put("/promote/:clan_id/:user_id", ClanController, :promote) put("/demote/:clan_id/:user_id", ClanController, :demote) end scope "/teiserver/games", TeiserverWeb.Game, as: :ts_game do pipe_through([:browser, :admin_layout, :protected]) resources("/queues", QueueController) end scope "/teiserver/battle", TeiserverWeb.Battle, as: :ts_battle do pipe_through([:browser, :admin_layout, :protected]) get("/", GeneralController, :index) end scope "/teiserver/battle", TeiserverWeb.Battle, as: :ts_battle do pipe_through([:browser, :admin_layout, :protected]) resources("/matches", MatchController, only: [:index, :show, :delete]) end scope "/teiserver/battle", TeiserverWeb.Battle.LobbyLive, as: :ts_battle do pipe_through([:browser, :admin_layout, :protected]) live("/lobbies", Index, :index) live("/lobbies/:id", Show, :show) end scope "/teiserver/game_live", TeiserverWeb.Matchmaking.QueueLive, as: :ts_game do pipe_through([:browser, :admin_layout, :protected]) live("/queues", Index, :index) live("/queues/:id", Show, :show) end # REPORTING scope "/teiserver/reports", TeiserverWeb.Report, as: :ts_reports do pipe_through([:browser, :admin_layout, :protected]) get("/", GeneralController, :index) # Server metrics get("/server/day_metrics/today", ServerMetricController, :day_metrics_today) get("/server/day_metrics/show/:date", ServerMetricController, :day_metrics_show) get("/server/day_metrics/export_form", ServerMetricController, :day_metrics_export_form) post("/server/day_metrics/export_post", ServerMetricController, :day_metrics_export_post) get("/server/day_metrics/graph", ServerMetricController, :day_metrics_graph) post("/server/day_metrics/graph", ServerMetricController, :day_metrics_graph) get("/server/day_metrics", ServerMetricController, :day_metrics_list) post("/server/day_metrics", ServerMetricController, :day_metrics_list) get("/server/month_metrics/today", ServerMetricController, :month_metrics_today) get("/server/month_metrics/show/:year/:month", ServerMetricController, :month_metrics_show) get("/server/month_metrics/graph", ServerMetricController, :month_metrics_graph) post("/server/month_metrics/graph", ServerMetricController, :month_metrics_graph) get("/server/month_metrics", ServerMetricController, :month_metrics_list) post("/server/month_metrics", ServerMetricController, :month_metrics_list) # Match metrics get("/match/day_metrics/today", MatchMetricController, :day_metrics_today) get("/match/day_metrics/show/:date", MatchMetricController, :day_metrics_show) get("/match/day_metrics/graph", MatchMetricController, :day_metrics_graph) post("/match/day_metrics/graph", MatchMetricController, :day_metrics_graph) get("/match/day_metrics", MatchMetricController, :day_metrics_list) post("/match/day_metrics", MatchMetricController, :day_metrics_list) get("/match/export_form", MatchMetricController, :export_form) post("/match/export_post", MatchMetricController, :export_post) get("/match/month_metrics/today", MatchMetricController, :month_metrics_today) get("/match/month_metrics/show/:year/:month", MatchMetricController, :month_metrics_show) get("/match/month_metrics/graph", MatchMetricController, :month_metrics_graph) post("/match/month_metrics/graph", MatchMetricController, :month_metrics_graph) get("/match/month_metrics", MatchMetricController, :month_metrics_list) post("/match/month_metrics", MatchMetricController, :month_metrics_list) # Client events get("/client_events/export/form", ClientEventController, :export_form) post("/client_events/export/post", ClientEventController, :export_post) get("/client_events/summary", ClientEventController, :summary) get("/client_events/property/:property_name/detail", ClientEventController, :property_detail) get("/client_events/event/:event_name/detail", ClientEventController, :event_detail) get("/infolog/download/:id", InfologController, :download) resources("/infolog", InfologController, only: [:index, :show, :delete]) get("/show/:name", ReportController, :show) post("/show/:name", ReportController, :show) end # ts_engine_X_path scope "/teiserver/engine", TeiserverWeb.Engine, as: :ts_engine do pipe_through([:browser, :admin_layout, :protected]) resources("/unit", UnitController) end # API scope "/teiserver/api", TeiserverWeb.API do pipe_through :api post "/login", SessionController, :login end scope "/teiserver/api", TeiserverWeb.API do pipe_through([:token_api]) post "/battle/create", BattleController, :create end # ADMIN scope "/teiserver/admin", TeiserverWeb.AdminDashLive, as: :ts do pipe_through([:browser, :admin_layout, :protected]) live("/dashboard", Index, :index) end scope "/teiserver/admin", TeiserverWeb.ClientLive, as: :ts_admin do pipe_through([:browser, :admin_layout, :protected]) live("/client", Index, :index) live("/client/:id", Show, :show) end scope "/teiserver/admin", TeiserverWeb.AgentLive, as: :ts_admin do pipe_through([:browser, :admin_layout, :protected]) live("/agent", Index, :index) # live("/agent/:id", Show, :show) end scope "/teiserver/admin", TeiserverWeb.Admin, as: :ts_admin do pipe_through([:browser, :admin_layout, :protected]) get("/", GeneralController, :index) get("/metrics", GeneralController, :metrics) get("/tools", ToolController, :index) get("/tools/convert", ToolController, :convert_form) post("/tools/convert_post", ToolController, :convert_post) post("/clans/create_membership", ClanController, :create_membership) delete("/clans/delete_membership/:clan_id/:user_id", ClanController, :delete_membership) delete("/clans/delete_invite/:clan_id/:user_id", ClanController, :delete_invite) put("/clans/promote/:clan_id/:user_id", ClanController, :promote) put("/clans/demote/:clan_id/:user_id", ClanController, :demote) resources("/clans", ClanController) get("/users/reset_password/:id", UserController, :reset_password) get("/users/action/:id/:action", UserController, :perform_action) put("/users/action/:id/:action", UserController, :perform_action) get("/users/reports/:id/respond", UserController, :respond_form) put("/users/reports/:id/respond", UserController, :respond_post) get("/users/smurf_search/:id", UserController, :smurf_search) get("/users/banhash_form/:id", UserController, :banhash_form) get("/users/full_chat/:id", UserController, :full_chat) get("/users/search", UserController, :index) post("/users/set_stat", UserController, :set_stat) get("/users/data_search", UserController, :data_search) post("/users/data_search", UserController, :data_search) post("/users/search", UserController, :search) resources("/user", UserController) resources("/banhash", BanHashController, only: [:index, :show, :delete, :create]) resources("/badge_types", BadgeTypeController) resources("/accolades", AccoladeController, only: [:index, :show, :delete]) get("/accolades/user/:user_id", AccoladeController, :user_show) resources("/matches", MatchController, only: [:index, :show, :delete]) get("/matches/user/:user_id", MatchController, :user_show) resources("/chat", ChatController, only: [:index]) post("/chat", ChatController, :index) get("/lobbies/:id/chat", LobbyController, :chat) end end end end
43.67094
101
0.675311
ffc051ff58db1ffba8e8dad9e5bd5e0bc0cf8b81
84
exs
Elixir
test/railway_ui_web/views/page_view_test.exs
SophieDeBenedetto/railway-ui
ceb253bccf63f278e93502e4dc6b113b31f6d8b2
[ "MIT" ]
2
2019-11-12T21:04:11.000Z
2020-02-09T18:07:27.000Z
test/railway_ui_web/views/page_view_test.exs
SophieDeBenedetto/railway-ui
ceb253bccf63f278e93502e4dc6b113b31f6d8b2
[ "MIT" ]
4
2019-12-02T17:31:57.000Z
2021-03-09T22:59:48.000Z
test/railway_ui_web/views/page_view_test.exs
SophieDeBenedetto/railway-ui
ceb253bccf63f278e93502e4dc6b113b31f6d8b2
[ "MIT" ]
2
2020-02-05T18:23:26.000Z
2020-04-26T13:50:10.000Z
defmodule RailwayUiWeb.PageViewTest do use RailwayUiWeb.ConnCase, async: true end
21
40
0.833333
ffc058f29a61d13928938ded201fdda2106823c7
1,772
ex
Elixir
clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta2_document_page_dimension.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
null
null
null
clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta2_document_page_dimension.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/document_ai/lib/google_api/document_ai/v1beta2/model/google_cloud_documentai_v1beta2_document_page_dimension.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta2DocumentPageDimension do @moduledoc """ Dimension for the page. ## Attributes * `height` (*type:* `number()`, *default:* `nil`) - Page height. * `unit` (*type:* `String.t`, *default:* `nil`) - Dimension unit. * `width` (*type:* `number()`, *default:* `nil`) - Page width. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :height => number(), :unit => String.t(), :width => number() } field(:height) field(:unit) field(:width) end defimpl Poison.Decoder, for: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta2DocumentPageDimension do def decode(value, options) do GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta2DocumentPageDimension.decode( value, options ) end end defimpl Poison.Encoder, for: GoogleApi.DocumentAI.V1beta2.Model.GoogleCloudDocumentaiV1beta2DocumentPageDimension do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
30.551724
97
0.71614
ffc0a9e1e06aea40c14edf3a803895b9aabfe983
673
ex
Elixir
web/models/user.ex
dogweather/ketobit
cd757648e5fee357b82716a2fb2d4542131312e7
[ "MIT" ]
6
2016-07-05T22:57:05.000Z
2019-04-05T01:47:28.000Z
web/models/user.ex
dogweather/ketobit
cd757648e5fee357b82716a2fb2d4542131312e7
[ "MIT" ]
null
null
null
web/models/user.ex
dogweather/ketobit
cd757648e5fee357b82716a2fb2d4542131312e7
[ "MIT" ]
null
null
null
defmodule Ketobit.User do use Ketobit.Web, :model schema "users" do field :name, :string field :email, :string field :user_id, :string field :access_token, :binary field :refresh_token, :binary field :expires_at, Ecto.DateTime timestamps end @required_fields ~w() @optional_fields ~w(name email user_id access_token refresh_token expires_at) @doc """ Creates a changeset based on the `model` and `params`. If no params are provided, an invalid changeset is returned with no validation performed. """ def changeset(model, params \\ :empty) do model |> cast(params, @required_fields, @optional_fields) end end
23.206897
79
0.699851
ffc0eb3e7fb87c633f4d5c11ef4d436044fe59ad
1,159
ex
Elixir
clients/ad_sense/lib/google_api/ad_sense/v2/connection.ex
renovate-bot/elixir-google-api
1da34cd39b670c99f067011e05ab90af93fef1f6
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/ad_sense/lib/google_api/ad_sense/v2/connection.ex
swansoffiee/elixir-google-api
9ea6d39f273fb430634788c258b3189d3613dde0
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/ad_sense/lib/google_api/ad_sense/v2/connection.ex
dazuma/elixir-google-api
6a9897168008efe07a6081d2326735fe332e522c
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.AdSense.V2.Connection do @moduledoc """ Handle Tesla connections for GoogleApi.AdSense.V2. """ @type t :: Tesla.Env.client() use GoogleApi.Gax.Connection, scopes: [ # View and manage your AdSense data "https://www.googleapis.com/auth/adsense", # View your AdSense data "https://www.googleapis.com/auth/adsense.readonly" ], otp_app: :google_api_ad_sense, base_url: "https://adsense.googleapis.com/" end
32.194444
74
0.727351
ffc1096dc7aea1bc04a02a7103b9d5faddb5560b
1,497
ex
Elixir
clients/container/lib/google_api/container/v1/model/gcp_filestore_csi_driver_config.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/container/lib/google_api/container/v1/model/gcp_filestore_csi_driver_config.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/container/lib/google_api/container/v1/model/gcp_filestore_csi_driver_config.ex
pojiro/elixir-google-api
928496a017d3875a1929c6809d9221d79404b910
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Container.V1.Model.GcpFilestoreCsiDriverConfig do @moduledoc """ Configuration for the GCP Filestore CSI driver. ## Attributes * `enabled` (*type:* `boolean()`, *default:* `nil`) - Whether the GCP Filestore CSI driver is enabled for this cluster. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :enabled => boolean() | nil } field(:enabled) end defimpl Poison.Decoder, for: GoogleApi.Container.V1.Model.GcpFilestoreCsiDriverConfig do def decode(value, options) do GoogleApi.Container.V1.Model.GcpFilestoreCsiDriverConfig.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Container.V1.Model.GcpFilestoreCsiDriverConfig do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
31.851064
123
0.746159
ffc10d9b6d8bfae2bf1fd45942099ac5bb5fd1e3
75
ex
Elixir
lib/elixir_awesome_web/views/layout_view.ex
ddidwyll/awesome-elixir
55b72305884cfd10510e25dd5f17f1a32765a17f
[ "MIT" ]
null
null
null
lib/elixir_awesome_web/views/layout_view.ex
ddidwyll/awesome-elixir
55b72305884cfd10510e25dd5f17f1a32765a17f
[ "MIT" ]
null
null
null
lib/elixir_awesome_web/views/layout_view.ex
ddidwyll/awesome-elixir
55b72305884cfd10510e25dd5f17f1a32765a17f
[ "MIT" ]
null
null
null
defmodule ElixirAwesomeWeb.LayoutView do use ElixirAwesomeWeb, :view end
18.75
40
0.84
ffc11c87e0ca6e0f353791b17470748435c17920
768
ex
Elixir
lib/remote_retro_web/channels/retro_management_handlers.ex
dplain90/remote_retro
c7063c42c893215a54d336da2d7b068f2c120a7d
[ "MIT" ]
null
null
null
lib/remote_retro_web/channels/retro_management_handlers.ex
dplain90/remote_retro
c7063c42c893215a54d336da2d7b068f2c120a7d
[ "MIT" ]
null
null
null
lib/remote_retro_web/channels/retro_management_handlers.ex
dplain90/remote_retro
c7063c42c893215a54d336da2d7b068f2c120a7d
[ "MIT" ]
null
null
null
defmodule RemoteRetroWeb.RetroManagementHandlers do import Phoenix.Channel import ShorterMaps alias RemoteRetro.{Emails, Mailer, Repo, Retro} def handle_in("retro_edited", %{"stage" => "closed"}, socket) do retro_id = socket.assigns.retro_id update_retro!(retro_id, "closed") Emails.action_items_email(retro_id) |> Mailer.deliver_now broadcast! socket, "retro_edited", %{"stage" => "closed"} {:noreply, socket} end def handle_in("retro_edited", ~m{stage}, socket) do update_retro!(socket.assigns.retro_id, stage) broadcast! socket, "retro_edited", ~m{stage} {:noreply, socket} end defp update_retro!(retro_id, stage) do Repo.get(Retro, retro_id) |> Retro.changeset(~m{stage}) |> Repo.update! end end
26.482759
66
0.696615
ffc14da54f8bb77d87b673e86b7a8768e12cfd04
5,230
ex
Elixir
lib/monocle/plugin.ex
brianbroderick/monocle
eeabecea658468479c04a02352271f6304447736
[ "Apache-2.0" ]
2
2018-02-11T01:18:24.000Z
2020-01-12T17:19:22.000Z
lib/monocle/plugin.ex
brianbroderick/monocle
eeabecea658468479c04a02352271f6304447736
[ "Apache-2.0" ]
null
null
null
lib/monocle/plugin.ex
brianbroderick/monocle
eeabecea658468479c04a02352271f6304447736
[ "Apache-2.0" ]
null
null
null
defmodule Monocle.Plugin do alias Monocle.Error alias Monocle.Options @moduledoc """ Plugins are modules that implement a render function. Right now that is `as_html`. ### API #### Plugin Registration When invoking `Monocle.as_html(some_md, options)` we can register plugins inside the `plugins` map, where each plugin is a value pointed to by the prefix triggering it. Prefixes are appended to `"$$"` and lines starting by that string will be rendered by the registered plugin. `%Monocle.Options{plugins: %{"" => CommentPlugin}}` would trigger the `CommentPlugin` for each block of lines prefixed by `$$`, while `%Monocle.Options{plugins: %{"cp" => CommentPlugin}}` would do the same for blocks of lines prefixed by `$$cp`. Please see the documentation of `Plugin.define` for a convenience function that helps creating the necessary `Monocle.Options` structs for the usage of plugins. #### Plugin Invocation `as_html` (or other render functions in the future) is invoked with a list of pairs containing the text and line number of the lines in the block. As an example, if our plugin was registered with the default prefix of `""` and the markdown to be converted was: # Plugin output ahead $$ line one $$ $$ line two `as_html` would be invoked as follows: as_html([{"line one", 2}, {"", 3}, {"line two", 4}) #### Plugin Output Monocle's render function will invoke the plugin's render function as explained above. It can then integrate the return value of the function into the generated rendering output if it complies to the following criteria. 1. It returns a string 1. It returns a list of strings 1. It returns a pair of lists containing a list of strings and a list of error/warning tuples. Where the tuples are of the form `{:error | :warning, line_number, descriptive_text}` #### A complete example iex> defmodule MyPlug do ...> def as_html(lines) do ...> # to demonstrate the three possible return values ...> case render(lines) do ...> {[line], []} -> line ...> {lines, []} -> lines ...> tuple -> tuple ...> end ...> end ...> ...> defp render(lines) do ...> Enum.map(lines, &render_line/1) |> Enum.split_with(&ok?/1) ...> end ...> ...> defp render_line({"", _}), do: "<hr/>" ...> defp render_line({"line one", _}), do: "<p>first line</p>\\n" ...> defp render_line({line, lnb}), do: {:error, lnb, line} ...> ...> defp ok?({_, _, _}), do: false ...> defp ok?(_), do: true ...> end ...> ...> lines = [ ...> "# Plugin Ahead", ...> "$$ line one", ...> "$$", ...> "$$ line two", ...> ] ...> Monocle.as_html(lines, Monocle.Plugin.define(MyPlug)) {:error, "<h1>Plugin Ahead</h1>\\n<p>first line</p>\\n<hr/>", [{ :error, 4, "line two"}]} #### Plugins, reusing Monocle As long as you avoid endless recursion there is absolutely no problem to call `Monocle.as_html` in your plugin, consider the following example in which the plugin will parse markdown and render html verbatim (which is stupid, that is what Monocle already does for you, but just to demonstrate the possibilities): iex> defmodule Again do ...> def as_html(lines) do ...> text_lines = Enum.map(lines, fn {str, _} -> str end) ...> {_, html, errors} = Monocle.as_html(text_lines) ...> { Enum.join([html | text_lines]), errors } ...> end ...> end ...> lines = [ ...> "$$a * one", ...> "$$a * two", ...> ] ...> Monocle.as_html(lines, Monocle.Plugin.define({Again, "a"})) {:ok, "<ul>\\n<li>one\\n</li>\\n<li>two\\n</li>\\n</ul>\\n* one* two", []} """ @doc """ adds the definition of one or more plugins to `Monocle.Options`. If the plugin is defined with the default prefix and no other options are needed one can use the one parameter form: iex> Monocle.Plugin.define(Monocle) # not a legal plugin of course %Monocle.Options{plugins: %{"" => Monocle}} More then one plugin can be defined, as long as all prefixes differ iex> defmodule P1, do: nil ...> defmodule P2, do: nil ...> Monocle.Plugin.define([ Monocle, {P1, "p1"}, {P2, "p2"} ]) %Monocle.Options{plugins: %{"" => Monocle, "p1" => Unit.PluginTest.P1, "p2" => Unit.PluginTest.P2}} """ def define(plugin_defs) def define(plugin_defs), do: define(%Options{}, plugin_defs) def define(options, plugin_defs) def define(options, plugins) when is_list(plugins) do Enum.reduce(plugins, options, fn plugin, acc -> define(acc, plugin) end) end def define(options=%Options{plugins: plugins}, {plugin, prefix}) do if Map.get(plugins, prefix) do raise Error, "must not define more than one plugin for prefix #{inspect prefix}" else %{options | plugins: Map.put(plugins, prefix, plugin)} end end def define(options, plugin), do: define(options, {plugin, ""}) end
37.898551
136
0.602486
ffc15821bff5743f417f95b27b3c0a639a4033af
200
ex
Elixir
lib/gardenhose/api/api.ex
taybin/gardenhose
cd82a7d1838d443810a13b75ac609aaac92a2b0b
[ "Apache-2.0" ]
null
null
null
lib/gardenhose/api/api.ex
taybin/gardenhose
cd82a7d1838d443810a13b75ac609aaac92a2b0b
[ "Apache-2.0" ]
null
null
null
lib/gardenhose/api/api.ex
taybin/gardenhose
cd82a7d1838d443810a13b75ac609aaac92a2b0b
[ "Apache-2.0" ]
null
null
null
defmodule Gardenhose.API do use Lazymaru.Router plug Plug.Static, at: "/static", from: "/my/static/path/" mount Router.Group def error(conn, _e) do "Server Error" |> text(500) end end
18.181818
59
0.675
ffc1779e5c6bd99ff012feac43b918bfdd65afd0
596
exs
Elixir
machine_translation/MorpHIN/Learned/Resources/TrainingInstances/67.exs
AdityaPrasadMishra/NLP--Project-Group-16
fb62cc6a1db4a494058171f11c14a2be3933a9a1
[ "MIT" ]
null
null
null
machine_translation/MorpHIN/Learned/Resources/TrainingInstances/67.exs
AdityaPrasadMishra/NLP--Project-Group-16
fb62cc6a1db4a494058171f11c14a2be3933a9a1
[ "MIT" ]
null
null
null
machine_translation/MorpHIN/Learned/Resources/TrainingInstances/67.exs
AdityaPrasadMishra/NLP--Project-Group-16
fb62cc6a1db4a494058171f11c14a2be3933a9a1
[ "MIT" ]
null
null
null
**EXAMPLE FILE** SYM * ordinal * * noun * pn; SYM * demonstrative verb_aux a82 noun * pn; verb * demonstrative pnoun * noun * pn; verb_aux a82 pn verb_aux a75 noun * pn; verb * adjective cm * noun * pn; verb_aux a19 adjective verb * pnoun * pn; verb * demonstrative noun * noun * pn; pn * verb pn * noun * pn; cm * quantifier pnoun * noun * pn; verb * demonstrative noun * noun * pn; SYM * pn verb_aux * cardinal * pn; SYM * cm SYM * pn * pn; SYM * cm * * demonstrative * pn; verb * verb noun * pn * pn; verb_aux a82 pn verb * noun * pn; verb_aux a57 adjective verb * noun * pn;
31.368421
44
0.645973
ffc1898e5483992f734244f43941c1e47e90e1e0
134
ex
Elixir
sample_server/lib/sample_server_web/channels/presence.ex
mjaric/finix
fb0dedfdfdd46927d3df239c7c45d7fe92c441c4
[ "Apache-2.0" ]
31
2019-03-26T15:26:21.000Z
2022-02-16T14:33:13.000Z
sample_server/lib/sample_server_web/channels/presence.ex
mjaric/finix
fb0dedfdfdd46927d3df239c7c45d7fe92c441c4
[ "Apache-2.0" ]
3
2019-04-05T19:45:09.000Z
2019-10-25T01:48:57.000Z
sample_server/lib/sample_server_web/channels/presence.ex
mjaric/finix
fb0dedfdfdd46927d3df239c7c45d7fe92c441c4
[ "Apache-2.0" ]
5
2019-03-27T14:16:28.000Z
2022-02-18T12:01:46.000Z
defmodule SampleServerWeb.Presence do use Phoenix.Presence, otp_app: :sample_server, pubsub_server: SampleServer.PubSub end
22.333333
38
0.791045
ffc21ccbe49e3c4a1ac14609cea4d581c9ef3464
267
ex
Elixir
lib/phoenix_checkers.ex
kowaliklukasz/phoenix-checkers
30021838cc1b9fa2dac83e698d0844e7e281dc26
[ "MIT" ]
null
null
null
lib/phoenix_checkers.ex
kowaliklukasz/phoenix-checkers
30021838cc1b9fa2dac83e698d0844e7e281dc26
[ "MIT" ]
null
null
null
lib/phoenix_checkers.ex
kowaliklukasz/phoenix-checkers
30021838cc1b9fa2dac83e698d0844e7e281dc26
[ "MIT" ]
null
null
null
defmodule PhoenixCheckers do @moduledoc """ PhoenixCheckers keeps the contexts that define your domain and business logic. Contexts are also responsible for managing your data, regardless if it comes from the database, an external API or others. """ end
26.7
66
0.76779
ffc2642e5049edac39bc20b4bba9f1e8ad048eec
437
exs
Elixir
test/live_view_demo_web/views/error_view_test.exs
manojsamanta/codebreaker-prototype
14d521db45784dee692de9e7252dd6a54bb793bb
[ "MIT" ]
null
null
null
test/live_view_demo_web/views/error_view_test.exs
manojsamanta/codebreaker-prototype
14d521db45784dee692de9e7252dd6a54bb793bb
[ "MIT" ]
null
null
null
test/live_view_demo_web/views/error_view_test.exs
manojsamanta/codebreaker-prototype
14d521db45784dee692de9e7252dd6a54bb793bb
[ "MIT" ]
null
null
null
defmodule LiveViewDemoWeb.ErrorViewTest do use LiveViewDemoWeb.ConnCase, async: true # Bring render/3 and render_to_string/3 for testing custom views import Phoenix.View test "renders 404.html" do assert render_to_string(LiveViewDemoWeb.ErrorView, "404.html", []) == "Not Found" end test "renders 500.html" do assert render_to_string(LiveViewDemoWeb.ErrorView, "500.html", []) == "Internal Server Error" end end
29.133333
97
0.745995
ffc2915d22ec1eddf415de69456164b3f6254078
16,504
ex
Elixir
lib/xgit/object.ex
scouten/xgit
0e2f849c83cdf39a9249b319d63ff3682c482c2f
[ "Apache-2.0" ]
94
2019-05-28T05:29:54.000Z
2022-02-18T20:03:20.000Z
lib/xgit/object.ex
scouten/xgit
0e2f849c83cdf39a9249b319d63ff3682c482c2f
[ "Apache-2.0" ]
156
2019-05-26T03:27:24.000Z
2020-10-08T05:44:26.000Z
lib/xgit/object.ex
scouten/redo
0e2f849c83cdf39a9249b319d63ff3682c482c2f
[ "Apache-2.0" ]
5
2019-05-28T16:35:55.000Z
2021-06-16T14:25:17.000Z
# Copyright (C) 2008-2010, Google Inc. # Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org> # and other copyright owners as documented in the project's IP log. # # Elixir adaptation from jgit file: # org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java # # Copyright (C) 2019, Eric Scouten <eric+xgit@scouten.com> # # This program and the accompanying materials are made available # under the terms of the Eclipse Distribution License v1.0 which # accompanies this distribution, is reproduced below, and is # available at http://www.eclipse.org/org/documents/edl-v10.php # # All rights reserved. # # Redistribution and use in source and binary forms, with or # without modification, are permitted provided that the following # conditions are met: # # - Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # - Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # - Neither the name of the Eclipse Foundation, Inc. nor the # names of its contributors may be used to endorse or promote # products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND # CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. defmodule Xgit.Object do @moduledoc ~S""" Describes a single object stored (or about to be stored) in a git repository. This struct is constructed, modified, and shared as a working description of how to find and describe an object before it gets written to a repository. """ use Xgit.ObjectType alias Xgit.ContentSource alias Xgit.FileMode alias Xgit.FilePath alias Xgit.ObjectId alias Xgit.PersonIdent alias Xgit.Util.ParseCharlist alias Xgit.Util.ParseDecimal import Xgit.Util.ForceCoverage import Xgit.Util.ParseHeader, only: [next_header: 1] @typedoc ~S""" This struct describes a single object stored or about to be stored in a git repository. ## Struct Members * `:type`: the object's type (`:blob`, `:tree`, `:commit`, or `:tag`) * `:content`: how to obtain the content (see `Xgit.ContentSource`) * `:size`: size (in bytes) of the object or `:unknown` * `:id`: object ID (40 chars hex) of the object or `:unknown` """ @type t :: %__MODULE__{ type: ObjectType.t(), content: ContentSource.t(), size: non_neg_integer() | :unknown, id: ObjectId.t() | :unknown } @enforce_keys [:type, :content] defstruct [:type, :content, size: :unknown, id: :unknown] @doc ~S""" Return `true` if the struct describes a valid object. _IMPORTANT:_ This validation _only_ verifies that the struct itself is valid. It does not inspect the content of the object. That check can be performed by `check/2`. """ @spec valid?(object :: any) :: boolean def valid?(object) def valid?(%__MODULE__{type: type, content: content, size: size, id: id}) when is_object_type(type) and is_integer(size) and size >= 0, do: ObjectId.valid?(id) && content != nil && ContentSource.impl_for(content) != nil def valid?(_), do: cover(false) @typedoc ~S""" Error codes which can be returned by `check/2`. """ @type check_reason :: :invalid_type | :no_tree_header | :invalid_tree | :invalid_parent | :no_author | :no_committer | :no_object_header | :invalid_object | :no_type_header | :invalid_tagger | :bad_date | :bad_email | :missing_email | :missing_space_before_date | :bad_time_zone | :invalid_file_mode | :truncated_in_name | :duplicate_entry_names | :incorrectly_sorted | :truncated_in_object_id | :null_sha1 | :invalid_mode @doc ~S""" Verify that a proposed object is valid. This function performs a detailed check on the _content_ of the object. For a simpler verification that the `Object` struct is _itself_ valid, see `valid?/1`. Verifications made by this function only check that the fields of an object are formatted correctly. The object ID checksum of the object is not verified, and connectivity links between objects are also not verified. It's assumed that the caller can provide both of these validations on its own. ## Options By default, this function will only enforce Posix file name restrictions. * `:macosx?`: `true` to also enforce Mac OS X path name restrictions * `:windows?`: `true` to also enforce Windows path name restrictions ## Return Value `:ok` if the object is successfully validated. `{:error, :invalid_type}` if the object's type is unknown. `{:error, :no_tree_header}` if the object is a commit but does not contain a valid tree header. `{:error, :invalid_tree}` if the object is a commit but the tree object ID is invalid. `{:error, :invalid_parent}` if the object is a commit but one of the `parent` headers is invalid. `{:error, :no_author}` if the object is a commit but there is no `author` header. `{:error, :no_committer}` if the object is a commit but there is no `committer` header. `{:error, :no_object_header}` if the object is a tag but there is no `object` header. `{:error, :invalid_object}` if the object is a tag but the object ID is invalid. `{:error, :no_type_header}` if the object is a tag but there is no `type` header. `{:error, :invalid_tagger}` if the object is a tag but one of the `tagger` headers is invalid. `{:error, :bad_date}` if the object is a tag or a commit but has a malformed date entry. `{:error, :bad_email}` if the object is a tag or a commit but has a malformed e-mail address. `{:error, :missing_email}` if the object is a tag or a commit but has a missing e-mail address where one is expected. `{:error, :missing_space_before_date}` if the object is a tag or a commit but has no space preceding the place where a date is expected. `{:error, :bad_time_zone}` if the object is a tag or a commit but has a malformed time zone entry. `{:error, :invalid_file_mode}` if the object is a tree but one of the file modes is invalid. `{:error, :truncated_in_name}` if the object is a tree but one of the file names is incomplete. `{:error, :duplicate_entry_names}` if the object is a tree and contains duplicate entry names. `{:error, :incorrectly_sorted}` if the object is a tree and the entries are not in alphabetical order. `{:error, :truncated_in_object_id}` if the object is a tree and one of the object IDs is invalid. `{:error, :null_sha1}` if the object is a tree and one of the object IDs is all zeros. `{:error, :invalid_mode}` if the object is a tree and one of the file modes is incomplete. See also error responses from `Xgit.FilePath.check_path/2` and `Xgit.FilePath.check_path_segment/2`. """ @spec check(object :: t(), windows?: boolean, macosx?: boolean) :: :ok | {:error, reason :: check_reason} | {:error, reason :: FilePath.check_path_reason()} | {:error, reason :: FilePath.check_path_segment_reason()} def check(object, opts \\ []) def check(%__MODULE__{type: :blob}, _opts), do: cover(:ok) def check(%__MODULE__{type: :commit} = object, _opts), do: check_commit(object) def check(%__MODULE__{type: :tag} = object, _opts), do: check_tag(object) def check(%__MODULE__{type: :tree} = object, opts), do: check_tree(object, opts) def check(%__MODULE__{type: _type}, _opts), do: cover({:error, :invalid_type}) # -- commit specifics -- defp check_commit(%__MODULE__{content: data}) when is_list(data) do with {:tree, {'tree', tree_id, data}} <- {:tree, next_header(data)}, {:tree_id, {_tree_id_str, []}} <- {:tree_id, ObjectId.from_hex_charlist(tree_id)}, {:parents, data} when is_list(data) <- {:parents, check_commit_parents(data)}, {:author, {'author', author, data}} <- {:author, next_header(data)}, {:author_id, :ok} <- {:author_id, check_person_ident(author)}, {:committer, {'committer', committer, _data}} <- {:committer, next_header(data)}, {:committer_id, :ok} <- {:committer_id, check_person_ident(committer)} do cover :ok else {:tree, _} -> cover {:error, :no_tree_header} {:tree_id, _} -> cover {:error, :invalid_tree} {:parents, _} -> cover {:error, :invalid_parent} {:author, _} -> cover {:error, :no_author} {:author_id, why} when is_atom(why) -> cover {:error, why} {:committer, _} -> cover {:error, :no_committer} {:committer_id, why} when is_atom(why) -> cover {:error, why} end end defp check_commit_parents(data) do with {'parent', parent_id, next_data} <- next_header(data), {:parent_id, {_parent_id, []}} <- {:parent_id, ObjectId.from_hex_charlist(parent_id)} do check_commit_parents(next_data) else {:parent_id, _} -> cover nil _ -> cover data end end # -- tag specifics -- defp check_tag(%__MODULE__{content: data}) when is_list(data) do with {:object, {'object', object_id, data}} <- {:object, next_header(data)}, {:object_id, {object_id, []}} when is_binary(object_id) <- {:object_id, ObjectId.from_hex_charlist(object_id)}, {:type, {'type', _type, data}} <- {:type, next_header(data)}, {:tag, {'tag', _tag, data}} <- {:tag, next_header(data)}, {:tagger, data} when is_list(data) <- {:tagger, maybe_match_tagger(data)} do cover :ok else {:object, _} -> cover {:error, :no_object_header} {:object_id, _} -> cover {:error, :invalid_object} {:type, _} -> cover {:error, :no_type_header} {:tag, _} -> cover {:error, :no_tag_header} {:tagger, _} -> cover {:error, :invalid_tagger} end end defp maybe_match_tagger(data) do with {'tagger', tagger, next} when next != data <- next_header(data), {:valid_person_ident, %PersonIdent{}} <- {:valid_person_ident, PersonIdent.from_byte_list(tagger)} do cover next else {:valid_person_ident, _} -> cover nil _ -> cover data end end # -- tree specifics -- defp check_tree(%__MODULE__{content: data}, opts) when is_list(data) and is_list(opts) do maybe_normalized_paths = if Keyword.get(opts, :windows?) || Keyword.get(opts, :macosx?) do MapSet.new() else cover nil end check_next_tree_entry(data, maybe_normalized_paths, [], FileMode.regular_file(), opts) end defp check_next_tree_entry([], _maybe_normalized_paths, _previous_name, _previous_mode, _opts), do: cover(:ok) defp check_next_tree_entry(data, maybe_normalized_paths, previous_name, previous_mode, opts) do # Scan one entry then recurse to scan remaining entries. with {:file_mode, {:ok, file_mode, data}} <- {:file_mode, check_file_mode(data, 0)}, {:file_mode, true} <- {:file_mode, FileMode.valid?(file_mode)}, {:path_split, {path_segment, [0 | data]}} <- {:path_split, path_and_object_id(data)}, {:path_valid, :ok} <- {:path_valid, FilePath.check_path_segment(path_segment, opts)}, {:duplicate, false} <- {:duplicate, maybe_mapset_member?(maybe_normalized_paths, path_segment, opts)}, {:duplicate, false} <- {:duplicate, duplicate_name?(path_segment, data)}, {:sorted, true} <- {:sorted, correctly_sorted?(previous_name, previous_mode, path_segment, file_mode)}, {raw_object_id, data} <- Enum.split(data, 20), {:object_id_length, 20} <- {:object_id_length, Enum.count(raw_object_id)}, {:object_id_null, false} <- {:object_id_null, Enum.all?(raw_object_id, &(&1 == 0))} do check_next_tree_entry( data, maybe_put_path(maybe_normalized_paths, path_segment, opts), path_segment, file_mode, opts ) else {:file_mode, {:error, reason}} -> cover {:error, reason} {:file_mode, _} -> cover {:error, :invalid_file_mode} {:path_split, _} -> cover {:error, :truncated_in_name} {:path_valid, {:error, reason}} -> cover {:error, reason} {:duplicate, _} -> cover {:error, :duplicate_entry_names} {:sorted, _} -> cover {:error, :incorrectly_sorted} {:object_id_length, _} -> cover {:error, :truncated_in_object_id} {:object_id_null, _} -> cover {:error, :null_sha1} end end defp check_file_mode([], _mode), do: cover({:error, :invalid_mode}) defp check_file_mode([?\s | data], mode), do: cover({:ok, mode, data}) defp check_file_mode([?0 | _data], 0), do: cover({:error, :invalid_mode}) defp check_file_mode([c | data], mode) when c >= ?0 and c <= ?7, do: check_file_mode(data, mode * 8 + (c - ?0)) defp check_file_mode([_c | _data], _mode), do: cover({:error, :invalid_mode}) defp path_and_object_id(data), do: Enum.split_while(data, &(&1 != 0)) defp maybe_mapset_member?(nil, _path_segment, _opts), do: cover(false) defp maybe_mapset_member?(mapset, path_segment, opts), do: MapSet.member?(mapset, normalize(path_segment, Keyword.get(opts, :macosx?, false))) defp duplicate_name?(this_name, data) do data = Enum.drop(data, 20) {mode_str, data} = Enum.split_while(data, &(&1 != ?\s)) mode = parse_octal(mode_str) data = Enum.drop(data, 1) {next_name, data} = Enum.split_while(data, &(&1 != 0)) data = Enum.drop(data, 1) compare = FilePath.compare_same_name(this_name, next_name, mode) cond do Enum.empty?(mode_str) or Enum.empty?(next_name) -> cover false compare == :lt -> cover false compare == :eq -> cover true compare == :gt -> duplicate_name?(this_name, data) end end defp parse_octal(data) do case Integer.parse(to_string(data), 8) do {n, _} when is_integer(n) -> cover n :error -> cover 0 end end defp correctly_sorted?([], _previous_mode, _this_name, _this_mode), do: cover(true) defp correctly_sorted?(previous_name, previous_mode, this_name, this_mode), do: FilePath.compare(previous_name, previous_mode, this_name, this_mode) != :gt defp maybe_put_path(nil, _path_segment, _opts), do: cover(nil) defp maybe_put_path(mapset, path_segment, opts), do: MapSet.put(mapset, normalize(path_segment, Keyword.get(opts, :macosx?, false))) # -- generic matching utilities -- defp check_person_ident(data) do with {:missing_email, [?< | email_start]} <- {:missing_email, Enum.drop_while(data, &(&1 != ?<))}, {:bad_email, [?> | after_email]} <- {:bad_email, Enum.drop_while(email_start, &(&1 != ?>))}, {:missing_space_before_date, [?\s | date]} <- {:missing_space_before_date, after_email}, {:bad_date, {_date, [?\s | tz]}} <- {:bad_date, ParseDecimal.from_decimal_charlist(date)}, {:bad_timezone, {_tz, []}} <- {:bad_timezone, ParseDecimal.from_decimal_charlist(tz)} do cover :ok else {:missing_email, _} -> cover :missing_email {:bad_email, _} -> cover :bad_email {:missing_space_before_date, _} -> cover :missing_space_before_date {:bad_date, _} -> cover :bad_date {:bad_timezone, _} -> cover :bad_time_zone end end defp normalize(name, true = _mac?) when is_list(name) do name |> ParseCharlist.decode_ambiguous_charlist() |> String.downcase() |> :unicode.characters_to_nfc_binary() end defp normalize(name, _) when is_list(name), do: Enum.map(name, &to_lower/1) defp to_lower(b) when b >= ?A and b <= ?Z, do: cover(b + 32) defp to_lower(b), do: cover(b) end
38.651054
97
0.667475
ffc2b44fa154033687bf63043535698e05dcc6c6
2,188
ex
Elixir
clients/language/lib/google_api/language/v1/model/analyze_syntax_response.ex
matehat/elixir-google-api
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
[ "Apache-2.0" ]
1
2018-12-03T23:43:10.000Z
2018-12-03T23:43:10.000Z
clients/language/lib/google_api/language/v1/model/analyze_syntax_response.ex
matehat/elixir-google-api
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
[ "Apache-2.0" ]
null
null
null
clients/language/lib/google_api/language/v1/model/analyze_syntax_response.ex
matehat/elixir-google-api
c1b2523c2c4cdc9e6ca4653ac078c94796b393c3
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the elixir code generator program. # Do not edit the class manually. defmodule GoogleApi.Language.V1.Model.AnalyzeSyntaxResponse do @moduledoc """ The syntax analysis response message. ## Attributes * `language` (*type:* `String.t`, *default:* `nil`) - The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language field for more details. * `sentences` (*type:* `list(GoogleApi.Language.V1.Model.Sentence.t)`, *default:* `nil`) - Sentences in the input document. * `tokens` (*type:* `list(GoogleApi.Language.V1.Model.Token.t)`, *default:* `nil`) - Tokens, along with their syntactic information, in the input document. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :language => String.t(), :sentences => list(GoogleApi.Language.V1.Model.Sentence.t()), :tokens => list(GoogleApi.Language.V1.Model.Token.t()) } field(:language) field(:sentences, as: GoogleApi.Language.V1.Model.Sentence, type: :list) field(:tokens, as: GoogleApi.Language.V1.Model.Token, type: :list) end defimpl Poison.Decoder, for: GoogleApi.Language.V1.Model.AnalyzeSyntaxResponse do def decode(value, options) do GoogleApi.Language.V1.Model.AnalyzeSyntaxResponse.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Language.V1.Model.AnalyzeSyntaxResponse do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
39.781818
159
0.728062
ffc2b4c921381ed828daaebd15e8f67a89551a64
9,090
exs
Elixir
test/struct/elements_test.exs
h-ikeda/structex-web
16768fe901d7adef3f3d81c26c3f5b5197b06088
[ "MIT" ]
null
null
null
test/struct/elements_test.exs
h-ikeda/structex-web
16768fe901d7adef3f3d81c26c3f5b5197b06088
[ "MIT" ]
null
null
null
test/struct/elements_test.exs
h-ikeda/structex-web
16768fe901d7adef3f3d81c26c3f5b5197b06088
[ "MIT" ]
null
null
null
defmodule Struct.ElementsTest do use Struct.DataCase alias Struct.Elements describe "inserted_wooden_siding_walls" do alias Struct.Elements.InsertedWoodenSidingWall @valid_attrs %{beam_height: 120.5, beam_width: 120.5, column_depth: 120.5, column_width: 120.5, fiber_direction_compressive_strength_of_sidings: 120.5, fiber_direction_elasticity_of_sidings: 120.5, fiber_orthogonal_direction_compressive_strength_of_sidings: 120.5, fiber_orthogonal_direction_elasticity_of_beams: 120.5, fiber_orthogonal_direction_elasticity_of_columns: 120.5, frame_inner_height: 120.5, frame_inner_width: 120.5, friction_coefficient_between_sidings: 120.5, name: "some name", number_of_shear_connecters_on_line: 42, shear_modulus_of_sidings: 120.5, siding_elasticity_ratio: 120.5, siding_thickness: 120.5, siding_width: 120.5, single_shear_connecter_rigidity: 120.5, single_shear_connecter_yield_resistance: 120.5, substitution_coefficient_of_beams: 120.5, substitution_coefficient_of_columns: 120.5, yield_judgement_ratio: 120.5} @update_attrs %{beam_height: 456.7, beam_width: 456.7, column_depth: 456.7, column_width: 456.7, fiber_direction_compressive_strength_of_sidings: 456.7, fiber_direction_elasticity_of_sidings: 456.7, fiber_orthogonal_direction_compressive_strength_of_sidings: 456.7, fiber_orthogonal_direction_elasticity_of_beams: 456.7, fiber_orthogonal_direction_elasticity_of_columns: 456.7, frame_inner_height: 456.7, frame_inner_width: 456.7, friction_coefficient_between_sidings: 456.7, name: "some updated name", number_of_shear_connecters_on_line: 43, shear_modulus_of_sidings: 456.7, siding_elasticity_ratio: 456.7, siding_thickness: 456.7, siding_width: 456.7, single_shear_connecter_rigidity: 456.7, single_shear_connecter_yield_resistance: 456.7, substitution_coefficient_of_beams: 456.7, substitution_coefficient_of_columns: 456.7, yield_judgement_ratio: 456.7} @invalid_attrs %{beam_height: nil, beam_width: nil, column_depth: nil, column_width: nil, fiber_direction_compressive_strength_of_sidings: nil, fiber_direction_elasticity_of_sidings: nil, fiber_orthogonal_direction_compressive_strength_of_sidings: nil, fiber_orthogonal_direction_elasticity_of_beams: nil, fiber_orthogonal_direction_elasticity_of_columns: nil, frame_inner_height: nil, frame_inner_width: nil, friction_coefficient_between_sidings: nil, name: nil, number_of_shear_connecters_on_line: nil, shear_modulus_of_sidings: nil, siding_elasticity_ratio: nil, siding_thickness: nil, siding_width: nil, single_shear_connecter_rigidity: nil, single_shear_connecter_yield_resistance: nil, substitution_coefficient_of_beams: nil, substitution_coefficient_of_columns: nil, yield_judgement_ratio: nil} def inserted_wooden_siding_wall_fixture(attrs \\ %{}) do {:ok, inserted_wooden_siding_wall} = attrs |> Enum.into(@valid_attrs) |> Elements.create_inserted_wooden_siding_wall() inserted_wooden_siding_wall end test "list_inserted_wooden_siding_walls/0 returns all inserted_wooden_siding_walls" do inserted_wooden_siding_wall = inserted_wooden_siding_wall_fixture() assert Elements.list_inserted_wooden_siding_walls() == [inserted_wooden_siding_wall] end test "get_inserted_wooden_siding_wall!/1 returns the inserted_wooden_siding_wall with given id" do inserted_wooden_siding_wall = inserted_wooden_siding_wall_fixture() assert Elements.get_inserted_wooden_siding_wall!(inserted_wooden_siding_wall.id) == inserted_wooden_siding_wall end test "create_inserted_wooden_siding_wall/1 with valid data creates a inserted_wooden_siding_wall" do assert {:ok, %InsertedWoodenSidingWall{} = inserted_wooden_siding_wall} = Elements.create_inserted_wooden_siding_wall(@valid_attrs) assert inserted_wooden_siding_wall.beam_height == 120.5 assert inserted_wooden_siding_wall.beam_width == 120.5 assert inserted_wooden_siding_wall.column_depth == 120.5 assert inserted_wooden_siding_wall.column_width == 120.5 assert inserted_wooden_siding_wall.fiber_direction_compressive_strength_of_sidings == 120.5 assert inserted_wooden_siding_wall.fiber_direction_elasticity_of_sidings == 120.5 assert inserted_wooden_siding_wall.fiber_orthogonal_direction_compressive_strength_of_sidings == 120.5 assert inserted_wooden_siding_wall.fiber_orthogonal_direction_elasticity_of_beams == 120.5 assert inserted_wooden_siding_wall.fiber_orthogonal_direction_elasticity_of_columns == 120.5 assert inserted_wooden_siding_wall.frame_inner_height == 120.5 assert inserted_wooden_siding_wall.frame_inner_width == 120.5 assert inserted_wooden_siding_wall.friction_coefficient_between_sidings == 120.5 assert inserted_wooden_siding_wall.name == "some name" assert inserted_wooden_siding_wall.number_of_shear_connecters_on_line == 42 assert inserted_wooden_siding_wall.shear_modulus_of_sidings == 120.5 assert inserted_wooden_siding_wall.siding_elasticity_ratio == 120.5 assert inserted_wooden_siding_wall.siding_thickness == 120.5 assert inserted_wooden_siding_wall.siding_width == 120.5 assert inserted_wooden_siding_wall.single_shear_connecter_rigidity == 120.5 assert inserted_wooden_siding_wall.single_shear_connecter_yield_resistance == 120.5 assert inserted_wooden_siding_wall.substitution_coefficient_of_beams == 120.5 assert inserted_wooden_siding_wall.substitution_coefficient_of_columns == 120.5 assert inserted_wooden_siding_wall.yield_judgement_ratio == 120.5 end test "create_inserted_wooden_siding_wall/1 with invalid data returns error changeset" do assert {:error, %Ecto.Changeset{}} = Elements.create_inserted_wooden_siding_wall(@invalid_attrs) end test "update_inserted_wooden_siding_wall/2 with valid data updates the inserted_wooden_siding_wall" do inserted_wooden_siding_wall = inserted_wooden_siding_wall_fixture() assert {:ok, %InsertedWoodenSidingWall{} = inserted_wooden_siding_wall} = Elements.update_inserted_wooden_siding_wall(inserted_wooden_siding_wall, @update_attrs) assert inserted_wooden_siding_wall.beam_height == 456.7 assert inserted_wooden_siding_wall.beam_width == 456.7 assert inserted_wooden_siding_wall.column_depth == 456.7 assert inserted_wooden_siding_wall.column_width == 456.7 assert inserted_wooden_siding_wall.fiber_direction_compressive_strength_of_sidings == 456.7 assert inserted_wooden_siding_wall.fiber_direction_elasticity_of_sidings == 456.7 assert inserted_wooden_siding_wall.fiber_orthogonal_direction_compressive_strength_of_sidings == 456.7 assert inserted_wooden_siding_wall.fiber_orthogonal_direction_elasticity_of_beams == 456.7 assert inserted_wooden_siding_wall.fiber_orthogonal_direction_elasticity_of_columns == 456.7 assert inserted_wooden_siding_wall.frame_inner_height == 456.7 assert inserted_wooden_siding_wall.frame_inner_width == 456.7 assert inserted_wooden_siding_wall.friction_coefficient_between_sidings == 456.7 assert inserted_wooden_siding_wall.name == "some updated name" assert inserted_wooden_siding_wall.number_of_shear_connecters_on_line == 43 assert inserted_wooden_siding_wall.shear_modulus_of_sidings == 456.7 assert inserted_wooden_siding_wall.siding_elasticity_ratio == 456.7 assert inserted_wooden_siding_wall.siding_thickness == 456.7 assert inserted_wooden_siding_wall.siding_width == 456.7 assert inserted_wooden_siding_wall.single_shear_connecter_rigidity == 456.7 assert inserted_wooden_siding_wall.single_shear_connecter_yield_resistance == 456.7 assert inserted_wooden_siding_wall.substitution_coefficient_of_beams == 456.7 assert inserted_wooden_siding_wall.substitution_coefficient_of_columns == 456.7 assert inserted_wooden_siding_wall.yield_judgement_ratio == 456.7 end test "update_inserted_wooden_siding_wall/2 with invalid data returns error changeset" do inserted_wooden_siding_wall = inserted_wooden_siding_wall_fixture() assert {:error, %Ecto.Changeset{}} = Elements.update_inserted_wooden_siding_wall(inserted_wooden_siding_wall, @invalid_attrs) assert inserted_wooden_siding_wall == Elements.get_inserted_wooden_siding_wall!(inserted_wooden_siding_wall.id) end test "delete_inserted_wooden_siding_wall/1 deletes the inserted_wooden_siding_wall" do inserted_wooden_siding_wall = inserted_wooden_siding_wall_fixture() assert {:ok, %InsertedWoodenSidingWall{}} = Elements.delete_inserted_wooden_siding_wall(inserted_wooden_siding_wall) assert_raise Ecto.NoResultsError, fn -> Elements.get_inserted_wooden_siding_wall!(inserted_wooden_siding_wall.id) end end test "change_inserted_wooden_siding_wall/1 returns a inserted_wooden_siding_wall changeset" do inserted_wooden_siding_wall = inserted_wooden_siding_wall_fixture() assert %Ecto.Changeset{} = Elements.change_inserted_wooden_siding_wall(inserted_wooden_siding_wall) end end end
83.394495
861
0.826953
ffc2bd51b2afd7492dfcfcd6f2f46b9f62c8019b
700
exs
Elixir
mix.exs
ElaWorkshop/ex_loglite
eed96e4c5f4e0fc160fdc742677531b339ba075b
[ "MIT" ]
7
2016-09-24T07:06:09.000Z
2017-05-08T08:47:53.000Z
mix.exs
ElaWorkshop/ex_loglite
eed96e4c5f4e0fc160fdc742677531b339ba075b
[ "MIT" ]
1
2018-03-12T14:53:00.000Z
2018-03-12T14:53:00.000Z
mix.exs
ElaWorkshop/ex_loglite
eed96e4c5f4e0fc160fdc742677531b339ba075b
[ "MIT" ]
null
null
null
defmodule ExLogLite.Mixfile do use Mix.Project def project do [app: :ex_loglite, version: "0.1.1", elixir: "~> 1.3", build_embedded: Mix.env == :prod, start_permanent: Mix.env == :prod, deps: deps(), package: package(), description: description()] end def application, do: [] defp deps do [ {:ex_doc, "~> 0.13", only: :dev}, {:eqc_ex, "~> 1.4", only: :test}, ] end defp description, do: "An Elixir Logger Backend for EVE LogLite." defp package do [ licenses: ["MIT"], maintainers: ["Lou Xun <aquarhead@ela.build>"], links: %{"GitHub" => "https://github.com/ElaWorkshop/ex_loglite"} ] end end
20.588235
71
0.572857
ffc2c6578614986ff0c07a0bff0b91e60acbe416
2,467
ex
Elixir
examples/bulk_job_controller.ex
timbuckley/forcex
e3b4cac234d1f17d1be6d8c49a0081568a0b711e
[ "MIT" ]
59
2015-06-10T13:12:16.000Z
2021-04-27T07:09:33.000Z
examples/bulk_job_controller.ex
timbuckley/forcex
e3b4cac234d1f17d1be6d8c49a0081568a0b711e
[ "MIT" ]
38
2016-04-06T01:12:43.000Z
2019-10-17T17:16:06.000Z
examples/bulk_job_controller.ex
timbuckley/forcex
e3b4cac234d1f17d1be6d8c49a0081568a0b711e
[ "MIT" ]
38
2015-10-07T03:35:51.000Z
2020-10-29T17:59:38.000Z
defmodule BulkJobController do use GenServer use Forcex.Bulk.BatchHandler use Forcex.Bulk.JobHandler def start_link(params) do GenServer.start_link(__MODULE__, params) end def init({:query, sobject, queries, client}) do send(self(), :after_init) {:ok, [sobject: sobject, queries: queries, client: client]} end def handle_info(:after_init, state) do sobject = Keyword.fetch!(state, :sobject) client = Keyword.fetch!(state, :client) {:ok, pid} = Forcex.Bulk.JobWorker.start_link({:query, sobject: sobject, client: client, handlers: [self()]}) {:noreply, Keyword.put(state, :job_worker, pid)} end def handle_info(msg, state) do IO.puts "Got message: #{inspect msg}" {:noreply, state} end ############################# # # Job Handler callbacks # ############################# def handle_job_created(job, state) do client = Keyword.fetch!(state, :client) queries = Keyword.fetch!(state, :queries) for query <- queries do {:ok, _pid} = Forcex.Bulk.BatchWorker.start_link({:query, client: client, job: job, query: query, handlers: [self()]}) end IO.puts "Job #{job["id"]} created" {:noreply, state} end def handle_job_closed(job, state) do IO.puts "Job #{job["id"]} closed" {:stop, :normal, Keyword.put(state, :job, job)} end def handle_job_all_batches_complete(_job, state) do job_worker = Keyword.fetch!(state, :job_worker) send(job_worker, :close_job) {:noreply, state} end def handle_job_status(job, state) do IO.puts "Job #{job["id"]} poll" {:noreply, state} end ############################# # # Batch Handler callbacks # ############################# def handle_batch_completed(batch, state) do IO.puts "Batch #{batch["id"]} complete" {:noreply, state} end def handle_batch_failed(batch, state) do IO.puts "Batch #{batch["id"]} failed" {:noreply, state} end def handle_batch_status(batch, state) do IO.puts "Batch #{batch["id"]} poll" {:noreply, state} end def handle_batch_created(batch, state) do IO.puts "Batch #{batch["id"]} created" {:noreply, state} end def handle_batch_partial_result_ready(batch, results, state) do client = Keyword.fetch!(state, :client) partial_results = Forcex.Bulk.fetch_results(results, batch, client) IO.puts("Batch #{batch["id"]} partial results: #{inspect partial_results}") {:noreply, state} end end
27.719101
124
0.632347
ffc2cf180a906f4b80141ca706c881bdb6c75648
128
exs
Elixir
test/pomodoro_slack_test.exs
startupbuilders/pomodoro_slack
4414d13246fd29454175d940fac916f36deb65a4
[ "MIT" ]
null
null
null
test/pomodoro_slack_test.exs
startupbuilders/pomodoro_slack
4414d13246fd29454175d940fac916f36deb65a4
[ "MIT" ]
null
null
null
test/pomodoro_slack_test.exs
startupbuilders/pomodoro_slack
4414d13246fd29454175d940fac916f36deb65a4
[ "MIT" ]
null
null
null
defmodule PomodoroSlackTest do use ExUnit.Case doctest PomodoroSlack test "the truth" do assert 1 + 1 == 2 end end
14.222222
30
0.703125
ffc3208f2ad5eebe3b3ff650883ef40a9d9fb823
1,399
ex
Elixir
lib/step_flow/workflows/workflow.ex
mathiaHT/ex_step_flow
6496e9511239de64f00119428476338dfcde9dea
[ "MIT" ]
4
2019-12-07T05:18:26.000Z
2020-11-06T23:28:43.000Z
lib/step_flow/workflows/workflow.ex
mathiaHT/ex_step_flow
6496e9511239de64f00119428476338dfcde9dea
[ "MIT" ]
53
2020-01-06T11:23:09.000Z
2021-06-25T15:30:07.000Z
lib/step_flow/workflows/workflow.ex
mathiaHT/ex_step_flow
6496e9511239de64f00119428476338dfcde9dea
[ "MIT" ]
3
2020-01-30T15:37:40.000Z
2020-10-27T14:10:02.000Z
defmodule StepFlow.Workflows.Workflow do use Ecto.Schema import Ecto.Changeset alias StepFlow.Artifacts.Artifact alias StepFlow.Jobs.Job alias StepFlow.Rights.Right alias StepFlow.Workflows.Workflow @moduledoc false schema "step_flow_workflow" do field(:schema_version, :string) field(:identifier, :string) field(:version_major, :integer) field(:version_minor, :integer) field(:version_micro, :integer) field(:tags, {:array, :string}, default: []) field(:reference, :string) field(:steps, {:array, :map}, default: []) field(:parameters, {:array, :map}, default: []) has_many(:jobs, Job, on_delete: :delete_all) has_many(:artifacts, Artifact, on_delete: :delete_all) many_to_many(:rights, Right, join_through: "step_flow_workflow_right", on_delete: :delete_all, on_replace: :delete ) timestamps() end @doc false def changeset(%Workflow{} = workflow, attrs) do workflow |> cast(attrs, [ :schema_version, :identifier, :version_major, :version_minor, :version_micro, :tags, :parameters, :reference, :steps ]) |> cast_assoc(:rights, required: true) |> validate_required([ :schema_version, :identifier, :version_major, :version_minor, :version_micro, :reference, :steps ]) end end
23.316667
58
0.642602
ffc3244a92d0b4371b0f1964b401b3079f20930e
14,496
ex
Elixir
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/subaccounts.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/subaccounts.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/subaccounts.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.DFAReporting.V33.Api.Subaccounts do @moduledoc """ API calls for all endpoints tagged `Subaccounts`. """ alias GoogleApi.DFAReporting.V33.Connection alias GoogleApi.Gax.{Request, Response} @library_version Mix.Project.config() |> Keyword.get(:version, "") @doc """ Gets one subaccount by ID. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `id` (*type:* `String.t`) - Subaccount ID. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:alt` (*type:* `String.t`) - Data format for the response. * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters. * `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead. * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V33.Model.Subaccount{}}` on success * `{:error, info}` on failure """ @spec dfareporting_subaccounts_get( Tesla.Env.client(), String.t(), String.t(), keyword(), keyword() ) :: {:ok, GoogleApi.DFAReporting.V33.Model.Subaccount.t()} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()} def dfareporting_subaccounts_get(connection, profile_id, id, optional_params \\ [], opts \\ []) do optional_params_config = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query } request = Request.new() |> Request.method(:get) |> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts/{id}", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1), "id" => URI.encode(id, &URI.char_unreserved?/1) }) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Subaccount{}]) end @doc """ Inserts a new subaccount. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:alt` (*type:* `String.t`) - Data format for the response. * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters. * `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead. * `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.Subaccount.t`) - * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V33.Model.Subaccount{}}` on success * `{:error, info}` on failure """ @spec dfareporting_subaccounts_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) :: {:ok, GoogleApi.DFAReporting.V33.Model.Subaccount.t()} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()} def dfareporting_subaccounts_insert(connection, profile_id, optional_params \\ [], opts \\ []) do optional_params_config = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query, :body => :body } request = Request.new() |> Request.method(:post) |> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1) }) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Subaccount{}]) end @doc """ Gets a list of subaccounts, possibly filtered. This method supports paging. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:alt` (*type:* `String.t`) - Data format for the response. * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters. * `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead. * `:ids` (*type:* `list(String.t)`) - Select only subaccounts with these IDs. * `:maxResults` (*type:* `integer()`) - Maximum number of results to return. * `:pageToken` (*type:* `String.t`) - Value of the nextPageToken from the previous result page. * `:searchString` (*type:* `String.t`) - Allows searching for objects by name or ID. Wildcards (*) are allowed. For example, "subaccount*2015" will return objects with names like "subaccount June 2015", "subaccount April 2015", or simply "subaccount 2015". Most of the searches also add wildcards implicitly at the start and the end of the search string. For example, a search string of "subaccount" will match objects with name "my subaccount", "subaccount 2015", or simply "subaccount". * `:sortField` (*type:* `String.t`) - Field by which to sort the list. * `:sortOrder` (*type:* `String.t`) - Order of sorted results. * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V33.Model.SubaccountsListResponse{}}` on success * `{:error, info}` on failure """ @spec dfareporting_subaccounts_list(Tesla.Env.client(), String.t(), keyword(), keyword()) :: {:ok, GoogleApi.DFAReporting.V33.Model.SubaccountsListResponse.t()} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()} def dfareporting_subaccounts_list(connection, profile_id, optional_params \\ [], opts \\ []) do optional_params_config = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query, :ids => :query, :maxResults => :query, :pageToken => :query, :searchString => :query, :sortField => :query, :sortOrder => :query } request = Request.new() |> Request.method(:get) |> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1) }) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode( opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.SubaccountsListResponse{}] ) end @doc """ Updates an existing subaccount. This method supports patch semantics. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `id` (*type:* `String.t`) - Subaccount ID. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:alt` (*type:* `String.t`) - Data format for the response. * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters. * `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead. * `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.Subaccount.t`) - * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V33.Model.Subaccount{}}` on success * `{:error, info}` on failure """ @spec dfareporting_subaccounts_patch( Tesla.Env.client(), String.t(), String.t(), keyword(), keyword() ) :: {:ok, GoogleApi.DFAReporting.V33.Model.Subaccount.t()} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()} def dfareporting_subaccounts_patch( connection, profile_id, id, optional_params \\ [], opts \\ [] ) do optional_params_config = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query, :body => :body } request = Request.new() |> Request.method(:patch) |> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1) }) |> Request.add_param(:query, :id, id) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Subaccount{}]) end @doc """ Updates an existing subaccount. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:alt` (*type:* `String.t`) - Data format for the response. * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters. * `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead. * `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.Subaccount.t`) - * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V33.Model.Subaccount{}}` on success * `{:error, info}` on failure """ @spec dfareporting_subaccounts_update(Tesla.Env.client(), String.t(), keyword(), keyword()) :: {:ok, GoogleApi.DFAReporting.V33.Model.Subaccount.t()} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()} def dfareporting_subaccounts_update(connection, profile_id, optional_params \\ [], opts \\ []) do optional_params_config = %{ :alt => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :userIp => :query, :body => :body } request = Request.new() |> Request.method(:put) |> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1) }) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Subaccount{}]) end end
44.603077
496
0.633623
ffc332fd7fa047be6be8afe5927cf95d28f90c61
1,370
ex
Elixir
lib/cat_show_web/controllers/session_controller.ex
kemm/cat_show
42f47f93ecec48700a4d5373be27138cf907d6cb
[ "BSD-3-Clause" ]
null
null
null
lib/cat_show_web/controllers/session_controller.ex
kemm/cat_show
42f47f93ecec48700a4d5373be27138cf907d6cb
[ "BSD-3-Clause" ]
1
2021-05-11T15:49:58.000Z
2021-05-11T15:49:58.000Z
lib/cat_show_web/controllers/session_controller.ex
kemm/cat_show
42f47f93ecec48700a4d5373be27138cf907d6cb
[ "BSD-3-Clause" ]
null
null
null
defmodule CatShowWeb.SessionController do use CatShowWeb, :controller alias CatShow.Accounts alias CatShow.Auth.Guardian def create(conn, params) do case authenticate(params) do {:ok, user} -> new_conn = Guardian.Plug.sign_in(conn, user) token = Guardian.Plug.current_token(new_conn) new_conn |> put_status(:created) |> render("show.json", user: user, token: token) {:error, _reason} -> conn |> put_status(:unauthorized) |> render("error.json", error: "Email not found or invalid password") end end def delete(conn, _params) do conn |> Guardian.Plug.sign_out() |> render("delete.json") end def refresh(conn, _params) do user = Guardian.Plug.current_resource(conn) token = Guardian.Plug.current_token(conn) case Guardian.refresh(token, ttl: {30, :days}) do {:ok, _, {new_token, _new_claims}} -> conn |> put_status(:ok) |> render("show.json", user: user, token: new_token) {:error, _reason} -> conn |> put_status(:unauthorized) |> render("error.json", error: "Not authenticated") end end defp authenticate(%{"email" => email, "password" => password}) do Accounts.authenticate(email, password) end defp authenticate(_) do {:error, :invalid_params} end end
24.464286
77
0.621898
ffc34272db2a2c15413dd3a2d5dd9422c057ce9a
32,184
exs
Elixir
test/row_test.exs
kianmeng/ecto_tablestore
d07b34a10e4476f15524d37e7a22210cdb9744b3
[ "MIT" ]
null
null
null
test/row_test.exs
kianmeng/ecto_tablestore
d07b34a10e4476f15524d37e7a22210cdb9744b3
[ "MIT" ]
null
null
null
test/row_test.exs
kianmeng/ecto_tablestore
d07b34a10e4476f15524d37e7a22210cdb9744b3
[ "MIT" ]
null
null
null
defmodule EctoTablestore.RowTest do use ExUnit.Case alias EctoTablestore.TestSchema.{Order, User, User2, User3} alias EctoTablestore.TestRepo alias Ecto.Changeset import EctoTablestore.Query, only: [condition: 2, condition: 1, filter: 1] setup_all do TestHelper.setup_all() EctoTablestore.Support.Table.create_order() EctoTablestore.Support.Table.create_user() EctoTablestore.Support.Table.create_user2() EctoTablestore.Support.Table.create_user3() on_exit(fn -> EctoTablestore.Support.Table.delete_order() EctoTablestore.Support.Table.delete_user() EctoTablestore.Support.Table.delete_user2() EctoTablestore.Support.Table.delete_user3() end) Process.sleep(3_000) end test "repo - insert/get/delete" do input_id = "1" input_desc = "order_desc" input_num = 1 input_order_name = "order_name" input_success = true input_price = 100.09 order = %Order{ id: input_id, name: input_order_name, desc: input_desc, num: input_num, success?: input_success, price: input_price } assert_raise Ecto.ConstraintError, fn -> TestRepo.insert(order, condition: condition(:expect_not_exist), return_type: :pk) end {status, saved_order} = TestRepo.insert(order, condition: condition(:ignore), return_type: :pk) assert status == :ok saved_order_internal_id = saved_order.internal_id assert saved_order.id == input_id # autogenerate primary key return in schema assert saved_order_internal_id != nil and is_integer(saved_order_internal_id) == true order_with_non_exist_num = %Order{id: input_id, internal_id: saved_order_internal_id, num: 2} query_result = TestRepo.one(order_with_non_exist_num, entity_full_match: true) assert query_result == nil query_result = TestRepo.one(order_with_non_exist_num) assert query_result != nil query_result = TestRepo.one(%Order{id: "fake", internal_id: 0}) assert query_result == nil # `num` attribute field will be used in condition filter order_with_matched_num = %Order{ id: input_id, internal_id: saved_order_internal_id, num: input_num } query_result_by_one = TestRepo.one(order_with_matched_num, entity_full_match: true) assert query_result_by_one != nil assert query_result_by_one.desc == input_desc assert query_result_by_one.num == input_num assert query_result_by_one.name == input_order_name assert query_result_by_one.success? == input_success assert query_result_by_one.price == input_price # query and return fields in `columns_to_get` query_result2_by_one = TestRepo.one(order_with_matched_num, columns_to_get: ["num", "desc"], entity_full_match: true ) assert query_result2_by_one.desc == input_desc assert query_result2_by_one.num == input_num assert query_result2_by_one.name == nil assert query_result2_by_one.success? == nil assert query_result2_by_one.price == nil # Optional use case `get/3` query_result_by_get = TestRepo.get(Order, id: input_id, internal_id: saved_order_internal_id) assert query_result_by_get == query_result_by_one query_result2_by_get = TestRepo.get(Order, [internal_id: saved_order_internal_id, id: input_id], columns_to_get: ["num", "desc"] ) assert query_result2_by_get == query_result2_by_one query_result = TestRepo.get(Order, [id: input_id, internal_id: saved_order_internal_id], filter: filter("num" == 2) ) assert query_result == nil query_result = TestRepo.get(Order, [id: input_id, internal_id: saved_order_internal_id], filter: filter("num" == input_num) ) assert query_result == query_result_by_get assert_raise Ecto.ChangeError, fn -> TestRepo.delete!(%Order{id: input_id, internal_id: "invalid"}) end assert_raise Ecto.NoPrimaryKeyValueError, fn -> TestRepo.delete(%Order{id: input_id}) end order = %Order{internal_id: saved_order_internal_id, id: input_id} assert_raise Ecto.StaleEntryError, fn -> TestRepo.delete(order, condition: condition(:expect_exist, "num" == "invalid_num")) end {result, _} = TestRepo.delete(order, condition: condition(:expect_exist, "num" == input_num)) assert result == :ok end test "repo - update" do input_id = "1001" input_desc = "order_desc" input_num = 10 increment = 1 input_price = 99.9 order = %Order{id: input_id, desc: input_desc, num: input_num, price: input_price} {:ok, saved_order} = TestRepo.insert(order, condition: condition(:ignore), return_type: :pk) assert saved_order.price == input_price new_input_num = 100 changeset = Order.test_changeset(saved_order, %{name: "new_name1", num: new_input_num, price: 88.8}) {:ok, updated_order0} = TestRepo.update(changeset, condition: condition(:expect_exist), return_type: :pk) # since the above `test_changeset` don't update price assert updated_order0.price == input_price assert updated_order0.name == "new_name1" assert updated_order0.num == 100 # 1, atom increment `num` field # 2, delete `desc` field # 3, update `name` field as "new_order" updated_order_name = "new_order_name" changeset = saved_order |> Ecto.Changeset.change(num: {:increment, increment}, desc: nil) |> Ecto.Changeset.change(name: updated_order_name) {:ok, updated_order} = TestRepo.update(changeset, condition: condition(:expect_exist)) assert updated_order.desc == nil assert updated_order.num == new_input_num + increment assert updated_order.name == updated_order_name order = TestRepo.get(Order, internal_id: saved_order.internal_id, id: input_id) assert order.desc == nil assert order.num == new_input_num + increment assert order.name == updated_order_name TestRepo.delete(%Order{id: input_id, internal_id: saved_order.internal_id}, condition: condition(:expect_exist) ) end test "repo - update with timestamps" do user = %User{name: "username", level: 10, id: 1} {:ok, saved_user} = TestRepo.insert(user, condition: condition(:ignore)) assert saved_user.updated_at == saved_user.inserted_at assert is_integer(saved_user.updated_at) user = TestRepo.get(User, id: 1) new_name = "username2" changeset = Ecto.Changeset.change(user, name: new_name, level: {:increment, 1}) Process.sleep(1000) {:ok, updated_user} = TestRepo.update(changeset, condition: condition(:expect_exist)) assert updated_user.level == 11 assert updated_user.name == new_name assert updated_user.updated_at > updated_user.inserted_at TestRepo.delete(user, condition: condition(:expect_exist)) end test "repo - insert/update with ecto types" do assert_raise Ecto.ConstraintError, ~r/OTSConditionCheckFail/, fn -> user = %User{ name: "username2", profile: %{"level" => 1, "age" => 20}, tags: ["tag_a", "tag_b"], id: 2 } {:ok, _saved_user} = TestRepo.insert(user, condition: condition(:expect_exist)) end naive_dt = NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second) dt = DateTime.utc_now() |> DateTime.truncate(:second) user = %User{ id: 1, name: "username", naive_dt: naive_dt, dt: dt, profile: %{"level" => 1, "age" => 20}, tags: ["tag_a", "tag_b"] } {:ok, _saved_user} = TestRepo.insert(user, condition: condition(:ignore)) get_user = TestRepo.get(User, id: 1) profile = get_user.profile assert get_user.naive_dt == naive_dt assert get_user.dt == dt assert Map.get(profile, "level") == 1 assert Map.get(profile, "age") == 20 assert get_user.tags == ["tag_a", "tag_b"] naive_dt = NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second) dt = DateTime.utc_now() |> DateTime.truncate(:second) changeset = Ecto.Changeset.change(get_user, name: "username2", profile: %{name: 1}, naive_dt: naive_dt, dt: dt ) {:ok, updated_user} = TestRepo.update(changeset, condition: condition(:expect_exist)) assert updated_user.naive_dt == naive_dt assert updated_user.dt == dt assert updated_user.profile == %{name: 1} get_user = TestRepo.get(User, id: 1) # Please notice that Jason.decode use :key option as :strings by default, we don't # provide a way to modify this option so far. assert get_user.profile == %{"name" => 1} TestRepo.delete(user, condition: condition(:expect_exist)) end test "repo - get_range" do saved_orders = Enum.map(1..9, fn var -> order = %Order{id: "#{var}", desc: "desc#{var}", num: var, price: 20.5 * var} {:ok, saved_order} = TestRepo.insert(order, condition: condition(:ignore), return_type: :pk) saved_order end) start_pks = [{"id", "0a"}, {"internal_id", :inf_min}] end_pks = [{"id", "0b"}, {"internal_id", :inf_max}] {orders, next_start_primary_key} = TestRepo.get_range(Order, start_pks, end_pks) assert orders == nil and next_start_primary_key == nil start_pks = [{"id", "1"}, {"internal_id", :inf_min}, {"id", "1"}] end_pks = [{"id", "3"}, {"internal_id", :inf_max}, {"id", "3"}] {orders, next_start_primary_key} = TestRepo.get_range(Order, start_pks, end_pks) assert next_start_primary_key == nil assert length(orders) == 3 start_pks = [{"id", "3"}, {"internal_id", :inf_max}] end_pks = [{"id", "1"}, {"internal_id", :inf_min}] {backward_orders, _next_start_primary_key} = TestRepo.get_range(Order, start_pks, end_pks, direction: :backward) assert orders == Enum.reverse(backward_orders) start_pks = [{"id", "7"}, {"internal_id", :inf_max}] end_pks = [{"id", "4"}, {"internal_id", :inf_min}] {orders, next_start_primary_key} = TestRepo.get_range(Order, start_pks, end_pks, limit: 3, direction: :backward) assert next_start_primary_key != nil assert length(orders) == 3 {orders2, next_start_primary_key} = TestRepo.get_range(Order, next_start_primary_key, end_pks, limit: 3, direction: :backward) assert next_start_primary_key == nil assert length(orders2) == 1 for order <- saved_orders do TestRepo.delete(%Order{id: order.id, internal_id: order.internal_id}, condition: condition(:expect_exist) ) end end test "repo - stream_range" do saved_orders = Enum.map(1..9, fn var -> order = %Order{id: "#{var}", desc: "desc#{var}", num: var, price: 20.5 * var} {:ok, saved_order} = TestRepo.insert(order, condition: condition(:ignore), return_type: :pk) saved_order end) start_pks = [{"id", "0a"}, {"internal_id", :inf_min}] end_pks = [{"id", "0b"}, {"internal_id", :inf_max}] # since it is enumerable, no matched data will return `[]`. orders = Order |> TestRepo.stream_range(start_pks, end_pks, direction: :forward) |> Enum.to_list() assert orders == [] start_pks = [{"id", "1"}, {"internal_id", :inf_min}] end_pks = [{"id", "3"}, {"internal_id", :inf_max}] orders = Order |> TestRepo.stream_range(start_pks, end_pks, direction: :forward, limit: 1) |> Enum.to_list() assert length(orders) == 3 # start/end pks with an invalid `direction` [{:error, error}] = Order |> TestRepo.stream_range(start_pks, end_pks, direction: :backward) |> Enum.to_list() assert error.code == "OTSParameterInvalid" and error.message == "Begin key must more than end key in BACKWARD" start_pks = [{"id", "3"}, {"internal_id", :inf_max}] end_pks = [{"id", "1"}, {"internal_id", :inf_min}] backward_orders = Order |> TestRepo.stream_range(start_pks, end_pks, direction: :backward) |> Enum.to_list() assert orders == Enum.reverse(backward_orders) start_pks = [{"id", "1"}, {"internal_id", :inf_min}] end_pks = [{"id", "9"}, {"internal_id", :inf_max}] all_orders = Order |> TestRepo.stream_range(start_pks, end_pks, limit: 3) |> Enum.to_list() assert length(all_orders) == 9 take_orders = Order |> TestRepo.stream_range(start_pks, end_pks, limit: 3) |> Enum.take(5) assert length(take_orders) == 5 for order <- saved_orders do TestRepo.delete(%Order{id: order.id, internal_id: order.internal_id}, condition: condition(:expect_exist) ) end end test "repo - batch_get" do {saved_orders, saved_users} = Enum.reduce(1..3, {[], []}, fn var, {cur_orders, cur_users} -> order = %Order{id: "#{var}", desc: "desc#{var}", num: var, price: 1.8 * var} {:ok, saved_order} = TestRepo.insert(order, condition: condition(:ignore), return_type: :pk) user = %User{id: var, name: "name#{var}", level: var} {:ok, saved_user} = TestRepo.insert(user, condition: condition(:expect_not_exist), return_type: :pk) {cur_orders ++ [saved_order], cur_users ++ [saved_user]} end) requests1 = [ {Order, [[{"id", "1"}, {"internal_id", List.first(saved_orders).internal_id}]], columns_to_get: ["num"]}, [%User{id: 1, name: "name1"}, %User{id: 2, name: "name2"}] ] {:ok, result} = TestRepo.batch_get(requests1) [{Order, query_orders}, {User, query_users}] = result assert length(query_orders) == 1 assert length(query_users) == 2 for query_user <- query_users do assert query_user.level != nil end # provide attribute column `name` in schema, and set `entity_full_match: true` will use these attribute field(s) in the filter and add `name` into columns_to_get if specially set columns_to_get. requests2 = [ {User, [ [{"id", 1}], [{"id", 2}] ]} ] {:ok, result2} = TestRepo.batch_get(requests2) query_users2 = Keyword.get(result2, User) assert length(query_users2) == 2 requests2 = [ {User, [ [{"id", 1}], [{"id", 2}] ], columns_to_get: ["level"]} ] {:ok, result2} = TestRepo.batch_get(requests2) query_users2 = Keyword.get(result2, User) assert length(query_users2) == 2 for query_user <- query_users2 do assert query_user.level != nil end requests2 = [ {[%User{id: 1, name: "name1"}, %User{id: 2, name: "name2"}], columns_to_get: ["level"], entity_full_match: true} ] {:ok, result2} = TestRepo.batch_get(requests2) query_users2 = Keyword.get(result2, User) assert length(query_users2) == 2 for query_user <- query_users2 do assert query_user.name != nil assert query_user.level != nil end requests_with_fake = [ {[%User{id: 1, name: "name_fake"}, %User{id: 2, name: "name2"}], columns_to_get: ["level"], entity_full_match: true} ] {:ok, [{_, result_users}]} = TestRepo.batch_get(requests_with_fake) assert length(result_users) == 1 assert_raise RuntimeError, fn -> # When use `entity_full_match: true`, one schema provides `name` attribute, # another schema only has primary key, # in this case, there exist conflict will raise a RuntimeError for requests_invalid = [ {[%User{id: 1, name: "name1"}, %User{id: 2}], entity_full_match: true} ] TestRepo.batch_get(requests_invalid) end # The filter of following case is ((name == "name1" and level == 2) or (name == "name2" and level == 2) requests3 = [ {[%User{id: 1, name: "name1"}, %User{id: 2, name: "name2"}], filter: filter("level" == 2)} ] {:ok, result3} = TestRepo.batch_get(requests3) query_users3 = Keyword.get(result3, User) assert length(query_users3) == 1 query3_user = List.first(query_users3) assert query3_user.id == 2 assert query3_user.name == "name2" changeset = Ecto.Changeset.change(%User{id: 1}, name: "new_name1") TestRepo.update(changeset, condition: condition(:expect_exist)) # After update User(id: 1)'s name as `new_name1`, in the next batch get, attribute columns will be used in filter. # # The following case will only return User(id: 2) in batch get. requests3_1 = [ {[%User{id: 1, name: "name1"}, %User{id: 2, name: "name2"}], entity_full_match: true} ] {:ok, result3_1} = TestRepo.batch_get(requests3_1) query_result3_1 = Keyword.get(result3_1, User) assert length(query_result3_1) == 1 query3_1_user = List.first(query_result3_1) assert query3_1_user.id == 2 assert query3_1_user.name == "name2" # Although User(id: 1)'s name is changed, but by default `batch_get` only use the primary keys of User entity to fetch rows. requests4 = [ [%User{id: 1, name: "not_existed_name1"}] ] {:ok, result4} = TestRepo.batch_get(requests4) assert Keyword.get(result4, User) != nil for order <- saved_orders do TestRepo.delete(%Order{id: order.id, internal_id: order.internal_id}, condition: condition(:expect_exist) ) end for user <- saved_users do TestRepo.delete(%User{id: user.id}, condition: condition(:expect_exist)) end end test "repo - batch_write" do order0 = %Order{id: "order0", desc: "desc0"} {:ok, saved_order0} = TestRepo.insert(order0, condition: condition(:ignore), return_type: :pk) order1_num = 10 order1 = %Order{id: "order1", desc: "desc1", num: order1_num, price: 89.1} {:ok, saved_order1} = TestRepo.insert(order1, condition: condition(:ignore), return_type: :pk) order2 = %Order{id: "order2", desc: "desc2", num: 5, price: 76.6} {:ok, saved_order2} = TestRepo.insert(order2, condition: condition(:ignore), return_type: :pk) order3 = %Order{id: "order3", desc: "desc3", num: 10, price: 55.67} order4_changeset = Changeset.cast(%Order{id: "order4_1", desc: "desc3"}, %{num: 40}, [:num]) user1_lv = 8 user1 = %User{id: 100, name: "u1", level: user1_lv} {:ok, _} = TestRepo.insert(user1, condition: condition(:expect_not_exist)) user2 = %User{id: 101, name: "u2", level: 11} {:ok, _} = TestRepo.insert(user2, condition: condition(:expect_not_exist)) user3 = %User{id: 102, name: "u3", level: 12} changeset_order1 = Order |> TestRepo.get(id: "order1", internal_id: saved_order1.internal_id) |> Changeset.change(num: {:increment, 1}, price: nil) changeset_user1 = User |> TestRepo.get(id: 100) |> Changeset.change(level: {:increment, 1}, name: "new_user_2") writes = [ delete: [ saved_order2, {Order, [id: "order0", internal_id: saved_order0.internal_id], condition: condition(:ignore)}, {user2, return_type: :pk} ], update: [ {changeset_user1, return_type: :pk}, {changeset_order1, return_type: :pk} ], put: [ {order3, condition: condition(:ignore), return_type: :pk}, {order4_changeset, condition: condition(:ignore), return_type: :pk}, {user3, condition: condition(:expect_not_exist), return_type: :pk} ] ] {:ok, result} = TestRepo.batch_write(writes) order_batch_write_result = Keyword.get(result, Order) {:ok, batch_write_update_order} = Keyword.get(order_batch_write_result, :update) |> List.first() assert batch_write_update_order.num == order1_num + 1 assert batch_write_update_order.id == order1.id assert batch_write_update_order.internal_id == saved_order1.internal_id assert batch_write_update_order.price == nil [{:ok, batch_write_delete_order2}, {:ok, batch_write_delete_order0}] = Keyword.get(order_batch_write_result, :delete) assert batch_write_delete_order2.id == saved_order2.id assert batch_write_delete_order2.internal_id == saved_order2.internal_id assert batch_write_delete_order0.id == saved_order0.id assert batch_write_delete_order0.internal_id == saved_order0.internal_id {:ok, batch_write_put_order} = Keyword.get(order_batch_write_result, :put) |> List.first() assert batch_write_put_order.id == order3.id assert batch_write_put_order.desc == "desc3" assert batch_write_put_order.num == 10 assert batch_write_put_order.price == 55.67 {:ok, batch_write_put_order4} = Keyword.get(order_batch_write_result, :put) |> List.last() assert batch_write_put_order4.id == order4_changeset.data.id assert is_integer(batch_write_put_order4.internal_id) == true assert batch_write_put_order4.num == order4_changeset.changes.num user_batch_write_result = Keyword.get(result, User) {:ok, batch_write_update_user} = Keyword.get(user_batch_write_result, :update) |> List.first() assert batch_write_update_user.level == user1_lv + 1 assert batch_write_update_user.name == "new_user_2" assert batch_write_update_user.id == 100 {:ok, batch_write_delete_user} = Keyword.get(user_batch_write_result, :delete) |> List.first() assert batch_write_delete_user.level == 11 assert batch_write_delete_user.id == 101 assert batch_write_delete_user.name == "u2" {:ok, batch_write_put_user} = Keyword.get(user_batch_write_result, :put) |> List.first() assert batch_write_put_user.level == 12 assert batch_write_put_user.id == 102 assert batch_write_put_user.name == "u3" changeset_user3 = Changeset.change(batch_write_put_user, level: {:increment, 2}) # failed case writes2 = [ delete: [ batch_write_put_user ], update: [ changeset_user3 ] ] {:ok, result2} = TestRepo.batch_write(writes2) fail_batch_write_result = Keyword.get(result2, User) {:error, batch_write_update_response} = Keyword.get(fail_batch_write_result, :update) |> List.first() assert batch_write_update_response.is_ok == false {:error, batch_write_delete_response} = Keyword.get(fail_batch_write_result, :delete) |> List.first() assert batch_write_delete_response.is_ok == false {:ok, _} = TestRepo.delete(batch_write_put_user, condition: condition(:expect_exist)) {:ok, _} = TestRepo.delete(batch_write_update_user, condition: condition(:expect_exist)) {:ok, _} = TestRepo.delete(batch_write_put_order, condition: condition(:ignore)) {:ok, _} = TestRepo.delete(batch_write_update_order, condition: condition(:ignore)) end test "repo - batch_update with timestamps" do inserted_users = for index <- 1..3 do u = %User{id: index, name: "u#{index}", level: index} {:ok, inserted_user} = TestRepo.insert(u, condition: condition(:expect_not_exist)) assert inserted_user.inserted_at == inserted_user.updated_at inserted_user end user1 = List.first(inserted_users) Process.sleep(1000) new_user1_name = "new_u1" changeset1 = Changeset.change(user1, name: new_user1_name, level: nil) changeset2 = Changeset.change(%User{id: 2}, level: {:increment, 1}) writes = [ update: [ {changeset1, condition: condition(:expect_exist)}, {changeset2, condition: condition(:expect_exist)} ], put: [ {%User{id: 10, name: "new10", level: 10}, condition: condition(:expect_not_exist)}, {%User{id: 11, name: "new11", level: 11}, condition: condition(:expect_not_exist)} ] ] {:ok, result} = TestRepo.batch_write(writes) user_writes = Keyword.get(result, User) put_opers = Keyword.get(user_writes, :put) for {:ok, put_user} <- put_opers do assert put_user.inserted_at == put_user.updated_at end update_opers = Keyword.get(user_writes, :update) for {:ok, update_user} <- update_opers do case update_user.id do 1 -> update_updated_at = update_user.updated_at update_inserted_at = update_user.inserted_at user1_inserted_at = user1.inserted_at assert update_updated_at > user1_inserted_at and update_updated_at > update_inserted_at and update_inserted_at == user1_inserted_at assert update_user.level == nil assert update_user.name == new_user1_name 2 -> updated_at = update_user.updated_at assert updated_at != nil and is_integer(updated_at) assert update_user.level == 3 assert update_user.name == nil end end TestRepo.delete(%User{id: 1}, condition: condition(:expect_exist)) TestRepo.delete(%User{id: 2}, condition: condition(:expect_exist)) TestRepo.delete(%User{id: 3}, condition: condition(:expect_exist)) TestRepo.delete(%User{id: 10}, condition: condition(:expect_exist)) TestRepo.delete(%User{id: 11}, condition: condition(:expect_exist)) end test "repo - at least one attribute column" do u = %User3{id: "1"} assert_raise Ecto.ConstraintError, fn -> TestRepo.insert(u, condition: condition(:expect_not_exist)) end end test "repo - get/one/get_range/batch_get with not matched filter" do input_id = "10001" input_desc = "order_desc" input_num = 1 input_order_name = "order_name" input_success = true input_price = 100.09 order = %Order{ id: input_id, name: input_order_name, desc: input_desc, num: input_num, success?: input_success, price: input_price } {:ok, saved_order} = TestRepo.insert(order, condition: condition(:ignore), return_type: :pk) start_pks = [{"id", input_id}, {"internal_id", :inf_min}] end_pks = [{"id", "100010"}, {"internal_id", :inf_max}] get_result = TestRepo.get(Order, [id: input_id, internal_id: saved_order.internal_id], filter: filter("num" == 1000) ) assert get_result == nil one_result = TestRepo.one(%Order{id: input_id, internal_id: saved_order.internal_id}, filter: filter("num" > 10) ) assert one_result == nil {records, next} = TestRepo.get_range(Order, start_pks, end_pks, filter: filter("num" == 100)) assert records == nil and next == nil requests = [ {Order, [{"id", input_id}, {"internal_id", saved_order.internal_id}], filter: filter("num" == 100)} ] {:ok, [{Order, batch_get_result}]} = TestRepo.batch_get(requests) assert batch_get_result == nil TestRepo.delete(%Order{id: input_id, internal_id: saved_order.internal_id}, condition: condition(:expect_exist) ) end test "repo - insert/batch_write:put with changeset" do id = "1001" changeset = Order.test_changeset(%Order{}, %{id: id, name: "test_name", num: 100}) {:ok, new_order} = TestRepo.insert(changeset, condition: condition(:ignore)) assert new_order.id == id and new_order.name == "test_name" and new_order.num == 100 id2 = "1002" changeset2 = Order.test_changeset(%Order{}, %{id: id2, name: "test_name2", num: 102}) id3 = "1003" changeset3 = Order.test_changeset(%Order{}, %{id: id3, name: "test_name2", num: 102}) writes = [ put: [ {changeset2, condition: condition(:ignore), return_type: :pk}, {changeset3, condition: condition(:ignore)} ] ] {:ok, batch_writes_result} = TestRepo.batch_write(writes) [{Order, [put: result_items]}] = batch_writes_result [{:ok, result1}, {:ok, result2}] = result_items assert result1.internal_id != nil and result1.id == "1002" # since the second item does not require return pk assert result2.internal_id == nil and result2.id == "1003" id4 = "1004" changeset4 = Order.test_changeset(%Order{}, %{id: id4, name: "test_name2"}) {:error, changeset} = TestRepo.insert(changeset4, condition: condition(:ignore)) assert changeset.valid? == false writes = [ put: [ {changeset4, condition: condition(:ignore), return_type: :pk} ] ] assert_raise RuntimeError, ~r/Using invalid changeset/, fn -> TestRepo.batch_write(writes) end start_pks = [{"id", "1001"}, {"internal_id", :inf_min}] end_pks = [{"id", "1003"}, {"internal_id", :inf_max}] {orders, _next} = TestRepo.get_range(Order, start_pks, end_pks) Enum.map(orders, fn order -> TestRepo.delete(order, condition: condition(:expect_exist)) end) end test "repo - check stale_error" do input_id = "10001" input_desc = "order_desc" input_num = 10 increment = 1 input_price = 99.9 order = %Order{id: input_id, desc: input_desc, num: input_num, price: input_price} {:ok, saved_order} = TestRepo.insert(order, condition: condition(:ignore), return_type: :pk) changeset = %Order{internal_id: saved_order.internal_id, id: saved_order.id} |> Ecto.Changeset.change(num: {:increment, increment}, desc: nil) # `stale_error_field` can be any value of atom. stale_error_field = :num stale_error_message = "check num condition failed" {:error, invalid_changeset} = TestRepo.update(changeset, condition: condition(:expect_exist, "num" > 1000), stale_error_field: stale_error_field, stale_error_message: stale_error_message ) {^stale_error_message, error} = Keyword.get(invalid_changeset.errors, stale_error_field) assert error == [stale: true] {:ok, _} = TestRepo.delete(saved_order, condition: condition(:expect_exist)) end test "repo - check_constraint" do check_constraint_field = :condition check_constraint_name = "OTSConditionCheckFail" check_constraint_message = "ots condition check fail" user = %User2{id: "100"} |> Ecto.Changeset.change(name: "name2") |> Ecto.Changeset.check_constraint(check_constraint_field, name: check_constraint_name, message: check_constraint_message ) {:error, invalid_changeset} = TestRepo.insert(user, condition: condition(:expect_exist), return_type: :pk) {^check_constraint_message, error_constraint} = Keyword.get(invalid_changeset.errors, check_constraint_field) error_constraint_name = Keyword.get(error_constraint, :constraint_name) # Use ots's error code as check_constraint_name. assert error_constraint_name == check_constraint_name end test "repo - naive_datetime timestamp" do id = Ecto.UUID.generate() {:ok, user} = %User2{id: id} |> TestRepo.insert(condition: condition(:ignore)) assert NaiveDateTime.compare(NaiveDateTime.utc_now(), user.inserted_at) == :gt end test "repo batch write to delete with an array field" do user1 = %User{id: 1, tags: ["1", "2"], name: "name1"} user2 = %User{id: 2, tags: ["a", "b", "c"], name: "name2"} {:ok, _} = TestRepo.batch_write( put: [ {user1, condition: condition(:ignore)}, {user2, condition: condition(:ignore)} ] ) {:ok, _} = TestRepo.batch_write( delete: [ user1, user2 ] ) {:ok, [{User, users}]} = TestRepo.batch_get([ [ %User{id: 1}, %User{id: 2} ] ]) assert users == nil end test "repo batch write to delete with a default value" do user = %User2{id: "1", name: "username0", age: 30} user2 = %User2{id: "2", name: "username2", age: 25} {:ok, _} = TestRepo.batch_write( put: [ {user, condition: condition(:ignore)}, {user2, condition: condition(:ignore)} ] ) {:ok, [{User2, results}]} = TestRepo.batch_write( delete: [ {%User2{id: "1", age: 30}, condition: condition(:expect_exist, "name" == "username0")} ] ) [{:ok, deleted_user2}] = results[:delete] assert deleted_user2.id == "1" {:ok, [{User2, results}]} = TestRepo.batch_write( delete: [ {%User2{id: "2", age: 0}, condition: condition(:expect_exist, "name" == "username2" and "age" == 25)} ] ) [{:ok, deleted_user2}] = results[:delete] assert deleted_user2.id == "2" end end
32.345729
198
0.652063
ffc3485aade4fc719d8cfe8d21ef410d32e1166a
525
ex
Elixir
lib/triplex/plugs/ensure_plug_config.ex
keithpitty/triplex
3ae09fcb165de6cf027fe33d537cfb10facd4d7e
[ "MIT" ]
null
null
null
lib/triplex/plugs/ensure_plug_config.ex
keithpitty/triplex
3ae09fcb165de6cf027fe33d537cfb10facd4d7e
[ "MIT" ]
null
null
null
lib/triplex/plugs/ensure_plug_config.ex
keithpitty/triplex
3ae09fcb165de6cf027fe33d537cfb10facd4d7e
[ "MIT" ]
null
null
null
defmodule Triplex.EnsurePlugConfig do @moduledoc """ This is a struct that holds the configuration for `Triplex.EnsurePlug`. Here are the config keys allowed: - `assign`: the name of the assign where we must save the tenant. - `callback`: function that might be called when the plug succeeded. It must return a connection. - `failure_callback`: function that might be called when the plug failed. It must return a connection. """ defstruct [:callback, :failure_callback, assign: :current_tenant] end
29.166667
75
0.740952
ffc36ef0c6a66e160bfc122d9c73dc887a0921f6
1,596
exs
Elixir
mix.exs
exit9/temple
eab538c1ca584f8d4b0ebe97111251bba28979e1
[ "MIT" ]
null
null
null
mix.exs
exit9/temple
eab538c1ca584f8d4b0ebe97111251bba28979e1
[ "MIT" ]
null
null
null
mix.exs
exit9/temple
eab538c1ca584f8d4b0ebe97111251bba28979e1
[ "MIT" ]
null
null
null
defmodule Temple.MixProject do use Mix.Project def project do [ app: :temple, name: "Temple", description: "An HTML DSL for Elixir and Phoenix", version: "0.3.1", package: package(), elixirc_paths: elixirc_paths(Mix.env()), elixir: "~> 1.7", start_permanent: Mix.env() == :prod, deps: deps(), aliases: aliases(), source_url: "https://github.com/mhanberg/temple", docs: [ main: "Temple", extras: ["README.md"], deps: [ phoenix_html: "https://hexdocs.pm/phoenix_html/" ] ] ] end # Specifies which paths to compile per environment. defp elixirc_paths(:test), do: ["lib", "test/support"] defp elixirc_paths(_), do: ["lib"] # Run "mix help compile.app" to learn about applications. def application do [ extra_applications: [:logger] ] end defp package do [ maintainers: ["Mitchell Hanberg"], licenses: ["MIT"], links: %{github: "https://github.com/mhanberg/temple"}, files: ~w(lib priv CHANGELOG.md LICENSE mix.exs README.md .formatter.exs) ] end defp aliases do [ docs: ["update_mdn_docs", "docs"] ] end defp deps do [ {:phoenix_html, "~> 2.13"}, {:ecto, "~> 3.0", optional: true}, {:phoenix_ecto, "~> 4.0", optional: true}, {:ex_doc, "~> 0.0", only: [:dev], runtime: false}, {:html_sanitize_ex, "~> 1.3", only: [:dev, :test], runtime: false}, {:phoenix, "~> 1.4", optional: true}, {:plug, "~> 1.8", optional: true} ] end end
24.553846
79
0.558271
ffc3717b0feb1e89562ceee9ebb44d918d480dcf
1,826
ex
Elixir
clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_batch_delete_intents_request.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_batch_delete_intents_request.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_batch_delete_intents_request.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchDeleteIntentsRequest do @moduledoc """ The request message for Intents.BatchDeleteIntents. ## Attributes * `intents` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2Intent.t)`, *default:* `nil`) - Required. The collection of intents to delete. Only intent `name` must be filled in. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :intents => list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2Intent.t()) } field(:intents, as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2Intent, type: :list) end defimpl Poison.Decoder, for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchDeleteIntentsRequest do def decode(value, options) do GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchDeleteIntentsRequest.decode( value, options ) end end defimpl Poison.Encoder, for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchDeleteIntentsRequest do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
34.45283
189
0.761227
ffc38cb4b0b6e9d316c44ea01e1ae2e892443566
342
exs
Elixir
priv/repo/migrations/20170101204412_create_deliveryservice_server.exs
rob05c/tox
f54847ca058ad24b909341ad65d595a4069d2471
[ "Apache-2.0" ]
2
2016-11-16T17:24:21.000Z
2019-02-15T05:38:27.000Z
priv/repo/migrations/20170101204412_create_deliveryservice_server.exs
rob05c/tox
f54847ca058ad24b909341ad65d595a4069d2471
[ "Apache-2.0" ]
null
null
null
priv/repo/migrations/20170101204412_create_deliveryservice_server.exs
rob05c/tox
f54847ca058ad24b909341ad65d595a4069d2471
[ "Apache-2.0" ]
null
null
null
defmodule Tox.Repo.Migrations.CreateDeliveryserviceServer do use Ecto.Migration def change do create table(:deliveryserviceservers) do add :deliveryservice, references(:deliveryservices, column: :xml_id, type: :text) add :server_host, references(:servers, column: :host, type: :text) timestamps() end end end
28.5
87
0.72807