hexsha
stringlengths
40
40
size
int64
2
991k
ext
stringclasses
2 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
208
max_stars_repo_name
stringlengths
6
106
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
list
max_stars_count
int64
1
33.5k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
208
max_issues_repo_name
stringlengths
6
106
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
list
max_issues_count
int64
1
16.3k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
208
max_forks_repo_name
stringlengths
6
106
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
list
max_forks_count
int64
1
6.91k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
991k
avg_line_length
float64
1
36k
max_line_length
int64
1
977k
alphanum_fraction
float64
0
1
ffc3bab987444120271668b6129ce6c66c3a7c8b
293
exs
Elixir
test/message_check/rng_test.exs
Celeo/gandalf_bot
f8e48b9a0b403fac5e29e514ce6c684d1bebeecc
[ "MIT" ]
null
null
null
test/message_check/rng_test.exs
Celeo/gandalf_bot
f8e48b9a0b403fac5e29e514ce6c684d1bebeecc
[ "MIT" ]
null
null
null
test/message_check/rng_test.exs
Celeo/gandalf_bot
f8e48b9a0b403fac5e29e514ce6c684d1bebeecc
[ "MIT" ]
null
null
null
defmodule Bot.Util.Rng.Test do alias Bot.Util.Rng use ExUnit.Case test "get random number" do num = Rng.get_number() assert 1 <= num assert num <= 10 end test "store static number" do Rng.enqueue_static(11) num = Rng.get_number() assert num == 11 end end
17.235294
31
0.645051
ffc3ca23b7ca1f72d90840ff09e4baad2b098834
1,480
ex
Elixir
lib/vim_snake_web/views/error_helpers.ex
theanht1/vim_snake
a7650acf579721917a27d2f6c65f3e0d03e2d893
[ "MIT" ]
19
2020-01-15T05:34:44.000Z
2022-03-01T14:11:00.000Z
lib/vim_snake_web/views/error_helpers.ex
theanht1/vim_snake
a7650acf579721917a27d2f6c65f3e0d03e2d893
[ "MIT" ]
2
2021-09-14T03:06:58.000Z
2022-02-12T04:31:34.000Z
lib/vim_snake_web/views/error_helpers.ex
theanht1/vim_snake
a7650acf579721917a27d2f6c65f3e0d03e2d893
[ "MIT" ]
null
null
null
defmodule VimSnakeWeb.ErrorHelpers do @moduledoc """ Conveniences for translating and building error messages. """ use Phoenix.HTML @doc """ Generates tag for inlined form input errors. """ def error_tag(form, field) do Enum.map(Keyword.get_values(form.errors, field), fn error -> content_tag(:span, translate_error(error), class: "help-block") end) end @doc """ Translates an error message using gettext. """ def translate_error({msg, opts}) do # When using gettext, we typically pass the strings we want # to translate as a static argument: # # # Translate "is invalid" in the "errors" domain # dgettext("errors", "is invalid") # # # Translate the number of files with plural rules # dngettext("errors", "1 file", "%{count} files", count) # # Because the error messages we show in our forms and APIs # are defined inside Ecto, we need to translate them dynamically. # This requires us to call the Gettext module passing our gettext # backend as first argument. # # Note we use the "errors" domain, which means translations # should be written to the errors.po file. The :count option is # set by Ecto and indicates we should also apply plural rules. if count = opts[:count] do Gettext.dngettext(VimSnakeWeb.Gettext, "errors", msg, msg, count, opts) else Gettext.dgettext(VimSnakeWeb.Gettext, "errors", msg, opts) end end end
32.888889
77
0.670946
ffc3d635f34e36b6f68bf017e96947582d3e0ed8
1,360
exs
Elixir
apps/admin_api/test/admin_api/v1/views/user_auth_view_test.exs
AndonMitev/EWallet
898cde38933d6f134734528b3e594eedf5fa50f3
[ "Apache-2.0" ]
322
2018-02-28T07:38:44.000Z
2020-05-27T23:09:55.000Z
apps/admin_api/test/admin_api/v1/views/user_auth_view_test.exs
AndonMitev/EWallet
898cde38933d6f134734528b3e594eedf5fa50f3
[ "Apache-2.0" ]
643
2018-02-28T12:05:20.000Z
2020-05-22T08:34:38.000Z
apps/admin_api/test/admin_api/v1/views/user_auth_view_test.exs
AndonMitev/EWallet
898cde38933d6f134734528b3e594eedf5fa50f3
[ "Apache-2.0" ]
63
2018-02-28T10:57:06.000Z
2020-05-27T23:10:38.000Z
# Copyright 2018-2019 OmiseGO Pte Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. defmodule AdminAPI.V1.UserAuthViewTest do use AdminAPI.ViewCase, :v1 alias AdminAPI.V1.UserAuthView alias EWallet.Web.V1.UserAuthTokenSerializer describe "render/2" do test "renders auth_token.json with the given mint" do auth_token = insert(:auth_token) expected = %{ version: @expected_version, success: true, data: UserAuthTokenSerializer.serialize(auth_token) } assert UserAuthView.render("auth_token.json", %{auth_token: auth_token}) == expected end test "renders empty_response.json" do expected = %{ version: @expected_version, success: true, data: %{} } assert UserAuthView.render("empty_response.json", %{success: true}) == expected end end end
30.909091
90
0.706618
ffc3dfdb3c7de34cbcb4171ca2bdf21c0312eab5
1,858
ex
Elixir
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/operating_systems_list_response.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
null
null
null
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/operating_systems_list_response.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/operating_systems_list_response.ex
MasashiYokota/elixir-google-api
975dccbff395c16afcb62e7a8e411fbb58e9ab01
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.DFAReporting.V33.Model.OperatingSystemsListResponse do @moduledoc """ Operating System List Response ## Attributes * `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#operatingSystemsListResponse". * `operatingSystems` (*type:* `list(GoogleApi.DFAReporting.V33.Model.OperatingSystem.t)`, *default:* `nil`) - Operating system collection. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :kind => String.t(), :operatingSystems => list(GoogleApi.DFAReporting.V33.Model.OperatingSystem.t()) } field(:kind) field(:operatingSystems, as: GoogleApi.DFAReporting.V33.Model.OperatingSystem, type: :list) end defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.OperatingSystemsListResponse do def decode(value, options) do GoogleApi.DFAReporting.V33.Model.OperatingSystemsListResponse.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.OperatingSystemsListResponse do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
37.16
164
0.750807
ffc3f579847b6274eb35b56562207bd788e7b193
3,127
ex
Elixir
lib/blue_jet/app/storefront/order_line_item/proxy.ex
freshcom/freshcom-api
4f2083277943cf4e4e8fd4c4d443c7309f285ad7
[ "BSD-3-Clause" ]
44
2018-05-09T01:08:57.000Z
2021-01-19T07:25:26.000Z
lib/blue_jet/app/storefront/order_line_item/proxy.ex
freshcom/freshcom-api
4f2083277943cf4e4e8fd4c4d443c7309f285ad7
[ "BSD-3-Clause" ]
36
2018-05-08T23:59:54.000Z
2018-09-28T13:50:30.000Z
lib/blue_jet/app/storefront/order_line_item/proxy.ex
freshcom/freshcom-api
4f2083277943cf4e4e8fd4c4d443c7309f285ad7
[ "BSD-3-Clause" ]
9
2018-05-09T14:09:19.000Z
2021-03-21T21:04:04.000Z
defmodule BlueJet.Storefront.OrderLineItem.Proxy do use BlueJet, :proxy alias BlueJet.{Repo, Translation} alias BlueJet.Storefront.{CRMService, CatalogueService, GoodsService, FulfillmentService} def get_goods(oli = %{ product: %{ goods_type: "Stockable", goods_id: id }}) do account = get_account(oli) GoodsService.get_stockable(%{ id: id }, %{ account: account }) end def get_goods(oli = %{ product: %{ goods_type: "Unlockable", goods_id: id }}) do account = get_account(oli) GoodsService.get_unlockable(%{ id: id }, %{ account: account }) end def get_goods(oli = %{ product: %{ goods_type: "Depositable", goods_id: id }}) do account = get_account(oli) GoodsService.get_depositable(%{ id: id }, %{ account: account }) end def get_depositable(oli = %{ source_type: "Depositable", source_id: id }) do account = get_account(oli) GoodsService.get_depositable(%{ id: id }, %{ account: account }) end def get_depositable(oli = %{ product: %{ goods_type: "Depositable", goods_id: id }}) do account = get_account(oli) GoodsService.get_depositable(%{ id: id }, %{ account: account }) end def get_point_account(oli) do opts = get_sopts(oli) oli = Repo.preload(oli, :order) CRMService.get_point_account(%{ customer_id: oli.order.customer_id }, opts) end def create_point_transaction(fields, oli) do point_account = get_point_account(oli) opts = get_sopts(oli) {:ok, pt} = fields |> Map.put(:point_account_id, point_account.id) |> Map.put(:source_type, "OrderLineItem") |> Map.put(:source_id, oli.id) |> CRMService.create_point_transaction(opts) pt end def commit_point_transaction(id, oli) do opts = get_sopts(oli) {:ok, pt} = CRMService.update_point_transaction(id, %{ status: "committed" }, opts) pt end def create_fulfillment_item(oli, package) do opts = get_sopts(oli) |> Map.put(:package, package) translations = Translation.merge_translations(%{}, oli.translations, ["name"]) FulfillmentService.create_fulfillment_item(%{ package_id: package.id, order_id: package.order_id, order_line_item_id: oli.id, target_id: oli.target_id, target_type: oli.target_type, status: "fulfilled", name: oli.name, quantity: oli.order_quantity, translations: translations }, opts) end def put(oli = %{ price_id: nil }, {:price, _}, _), do: oli def put(oli, {:price, price_path}, opts) do preloads = %{ path: price_path, opts: opts } opts = Map.take(opts, [:account, :account_id]) price = CatalogueService.get_price(%{ id: oli.price_id, preloads: preloads }, opts) %{ oli | price: price } end def put(oli = %{ product_id: nil }, {:product, _}, _), do: oli def put(oli, {:product, product_path}, opts) do preloads = %{ path: product_path, opts: opts } opts = Map.take(opts, [:account, :account_id]) product = CatalogueService.get_product(%{ id: oli.product_id, preloads: preloads }, opts) %{ oli | product: product } end def put(oli, _, _), do: oli end
31.908163
93
0.661337
ffc3f58c5c3f0c320e46beb8e1565973f2132c40
552
exs
Elixir
config/test.exs
olafura/accent
b081bd96a816f5ffd79a1b28cd5ba022d5eda803
[ "BSD-3-Clause" ]
1
2020-07-01T16:08:34.000Z
2020-07-01T16:08:34.000Z
config/test.exs
queer/accent
43a0ab8b43c99a8f8b3bde7020eb615c39037ad5
[ "BSD-3-Clause" ]
null
null
null
config/test.exs
queer/accent
43a0ab8b43c99a8f8b3bde7020eb615c39037ad5
[ "BSD-3-Clause" ]
null
null
null
import Config import_config "releases.exs" config :accent, Accent.Endpoint, http: [port: 4001], server: false config :accent, Accent.Repo, pool: Ecto.Adapters.SQL.Sandbox config :accent, Accent.Mailer, webapp_url: "http://example.com", mailer_from: "accent-test@example.com", x_smtpapi_header: ~s({"category": ["test", "accent-api-test"]}), adapter: Bamboo.TestAdapter config :accent, hook_broadcaster: Accent.Hook.BroadcasterMock, hook_github_file_server: Accent.Hook.Consumers.GitHub.FileServerMock config :logger, level: :warn
25.090909
70
0.75
ffc41f37059e96e92baee084ca4546ec4c8e58c7
960
ex
Elixir
deps/phoenix_live_reload/lib/phoenix_live_reload/application.ex
rpillar/Top5_Elixir
9c450d2e9b291108ff1465dc066dfe442dbca822
[ "MIT" ]
null
null
null
deps/phoenix_live_reload/lib/phoenix_live_reload/application.ex
rpillar/Top5_Elixir
9c450d2e9b291108ff1465dc066dfe442dbca822
[ "MIT" ]
null
null
null
deps/phoenix_live_reload/lib/phoenix_live_reload/application.ex
rpillar/Top5_Elixir
9c450d2e9b291108ff1465dc066dfe442dbca822
[ "MIT" ]
null
null
null
defmodule Phoenix.LiveReloader.Application do use Application require Logger def start(_type, _args) do import Supervisor.Spec children = [worker(__MODULE__, [])] Supervisor.start_link(children, strategy: :one_for_one) end def start_link do opts = [dirs: [Path.absname("")], name: :phoenix_live_reload_file_monitor] opts = if backend = Application.get_env(:phoenix_live_reload, :backend) do [backend: backend] ++ opts else opts end case FileSystem.start_link(opts) do {:ok, pid} -> {:ok, pid} other -> Logger.warn """ Could not start Phoenix live-reload because we cannot listen to the file system. You don't need to worry! This is an optional feature used during development to refresh your browser when you save files and it does not affect production. """ other end end end
26.666667
89
0.626042
ffc432c331ef97cb44582d00b081a810d54803dd
8,300
ex
Elixir
lib/pow/plug.ex
rjurado01/pow
d97c013cafe06f51d30a56f4258d7ccc0952edf2
[ "MIT" ]
1
2021-05-27T21:33:29.000Z
2021-05-27T21:33:29.000Z
lib/pow/plug.ex
rjurado01/pow
d97c013cafe06f51d30a56f4258d7ccc0952edf2
[ "MIT" ]
null
null
null
lib/pow/plug.ex
rjurado01/pow
d97c013cafe06f51d30a56f4258d7ccc0952edf2
[ "MIT" ]
null
null
null
defmodule Pow.Plug do @moduledoc """ Plug helper methods. """ alias Plug.Conn alias Pow.{Config, Operations, Plug.MessageVerifier} @private_config_key :pow_config @doc """ Get the current user assigned to the conn. The config is fetched from the conn. See `current_user/2` for more. """ @spec current_user(Conn.t()) :: map() | nil def current_user(conn) do current_user(conn, fetch_config(conn)) end @doc """ Get the current user assigned to the conn. This will fetch the user from the assigns map in the conn. The key is by default `:current_user`, but it can be overridden with `:current_user_assigns_key` configuration option. """ @spec current_user(Conn.t(), Config.t()) :: map() | nil def current_user(%{assigns: assigns}, config) do key = current_user_assigns_key(config) Map.get(assigns, key) end @doc """ Assign an authenticated user to the connection. This will assign the user to the conn. The key is by default `:current_user`, but it can be overridden with `:current_user_assigns_key` configuration option. """ @spec assign_current_user(Conn.t(), any(), Config.t()) :: Conn.t() def assign_current_user(conn, user, config) do key = current_user_assigns_key(config) Conn.assign(conn, key, user) end defp current_user_assigns_key(config) do Config.get(config, :current_user_assigns_key, :current_user) end @doc """ Put the provided config as a private key in the connection. """ @spec put_config(Conn.t(), Config.t()) :: Conn.t() def put_config(conn, config) do Conn.put_private(conn, @private_config_key, config) end @doc """ Fetch configuration from the private key in the connection. It'll raise an error if configuration hasn't been set as a private key. """ @spec fetch_config(Conn.t()) :: Config.t() def fetch_config(%{private: private}) do private[@private_config_key] || no_config_error() end @doc """ Prepend namespace found in Plug Pow configuration to binary. Will prepend `:otp_app` if exists in configuration. """ @spec prepend_with_namespace(Config.t(), binary()) :: binary() def prepend_with_namespace(config, string) do case fetch_namespace(config) do nil -> string namespace -> "#{namespace}_#{string}" end end defp fetch_namespace(config), do: Config.get(config, :otp_app) @doc """ Authenticates a user. If successful, a new session will be created. """ @spec authenticate_user(Conn.t(), map()) :: {:ok | :error, Conn.t()} def authenticate_user(conn, params) do config = fetch_config(conn) params |> Operations.authenticate(config) |> case do nil -> {:error, conn} user -> {:ok, create(conn, user, config)} end end # TODO: Remove by 1.1.0 @doc false @deprecated "Use `delete/1` instead" @spec clear_authenticated_user(Conn.t()) :: {:ok, Conn.t()} def clear_authenticated_user(conn), do: {:ok, delete(conn)} @doc """ Creates a changeset from the current authenticated user. """ @spec change_user(Conn.t(), map()) :: map() def change_user(conn, params \\ %{}) do config = fetch_config(conn) case current_user(conn, config) do nil -> Operations.changeset(params, config) user -> Operations.changeset(user, params, config) end end @doc """ Creates a new user. If successful, a new session will be created. """ @spec create_user(Conn.t(), map()) :: {:ok, map(), Conn.t()} | {:error, map(), Conn.t()} def create_user(conn, params) do config = fetch_config(conn) params |> Operations.create(config) |> maybe_create_auth(conn, config) end @doc """ Updates the current authenticated user. If successful, a new session will be created. """ @spec update_user(Conn.t(), map()) :: {:ok, map(), Conn.t()} | {:error, map(), Conn.t()} def update_user(conn, params) do config = fetch_config(conn) conn |> current_user(config) |> Operations.update(params, config) |> maybe_create_auth(conn, config) end @doc """ Deletes the current authenticated user. If successful, the user authentication will be cleared from the session. """ @spec delete_user(Conn.t()) :: {:ok, map(), Conn.t()} | {:error, map(), Conn.t()} def delete_user(conn) do config = fetch_config(conn) conn |> current_user(config) |> Operations.delete(config) |> case do {:ok, user} -> {:ok, user, delete(conn, config)} {:error, changeset} -> {:error, changeset, conn} end end defp maybe_create_auth({:ok, user}, conn, config) do {:ok, user, create(conn, user, config)} end defp maybe_create_auth({:error, changeset}, conn, _config) do {:error, changeset, conn} end # TODO: Remove by 1.1.0 @doc false @deprecated "Use `get_plug/1` instead" @spec get_mod(Config.t()) :: atom() def get_mod(config), do: get_plug(config) @spec get_plug(Config.t()) :: atom() def get_plug(config) do config[:plug] || no_plug_error() end @doc """ Call `create/3` for the Pow plug set for the `conn`. """ @spec create(Conn.t(), map()) :: Conn.t() def create(conn, user), do: create(conn, user, fetch_config(conn)) @spec create(Conn.t(), map(), Config.t()) :: Conn.t() def create(conn, user, config), do: get_plug(config).do_create(conn, user, config) @doc """ Call `delete/2` for the Pow plug set for the `conn`. """ @spec delete(Conn.t()) :: Conn.t() def delete(conn), do: delete(conn, fetch_config(conn)) @spec delete(Conn.t(), Config.t()) :: Conn.t() def delete(conn, config), do: get_plug(config).do_delete(conn, config) @spec no_config_error :: no_return defp no_config_error do Config.raise_error("Pow configuration not found in connection. Please use a Pow plug that puts the Pow configuration in the plug connection.") end @spec no_plug_error :: no_return defp no_plug_error do Config.raise_error("Pow plug was not found in config. Please use a Pow plug that puts the `:plug` in the Pow configuration.") end @doc false @spec __prevent_user_enumeration__(Conn.t(), any()) :: boolean() def __prevent_user_enumeration__(%{private: %{pow_prevent_user_enumeration: false}}, _changeset), do: false def __prevent_user_enumeration__(_conn, %{errors: errors}), do: unique_constraint_error?(errors, :email) def __prevent_user_enumeration__(_conn, _any), do: true defp unique_constraint_error?(errors, field) do Enum.find_value(errors, false, fn {^field, {_msg, [constraint: :unique, constraint_name: _name]}} -> true _any -> false end) end @doc """ Signs a token for public consumption. Used to prevent timing attacks with token lookup. This uses `Pow.Plug.MessageVerifier` by default, but can be changed if the Pow configuration is set with `:message_verifier`. `Pow.Plug.MessageVerifier` can also be configured in this way if `:message_verifier` is set to `{Pow.Plug.MessageVerifier, key_generator_opts: [length: 64]}` """ @spec sign_token(Conn.t(), binary(), binary(), Config.t() | nil) :: binary() def sign_token(conn, salt, token, config \\ nil) do config = config || fetch_config(conn) {module, config} = message_verifier_module(config) module.sign(conn, salt, token, config) end @doc """ Decodes and verifies a token. Used to prevent timing attacks with token lookup. This uses `Pow.Plug.MessageVerifier` by default, but can be changed if the Pow configuration is set with `:message_verifier`. `Pow.Plug.MessageVerifier` can also be configured in this way if `:message_verifier` is set to `{Pow.Plug.MessageVerifier, key_generator_opts: [length: 64]}` """ @spec verify_token(Conn.t(), binary(), binary(), Config.t() | nil) :: {:ok, binary()} | :error def verify_token(conn, salt, token, config \\ nil) do config = config || fetch_config(conn) {module, config} = message_verifier_module(config) module.verify(conn, salt, token, config) end defp message_verifier_module(config) do case Config.get(config, :message_verifier, MessageVerifier) do {module, config} -> {module, config} module -> {module, []} end end end
30.627306
146
0.666145
ffc44eca76505e967c36240d21076c5ea5d9a5c2
3,816
ex
Elixir
clients/content/lib/google_api/content/v21/model/datafeed_status.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/content/lib/google_api/content/v21/model/datafeed_status.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/content/lib/google_api/content/v21/model/datafeed_status.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Content.V21.Model.DatafeedStatus do @moduledoc """ The status of a datafeed, i.e., the result of the last retrieval of the datafeed computed asynchronously when the feed processing is finished. ## Attributes * `country` (*type:* `String.t`, *default:* `nil`) - The country for which the status is reported, represented as a CLDR territory code. * `datafeedId` (*type:* `String.t`, *default:* `nil`) - The ID of the feed for which the status is reported. * `errors` (*type:* `list(GoogleApi.Content.V21.Model.DatafeedStatusError.t)`, *default:* `nil`) - The list of errors occurring in the feed. * `itemsTotal` (*type:* `String.t`, *default:* `nil`) - The number of items in the feed that were processed. * `itemsValid` (*type:* `String.t`, *default:* `nil`) - The number of items in the feed that were valid. * `kind` (*type:* `String.t`, *default:* `content#datafeedStatus`) - Identifies what kind of resource this is. Value: the fixed string "content#datafeedStatus". * `language` (*type:* `String.t`, *default:* `nil`) - The two-letter ISO 639-1 language for which the status is reported. * `lastUploadDate` (*type:* `String.t`, *default:* `nil`) - The last date at which the feed was uploaded. * `processingStatus` (*type:* `String.t`, *default:* `nil`) - The processing status of the feed. Acceptable values are: - ""failure": The feed could not be processed or all items had errors." - "in progress": The feed is being processed. - "none": The feed has not yet been processed. For example, a feed that has never been uploaded will have this processing status. - "success": The feed was processed successfully, though some items might have had errors. * `warnings` (*type:* `list(GoogleApi.Content.V21.Model.DatafeedStatusError.t)`, *default:* `nil`) - The list of errors occurring in the feed. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :country => String.t(), :datafeedId => String.t(), :errors => list(GoogleApi.Content.V21.Model.DatafeedStatusError.t()), :itemsTotal => String.t(), :itemsValid => String.t(), :kind => String.t(), :language => String.t(), :lastUploadDate => String.t(), :processingStatus => String.t(), :warnings => list(GoogleApi.Content.V21.Model.DatafeedStatusError.t()) } field(:country) field(:datafeedId) field(:errors, as: GoogleApi.Content.V21.Model.DatafeedStatusError, type: :list) field(:itemsTotal) field(:itemsValid) field(:kind) field(:language) field(:lastUploadDate) field(:processingStatus) field(:warnings, as: GoogleApi.Content.V21.Model.DatafeedStatusError, type: :list) end defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.DatafeedStatus do def decode(value, options) do GoogleApi.Content.V21.Model.DatafeedStatus.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.DatafeedStatus do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
47.7
164
0.692872
ffc45bd42fd85bb4f585a6ea35d10b64b9c1b0cd
2,357
ex
Elixir
lib/mix/phoenix/context.ex
G3z/phoenix
f13fe2c7f7ec25e6a59204266cb8cbbe7ffbbded
[ "MIT" ]
2
2016-11-01T15:01:48.000Z
2016-11-01T15:07:20.000Z
lib/mix/phoenix/context.ex
G3z/phoenix
f13fe2c7f7ec25e6a59204266cb8cbbe7ffbbded
[ "MIT" ]
null
null
null
lib/mix/phoenix/context.ex
G3z/phoenix
f13fe2c7f7ec25e6a59204266cb8cbbe7ffbbded
[ "MIT" ]
null
null
null
defmodule Mix.Phoenix.Context do @moduledoc false alias Mix.Phoenix.{Context, Schema} defstruct name: nil, module: nil, schema: nil, alias: nil, base_module: nil, web_module: nil, basename: nil, file: nil, test_file: nil, dir: nil, generate?: true, context_app: nil, opts: [] def valid?(context) do context =~ ~r/^[A-Z]\w*(\.[A-Z]\w*)*$/ end def new(context_name, %Schema{} = schema, opts) do ctx_app = opts[:context_app] || Mix.Phoenix.context_app() base = Module.concat([Mix.Phoenix.context_base(ctx_app)]) module = Module.concat(base, context_name) alias = Module.concat([module |> Module.split() |> List.last()]) basedir = Phoenix.Naming.underscore(context_name) basename = Path.basename(basedir) dir = Mix.Phoenix.context_lib_path(ctx_app, basedir) file = dir <> ".ex" test_dir = Mix.Phoenix.context_test_path(ctx_app, basedir) test_file = Path.join([test_dir, basename <> "_test.exs"]) generate? = Keyword.get(opts, :context, true) %Context{ name: context_name, module: module, schema: schema, alias: alias, base_module: base, web_module: web_module(), basename: basename, file: file, test_file: test_file, dir: dir, generate?: generate?, context_app: ctx_app, opts: opts} end def pre_existing?(%Context{file: file}), do: File.exists?(file) def pre_existing_tests?(%Context{test_file: file}), do: File.exists?(file) def function_count(%Context{file: file}) do {_ast, count} = file |> File.read!() |> Code.string_to_quoted!() |> Macro.postwalk(0, fn {:def, _, _} = node, count -> {node, count + 1} node, count -> {node, count} end) count end def file_count(%Context{dir: dir}) do dir |> Path.join("**/*.ex") |> Path.wildcard() |> Enum.count() end defp web_module do base = Mix.Phoenix.base() cond do Mix.Phoenix.context_app() != Mix.Phoenix.otp_app() -> Module.concat([base]) String.ends_with?(base, "Web") -> Module.concat([base]) true -> Module.concat(["#{base}Web"]) end end end
25.901099
76
0.570216
ffc4707ad5c7cd88528e6d979dd1ac2d41cbdeb0
596
exs
Elixir
test/vendor/rsa_test.exs
bahanni/custom_rpi4
ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5
[ "MIT" ]
null
null
null
test/vendor/rsa_test.exs
bahanni/custom_rpi4
ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5
[ "MIT" ]
null
null
null
test/vendor/rsa_test.exs
bahanni/custom_rpi4
ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5
[ "MIT" ]
null
null
null
defmodule RSATest do require Helpers use ExUnit.Case alias FarmbotOS.Bootstrap.Authorization def priv_key(), do: Authorization.rsa_decode_key(Helpers.priv_key()) def pub_key(), do: Authorization.rsa_decode_key(Helpers.pub_key()) test "decode_key" do {:RSAPublicKey, huge_integer, small_integer} = pub_key() assert huge_integer > 999_999_999_999 assert small_integer == 65537 end test "encryption" do ct = Authorization.rsa_encrypt("TOP SECRET", {:public, pub_key()}) pt = :public_key.decrypt_private(ct, priv_key()) assert pt == "TOP SECRET" end end
27.090909
70
0.72651
ffc47a5d1a52edc3f41a3ab9cfb88edfc8581a85
698
exs
Elixir
test/changelog_web/views/podcast_view_test.exs
gustavoarmoa/changelog.com
e898a9979a237ae66962714821ed8633a4966f37
[ "MIT" ]
2,599
2016-10-25T15:02:53.000Z
2022-03-26T02:34:42.000Z
test/changelog_web/views/podcast_view_test.exs
sdrees/changelog.com
955cdcf93d74991062f19a03e34c9f083ade1705
[ "MIT" ]
253
2016-10-25T20:29:24.000Z
2022-03-29T21:52:36.000Z
test/changelog_web/views/podcast_view_test.exs
sdrees/changelog.com
955cdcf93d74991062f19a03e34c9f083ade1705
[ "MIT" ]
298
2016-10-25T15:18:31.000Z
2022-01-18T21:25:52.000Z
defmodule ChangelogWeb.PodcastViewTest do use ChangelogWeb.ConnCase, async: true alias ChangelogWeb.PodcastView describe "subscribe_on_overcast_url" do test "reformats the Apple URL as necessary" do p = build(:podcast, apple_url: "https://itunes.apple.com/us/podcast/the-changelog/id341623264") assert PodcastView.subscribe_on_overcast_url(p) == "https://overcast.fm/itunes341623264/the-changelog" end end describe "subscribe_on_android_url" do test "reformats the feed URL as necessary" do p = build(:podcast, slug: "ohai") assert String.match?(PodcastView.subscribe_on_android_url(p), ~r/\/ohai\/feed\Z/) end end end
30.347826
99
0.712034
ffc48008a2cd9d57b6ee377650330c299e47c2ea
1,338
ex
Elixir
lib/salty/generichash_blake2b.ex
japhib/libsalty2
b11e5441b81e3591f73b342ef81b26a1345ee670
[ "Apache-2.0" ]
1
2021-07-18T08:50:32.000Z
2021-07-18T08:50:32.000Z
lib/salty/generichash_blake2b.ex
japhib/libsalty2
b11e5441b81e3591f73b342ef81b26a1345ee670
[ "Apache-2.0" ]
1
2021-12-04T22:06:35.000Z
2021-12-04T22:06:35.000Z
lib/salty/generichash_blake2b.ex
japhib/libsalty2
b11e5441b81e3591f73b342ef81b26a1345ee670
[ "Apache-2.0" ]
6
2020-09-14T09:19:14.000Z
2022-01-25T20:28:05.000Z
defmodule Salty.Generichash.Blake2b do use Salty.Generichash def bytes_min do C.generichash_blake2b_BYTES_MIN() end def bytes_max do C.generichash_blake2b_BYTES_MAX() end def bytes do C.generichash_blake2b_BYTES() end def keybytes_min do C.generichash_blake2b_KEYBYTES_MIN() end def keybytes_max do C.generichash_blake2b_KEYBYTES_MAX() end def keybytes do C.generichash_blake2b_KEYBYTES() end def saltbytes do C.generichash_blake2b_SALTBYTES() end def personalbytes do C.generichash_blake2b_PERSONALBYTES() end def hash(outlen, data) do C.generichash_blake2b(outlen, data) end def hash(outlen, data, key) do C.generichash_blake2b_key(outlen, data, key) end def hash(outlen, data, key, salt, personal) do C.generichash_blake2b_salt_personal(outlen, data, key, salt, personal) end def init(key, outlen) do C.generichash_blake2b_init(key, outlen) end def init(key, outlen, salt, personal) do C.generichash_blake2b_init_salt_personal(key, outlen, salt, personal) end def update(state, input) do C.generichash_blake2b_update(state, input) end def final(state, outlen) do C.generichash_blake2b_final(state, outlen) end def final_verify(_state, _expected) do :erlang.exit(:not_implemented) end end
19.391304
74
0.733931
ffc483c7c9f91d2b519927fa189bbdfe61591916
511
exs
Elixir
early-chapters/guess.exs
nespera/progr-elixir-1.6
d8b5751d5106ce81e440e2ad0a28abb0d00b18a2
[ "Apache-2.0" ]
null
null
null
early-chapters/guess.exs
nespera/progr-elixir-1.6
d8b5751d5106ce81e440e2ad0a28abb0d00b18a2
[ "Apache-2.0" ]
null
null
null
early-chapters/guess.exs
nespera/progr-elixir-1.6
d8b5751d5106ce81e440e2ad0a28abb0d00b18a2
[ "Apache-2.0" ]
null
null
null
defmodule Chop do def guess(target, low..high, goes \\ 1) do try = div(low + high, 2) IO.puts("Guessing #{try}") guess(target, low..high, try, goes) end defp guess(target, _, target, goes), do: IO.puts "Yes. Answer is #{target}! Took #{goes} goes." defp guess(target, low..high, _, goes) when target < div(low + high, 2) do guess(target, low..div(low + high, 2), goes+1) end defp guess(target, low..high, _, goes) do guess(target, div(low + high, 2)..high, goes+1) end end
26.894737
97
0.614481
ffc49fa02ce5f0c12a548f17362fe91ea90ee1bd
671
ex
Elixir
lib/builder/car_director.ex
Fulnir/Elixir_Design_Pattern
77e77541ac604968bfbfe9ebbd6b51f1c3442c1a
[ "MIT" ]
3
2018-03-06T13:45:42.000Z
2019-07-22T00:16:54.000Z
lib/builder/car_director.ex
Fulnir/Elixir_Design_Pattern
77e77541ac604968bfbfe9ebbd6b51f1c3442c1a
[ "MIT" ]
null
null
null
lib/builder/car_director.ex
Fulnir/Elixir_Design_Pattern
77e77541ac604968bfbfe9ebbd6b51f1c3442c1a
[ "MIT" ]
null
null
null
defmodule CarDirector do @moduledoc """ Copyright © 2018 Edwin Buehler. All rights reserved. """ import CarBuilder def construct, do: construct("") def construct(name) do case name do "Blues Mobil" -> CarBuilder.build() |> name(name) |> color_doors(:black) "Bumble Bee" -> TransformerCarBuilder.build() |> name(name) |> color(:yellow) |> color_doors(:black) _ -> CarBuilder.build() |> name("Average Joe") end end end
25.807692
56
0.444113
ffc50c8bfc7363cb7a4055356fae356eaf5eb3f2
836
ex
Elixir
channel-sender/lib/channel_sender_ex/core/pubsub/socket_event_bus.ex
santitigaga/async-dataflow
66132c7353ae0b3b9d5a3704699b5ba130b4da9d
[ "MIT" ]
2
2022-01-11T21:03:44.000Z
2022-03-15T15:13:11.000Z
channel-sender/lib/channel_sender_ex/core/pubsub/socket_event_bus.ex
santitigaga/async-dataflow
66132c7353ae0b3b9d5a3704699b5ba130b4da9d
[ "MIT" ]
3
2021-12-21T21:04:20.000Z
2022-03-15T16:16:45.000Z
channel-sender/lib/channel_sender_ex/core/pubsub/socket_event_bus.ex
santitigaga/async-dataflow
66132c7353ae0b3b9d5a3704699b5ba130b4da9d
[ "MIT" ]
2
2022-02-08T22:33:36.000Z
2022-03-25T19:55:18.000Z
defmodule ChannelSenderEx.Core.PubSub.SocketEventBus do @moduledoc """ Handles different socket events, as connected and disconnected, and abstracts in some way the socket/channel discovery and association. """ alias ChannelSenderEx.Core.ChannelRegistry alias ChannelSenderEx.Core.Channel def notify_event({:connected, channel}, socket_pid) do connect_channel(channel, socket_pid) end def connect_channel(_, _, count \\ 0) def connect_channel(_, _, 7), do: raise("No channel found") def connect_channel(channel, socket_pid, count) do case ChannelRegistry.lookup_channel_addr(channel) do pid when is_pid(pid) -> :ok = Channel.socket_connected(pid, socket_pid) pid :noproc -> Process.sleep(350) connect_channel(channel, socket_pid, count + 1) end end end
30.962963
124
0.721292
ffc516bad15bf3a09e9b033ce68c51c07aa22011
13,597
ex
Elixir
lib/ecto/repo/preloader.ex
zachahn/ecto
8119ad877f7caa837912647a014f4a63a951dba0
[ "Apache-2.0" ]
null
null
null
lib/ecto/repo/preloader.ex
zachahn/ecto
8119ad877f7caa837912647a014f4a63a951dba0
[ "Apache-2.0" ]
null
null
null
lib/ecto/repo/preloader.ex
zachahn/ecto
8119ad877f7caa837912647a014f4a63a951dba0
[ "Apache-2.0" ]
null
null
null
defmodule Ecto.Repo.Preloader do # The module invoked by user defined repos # for preload related functionality. @moduledoc false require Ecto.Query @doc """ Transforms a result set based on query preloads, loading the associations onto their parent schema. """ @spec query([list], Ecto.Repo.t, list, Access.t, fun, Keyword.t) :: [list] def query([], _repo, _preloads, _take, _fun, _opts), do: [] def query(rows, _repo, [], _take, fun, _opts), do: Enum.map(rows, fun) def query(rows, repo, preloads, take, fun, opts) do rows |> extract |> normalize_and_preload_each(repo, preloads, take, opts) |> unextract(rows, fun) end defp extract([[nil|_]|t2]), do: extract(t2) defp extract([[h|_]|t2]), do: [h|extract(t2)] defp extract([]), do: [] defp unextract(structs, [[nil|_] = h2|t2], fun), do: [fun.(h2)|unextract(structs, t2, fun)] defp unextract([h1|structs], [[_|t1]|t2], fun), do: [fun.([h1|t1])|unextract(structs, t2, fun)] defp unextract([], [], _fun), do: [] @doc """ Implementation for `Ecto.Repo.preload/2`. """ @spec preload(structs, atom, atom | list, Keyword.t) :: structs when structs: [Ecto.Schema.t] | Ecto.Schema.t | nil def preload(nil, _repo, _preloads, _opts) do nil end def preload(structs, repo, preloads, opts) when is_list(structs) do normalize_and_preload_each(structs, repo, preloads, opts[:take], opts) end def preload(struct, repo, preloads, opts) when is_map(struct) do normalize_and_preload_each([struct], repo, preloads, opts[:take], opts) |> hd() end defp normalize_and_preload_each(structs, repo, preloads, take, opts) do preloads = normalize(preloads, take, preloads) preload_each(structs, repo, preloads, opts) rescue e -> # Reraise errors so we ignore the preload inner stacktrace reraise e end ## Preloading defp preload_each(structs, _repo, [], _opts), do: structs defp preload_each([], _repo, _preloads, _opts), do: [] defp preload_each(structs, repo, preloads, opts) do if sample = Enum.find(structs, & &1) do module = sample.__struct__ prefix = preload_prefix(opts, sample) {assocs, throughs} = expand(module, preloads, {%{}, %{}}) assocs = maybe_pmap Map.values(assocs), repo, opts, fn {{:assoc, assoc, related_key}, take, query, sub_preloads}, opts -> preload_assoc(structs, module, repo, prefix, assoc, related_key, query, sub_preloads, take, opts) end throughs = Map.values(throughs) for struct <- structs do struct = Enum.reduce assocs, struct, &load_assoc/2 struct = Enum.reduce throughs, struct, &load_through/2 struct end else structs end end defp preload_prefix(opts, sample) do case Keyword.fetch(opts, :prefix) do {:ok, prefix} -> prefix :error -> %{__meta__: %{prefix: prefix}} = sample prefix end end ## Association preloading defp maybe_pmap(assocs, repo, opts, fun) do if match?([_,_|_], assocs) and not repo.in_transaction? and Keyword.get(opts, :in_parallel, true) do # We pass caller: self() so pools like the ownership # pool knows where to fetch the connection from and # set the proper timeouts. opts = Keyword.put_new(opts, :caller, self()) assocs |> Task.async_stream(&fun.(&1, opts), timeout: :infinity) |> Enum.map(fn {:ok, assoc} -> assoc end) else Enum.map(assocs, &fun.(&1, opts)) end end defp preload_assoc(structs, module, repo, prefix, %{cardinality: card} = assoc, related_key, query, preloads, take, opts) do {fetch_ids, loaded_ids, loaded_structs} = fetch_ids(structs, module, assoc, opts) {fetch_ids, fetch_structs} = fetch_query(fetch_ids, assoc, repo, query, prefix, related_key, take, opts) all = preload_each(Enum.reverse(loaded_structs, fetch_structs), repo, preloads, opts) {:assoc, assoc, assoc_map(card, Enum.reverse(loaded_ids, fetch_ids), all)} end defp fetch_ids(structs, module, assoc, opts) do %{field: field, owner_key: owner_key, cardinality: card} = assoc force? = Keyword.get(opts, :force, false) Enum.reduce structs, {[], [], []}, fn nil, acc -> acc struct, {fetch_ids, loaded_ids, loaded_structs} -> assert_struct!(module, struct) %{^owner_key => id, ^field => value} = struct cond do card == :one and Ecto.assoc_loaded?(value) and not force? -> {fetch_ids, [id|loaded_ids], [value|loaded_structs]} card == :many and Ecto.assoc_loaded?(value) and not force? -> {fetch_ids, List.duplicate(id, length(value)) ++ loaded_ids, value ++ loaded_structs} is_nil(id) -> {fetch_ids, loaded_ids, loaded_structs} true -> {[id|fetch_ids], loaded_ids, loaded_structs} end end end defp fetch_query([], _assoc, _repo, _query, _prefix, _related_key, _take, _opts) do {[], []} end defp fetch_query(ids, _assoc, _repo, query, _prefix, {_, key}, _take, _opts) when is_function(query, 1) do data = ids |> Enum.uniq |> query.() |> Enum.map(&{Map.fetch!(&1, key), &1}) |> Enum.sort unzip_ids data, [], [] end defp fetch_query(ids, %{cardinality: card} = assoc, repo, query, prefix, related_key, take, opts) do query = assoc.__struct__.assoc_query(assoc, query, Enum.uniq(ids)) field = related_key_to_field(query, related_key) # Normalize query query = %{Ecto.Query.Planner.ensure_select(query, take || true) | prefix: prefix} # Add the related key to the query results query = update_in query.select.expr, &{:{}, [], [field, &1]} # If we are returning many results, we must sort by the key too query = case card do :many -> update_in query.order_bys, fn order_bys -> [%Ecto.Query.QueryExpr{expr: [asc: field], params: [], file: __ENV__.file, line: __ENV__.line}|order_bys] end :one -> query end unzip_ids repo.all(query, opts), [], [] end defp related_key_to_field(query, {pos, key}) do {{:., [], [{:&, [], [related_key_pos(query, pos)]}, key]}, [], []} end defp related_key_pos(_query, pos) when pos >= 0, do: pos defp related_key_pos(query, pos), do: Ecto.Query.Builder.count_binds(query) + pos defp unzip_ids([{k, v}|t], acc1, acc2), do: unzip_ids(t, [k|acc1], [v|acc2]) defp unzip_ids([], acc1, acc2), do: {acc1, acc2} defp assert_struct!(mod, %{__struct__: mod}), do: true defp assert_struct!(mod, %{__struct__: struct}) do raise ArgumentError, "expected a homogeneous list containing the same struct, " <> "got: #{inspect mod} and #{inspect struct}" end defp assoc_map(:one, ids, structs) do one_assoc_map(ids, structs, %{}) end defp assoc_map(:many, ids, structs) do many_assoc_map(ids, structs, %{}) end defp one_assoc_map([id|ids], [struct|structs], map) do one_assoc_map(ids, structs, Map.put(map, id, struct)) end defp one_assoc_map([], [], map) do map end defp many_assoc_map([id|ids], [struct|structs], map) do {ids, structs, acc} = split_while(ids, structs, id, [struct]) many_assoc_map(ids, structs, Map.put(map, id, acc)) end defp many_assoc_map([], [], map) do map end defp split_while([id|ids], [struct|structs], id, acc), do: split_while(ids, structs, id, [struct|acc]) defp split_while(ids, structs, _id, acc), do: {ids, structs, acc} ## Load preloaded data defp load_assoc({:assoc, _assoc, _ids}, nil) do nil end defp load_assoc({:assoc, assoc, ids}, struct) do %{field: field, owner_key: owner_key, cardinality: cardinality} = assoc key = Map.fetch!(struct, owner_key) loaded = case ids do %{^key => value} -> value _ when cardinality == :many -> [] _ -> nil end Map.put(struct, field, loaded) end defp load_through({:through, assoc, throughs}, struct) do %{cardinality: cardinality, field: field, owner: owner} = assoc {loaded, _} = Enum.reduce(throughs, {[struct], owner}, &recur_through/2) Map.put(struct, field, maybe_first(loaded, cardinality)) end defp maybe_first(list, :one), do: List.first(list) defp maybe_first(list, _), do: list defp recur_through(field, {structs, owner}) do assoc = owner.__schema__(:association, field) case assoc.__struct__.preload_info(assoc) do {:assoc, %{related: related}, _} -> pks = related.__schema__(:primary_key) {children, _} = Enum.reduce(structs, {[], %{}}, fn struct, acc -> children = struct |> Map.fetch!(field) |> List.wrap Enum.reduce children, acc, fn child, {fresh, set} -> keys = through_pks(child, pks, assoc) case set do %{^keys => true} -> {fresh, set} _ -> {[child|fresh], Map.put(set, keys, true)} end end end) {Enum.reverse(children), related} {:through, _, through} -> Enum.reduce(through, {structs, owner}, &recur_through/2) end end defp through_pks(map, pks, assoc) do Enum.map pks, fn pk -> case map do %{^pk => value} -> value _ -> raise ArgumentError, "cannot preload through association `#{assoc.field}` on `#{inspect assoc.owner}`. " <> "Ecto expected a map/struct with the key `#{pk}` but got: #{inspect map}" end end end ## Normalizer def normalize(preload, take, original) do normalize_each(wrap(preload, original), [], take, original) end defp normalize_each({atom, {query, list}}, acc, take, original) when is_atom(atom) and (is_map(query) or is_function(query, 1)) do fields = take(take, atom) [{atom, {fields, query!(query), normalize_each(wrap(list, original), [], fields, original)}}|acc] end defp normalize_each({atom, query}, acc, take, _original) when is_atom(atom) and (is_map(query) or is_function(query, 1)) do [{atom, {take(take, atom), query!(query), []}}|acc] end defp normalize_each({atom, list}, acc, take, original) when is_atom(atom) do fields = take(take, atom) [{atom, {fields, nil, normalize_each(wrap(list, original), [], fields, original)}}|acc] end defp normalize_each(atom, acc, take, _original) when is_atom(atom) do [{atom, {take(take, atom), nil, []}}|acc] end defp normalize_each(other, acc, take, original) do Enum.reduce(wrap(other, original), acc, &normalize_each(&1, &2, take, original)) end defp query!(query) when is_function(query, 1), do: query defp query!(%Ecto.Query{} = query), do: query defp take(take, field) do case Access.fetch(take, field) do {:ok, fields} -> List.wrap(fields) :error -> nil end end defp wrap(list, _original) when is_list(list), do: list defp wrap(atom, _original) when is_atom(atom), do: atom defp wrap(other, original) do raise ArgumentError, "invalid preload `#{inspect other}` in `#{inspect original}`. " <> "preload expects an atom, a (nested) keyword or a (nested) list of atoms" end ## Expand def expand(schema, preloads, acc) do Enum.reduce(preloads, acc, fn {preload, {fields, query, sub_preloads}}, {assocs, throughs} -> assoc = association_from_schema!(schema, preload) info = assoc.__struct__.preload_info(assoc) case info do {:assoc, _, _} -> value = {info, fields, query, sub_preloads} assocs = Map.update(assocs, preload, value, &merge_preloads(preload, value, &1)) {assocs, throughs} {:through, _, through} -> through = through |> Enum.reverse() |> Enum.reduce({fields, query, sub_preloads}, &{nil, nil, [{&1, &2}]}) |> elem(2) expand(schema, through, {assocs, Map.put(throughs, preload, info)}) end end) end defp merge_preloads(_preload, {info, _, nil, left}, {info, take, query, right}), do: {info, take, query, left ++ right} defp merge_preloads(_preload, {info, take, query, left}, {info, _, nil, right}), do: {info, take, query, left ++ right} defp merge_preloads(preload, {info, _, left, _}, {info, _, right, _}) do raise ArgumentError, "cannot preload `#{preload}` as it has been supplied more than once " <> "with different queries: #{inspect left} and #{inspect right}" end # Since there is some ambiguity between assoc and queries. # We reimplement this function here for nice error messages. defp association_from_schema!(schema, assoc) do schema.__schema__(:association, assoc) || raise ArgumentError, "schema #{inspect schema} does not have association #{inspect assoc}#{maybe_module(assoc)}" end defp maybe_module(assoc) do case Atom.to_string(assoc) do "Elixir." <> _ -> " (if you were trying to pass a schema as a query to preload, " <> "you have to explicitly convert it to a query by doing `from x in #{inspect assoc}` " <> "or by calling Ecto.Queryable.to_query/1)" _ -> "" end end defp reraise(exception) do reraise exception, Enum.reject(System.stacktrace, &match?({__MODULE__, _, _, _}, &1)) end end
33.907731
108
0.616239
ffc51e33a9be89a1c3ae5a9a992834e25a739257
1,191
exs
Elixir
mix.exs
jimmybot/reverse_proxy_plug
489188bc16206b502410276b485e2721131efa16
[ "MIT" ]
null
null
null
mix.exs
jimmybot/reverse_proxy_plug
489188bc16206b502410276b485e2721131efa16
[ "MIT" ]
null
null
null
mix.exs
jimmybot/reverse_proxy_plug
489188bc16206b502410276b485e2721131efa16
[ "MIT" ]
null
null
null
defmodule ReverseProxyPlug.MixProject do use Mix.Project def project do [ app: :reverse_proxy_plug, version: "1.3.1", elixir: "~> 1.7", start_permanent: Mix.env() == :prod, deps: deps(), elixirc_paths: elixirc_paths(Mix.env()), description: description(), package: package() ] end defp description do """ An Elixir reverse proxy Plug with HTTP/2, chunked transfer and path proxying support. """ end defp package do %{ maintainers: ["Michał Szewczak"], licenses: ["MIT"], links: %{"GitHub" => "https://github.com/tallarium/reverse_proxy_plug"} } end defp elixirc_paths(:test), do: ["test/support", "lib"] defp elixirc_paths(_), do: ["lib"] # Run "mix help compile.app" to learn about applications. def application do [ extra_applications: [:logger] ] end # Run "mix help deps" to learn about dependencies. defp deps do [ {:plug, "~> 1.6"}, {:cowboy, "~> 2.4"}, {:httpoison, "~> 1.2"}, {:credo, "~> 1.0", only: [:dev, :test]}, {:mox, "~> 0.4", only: :test}, {:ex_doc, "~> 0.19", only: :dev} ] end end
22.055556
77
0.564232
ffc5a86450284e705ae8fdc1cebbcca622b5314e
3,183
exs
Elixir
test/html_test.exs
fhunleth/excoveralls
6332f4073a4a209d061382780ab702f5c7092ce2
[ "MIT" ]
null
null
null
test/html_test.exs
fhunleth/excoveralls
6332f4073a4a209d061382780ab702f5c7092ce2
[ "MIT" ]
null
null
null
test/html_test.exs
fhunleth/excoveralls
6332f4073a4a209d061382780ab702f5c7092ce2
[ "MIT" ]
1
2019-03-22T07:30:42.000Z
2019-03-22T07:30:42.000Z
defmodule ExCoveralls.HtmlTest do use ExUnit.Case import Mock import ExUnit.CaptureIO alias ExCoveralls.Html @file_name "excoveralls.html" @file_size 20212 @test_output_dir "cover_test/" @test_template_path "lib/templates/html/htmlcov/" @content "defmodule Test do\n def test do\n end\nend\n" @counts [0, 1, nil, nil] @source_info [%{name: "test/fixtures/test.ex", source: @content, coverage: @counts }] @stats_result "" <> "----------------\n" <> "COV FILE LINES RELEVANT MISSED\n" <> " 50.0% test/fixtures/test.ex 4 2 1\n" <> "[TOTAL] 50.0%\n" <> "----------------\n" setup do path = Path.expand(@file_name, @test_output_dir) # Assert does not exist prior to write assert(File.exists?(path) == false) on_exit fn -> if File.exists?(path) do # Ensure removed after test File.rm!(path) File.rmdir!(@test_output_dir) end end {:ok, report: path} end test "generate stats information with output_dir parameter", %{report: report} do assert capture_io(fn -> Html.execute(@source_info, [output_dir: @test_output_dir]) end) =~ @stats_result assert(File.read!(report) =~ "id='test/fixtures/test.ex'") %{size: size} = File.stat! report assert(size == @file_size) end test_with_mock "generate stats information", %{report: report}, ExCoveralls.Settings, [], [ get_coverage_options: fn -> %{"output_dir" => @test_output_dir, "template_path" => @test_template_path} end, get_file_col_width: fn -> 40 end, get_print_summary: fn -> true end, get_print_files: fn -> true end ] do assert capture_io(fn -> Html.execute(@source_info) end) =~ @stats_result assert(File.read!(report) =~ "id='test/fixtures/test.ex'") %{size: size} = File.stat! report assert(size == @file_size) end test_with_mock "Exit status code is 1 when actual coverage does not reach the minimum", ExCoveralls.Settings, [ get_coverage_options: fn -> coverage_options(100) end, get_file_col_width: fn -> 40 end, get_print_summary: fn -> true end, get_print_files: fn -> true end ] do output = capture_io(fn -> assert catch_exit(Html.execute(@source_info)) == {:shutdown, 1} end) assert String.ends_with?(output, "\e[31m\e[1mFAILED: Expected minimum coverage of 100%, got 50%.\e[0m\n") end test_with_mock "Exit status code is 0 when actual coverage reaches the minimum", ExCoveralls.Settings, [ get_coverage_options: fn -> coverage_options(49.9) end, get_file_col_width: fn -> 40 end, get_print_summary: fn -> true end, get_print_files: fn -> true end ] do assert capture_io(fn -> Html.execute(@source_info) end) =~ @stats_result end defp coverage_options(minimum_coverage) do %{ "minimum_coverage" => minimum_coverage, "output_dir" => @test_output_dir, "template_path" => @test_template_path } end end
31.205882
116
0.610116
ffc5f51d706a055b09ee9fe325cb2e7b4fb0866e
4,964
ex
Elixir
clients/cloud_error_reporting/lib/google_api/cloud_error_reporting/v1beta1/model/error_group_stats.ex
nuxlli/elixir-google-api
ecb8679ac7282b7dd314c3e20c250710ec6a7870
[ "Apache-2.0" ]
null
null
null
clients/cloud_error_reporting/lib/google_api/cloud_error_reporting/v1beta1/model/error_group_stats.ex
nuxlli/elixir-google-api
ecb8679ac7282b7dd314c3e20c250710ec6a7870
[ "Apache-2.0" ]
null
null
null
clients/cloud_error_reporting/lib/google_api/cloud_error_reporting/v1beta1/model/error_group_stats.ex
nuxlli/elixir-google-api
ecb8679ac7282b7dd314c3e20c250710ec6a7870
[ "Apache-2.0" ]
1
2020-11-10T16:58:27.000Z
2020-11-10T16:58:27.000Z
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorGroupStats do @moduledoc """ Data extracted for a specific group based on certain filter criteria, such as a given time period and/or service filter. ## Attributes - affectedServices ([ServiceContext]): Service contexts with a non-zero error count for the given filter criteria. This list can be truncated if multiple services are affected. Refer to &#x60;num_affected_services&#x60; for the total count. Defaults to: `null`. - affectedUsersCount (String.t): Approximate number of affected users in the given group that match the filter criteria. Users are distinguished by data in the &#x60;ErrorContext&#x60; of the individual error events, such as their login name or their remote IP address in case of HTTP requests. The number of affected users can be zero even if the number of errors is non-zero if no data was provided from which the affected user could be deduced. Users are counted based on data in the request context that was provided in the error report. If more users are implicitly affected, such as due to a crash of the whole service, this is not reflected here. Defaults to: `null`. - count (String.t): Approximate total number of events in the given group that match the filter criteria. Defaults to: `null`. - firstSeenTime (DateTime.t): Approximate first occurrence that was ever seen for this group and which matches the given filter criteria, ignoring the time_range that was specified in the request. Defaults to: `null`. - group (ErrorGroup): Group data that is independent of the filter criteria. Defaults to: `null`. - lastSeenTime (DateTime.t): Approximate last occurrence that was ever seen for this group and which matches the given filter criteria, ignoring the time_range that was specified in the request. Defaults to: `null`. - numAffectedServices (integer()): The total number of services with a non-zero error count for the given filter criteria. Defaults to: `null`. - representative (ErrorEvent): An arbitrary event that is chosen as representative for the whole group. The representative event is intended to be used as a quick preview for the whole group. Events in the group are usually sufficiently similar to each other such that showing an arbitrary representative provides insight into the characteristics of the group as a whole. Defaults to: `null`. - timedCounts ([TimedCount]): Approximate number of occurrences over time. Timed counts returned by ListGroups are guaranteed to be: - Inside the requested time interval - Non-overlapping, and - Ordered by ascending time. Defaults to: `null`. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :affectedServices => list(GoogleApi.CloudErrorReporting.V1beta1.Model.ServiceContext.t()), :affectedUsersCount => any(), :count => any(), :firstSeenTime => DateTime.t(), :group => GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorGroup.t(), :lastSeenTime => DateTime.t(), :numAffectedServices => any(), :representative => GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorEvent.t(), :timedCounts => list(GoogleApi.CloudErrorReporting.V1beta1.Model.TimedCount.t()) } field( :affectedServices, as: GoogleApi.CloudErrorReporting.V1beta1.Model.ServiceContext, type: :list ) field(:affectedUsersCount) field(:count) field(:firstSeenTime, as: DateTime) field(:group, as: GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorGroup) field(:lastSeenTime, as: DateTime) field(:numAffectedServices) field(:representative, as: GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorEvent) field(:timedCounts, as: GoogleApi.CloudErrorReporting.V1beta1.Model.TimedCount, type: :list) end defimpl Poison.Decoder, for: GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorGroupStats do def decode(value, options) do GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorGroupStats.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorGroupStats do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
63.641026
676
0.761483
ffc6078dbb0dc0d0f3d36d5b4c3868c20ec6421c
800
exs
Elixir
test/orders/report_test.exs
Sup3r-Us3r/exlivery
7d8c5c0011f53cdca1fc8345e1b267355995ac35
[ "MIT" ]
null
null
null
test/orders/report_test.exs
Sup3r-Us3r/exlivery
7d8c5c0011f53cdca1fc8345e1b267355995ac35
[ "MIT" ]
null
null
null
test/orders/report_test.exs
Sup3r-Us3r/exlivery
7d8c5c0011f53cdca1fc8345e1b267355995ac35
[ "MIT" ]
null
null
null
defmodule ExliveryTest.Orders.ReportTest do use ExUnit.Case import ExliveryTest.Support.Factory alias Exlivery.Orders.Agent, as: OrderAgent alias Exlivery.Orders.Report describe "create/1" do setup do OrderAgent.start_link(%{}) :ok end test "creates the report file" do :order |> build() |> OrderAgent.save() :order |> build() |> OrderAgent.save() Report.create("report_test.csv") response = File.read!("report_test.csv") expected_response = "12345678910,pizza,1,35.3,76.30\n" <> "12345678910,hamburguer,2,20.50,76.30\n" <> "12345678910,pizza,1,35.3,76.30\n" <> "12345678910,hamburguer,2,20.50,76.30\n" assert response == expected_response end end end
20.512821
53
0.61625
ffc630dca66df42a47750587808a9b9189393da5
487
ex
Elixir
lib/rumbl_web/views/error_view.ex
brunorafa/rumbl
910e6ecfaae8da8e54da9e67871a02885c2f383f
[ "MIT" ]
1
2021-05-30T20:57:51.000Z
2021-05-30T20:57:51.000Z
lib/rumbl_web/views/error_view.ex
brunorafa/rumbl
910e6ecfaae8da8e54da9e67871a02885c2f383f
[ "MIT" ]
2
2021-03-09T19:04:16.000Z
2021-05-10T16:20:10.000Z
lib/rumbl_web/views/error_view.ex
brunorafa/rumbl
910e6ecfaae8da8e54da9e67871a02885c2f383f
[ "MIT" ]
1
2020-07-17T14:48:52.000Z
2020-07-17T14:48:52.000Z
defmodule RumblWeb.ErrorView do use RumblWeb, :view # If you want to customize a particular status code # for a certain format, you may uncomment below. # def render("500.html", _assigns) do # "Internal Server Error" # end # By default, Phoenix returns the status message from # the template name. For example, "404.html" becomes # "Not Found". def template_not_found(template, _assigns) do Phoenix.Controller.status_message_from_template(template) end end
28.647059
61
0.73306
ffc63a29892b383b28de23ad0783df7f58931ee9
201
ex
Elixir
lib/playground/northwind/model.ex
evadne/ets-playground
de7f2e4d512be4de6510c8978c1f0b25d7720df2
[ "MIT" ]
27
2019-04-10T15:25:21.000Z
2021-11-22T14:28:56.000Z
lib/playground/northwind/model.ex
evadne/ets-playground
de7f2e4d512be4de6510c8978c1f0b25d7720df2
[ "MIT" ]
null
null
null
lib/playground/northwind/model.ex
evadne/ets-playground
de7f2e4d512be4de6510c8978c1f0b25d7720df2
[ "MIT" ]
2
2019-05-07T21:45:14.000Z
2020-07-15T11:55:17.000Z
defmodule Playground.Northwind.Model do defmacro __using__(_) do parent = __MODULE__ quote do use Ecto.Schema import Ecto.Changeset alias unquote(parent) end end end
16.75
39
0.681592
ffc6b45176cefd2f0df6e81314768b81b967cf18
794
ex
Elixir
apps/blunt/test/support/custom_dispatch_strategy/custom_command_handler.ex
blunt-elixir/blunt
a88b88984022db7ba2110204248fdb541121e3a0
[ "MIT" ]
1
2022-03-07T11:54:47.000Z
2022-03-07T11:54:47.000Z
apps/blunt/test/support/custom_dispatch_strategy/custom_command_handler.ex
elixir-cqrs/cqrs_tools
afbf82da522a10d2413547a46f316ed3aadebba5
[ "MIT" ]
null
null
null
apps/blunt/test/support/custom_dispatch_strategy/custom_command_handler.ex
elixir-cqrs/cqrs_tools
afbf82da522a10d2413547a46f316ed3aadebba5
[ "MIT" ]
null
null
null
defmodule Blunt.CustomDispatchStrategy.CustomCommandHandler do @type user :: map() @type command :: struct() @type context :: Blunt.DispatchContext.command_context() @callback before_dispatch(command, context) :: {:ok, context()} | {:error, any()} @callback handle_authorize(user, command, context) :: {:ok, context()} | {:error, any()} | :error @callback handle_dispatch(command, context) :: any() defmacro __using__(_opts) do quote do @behaviour Blunt.CustomDispatchStrategy.CustomCommandHandler @impl true def handle_authorize(_user, _command, context), do: {:ok, context} @impl true def before_dispatch(_command, context), do: {:ok, context} defoverridable handle_authorize: 3, before_dispatch: 2 end end end
30.538462
99
0.683879
ffc6c878cc716eaf3771cd81d3223496c0f0b409
9,737
exs
Elixir
test/client_test.exs
sekiyama58/sentry-elixir
1c1d31178eaffcedb1ccb3d9c48695f29962735a
[ "MIT" ]
502
2016-09-03T14:23:53.000Z
2022-03-23T17:36:37.000Z
test/client_test.exs
sekiyama58/sentry-elixir
1c1d31178eaffcedb1ccb3d9c48695f29962735a
[ "MIT" ]
350
2016-08-29T18:53:26.000Z
2022-03-27T15:45:38.000Z
test/client_test.exs
sekiyama58/sentry-elixir
1c1d31178eaffcedb1ccb3d9c48695f29962735a
[ "MIT" ]
147
2016-09-22T13:30:57.000Z
2022-03-14T13:24:14.000Z
defmodule Sentry.ClientTest do use ExUnit.Case import ExUnit.CaptureLog import Sentry.TestEnvironmentHelper require Logger alias Sentry.{Client, Envelope} test "authorization" do modify_env(:sentry, dsn: "https://public:secret@app.getsentry.com/1") {_endpoint, public_key, private_key} = Client.get_dsn() assert Client.authorization_header(public_key, private_key) =~ ~r/^Sentry sentry_version=5, sentry_client=sentry-elixir\/#{ Application.spec(:sentry, :vsn) }, sentry_timestamp=\d{10}, sentry_key=public, sentry_secret=secret$/ end test "authorization without secret" do modify_env(:sentry, dsn: "https://public@app.getsentry.com/1") {_endpoint, public_key, private_key} = Client.get_dsn() assert Client.authorization_header(public_key, private_key) =~ ~r/^Sentry sentry_version=5, sentry_client=sentry-elixir\/#{ Application.spec(:sentry, :vsn) }, sentry_timestamp=\d{10}, sentry_key=public$/ end test "get dsn with default config" do modify_env(:sentry, dsn: "https://public:secret@app.getsentry.com/1") assert {"https://app.getsentry.com:443/api/1/envelope/", "public", "secret"} = Sentry.Client.get_dsn() end test "get dsn with system config" do modify_env(:sentry, dsn: {:system, "SYSTEM_KEY"}) modify_system_env(%{"SYSTEM_KEY" => "https://public:secret@app.getsentry.com/1"}) assert {"https://app.getsentry.com:443/api/1/envelope/", "public", "secret"} = Sentry.Client.get_dsn() end test "errors on bad public keys" do modify_env(:sentry, dsn: "https://app.getsentry.com/1") assert {:error, :invalid_dsn} = Sentry.Client.get_dsn() end test "errors on non-integer project_id" do modify_env(:sentry, dsn: "https://public:secret@app.getsentry.com/Mitchell") assert {:error, :invalid_dsn} = Sentry.Client.get_dsn() end test "errors on no project_id" do modify_env(:sentry, dsn: "https://public:secret@app.getsentry.com") assert {:error, :invalid_dsn} = Sentry.Client.get_dsn() end test "errors on nil dsn" do modify_env(:sentry, dsn: nil) assert {:error, :invalid_dsn} = Sentry.Client.get_dsn() end test "errors on atom dsn" do modify_env(:sentry, dsn: :error) assert {:error, :invalid_dsn} = Sentry.Client.get_dsn() end test "logs api errors" do bypass = Bypass.open() Bypass.expect(bypass, fn conn -> {:ok, _body, conn} = Plug.Conn.read_body(conn) assert conn.request_path == "/api/1/envelope/" assert conn.method == "POST" conn |> Plug.Conn.put_resp_header( "X-Sentry-Error", "Creation of this event was denied due to rate limiting." ) |> Plug.Conn.resp(400, "Something bad happened") end) modify_env(:sentry, dsn: "http://public:secret@localhost:#{bypass.port}/1") try do apply(Event, :not_a_function, []) rescue e -> assert capture_log(fn -> Sentry.capture_exception(e) end) =~ ~r/400.*Creation of this event was denied due to rate limiting/ end end test "errors when attempting to report invalid JSON" do modify_env(:sentry, dsn: "http://public:secret@localhost:3000/1") unencodable_event = %Sentry.Event{message: "error", level: {:a, :b}} capture_log(fn -> assert {:error, {:invalid_json, _}} = Sentry.Client.send_event(unencodable_event) end) end test "calls anonymous before_send_event" do bypass = Bypass.open() Bypass.expect(bypass, fn conn -> {:ok, body, conn} = Plug.Conn.read_body(conn) event = body |> Envelope.from_binary!() |> Envelope.event() assert event.extra == %{"key" => "value"} assert event.user["id"] == 1 assert event.stacktrace.frames == [] Plug.Conn.resp(conn, 200, ~s<{"id": "340"}>) end) modify_env( :sentry, dsn: "http://public:secret@localhost:#{bypass.port}/1", before_send_event: fn e -> metadata = Map.new(Logger.metadata()) {user_id, rest_metadata} = Map.pop(metadata, :user_id) %{e | extra: Map.merge(e.extra, rest_metadata), user: Map.put(e.user, :id, user_id)} end ) Logger.metadata(key: "value", user_id: 1) try do apply(Event, :not_a_function, []) rescue e -> assert capture_log(fn -> Sentry.capture_exception(e, result: :sync) end) end end test "calls MFA before_send_event" do bypass = Bypass.open() Bypass.expect(bypass, fn conn -> {:ok, body, conn} = Plug.Conn.read_body(conn) event = body |> Envelope.from_binary!() |> Envelope.event() assert event.extra == %{"key" => "value", "user_id" => 1} Plug.Conn.resp(conn, 200, ~s<{"id": "340"}>) end) modify_env( :sentry, dsn: "http://public:secret@localhost:#{bypass.port}/1", before_send_event: {Sentry.BeforeSendEventTest, :before_send_event} ) Logger.metadata(key: "value", user_id: 1) try do apply(Event, :not_a_function, []) rescue e -> assert capture_log(fn -> Sentry.capture_exception(e, result: :sync) end) end end test "falsey before_send_event does not send event" do modify_env( :sentry, before_send_event: {Sentry.BeforeSendEventTest, :before_send_event_ignore_arithmetic} ) try do :rand.uniform() + "1" rescue e -> capture_log(fn -> assert Sentry.capture_exception(e, result: :sync) == :excluded end) end end test "calls anonymous after_send_event synchronously" do bypass = Bypass.open() Bypass.expect(bypass, fn conn -> {:ok, _body, conn} = Plug.Conn.read_body(conn) Plug.Conn.resp(conn, 200, ~s<{"id": "340"}>) end) modify_env( :sentry, dsn: "http://public:secret@localhost:#{bypass.port}/1", after_send_event: fn _e, _r -> Logger.error("AFTER_SEND_EVENT") end ) try do apply(Event, :not_a_function, []) rescue e -> assert capture_log(fn -> Sentry.capture_exception(e, result: :sync) end) =~ "AFTER_SEND_EVENT" end end test "calls anonymous after_send_event asynchronously" do bypass = Bypass.open() Bypass.expect(bypass, fn conn -> {:ok, _body, conn} = Plug.Conn.read_body(conn) Plug.Conn.resp(conn, 200, ~s<{"id": "340"}>) end) modify_env( :sentry, dsn: "http://public:secret@localhost:#{bypass.port}/1", after_send_event: fn _e, _r -> Logger.error("AFTER_SEND_EVENT") end ) try do apply(Event, :not_a_function, []) rescue e -> assert capture_log(fn -> {:ok, task} = Sentry.capture_exception(e, result: :async) Task.await(task) end) =~ "AFTER_SEND_EVENT" end end test "sends event with sample_rate of 1" do bypass = Bypass.open() Bypass.expect(bypass, fn conn -> {:ok, body, conn} = Plug.Conn.read_body(conn) event = body |> Envelope.from_binary!() |> Envelope.event() assert Enum.count(event.stacktrace.frames) > 0 Plug.Conn.resp(conn, 200, ~s<{"id": "340"}>) end) modify_env( :sentry, dsn: "http://public:secret@localhost:#{bypass.port}/1" ) try do apply(Event, :not_a_function, []) rescue e -> {:ok, _} = Sentry.capture_exception( e, stacktrace: __STACKTRACE__, result: :sync, sample_rate: 1 ) end end test "does not send event with sample_rate of 0" do bypass = Bypass.open() Bypass.expect(bypass, fn conn -> {:ok, _body, conn} = Plug.Conn.read_body(conn) Plug.Conn.resp(conn, 200, ~s<{"id": "340"}>) end) modify_env( :sentry, dsn: "http://public:secret@localhost:#{bypass.port}/1" ) try do apply(Event, :not_a_function, []) rescue e -> {:ok, _} = Sentry.capture_exception(e, result: :sync, sample_rate: 1) Bypass.down(bypass) :unsampled = Sentry.capture_exception(e, result: :sync, sample_rate: 0.0) end end test "logs errors at configured log_level" do bypass = Bypass.open() pid = self() Bypass.expect(bypass, fn conn -> {:ok, _body, conn} = Plug.Conn.read_body(conn) assert conn.request_path == "/api/1/envelope/" assert conn.method == "POST" conn = conn |> Plug.Conn.put_resp_header( "X-Sentry-Error", "Creation of this event was denied due to various reasons." ) |> Plug.Conn.resp(400, "Something bad happened") send(pid, "API called") conn end) modify_env( :sentry, dsn: "http://public:secret@localhost:#{bypass.port}/1", log_level: :error ) capture_log(fn -> try do apply(Event, :not_a_function, []) rescue e -> {:ok, task} = Sentry.capture_exception( e, stacktrace: __STACKTRACE__, result: :async ) assert_receive "API called" Task.shutdown(task) end end) =~ "[error] Failed to send Sentry event" end test "logs JSON parsing errors at configured log_level" do assert capture_log(fn -> Sentry.capture_message("something happened", extra: %{metadata: [keyword: "list"]}) end) =~ "Failed to send Sentry event. Unable to encode JSON" end end
26.972299
96
0.595871
ffc6cd27f8bcf0561ac52f07ecc9c7f52e1bfd2f
1,898
exs
Elixir
clients/android_publisher/mix.exs
richiboi1977/elixir-google-api
c495bb3548090eb7a63d12f6fb145ec48aecdc0b
[ "Apache-2.0" ]
1
2021-10-01T09:20:41.000Z
2021-10-01T09:20:41.000Z
clients/android_publisher/mix.exs
richiboi1977/elixir-google-api
c495bb3548090eb7a63d12f6fb145ec48aecdc0b
[ "Apache-2.0" ]
null
null
null
clients/android_publisher/mix.exs
richiboi1977/elixir-google-api
c495bb3548090eb7a63d12f6fb145ec48aecdc0b
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.AndroidPublisher.Mixfile do use Mix.Project @version "0.28.1" def project() do [ app: :google_api_android_publisher, version: @version, elixir: "~> 1.6", build_embedded: Mix.env == :prod, start_permanent: Mix.env == :prod, description: description(), package: package(), deps: deps(), source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/android_publisher" ] end def application() do [extra_applications: [:logger]] end defp deps() do [ {:google_gax, "~> 0.4"}, {:ex_doc, "~> 0.16", only: :dev} ] end defp description() do """ Google Play Android Developer API client library. Lets Android application developers access their Google Play accounts. """ end defp package() do [ files: ["lib", "mix.exs", "README*", "LICENSE"], maintainers: ["Jeff Ching", "Daniel Azuma"], licenses: ["Apache 2.0"], links: %{ "GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/android_publisher", "Homepage" => "https://developers.google.com/android-publisher" } ] end end
28.328358
124
0.665964
ffc77c9f5f28c746ff6bdafd38fa901973abe813
650
exs
Elixir
installer/mix.exs
fmterrorf/still
fd316bcc38cdf618444dbd36ec9d259fe5256a3e
[ "0BSD" ]
2
2021-02-15T07:55:38.000Z
2021-03-05T18:04:53.000Z
installer/mix.exs
mrmicahcooper/still
ba785b0b068d998d0343f73a1fd1795edbe9831c
[ "0BSD" ]
null
null
null
installer/mix.exs
mrmicahcooper/still
ba785b0b068d998d0343f73a1fd1795edbe9831c
[ "0BSD" ]
null
null
null
defmodule Still.New.MixProject do use Mix.Project @version "0.3.0" def project do [ app: :still_new, version: @version, elixir: "~> 1.10", start_permanent: Mix.env() == :prod, deps: deps(), package: [ licenses: ["ISC"], links: %{"GitHub" => "https://github.com/still-ex/still"}, files: ~w(lib priv mix.exs README.md) ], description: """ Still project generator. Provides a `mix still.new` task to bootstrap a new Still project. """ ] end def application do [extra_applications: [:logger, :eex]] end defp deps do [] end end
19.117647
71
0.556923
ffc789e03cc3eaee7474cfe23a180289bd14ab11
1,044
ex
Elixir
lib/excommerce/application.ex
roger120981/planet
a662006551d11427c08cf6cdbacd37d377bcd9c5
[ "MIT" ]
1
2019-04-01T19:14:16.000Z
2019-04-01T19:14:16.000Z
lib/excommerce/application.ex
roger120981/planet
a662006551d11427c08cf6cdbacd37d377bcd9c5
[ "MIT" ]
null
null
null
lib/excommerce/application.ex
roger120981/planet
a662006551d11427c08cf6cdbacd37d377bcd9c5
[ "MIT" ]
1
2019-03-24T01:50:48.000Z
2019-03-24T01:50:48.000Z
defmodule Excommerce.Application do use Application # See https://hexdocs.pm/elixir/Application.html # for more information on OTP Applications def start(_type, _args) do import Supervisor.Spec # Define workers and child supervisors to be supervised children = [ # Start the Ecto repository supervisor(Excommerce.Repo, []), # Start the endpoint when the application starts supervisor(ExcommerceWeb.Endpoint, []), # Start your own worker by calling: Excommerce.Worker.start_link(arg1, arg2, arg3) # worker(Excommerce.Worker, [arg1, arg2, arg3]), ] # See https://hexdocs.pm/elixir/Supervisor.html # for other strategies and supported options opts = [strategy: :one_for_one, name: Excommerce.Supervisor] Supervisor.start_link(children, opts) end # Tell Phoenix to update the endpoint configuration # whenever the application is updated. def config_change(changed, _new, removed) do ExcommerceWeb.Endpoint.config_change(changed, removed) :ok end end
32.625
88
0.719349
ffc7bc8373a8bad7828407514e73434cbf3235da
3,040
exs
Elixir
test/controllers/inbox_controller_test.exs
rubencaro/pedro
b550b3af700962283fa9e3985e1dcc2da2e14d0d
[ "MIT" ]
null
null
null
test/controllers/inbox_controller_test.exs
rubencaro/pedro
b550b3af700962283fa9e3985e1dcc2da2e14d0d
[ "MIT" ]
null
null
null
test/controllers/inbox_controller_test.exs
rubencaro/pedro
b550b3af700962283fa9e3985e1dcc2da2e14d0d
[ "MIT" ]
null
null
null
alias Pedro.Db.Messages require Pedro.Helpers, as: H defmodule Pedro.InboxControllerTest do use Pedro.ConnCase, async: true import Pedro.TestTools test "POST /inbox/put validates signature" do conn = build_conn() |> put_req_header("content-type", "application/json") |> post("/inbox/put", Poison.encode!(%{any: "thing"})) assert response(conn, 401) == "unauthorized" end test "GET /inbox/put validates signature" do conn = get build_conn(), "/inbox/put?#{URI.encode_query(%{any: "thing"})}" assert response(conn, 401) == "unauthorized" end test "GET /inbox validates signature" do conn = get build_conn(), "/inbox?#{URI.encode_query(%{any: "thing"})}" assert response(conn, 401) == "unauthorized" end test "POST /inbox/put" do in_test_transaction do deserialized_input = post_message %{WIP: true, from: "me", to: "you", message: "work in progress"} assert [row] = Messages.all assert ^deserialized_input = Poison.decode!(row.json_payload) end end test "GET /inbox/put" do in_test_transaction do input = %{WIP: "true", from: "me", to: "you", message: "work in progress"} # booleans on query string come as text! deserialized_input = input |> Poison.encode! |> Poison.decode! data = build_conn() |> signed_get("/inbox/put", input) |> assert_valid_json assert %{"valid" => true, "request" => ^deserialized_input, "response" => "OK"} = data assert [row] = Messages.all assert ^deserialized_input = Poison.decode!(row.json_payload) end end test "GET /inbox" do in_test_transaction do # empty inbox req = %{to: "you"} deserialized_req = req |> Poison.encode! |> Poison.decode! data = build_conn() |> signed_get("/inbox", req) |> assert_valid_json assert %{"valid" => true, "request" => ^deserialized_req, "response" => []} = data # put some Messages into the inbox assert [] = Messages.all post_message %{WIP: true, from: "me", to: "you", message: "work in progress"} post_message %{WIP: true, from: "me", to: "you", message: "work in progress"} post_message %{WIP: true, from: "me2", to: "you", message: "work in progress"} post_message %{WIP: true, from: "me", to: "you2", message: "work in progress"} assert [_,_,_,_] = Messages.all # gets only messages to: "you" data = build_conn() |> signed_get("/inbox", req) |> assert_valid_json assert %{"valid" => true, "request" => ^deserialized_req, "response" => rows} = data assert Enum.count(rows) == 3 assert Enum.all?(rows, fn(r)-> match?(%{"to" => "you"}, r) end) end end defp post_message(input) do deserialized_input = input |> Poison.encode! |> Poison.decode! data = build_conn() |> signed_post("/inbox/put", input) |> assert_valid_json assert %{"valid" => true, "request" => ^deserialized_input, "response" => "OK"} = data deserialized_input end end
34.157303
122
0.623355
ffc7d573f449119ae9f31c0c4f38afe7fbb8e585
1,576
ex
Elixir
apps/hello_web/lib/hello_web/endpoint.ex
hui-ad/institute
28242d9d324d710a0e70678ec2d79099f1d3a98d
[ "MIT" ]
4
2019-06-12T19:05:34.000Z
2019-08-18T15:02:56.000Z
apps/hello_web/lib/hello_web/endpoint.ex
hui-ad/institute
28242d9d324d710a0e70678ec2d79099f1d3a98d
[ "MIT" ]
33
2019-06-12T18:59:21.000Z
2021-03-31T15:45:22.000Z
apps/hello_web/lib/hello_web/endpoint.ex
hui-ad/institute
28242d9d324d710a0e70678ec2d79099f1d3a98d
[ "MIT" ]
1
2019-06-16T09:38:08.000Z
2019-06-16T09:38:08.000Z
defmodule HelloWeb.Endpoint do use Phoenix.Endpoint, otp_app: :hello_web # The session will be stored in the cookie and signed, # this means its contents can be read but not tampered with. # Set :encryption_salt if you would also like to encrypt it. @session_options [ store: :cookie, key: "_hello_web_key", signing_salt: "TjPrBKSP" ] socket "/hello/socket", HelloWeb.UserSocket, websocket: true, longpoll: false socket "/hello/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]] # Serve at "/" the static files from "priv/static" directory. # # You should set gzip to true if you are running phx.digest # when deploying your static files in production. plug Plug.Static, at: "/hello", from: :hello_web, gzip: false, only: ~w(css fonts images js favicon.ico robots.txt) # Code reloading can be explicitly enabled under the # :code_reloader configuration of your endpoint. if code_reloading? do socket "/hello/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket plug Phoenix.LiveReloader plug Phoenix.CodeReloader end plug Phoenix.LiveDashboard.RequestLogger, param_key: "request_logger", cookie_key: "request_logger" plug Plug.RequestId plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint] plug Plug.Parsers, parsers: [:urlencoded, :multipart, :json], pass: ["*/*"], json_decoder: Phoenix.json_library() plug Plug.MethodOverride plug Plug.Head plug Plug.Session, @session_options plug HelloWeb.Router end
29.185185
103
0.718909
ffc814c3186e6e74a106b2244c9869e71316f9b5
1,505
ex
Elixir
web/controllers/tag_controller.ex
slurmulon/hone
9cc817fef06fbec564b18a11faa472ecf902aa62
[ "MIT" ]
null
null
null
web/controllers/tag_controller.ex
slurmulon/hone
9cc817fef06fbec564b18a11faa472ecf902aa62
[ "MIT" ]
null
null
null
web/controllers/tag_controller.ex
slurmulon/hone
9cc817fef06fbec564b18a11faa472ecf902aa62
[ "MIT" ]
null
null
null
defmodule Hone.TagController do use Hone.Web, :controller alias Hone.Tag plug :scrub_params, "tag" when action in [:create, :update] def index(conn, _params) do tags = Repo.all(Tag) render(conn, "index.json", tags: tags) end def create(conn, %{"tag" => tag_params}) do changeset = Tag.changeset(%Tag{}, tag_params) case Repo.insert(changeset) do {:ok, tag} -> conn |> put_status(:created) |> put_resp_header("location", tag_path(conn, :show, tag)) |> render("show.json", tag: tag) {:error, changeset} -> conn |> put_status(:unprocessable_entity) |> render(Hone.ChangesetView, "error.json", changeset: changeset) end end def show(conn, %{"id" => id}) do tag = Repo.get!(Tag, id) render(conn, "show.json", tag: tag) end def update(conn, %{"id" => id, "tag" => tag_params}) do tag = Repo.get!(Tag, id) changeset = Tag.changeset(tag, tag_params) case Repo.update(changeset) do {:ok, tag} -> render(conn, "show.json", tag: tag) {:error, changeset} -> conn |> put_status(:unprocessable_entity) |> render(Hone.ChangesetView, "error.json", changeset: changeset) end end def delete(conn, %{"id" => id}) do tag = Repo.get!(Tag, id) # Here we use delete! (with a bang) because we expect # it to always work (and if it does not, it will raise). Repo.delete!(tag) send_resp(conn, :no_content, "") end end
25.948276
73
0.597342
ffc82c644b92e0f44568caefe47165a541e059da
625
ex
Elixir
lib/sanbase/insight/post_comment.ex
sitedata/sanbase2
8da5e44a343288fbc41b68668c6c80ae8547d557
[ "MIT" ]
null
null
null
lib/sanbase/insight/post_comment.ex
sitedata/sanbase2
8da5e44a343288fbc41b68668c6c80ae8547d557
[ "MIT" ]
1
2021-07-24T16:26:03.000Z
2021-07-24T16:26:03.000Z
lib/sanbase/insight/post_comment.ex
sitedata/sanbase2
8da5e44a343288fbc41b68668c6c80ae8547d557
[ "MIT" ]
null
null
null
defmodule Sanbase.Insight.PostComment do @moduledoc ~s""" A mapping table connecting comments and posts. This module is used to create, update, delete and fetch insight comments. """ use Ecto.Schema import Ecto.Changeset alias Sanbase.Comment alias Sanbase.Insight.Post schema "post_comments_mapping" do belongs_to(:comment, Comment) belongs_to(:post, Post) timestamps() end def changeset(%__MODULE__{} = mapping, attrs \\ %{}) do mapping |> cast(attrs, [:post_id, :comment_id]) |> validate_required([:post_id, :comment_id]) |> unique_constraint(:comment_id) end end
22.321429
75
0.7008
ffc881c635def9010cf2d8cdc68e57cf41bbfe2b
250
ex
Elixir
lib/umbra/behaviour/default.ex
scorsi/Umbra
16036742db98e39fdc5bf50bc63f6db7cb0e7e92
[ "MIT" ]
null
null
null
lib/umbra/behaviour/default.ex
scorsi/Umbra
16036742db98e39fdc5bf50bc63f6db7cb0e7e92
[ "MIT" ]
7
2020-06-07T08:26:09.000Z
2020-06-07T08:38:36.000Z
lib/umbra/behaviour/default.ex
scorsi/Umbra
16036742db98e39fdc5bf50bc63f6db7cb0e7e92
[ "MIT" ]
null
null
null
defmodule Umbra.Behaviour.Default do @moduledoc """ This is the default behaviour of `GenServer`. It only does `use GenServer` behind the scene. """ @doc false defmacro __using__(_) do quote do use GenServer end end end
16.666667
48
0.676
ffc8999d67d522026eedb2e60920ad40bd5a5688
1,965
ex
Elixir
lib/still/compiler/template_helpers/link.ex
hgg/still
85477022d4c1e3c36d5f8da7a5a2ac0bc1bd09c9
[ "0BSD" ]
null
null
null
lib/still/compiler/template_helpers/link.ex
hgg/still
85477022d4c1e3c36d5f8da7a5a2ac0bc1bd09c9
[ "0BSD" ]
null
null
null
lib/still/compiler/template_helpers/link.ex
hgg/still
85477022d4c1e3c36d5f8da7a5a2ac0bc1bd09c9
[ "0BSD" ]
null
null
null
defmodule Still.Compiler.TemplateHelpers.Link do @moduledoc """ Renders an anchor HTML tag. """ import Still.Utils alias Still.Compiler.TemplateHelpers.ContentTag alias Still.SourceFile @doc """ Uses `Still.Compiler.TemplateHelpers.ContentTag` to render an anchor tag. Requires a `:to` option, the target URL. If this is a relative path, the website's base URL is prepended. If it is an absolute path the `target: "_blank"` and `rel: "noopener noreferrer"` options are added to be passed to `Still.Compiler.TemplateHelpers.ContentTag` `render` function. If there is a `do` block, it uses the current file preprocessor to render `markup`. Note that this is on demand, outside `Still.Compiler.CompilationStage`. """ def render(opts, metadata, do: markup) do preprocessor = metadata[:preprocessor] %{content: content} = preprocessor.render(%SourceFile{ content: markup, input_file: metadata[:input_file], metadata: metadata |> Enum.into(%{}) }) render(content, metadata, opts) end def render(text, _metadata, opts) do {url, opts} = pop_url(opts) ContentTag.render("a", text, [{:href, url} | opts]) end defp pop_url(opts) do {to, opts} = Keyword.pop!(opts, :to) case URI.parse(to) do %URI{host: nil, scheme: nil, path: path} when not is_nil(path) -> to = to |> add_base_url() |> modernize() {to, opts} %URI{scheme: scheme} when scheme in ["http", "https"] -> opts = add_absolute_path_opts(opts) {to, opts} _ -> {to, opts} end end defp add_base_url("/" <> path), do: add_base_url(path) defp add_base_url(path), do: get_base_url() <> "/" <> path defp add_absolute_path_opts(opts) do opts |> Keyword.put_new(:target, "_blank") |> Keyword.put_new(:rel, "noopener noreferrer") end defp modernize(path) do path |> String.replace_suffix("index.html", "") end end
27.676056
78
0.652926
ffc8a623a9b3752325dc3dcd14961fa81e9c038f
2,640
ex
Elixir
clients/service_user/lib/google_api/service_user/v1/model/authentication_rule.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2021-12-20T03:40:53.000Z
2021-12-20T03:40:53.000Z
clients/service_user/lib/google_api/service_user/v1/model/authentication_rule.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-08-18T00:11:23.000Z
2020-08-18T00:44:16.000Z
clients/service_user/lib/google_api/service_user/v1/model/authentication_rule.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.ServiceUser.V1.Model.AuthenticationRule do @moduledoc """ Authentication rules for the service. By default, if a method has any authentication requirements, every request must include a valid credential matching one of the requirements. It's an error to include more than one kind of credential in a single request. If a method doesn't have any auth requirements, request credentials will be ignored. ## Attributes * `allowWithoutCredential` (*type:* `boolean()`, *default:* `nil`) - If true, the service accepts API keys without any other credential. * `oauth` (*type:* `GoogleApi.ServiceUser.V1.Model.OAuthRequirements.t`, *default:* `nil`) - The requirements for OAuth credentials. * `requirements` (*type:* `list(GoogleApi.ServiceUser.V1.Model.AuthRequirement.t)`, *default:* `nil`) - Requirements for additional authentication providers. * `selector` (*type:* `String.t`, *default:* `nil`) - Selects the methods to which this rule applies. Refer to selector for syntax details. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :allowWithoutCredential => boolean(), :oauth => GoogleApi.ServiceUser.V1.Model.OAuthRequirements.t(), :requirements => list(GoogleApi.ServiceUser.V1.Model.AuthRequirement.t()), :selector => String.t() } field(:allowWithoutCredential) field(:oauth, as: GoogleApi.ServiceUser.V1.Model.OAuthRequirements) field(:requirements, as: GoogleApi.ServiceUser.V1.Model.AuthRequirement, type: :list) field(:selector) end defimpl Poison.Decoder, for: GoogleApi.ServiceUser.V1.Model.AuthenticationRule do def decode(value, options) do GoogleApi.ServiceUser.V1.Model.AuthenticationRule.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.ServiceUser.V1.Model.AuthenticationRule do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
40
161
0.739394
ffc8b172b68140e4df433040b814aa4c2820e3a8
59
ex
Elixir
app/lib/noodl_web/views/ticket_view.ex
nathanjohnson320/noodl
2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967
[ "MIT" ]
1
2021-01-20T20:00:50.000Z
2021-01-20T20:00:50.000Z
app/lib/noodl_web/views/ticket_view.ex
nathanjohnson320/noodl
2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967
[ "MIT" ]
null
null
null
app/lib/noodl_web/views/ticket_view.ex
nathanjohnson320/noodl
2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967
[ "MIT" ]
null
null
null
defmodule NoodlWeb.TicketView do use NoodlWeb, :view end
14.75
32
0.79661
ffc8b58564fd4ceee06b71e673a451a909be72ae
1,532
ex
Elixir
example/test/support/data_case.ex
shufo/phoenix_sail
ad15f404ca3665efe3d345e5e0e7f9390fb972f1
[ "MIT" ]
1
2021-02-01T12:01:15.000Z
2021-02-01T12:01:15.000Z
example/test/support/data_case.ex
shufo/phoenix_sail
ad15f404ca3665efe3d345e5e0e7f9390fb972f1
[ "MIT" ]
null
null
null
example/test/support/data_case.ex
shufo/phoenix_sail
ad15f404ca3665efe3d345e5e0e7f9390fb972f1
[ "MIT" ]
1
2021-02-01T12:01:46.000Z
2021-02-01T12:01:46.000Z
defmodule Example.DataCase do @moduledoc """ This module defines the setup for tests requiring access to the application's data layer. You may define functions here to be used as helpers in your tests. Finally, if the test case interacts with the database, we enable the SQL sandbox, so changes done to the database are reverted at the end of every test. If you are using PostgreSQL, you can even run database tests asynchronously by setting `use Example.DataCase, async: true`, although this option is not recommended for other databases. """ use ExUnit.CaseTemplate using do quote do alias Example.Repo import Ecto import Ecto.Changeset import Ecto.Query import Example.DataCase end end setup tags do :ok = Ecto.Adapters.SQL.Sandbox.checkout(Example.Repo) unless tags[:async] do Ecto.Adapters.SQL.Sandbox.mode(Example.Repo, {:shared, self()}) end :ok end @doc """ A helper that transforms changeset errors into a map of messages. assert {:error, changeset} = Accounts.create_user(%{password: "short"}) assert "password is too short" in errors_on(changeset).password assert %{password: ["password is too short"]} = errors_on(changeset) """ def errors_on(changeset) do Ecto.Changeset.traverse_errors(changeset, fn {message, opts} -> Regex.replace(~r"%{(\w+)}", message, fn _, key -> opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string() end) end) end end
27.357143
77
0.688642
ffc8c0ac0127e707be06e0e31741b57907cdb554
1,586
ex
Elixir
lib/bamboo/api_error.ex
clairton/bamboo
f2e4ddf177bbcf2c90d010c53c2fa9251fdf1b01
[ "MIT" ]
null
null
null
lib/bamboo/api_error.ex
clairton/bamboo
f2e4ddf177bbcf2c90d010c53c2fa9251fdf1b01
[ "MIT" ]
null
null
null
lib/bamboo/api_error.ex
clairton/bamboo
f2e4ddf177bbcf2c90d010c53c2fa9251fdf1b01
[ "MIT" ]
1
2020-04-21T12:36:34.000Z
2020-04-21T12:36:34.000Z
defmodule Bamboo.ApiError do @moduledoc """ Error used to represent a problem when sending emails through an external email service API. """ defexception [:message] @doc """ Raises an `ApiError` with the given `message` or `service_name`, `response` and `params`. An extra error message can be added using a fourth parameter `extra_message`. ## Examples iex> raise_api_error("Error message") ** (Bamboo.ApiError) Error Message iex> raise_api_error(service_name, response, params) ** (Bamboo.ApiError) There was a problem sending the email through the <service_name> API. Here is the response: "<response>" Here are the params we sent: "<params>" iex> raise_api_error(service_name, response, params, extra_message) ** (Bamboo.ApiError) There was a problem sending the email through the <service_name> API. Here is the response: "<response>" Here are the params we sent: "<params>" <extra_message> """ def raise_api_error(message), do: raise(__MODULE__, message: message) def raise_api_error(service_name, response, params, extra_message \\ "") do message = """ There was a problem sending the email through the #{service_name} API. Here is the response: #{inspect(response, limit: :infinity)} Here are the params we sent: #{inspect(params, limit: :infinity)} """ message = case extra_message do "" -> message em -> message <> "\n#{em}\n" end raise(__MODULE__, message: message) end end
24.030303
96
0.656368
ffc8e156437f63ad576c07acab47ab1e0899fc86
1,760
ex
Elixir
clients/tag_manager/lib/google_api/tag_manager/v2/model/list_workspaces_response.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/tag_manager/lib/google_api/tag_manager/v2/model/list_workspaces_response.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/tag_manager/lib/google_api/tag_manager/v2/model/list_workspaces_response.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.TagManager.V2.Model.ListWorkspacesResponse do @moduledoc """ A list of workspaces in a container. ## Attributes * `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Continuation token for fetching the next page of results. * `workspace` (*type:* `list(GoogleApi.TagManager.V2.Model.Workspace.t)`, *default:* `nil`) - All Workspaces of a GTM Container. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :nextPageToken => String.t(), :workspace => list(GoogleApi.TagManager.V2.Model.Workspace.t()) } field(:nextPageToken) field(:workspace, as: GoogleApi.TagManager.V2.Model.Workspace, type: :list) end defimpl Poison.Decoder, for: GoogleApi.TagManager.V2.Model.ListWorkspacesResponse do def decode(value, options) do GoogleApi.TagManager.V2.Model.ListWorkspacesResponse.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.TagManager.V2.Model.ListWorkspacesResponse do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
35.2
132
0.7375
ffc8f5b0b12369f91365e8f74334e3baee4ff13f
289
exs
Elixir
priv/repo/migrations/20200222223105_create_npcs.exs
Cadiac/titeenit-backend
51db7a1f93dc78a769bb309b94b1b893cefdcdc9
[ "MIT" ]
null
null
null
priv/repo/migrations/20200222223105_create_npcs.exs
Cadiac/titeenit-backend
51db7a1f93dc78a769bb309b94b1b893cefdcdc9
[ "MIT" ]
null
null
null
priv/repo/migrations/20200222223105_create_npcs.exs
Cadiac/titeenit-backend
51db7a1f93dc78a769bb309b94b1b893cefdcdc9
[ "MIT" ]
null
null
null
defmodule Titeenipeli.Repo.Migrations.CreateNpcs do use Ecto.Migration def change do create table(:npcs) do add :name, :string add :image_url, :string add :max_hp, :integer add :hp, :integer add :level, :integer timestamps() end end end
18.0625
51
0.633218
ffc8fdb30f115511ca54e30a71852378531d8c1a
8,510
ex
Elixir
lib/scenic/component/input/slider.ex
fhunleth/scenic
02fc61916ebe0cb01bf436832409226cad2d1f8b
[ "Apache-2.0" ]
null
null
null
lib/scenic/component/input/slider.ex
fhunleth/scenic
02fc61916ebe0cb01bf436832409226cad2d1f8b
[ "Apache-2.0" ]
null
null
null
lib/scenic/component/input/slider.ex
fhunleth/scenic
02fc61916ebe0cb01bf436832409226cad2d1f8b
[ "Apache-2.0" ]
null
null
null
defmodule Scenic.Component.Input.Slider do @moduledoc """ Add a slider to a graph ## Data `{ extents, initial_value}` * `extents` gives the range of values. It can take several forms... * `{min, max}` If `min` and `max` are integers, then the slider value will be an integer. * `{min, max}` If `min` and `max` are floats, then the slider value will be an float. * `[a, b, c]` A list of terms. The value will be one of the terms * `initial_value` Sets the initial value (and position) of the slider. It must make sense with the extents you passed in. ## Messages When the state of the slider changes, it sends an event message to the host scene in the form of: `{:value_changed, id, value}` ### Options Sliders honor the following list of options. ## Styles Sliders honor the following styles * `:hidden` - If `false` the component is rendered. If `true`, it is skipped. The default is `false`. * `:theme` - The color set used to draw. See below. The default is `:dark` ## Theme Sliders work well with the following predefined themes: `:light`, `:dark` To pass in a custom theme, supply a map with at least the following entries: * `:border` - the color of the slider line * `:thumb` - the color of slider thumb ## Usage You should add/modify components via the helper functions in [`Scenic.Components`](Scenic.Components.html#slider/3) ## Examples The following example creates a numeric slider and positions it on the screen. graph |> slider({{0,100}, 0}, id: :num_slider, translate: {20,20}) The following example creates a list slider and positions it on the screen. graph |> slider({[ :white, :cornflower_blue, :green, :chartreuse ], :cornflower_blue}, id: :slider_id, translate: {20,20}) """ use Scenic.Component, has_children: false alias Scenic.Graph alias Scenic.ViewPort alias Scenic.Primitive.Style.Theme import Scenic.Primitives, only: [{:rect, 3}, {:line, 3}, {:rrect, 3}, {:update_opts, 2}] # import IEx @height 18 @mid_height trunc(@height / 2) @radius 5 @btn_size 16 @line_width 4 @default_width 300 # ============================================================================ # setup # -------------------------------------------------------- @doc false def info(data) do """ #{IO.ANSI.red()}Slider data must be: {extents, initial_value} #{IO.ANSI.yellow()}Received: #{inspect(data)} The initial_value must make sense with the extents Examples: {{0,100}, 0} {{0.0, 1.57}, 0.3} {[:red, :green, :blue, :orange], :green} #{IO.ANSI.default_color()} """ end # -------------------------------------------------------- @doc false def verify({ext, initial} = data) do verify_initial(ext, initial) |> case do true -> {:ok, data} _ -> :invalid_data end end def verify(_), do: :invalid_data # -------------------------------------------------------- defp verify_initial({min, max}, init) when is_integer(min) and is_integer(max) and is_integer(init) and init >= min and init <= max, do: true defp verify_initial({min, max}, init) when is_float(min) and is_float(max) and is_number(init) and init >= min and init <= max, do: true defp verify_initial(list_ext, init) when is_list(list_ext), do: Enum.member?(list_ext, init) defp verify_initial(_, _), do: false # -------------------------------------------------------- @doc false def init({extents, value}, opts) do id = opts[:id] styles = opts[:styles] # theme is passed in as an inherited style theme = (styles[:theme] || Theme.preset(:primary)) |> Theme.normalize() # get button specific styles width = styles[:width] || @default_width graph = Graph.build() |> rect({width, @height}, fill: :clear, t: {0, -1}) |> line({{0, @mid_height}, {width, @mid_height}}, stroke: {@line_width, theme.border}) |> rrect({@btn_size, @btn_size, @radius}, fill: theme.thumb, id: :thumb, t: {0, 1}) |> update_slider_position(value, extents, width) state = %{ graph: graph, value: value, extents: extents, width: width, id: id, tracking: false } push_graph(graph) {:ok, state} end # ============================================================================ # -------------------------------------------------------- @doc false def handle_input({:cursor_button, {:left, :press, _, {x, _}}}, context, state) do state = state |> Map.put(:tracking, true) state = update_slider(x, state) ViewPort.capture_input(context, [:cursor_button, :cursor_pos]) # %{state | graph: graph}} {:noreply, state} end # -------------------------------------------------------- def handle_input({:cursor_button, {:left, :release, _, _}}, context, state) do state = Map.put(state, :tracking, false) ViewPort.release_input(context, [:cursor_button, :cursor_pos]) # %{state | graph: graph}} {:noreply, state} end # -------------------------------------------------------- def handle_input({:cursor_pos, {x, _}}, _context, %{tracking: true} = state) do state = update_slider(x, state) {:noreply, state} end # -------------------------------------------------------- def handle_input(_event, _context, state) do {:noreply, state} end # ============================================================================ # internal utilities # {text_color, box_background, border_color, pressed_color, checkmark_color} defp update_slider( x, %{ graph: graph, value: old_value, extents: extents, width: width, id: id, tracking: true } = state ) do # pin x to be inside the width x = cond do x < 0 -> 0 x > width -> width true -> x end # calc the new value based on its position across the slider new_value = calc_value_by_percent(extents, x / width) # update the slider position graph = update_slider_position(graph, new_value, extents, width) if new_value != old_value do send_event({:value_changed, id, new_value}) end %{state | graph: graph, value: new_value} end # -------------------------------------------------------- defp update_slider_position(graph, new_value, extents, width) do # calculate the slider position new_x = calc_slider_position(width, extents, new_value) # apply the x position Graph.modify(graph, :thumb, fn p -> update_opts(p, translate: {new_x, 0}) end) |> push_graph() end # -------------------------------------------------------- # calculate the position if the extents are numeric defp calc_slider_position(width, extents, value) defp calc_slider_position(width, {min, max}, value) when value < min do calc_slider_position(width, {min, max}, min) end defp calc_slider_position(width, {min, max}, value) when value > max do calc_slider_position(width, {min, max}, max) end defp calc_slider_position(width, {min, max}, value) do width = width - @btn_size percent = (value - min) / (max - min) trunc(width * percent) end # -------------------------------------------------------- # calculate the position if the extents is a list of arbitrary values defp calc_slider_position(width, extents, value) defp calc_slider_position(width, ext, value) when is_list(ext) do max_index = Enum.count(ext) - 1 index = case Enum.find_index(ext, fn v -> v == value end) do nil -> raise "Slider value not in extents list" index -> index end # calc position of slider width = width - @btn_size percent = index / max_index round(width * percent) end # -------------------------------------------------------- defp calc_value_by_percent({min, max}, percent) when is_integer(min) and is_integer(max) do round((max - min) * percent) + min end defp calc_value_by_percent({min, max}, percent) when is_float(min) and is_float(max) do (max - min) * percent + min end defp calc_value_by_percent(extents, percent) when is_list(extents) do max_index = Enum.count(extents) - 1 index = round(max_index * percent) Enum.at(extents, index) end end
27.993421
96
0.562162
ffc906c741f19bbb48ace261f4850c72401f3aea
15,900
ex
Elixir
lib/oli_web/controllers/delivery_controller.ex
ctipperCMU/oli-torus
231e8afc9ec6179e6081f327f4ebeb497703bfb8
[ "MIT" ]
null
null
null
lib/oli_web/controllers/delivery_controller.ex
ctipperCMU/oli-torus
231e8afc9ec6179e6081f327f4ebeb497703bfb8
[ "MIT" ]
null
null
null
lib/oli_web/controllers/delivery_controller.ex
ctipperCMU/oli-torus
231e8afc9ec6179e6081f327f4ebeb497703bfb8
[ "MIT" ]
null
null
null
defmodule OliWeb.DeliveryController do use OliWeb, :controller alias Oli.Delivery.Sections alias Oli.Delivery.Sections.{Section, SectionInvites} alias Oli.Publishing alias Oli.Institutions alias Lti_1p3.Tool.{PlatformRoles, ContextRoles} alias Oli.Accounts alias Oli.Accounts.Author alias Oli.Repo import Oli.Utils require Logger @allow_configure_section_roles [ PlatformRoles.get_role(:system_administrator), PlatformRoles.get_role(:institution_administrator), PlatformRoles.get_role(:institution_instructor), ContextRoles.get_role(:context_administrator), ContextRoles.get_role(:context_instructor) ] plug(Oli.Plugs.RegistrationCaptcha when action in [:process_create_and_link_account_user]) def index(conn, _params) do user = conn.assigns.current_user lti_params = conn.assigns.lti_params lti_roles = lti_params["https://purl.imsglobal.org/spec/lti/claim/roles"] context_roles = ContextRoles.get_roles_by_uris(lti_roles) platform_roles = PlatformRoles.get_roles_by_uris(lti_roles) roles = MapSet.new(context_roles ++ platform_roles) allow_configure_section_roles = MapSet.new(@allow_configure_section_roles) # allow section configuration if user has any of the allowed roles allow_configure_section = MapSet.intersection(roles, allow_configure_section_roles) |> MapSet.size() > 0 section = Sections.get_section_from_lti_params(lti_params) case section do # author account has not been linked nil when allow_configure_section -> render_getting_started(conn) nil -> render_course_not_configured(conn) # section has been configured section -> if user.research_opt_out === nil do render_research_consent(conn) else redirect_to_page_delivery(conn, section) end end end def open_and_free_index(conn, _params) do user = conn.assigns.current_user sections = Sections.list_user_open_and_free_sections(user) render(conn, "open_and_free_index.html", sections: sections, user: user) end defp render_course_not_configured(conn) do render(conn, "course_not_configured.html") end defp render_getting_started(conn) do render(conn, "getting_started.html") end defp render_research_consent(conn) do conn |> assign(:opt_out, nil) |> render("research_consent.html") end def select_project(conn, params) do user = conn.assigns.current_user lti_params = conn.assigns.lti_params issuer = lti_params["iss"] client_id = lti_params["aud"] deployment_id = lti_params["https://purl.imsglobal.org/spec/lti/claim/deployment_id"] {institution, _registration, _deployment} = Institutions.get_institution_registration_deployment(issuer, client_id, deployment_id) render(conn, "select_project.html", author: user.author, sources: Publishing.retrieve_visible_sources(user, institution), remix: Map.get(params, "remix", "false") ) end defp redirect_to_page_delivery(conn, section) do redirect(conn, to: Routes.page_delivery_path(conn, :index, section.slug)) end def research_consent(conn, %{"consent" => consent}) do user = conn.assigns.current_user lti_params = conn.assigns.lti_params section = Sections.get_section_from_lti_params(lti_params) case Accounts.update_user(user, %{research_opt_out: consent !== "true"}) do {:ok, _} -> redirect_to_page_delivery(conn, section) {:error, _} -> conn |> put_flash(:error, "Unable to persist research consent option") |> redirect_to_page_delivery(section) end end def link_account(conn, _params) do # sign out current author account conn = conn |> use_pow_config(:author) |> Pow.Plug.delete() conn |> render_link_account_form() end def render_link_account_form(conn, opts \\ []) do title = Keyword.get(opts, :title, "Link Existing Account") changeset = Keyword.get(opts, :changeset, Author.noauth_changeset(%Author{})) action = Keyword.get(opts, :action, Routes.delivery_path(conn, :process_link_account_user)) create_account_path = Keyword.get( opts, :create_account_path, Routes.delivery_path(conn, :create_and_link_account) ) cancel_path = Keyword.get(opts, :cancel_path, Routes.delivery_path(conn, :index)) conn |> assign(:title, title) |> assign(:changeset, changeset) |> assign(:action, action) |> assign(:create_account_path, create_account_path) |> assign(:cancel_path, cancel_path) |> assign(:link_account, true) |> put_view(OliWeb.Pow.SessionView) |> render("new.html") end def process_link_account_provider(conn, %{"provider" => provider}) do conn = conn |> merge_assigns( callback_url: Routes.authoring_delivery_url(conn, :link_account_callback, provider) ) PowAssent.Plug.authorize_url(conn, provider, conn.assigns.callback_url) |> case do {:ok, url, conn} -> conn |> redirect(external: url) end end def process_link_account_user(conn, %{"user" => author_params}) do conn |> use_pow_config(:author) |> Pow.Plug.authenticate_user(author_params) |> case do {:ok, conn} -> conn |> put_flash( :info, Pow.Phoenix.Controller.messages(conn, Pow.Phoenix.Messages).signed_in(conn) ) |> redirect( to: Pow.Phoenix.Controller.routes(conn, Pow.Phoenix.Routes).after_sign_in_path(conn) ) {:error, conn} -> conn |> put_flash( :error, Pow.Phoenix.Controller.messages(conn, Pow.Phoenix.Messages).invalid_credentials(conn) ) |> render_link_account_form( changeset: PowAssent.Plug.change_user(conn, %{}, author_params) ) end end def link_account_callback(conn, %{"provider" => provider} = params) do conn = conn |> merge_assigns( callback_url: Routes.authoring_delivery_url(conn, :link_account_callback, provider) ) PowAssent.Plug.callback_upsert(conn, provider, params, conn.assigns.callback_url) |> (fn {:ok, conn} -> %{current_user: current_user, current_author: current_author} = conn.assigns conn = case Accounts.link_user_author_account(current_user, current_author) do {:ok, _user} -> conn |> put_flash(:info, "Account '#{current_author.email}' is now linked") _ -> conn |> put_flash( :error, "Failed to link user and author accounts for '#{current_author.email}'" ) end {:ok, conn} end).() |> PowAssent.Phoenix.AuthorizationController.respond_callback() end def create_and_link_account(conn, _params) do # sign out current author account conn = conn |> use_pow_config(:author) |> Pow.Plug.delete() conn |> render_create_and_link_form() end def process_create_and_link_account_user(conn, %{"user" => user_params}) do conn |> use_pow_config(:author) |> Pow.Plug.create_user(user_params) |> case do {:ok, _user, conn} -> conn |> put_flash( :info, Pow.Phoenix.Controller.messages(conn, Pow.Phoenix.Messages).user_has_been_created(conn) ) |> redirect( to: Pow.Phoenix.Controller.routes(conn, Pow.Phoenix.Routes).after_registration_path(conn) ) {:error, changeset, conn} -> conn |> render_create_and_link_form(changeset: changeset) end end def render_create_and_link_form(conn, opts \\ []) do title = Keyword.get(opts, :title, "Create and Link Account") changeset = Keyword.get(opts, :changeset, Author.noauth_changeset(%Author{})) action = Keyword.get( opts, :action, Routes.delivery_path(conn, :process_create_and_link_account_user) ) sign_in_path = Keyword.get(opts, :sign_in_path, Routes.delivery_path(conn, :link_account)) cancel_path = Keyword.get(opts, :cancel_path, Routes.delivery_path(conn, :index)) conn |> assign(:title, title) |> assign(:changeset, changeset) |> assign(:action, action) |> assign(:sign_in_path, sign_in_path) |> assign(:cancel_path, cancel_path) |> assign(:link_account, true) |> put_view(OliWeb.Pow.RegistrationView) |> render("new.html") end def create_section(conn, %{"source_id" => source_id} = params) do lti_params = conn.assigns.lti_params user = conn.assigns.current_user # guard against creating a new section if one already exists Repo.transaction(fn -> case Sections.get_section_from_lti_params(lti_params) do nil -> issuer = lti_params["iss"] client_id = lti_params["aud"] deployment_id = lti_params["https://purl.imsglobal.org/spec/lti/claim/deployment_id"] {institution, _registration, deployment} = Institutions.get_institution_registration_deployment(issuer, client_id, deployment_id) # create section, section resources and enroll instructor {:ok, section} = case source_id do "publication:" <> publication_id -> create_from_publication( String.to_integer(publication_id), user, institution, lti_params, deployment ) "product:" <> product_id -> create_from_product( String.to_integer(product_id), user, institution, lti_params, deployment ) end if is_remix?(params) do conn |> redirect(to: Routes.live_path(conn, OliWeb.Delivery.RemixSection, section.slug)) else conn |> redirect(to: Routes.delivery_path(conn, :index)) end section -> # a section already exists, redirect to index conn |> put_flash(:error, "Unable to create new section. This section already exists.") |> redirect_to_page_delivery(section) end end) |> case do {:ok, conn} -> conn {:error, error} -> {_error_id, error_msg} = log_error("Failed to create new section", error) conn |> put_flash(:error, error_msg) |> redirect(to: Routes.delivery_path(conn, :index)) end end defp create_from_product(product_id, user, institution, lti_params, deployment) do blueprint = Oli.Delivery.Sections.get_section!(product_id) Repo.transaction(fn -> # calculate a cost, if an error, fallback to the amount in the blueprint # TODO: we may need to move this to AFTER a remix if the cost calculation factors # in the percentage project usage amount = case Oli.Delivery.Paywall.calculate_product_cost(blueprint, institution) do {:ok, amount} -> amount _ -> blueprint.amount end {:ok, section} = Oli.Delivery.Sections.Blueprint.duplicate(blueprint, %{ type: :enrollable, timezone: institution.timezone, title: lti_params["https://purl.imsglobal.org/spec/lti/claim/context"]["title"], context_id: lti_params["https://purl.imsglobal.org/spec/lti/claim/context"]["id"], institution_id: institution.id, lti_1p3_deployment_id: deployment.id, blueprint_id: blueprint.id, amount: amount }) # Enroll this user with their proper roles (instructor) lti_roles = lti_params["https://purl.imsglobal.org/spec/lti/claim/roles"] context_roles = ContextRoles.get_roles_by_uris(lti_roles) Sections.enroll(user.id, section.id, context_roles) section end) end defp create_from_publication(publication_id, user, institution, lti_params, deployment) do publication = Publishing.get_publication!(publication_id) Repo.transaction(fn -> {:ok, section} = Sections.create_section(%{ type: :enrollable, timezone: institution.timezone, title: lti_params["https://purl.imsglobal.org/spec/lti/claim/context"]["title"], context_id: lti_params["https://purl.imsglobal.org/spec/lti/claim/context"]["id"], institution_id: institution.id, base_project_id: publication.project_id, lti_1p3_deployment_id: deployment.id }) {:ok, %Section{id: section_id}} = Sections.create_section_resources(section, publication) # Enroll this user with their proper roles (instructor) lti_roles = lti_params["https://purl.imsglobal.org/spec/lti/claim/roles"] context_roles = ContextRoles.get_roles_by_uris(lti_roles) Sections.enroll(user.id, section_id, context_roles) section end) end def signout(conn, _params) do conn |> use_pow_config(:user) |> Pow.Plug.delete() |> redirect(to: Routes.static_page_path(conn, :index)) end def signin(conn, %{"section" => section}) do conn |> use_pow_config(:user) |> Pow.Plug.delete() |> redirect(to: Routes.pow_session_path(conn, :new, section: section)) end def create_account(conn, %{"section" => section}) do conn |> use_pow_config(:user) |> Pow.Plug.delete() |> redirect(to: Routes.pow_registration_path(conn, :new, section: section)) end def enroll(conn, _params) do section = conn.assigns.section |> Oli.Repo.preload([:base_project]) # redirect to course index if user is already signed in and enrolled with {:ok, user} <- conn.assigns.current_user |> trap_nil, true <- Sections.is_enrolled?(user.id, section.slug) do redirect(conn, to: Routes.page_delivery_path(conn, :index, section.slug)) else _ -> render(conn, "enroll.html", section: section) end end def enroll_independent(conn, %{"section_invite_slug" => invite_slug}) do section_invite = SectionInvites.get_section_invite(invite_slug) if !SectionInvites.link_expired?(section_invite) do conn |> assign(:section, SectionInvites.get_section_by_invite_slug(invite_slug)) |> enroll(%{}) else conn |> redirect(to: Routes.live_path(OliWeb.Endpoint, OliWeb.Sections.InvalidSectionInviteView)) end end defp recaptcha_verified?(g_recaptcha_response) do Oli.Utils.Recaptcha.verify(g_recaptcha_response) == {:success, true} end def create_user(conn, params) do g_recaptcha_response = Map.get(params, "g-recaptcha-response", "") if Oli.Utils.LoadTesting.enabled?() or recaptcha_verified?(g_recaptcha_response) do section = conn.assigns.section case current_or_guest_user(conn) do {:ok, user} -> Accounts.update_user_platform_roles(user, [ PlatformRoles.get_role(:institution_learner) ]) conn |> OliWeb.Pow.PowHelpers.use_pow_config(:user) |> Pow.Plug.create(user) |> redirect(to: Routes.page_delivery_path(conn, :index, section.slug)) {:error, _} -> render(conn, "enroll.html", error: "Something went wrong, please try again") end else render(conn, "enroll.html", error: "ReCaptcha failed, please try again") end end defp current_or_guest_user(conn) do case conn.assigns.current_user do nil -> Accounts.create_guest_user() user -> {:ok, user} end end defp is_remix?(params) do case Map.get(params, "remix") do "true" -> true _ -> false end end end
31.237721
98
0.649057
ffc929051ac80b062a8b79143e720b0f0a447499
1,623
ex
Elixir
clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_zone_audience_enabled.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_zone_audience_enabled.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/apigee/lib/google_api/apigee/v1/model/google_cloud_apigee_v1_zone_audience_enabled.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1ZoneAudienceEnabled do @moduledoc """ ## Attributes * `audiencesEnabled` (*type:* `boolean()`, *default:* `nil`) - Boolean flag that specifies whether the audience feature is enabled. * `zmsId` (*type:* `String.t`, *default:* `nil`) - ID of the zone. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :audiencesEnabled => boolean(), :zmsId => String.t() } field(:audiencesEnabled) field(:zmsId) end defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1ZoneAudienceEnabled do def decode(value, options) do GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1ZoneAudienceEnabled.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleCloudApigeeV1ZoneAudienceEnabled do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
32.46
135
0.736907
ffc92ba7cdead30946ed1267eb819bae73c0053a
724
ex
Elixir
web/gettext.ex
mule/phoenix-elm-auth
6b7b3019c53673bf3b4b2b41fa3eec8f75d08947
[ "MIT" ]
1
2016-07-06T02:06:31.000Z
2016-07-06T02:06:31.000Z
web/gettext.ex
mule/phoenix-elm-auth
6b7b3019c53673bf3b4b2b41fa3eec8f75d08947
[ "MIT" ]
1
2017-04-07T12:58:38.000Z
2017-04-07T12:58:38.000Z
web/gettext.ex
mule/phoenix-elm-auth
6b7b3019c53673bf3b4b2b41fa3eec8f75d08947
[ "MIT" ]
null
null
null
defmodule PhoenixAuthKata.Gettext do @moduledoc """ A module providing Internationalization with a gettext-based API. By using [Gettext](http://hexdocs.pm/gettext), your module gains a set of macros for translations, for example: import PhoenixAuthKata.Gettext # Simple translation gettext "Here is the string to translate" # Plural translation ngettext "Here is the string to translate", "Here are the strings to translate", 3 # Domain-based translation dgettext "errors", "Here is the error message to translate" See the [Gettext Docs](http://hexdocs.pm/gettext) for detailed usage. """ use Gettext, otp_app: :phoenixAuthKata end
28.96
71
0.687845
ffc93db8f69849bc1b79fa438d0f79fa49ef3164
355
ex
Elixir
lib/step_flow/controllers/live_workers_controller.ex
nipierre/ex_step_flow
4345ee57bd4e5eb79138df68d10579ba1b9ec6a1
[ "MIT" ]
null
null
null
lib/step_flow/controllers/live_workers_controller.ex
nipierre/ex_step_flow
4345ee57bd4e5eb79138df68d10579ba1b9ec6a1
[ "MIT" ]
null
null
null
lib/step_flow/controllers/live_workers_controller.ex
nipierre/ex_step_flow
4345ee57bd4e5eb79138df68d10579ba1b9ec6a1
[ "MIT" ]
null
null
null
defmodule StepFlow.LiveWorkersController do use StepFlow, :controller alias StepFlow.LiveWorkers action_fallback(StepFlow.FallbackController) def index(conn, params) do live_workers = LiveWorkers.list_live_workers(params) conn |> put_view(StepFlow.LiveWorkersView) |> render("index.json", live_workers: live_workers) end end
22.1875
56
0.766197
ffc94487fb66872e8821f9b7e8f2cc79270eef09
1,532
ex
Elixir
lib/attachments_client.ex
Mohammad-Haseeb/ex_microsoftbot
72c417ce1e6dd42cf982bf856f3b402b67cf820e
[ "MIT" ]
null
null
null
lib/attachments_client.ex
Mohammad-Haseeb/ex_microsoftbot
72c417ce1e6dd42cf982bf856f3b402b67cf820e
[ "MIT" ]
null
null
null
lib/attachments_client.ex
Mohammad-Haseeb/ex_microsoftbot
72c417ce1e6dd42cf982bf856f3b402b67cf820e
[ "MIT" ]
1
2019-11-08T13:20:11.000Z
2019-11-08T13:20:11.000Z
defmodule ExMicrosoftBot.Client.Attachments do @moduledoc """ This module provides the functions to get information related to attachments. """ import ExMicrosoftBot.Client, only: [authed_req_options: 1, deserialize_response: 2] alias ExMicrosoftBot.Models, as: Models alias ExMicrosoftBot.Client @doc """ Get AttachmentInfo structure describing the attachment views. [API Reference](https://docs.botframework.com/en-us/restapi/connector/#!/Attachments/Attachments_GetAttachmentInfo) """ @spec get_attachment(String.t, String.t) :: {:ok, Models.AttachmentInfo.t} | Client.error_type def get_attachment(service_url, attachment_id) do api_endpoint = "#{attachments_endpoint(service_url)}/#{attachment_id}" HTTPotion.get(api_endpoint, authed_req_options(api_endpoint)) |> deserialize_response(&Models.AttachmentInfo.parse/1) end @doc """ Get the named view as binary content. [API Reference](https://docs.botframework.com/en-us/restapi/connector/#!/Attachments/Attachments_GetAttachment) """ @spec get_attachment_view(String.t, String.t, String.t) :: {:ok, binary} | Client.error_type def get_attachment_view(service_url, attachment_id, view_id) do api_endpoint = "#{attachments_endpoint(service_url)}/#{attachment_id}/views/#{view_id}" HTTPotion.get(api_endpoint, authed_req_options(api_endpoint)) |> deserialize_response(&(&1)) # Return the body as is because it is binary end defp attachments_endpoint(service_url) do "#{service_url}/v3/attachments" end end
42.555556
179
0.761097
ffc952ca23519e04eb5567f35f9a6421326ee37f
251
ex
Elixir
lib/mango_web/controllers/page_controller.ex
jacruzca/mango
9978dc609ed1fc86aa40386e35fae06cb95f80fc
[ "MIT" ]
null
null
null
lib/mango_web/controllers/page_controller.ex
jacruzca/mango
9978dc609ed1fc86aa40386e35fae06cb95f80fc
[ "MIT" ]
null
null
null
lib/mango_web/controllers/page_controller.ex
jacruzca/mango
9978dc609ed1fc86aa40386e35fae06cb95f80fc
[ "MIT" ]
null
null
null
defmodule MangoWeb.PageController do use MangoWeb, :controller alias Mango.Catalog def index(conn, _params) do seasonal_products = Catalog.list_seasonal_products render conn, "index.html", seasonal_products: seasonal_products end end
25.1
67
0.784861
ffc966e8ad22e9fba6237308a3aec6fbd32803f6
16,538
ex
Elixir
lib/phoenix_live_view/helpers.ex
sbacarob/phoenix_live_view
dce42778671fa85801e7a537092b860edbb9759d
[ "MIT" ]
null
null
null
lib/phoenix_live_view/helpers.ex
sbacarob/phoenix_live_view
dce42778671fa85801e7a537092b860edbb9759d
[ "MIT" ]
null
null
null
lib/phoenix_live_view/helpers.ex
sbacarob/phoenix_live_view
dce42778671fa85801e7a537092b860edbb9759d
[ "MIT" ]
null
null
null
defmodule Phoenix.LiveView.Helpers do @moduledoc """ A collection of helpers to be imported into your views. """ alias Phoenix.LiveView.{Component, Socket, Static} @doc false def live_patch(opts) when is_list(opts) do live_link("patch", Keyword.fetch!(opts, :do), Keyword.delete(opts, :do)) end @doc """ Generates a link that will patch the current LiveView. When navigating to the current LiveView, `c:Phoenix.LiveView.handle_params/3` is immediately invoked to handle the change of params and URL state. Then the new state is pushed to the client, without reloading the whole page while also maintaining the current scroll position. For live redirects to another LiveView, use `live_redirect/2`. ## Options * `:to` - the required path to link to. * `:replace` - the flag to replace the current history or push a new state. Defaults `false`. All other options are forwarded to the anchor tag. ## Examples <%= live_patch "home", to: Routes.page_path(@socket, :index) %> <%= live_patch "next", to: Routes.live_path(@socket, MyLive, @page + 1) %> <%= live_patch to: Routes.live_path(@socket, MyLive, dir: :asc), replace: false do %> Sort By Price <% end %> """ def live_patch(%Socket{}, _) do raise """ you are invoking live_patch/2 with a socket but a socket is not expected. If you want to live_patch/2 inside a LiveView, use push_patch/2 instead. If you are inside a template, make the sure the first argument is a string. """ end def live_patch(opts, do: block) when is_list(opts) do live_link("patch", block, opts) end def live_patch(text, opts) when is_list(opts) do live_link("patch", text, opts) end @doc false def live_redirect(opts) when is_list(opts) do live_link("redirect", Keyword.fetch!(opts, :do), Keyword.delete(opts, :do)) end @doc """ Generates a link that will redirect to a new LiveView. The current LiveView will be shut down and a new one will be mounted in its place, without reloading the whole page. This can also be used to remount the same LiveView, in case you want to start fresh. If you want to navigate to the same LiveView without remounting it, use `live_patch/2` instead. ## Options * `:to` - the required path to link to. * `:replace` - the flag to replace the current history or push a new state. Defaults `false`. All other options are forwarded to the anchor tag. ## Examples <%= live_redirect "home", to: Routes.page_path(@socket, :index) %> <%= live_redirect "next", to: Routes.live_path(@socket, MyLive, @page + 1) %> <%= live_redirect to: Routes.live_path(@socket, MyLive, dir: :asc), replace: false do %> Sort By Price <% end %> """ def live_redirect(%Socket{}, _) do raise """ you are invoking live_redirect/2 with a socket but a socket is not expected. If you want to live_redirect/2 inside a LiveView, use push_redirect/2 instead. If you are inside a template, make the sure the first argument is a string. """ end def live_redirect(opts, do: block) when is_list(opts) do live_link("redirect", block, opts) end def live_redirect(text, opts) when is_list(opts) do live_link("redirect", text, opts) end defp live_link(type, block_or_text, opts) do uri = Keyword.fetch!(opts, :to) replace = Keyword.get(opts, :replace, false) kind = if replace, do: "replace", else: "push" data = [phx_link: type, phx_link_state: kind] opts = opts |> Keyword.update(:data, data, &Keyword.merge(&1, data)) |> Keyword.put(:href, uri) Phoenix.HTML.Tag.content_tag(:a, Keyword.delete(opts, :to), do: block_or_text) end @doc """ Renders a LiveView within an originating plug request or within a parent LiveView. ## Options * `:session` - the map of extra session data to be serialized and sent to the client. Note that all session data currently in the connection is automatically available in LiveViews. You can use this option to provide extra data. Also note that the keys in the session are strings keys, as a reminder that data has to be serialized first. * `:container` - an optional tuple for the HTML tag and DOM attributes to be used for the LiveView container. For example: `{:li, style: "color: blue;"}`. By default it uses the module definition container. See the "Containers" section below for more information. * `:id` - both the DOM ID and the ID to uniquely identify a LiveView. An `:id` is automatically generated when rendering root LiveViews but it is a required option when rendering a child LiveView. * `:router` - an optional router that enables this LiveView to perform live navigation. Only a single LiveView in a page may have the `:router` set. LiveViews defined at the router with the `live` macro automatically have the `:router` option set. ## Examples # within eex template <%= live_render(@conn, MyApp.ThermostatLive) %> # within leex template <%= live_render(@socket, MyApp.ThermostatLive, id: "thermostat") %> ## Containers When a `LiveView` is rendered, its contents are wrapped in a container. By default, said container is a `div` tag with a handful of `LiveView` specific attributes. The container can be customized in different ways: * You can change the default `container` on `use Phoenix.LiveView`: use Phoenix.LiveView, container: {:tr, id: "foo-bar"} * You can override the container tag and pass extra attributes when calling `live_render` (as well as on your `live` call in your router): live_render socket, MyLiveView, container: {:tr, class: "highlight"} """ def live_render(conn_or_socket, view, opts \\ []) def live_render(%Plug.Conn{} = conn, view, opts) do case Static.render(conn, view, opts) do {:ok, content, _assigns} -> content {:stop, _} -> raise RuntimeError, "cannot redirect from a child LiveView" end end def live_render(%Socket{} = parent, view, opts) do Static.nested_render(parent, view, opts) end @doc """ Renders a `Phoenix.LiveComponent` within a parent LiveView. While `LiveView`s can be nested, each LiveView starts its own process. A LiveComponent provides similar functionality to LiveView, except they run in the same process as the `LiveView`, with its own encapsulated state. LiveComponent comes in two shapes, stateful and stateless. See `Phoenix.LiveComponent` for more information. ## Examples All of the `assigns` given are forwarded directly to the `live_component`: <%= live_component(@socket, MyApp.WeatherComponent, id: "thermostat", city: "Kraków") %> Note the `:id` won't necessarily be used as the DOM ID. That's up to the component. However, note that the `:id` has a special meaning: whenever an `:id` is given, the component becomes stateful. Otherwise, `:id` is always set to `nil`. """ # TODO: Deprecate the socket as argument defmacro live_component(_socket, component, assigns \\ [], do_block \\ []) do {do_block, assigns} = case {do_block, assigns} do {[do: do_block], _} -> {do_block, assigns} {_, [do: do_block]} -> {do_block, []} {_, _} -> {nil, assigns} end {assigns, inner_block} = rewrite_do(do_block, assigns, __CALLER__) quote do Phoenix.LiveView.Helpers.__live_component__( unquote(component).__live__(), unquote(assigns), unquote(inner_block) ) end end defp rewrite_do(nil, opts, _caller), do: {opts, nil} defp rewrite_do([{:->, meta, _} | _] = do_block, opts, _caller) do inner_fun = {:fn, meta, do_block} quoted = quote do fn parent_changed, arg -> var!(assigns) = unquote(__MODULE__).__render_inner_fun__(var!(assigns), parent_changed) unquote(inner_fun).(arg) end end {opts, quoted} end defp rewrite_do(do_block, opts, caller) do unless Macro.Env.has_var?(caller, {:assigns, nil}) and Macro.Env.has_var?(caller, {:changed, Phoenix.LiveView.Engine}) do raise ArgumentError, """ cannot use live_component do/end blocks because we could not find existing assigns. Please pass a `->` clause to do/end instead, for example: live_component @socket, GridComponent, entries: @entries do new_assigns -> "New entry: " <> new_assigns[:entry] end """ end # TODO: deprecate implicit assigns (i.e. do/end without -> should not get any assign) quoted = quote do fn changed, extra_assigns -> var!(assigns) = unquote(__MODULE__).__render_inner_do__(var!(assigns), changed, extra_assigns) unquote(do_block) end end {opts, quoted} end @doc false def __render_inner_fun__(assigns, parent_changed) do if is_nil(parent_changed) or parent_changed[:inner_block] == true do assigns else Map.put(assigns, :__changed__, %{}) end end @doc false def __render_inner_do__(assigns, parent_changed, extra_assigns) do # If the parent is tracking changes or the inner content changed, # we will keep the current __changed__ values changed = if is_nil(parent_changed) or parent_changed[:inner_block] == true do Map.get(assigns, :__changed__) else %{} end assigns = Enum.into(extra_assigns, assigns) changed = changed && for {key, _} <- extra_assigns, key != :socket, into: changed, do: {key, true} Map.put(assigns, :__changed__, changed) end @doc false def __live_component__(%{kind: :component, module: component}, assigns, inner) when is_list(assigns) or is_map(assigns) do assigns = assigns |> Map.new() |> Map.put_new(:id, nil) assigns = if inner, do: Map.put(assigns, :inner_block, inner), else: assigns id = assigns[:id] if is_nil(id) and (function_exported?(component, :handle_event, 3) or function_exported?(component, :preload, 1)) do raise "a component #{inspect(component)} that has implemented handle_event/3 or preload/1 " <> "requires an :id assign to be given" end %Component{id: id, assigns: assigns, component: component} end def __live_component__(%{kind: kind, module: module}, assigns) when is_list(assigns) or is_map(assigns) do raise "expected #{inspect(module)} to be a component, but it is a #{kind}" end @doc """ Renders the `@inner_block` assign of a component with the given `argument`. <%= render_block(@inner_block, value: @value) """ defmacro render_block(inner_block, argument \\ []) do quote do unquote(inner_block).(var!(changed, Phoenix.LiveView.Engine), unquote(argument)) end end @doc """ Returns the flash message from the LiveView flash assign. ## Examples <p class="alert alert-info"><%= live_flash(@flash, :info) %></p> <p class="alert alert-danger"><%= live_flash(@flash, :error) %></p> """ def live_flash(%_struct{} = other, _key) do raise ArgumentError, "live_flash/2 expects a @flash assign, got: #{inspect(other)}" end def live_flash(%{} = flash, key), do: Map.get(flash, to_string(key)) @doc """ Provides `~L` sigil with HTML safe Live EEx syntax inside source files. iex> ~L"\"" ...> Hello <%= "world" %> ...> "\"" {:safe, ["Hello ", "world", "\\n"]} """ defmacro sigil_L({:<<>>, meta, [expr]}, []) do options = [ engine: Phoenix.LiveView.Engine, file: __CALLER__.file, line: __CALLER__.line + 1, indentation: meta[:indentation] || 0 ] EEx.compile_string(expr, options) end @doc """ Returns the entry errors for an upload. The following errors may be returned: * `:too_large` - The entry exceeds the `:max_file_size` constraint * `:too_many_files` - The number of selected files exceeds the `:max_entries` constraint * `:not_accepted` - The entry does not match the `:accept` MIME types ## Examples def error_to_string(:too_large), do: "Too large" def error_to_string(:too_many_files), do: "You have selected too many files" def error_to_string(:not_accepted), do: "You have selected an unacceptable file type" <%= for entry <- @uploads.avatar.entries do %> <%= for err <- upload_errors(@uploads.avatar, entry) do %> <div class="alert alert-danger"> <%= error_to_string(err) %> </div> <% end %> <% end %> """ def upload_errors( %Phoenix.LiveView.UploadConfig{} = conf, %Phoenix.LiveView.UploadEntry{} = entry ) do for {ref, error} <- conf.errors, ref == entry.ref, do: error end @doc """ Generates an image preview on the client for a selected file. ## Examples <%= for entry <- @uploads.avatar.entries do %> <%= live_img_preview entry, width: 75 %> <% end %> """ def live_img_preview(%Phoenix.LiveView.UploadEntry{ref: ref} = entry, opts \\ []) do opts = Keyword.merge(opts, id: "phx-preview-#{ref}", data_phx_upload_ref: entry.upload_ref, data_phx_entry_ref: ref, data_phx_hook: "Phoenix.LiveImgPreview", data_phx_update: "ignore" ) Phoenix.HTML.Tag.content_tag(:img, "", opts) end @doc """ Builds a file input tag for a LiveView upload. Options may be passed through to the tag builder for custom attributes. ## Drag and Drop Drag and drop is supported by annotating the droppable container with a `phx-drop-target` attribute pointing to the DOM ID of the file input. By default, the file input ID is the upload `ref`, so the following markup is all that is required for drag and drop support: <div class="container" phx-drop-target="<%= @uploads.avatar.ref %>"> ... <%= live_file_input @uploads.avatar %> </div> ## Examples <%= live_file_input @uploads.avatar %> """ def live_file_input(%Phoenix.LiveView.UploadConfig{} = conf, opts \\ []) do if opts[:id], do: raise(ArgumentError, "the :id cannot be overridden on a live_file_input") opts = if conf.max_entries > 1 do Keyword.put(opts, :multiple, true) else opts end preflighted_entries = for entry <- conf.entries, entry.preflighted?, do: entry done_entries = for entry <- conf.entries, entry.done?, do: entry valid? = Enum.any?(conf.entries) && Enum.empty?(conf.errors) Phoenix.HTML.Tag.content_tag( :input, "", Keyword.merge(opts, type: "file", id: conf.ref, name: conf.name, accept: if(conf.accept != :any, do: conf.accept), phx_hook: "Phoenix.LiveFileUpload", data_phx_update: "ignore", data_phx_upload_ref: conf.ref, data_phx_active_refs: Enum.map_join(conf.entries, ",", & &1.ref), data_phx_done_refs: Enum.map_join(done_entries, ",", & &1.ref), data_phx_preflighted_refs: Enum.map_join(preflighted_entries, ",", & &1.ref), data_phx_auto_upload: valid? && conf.auto_upload? ) ) end @doc """ Renders a title tag with automatic prefix/suffix on `@page_title` updates. ## Examples <%= live_title_tag assigns[:page_title] || "Welcome", prefix: "MyApp – " %> <%= live_title_tag assigns[:page_title] || "Welcome", suffix: " – MyApp" %> """ def live_title_tag(title, opts \\ []) do title_tag(title, opts[:prefix], opts[:suffix], opts) end defp title_tag(title, nil = _prefix, "" <> suffix, _opts) do Phoenix.HTML.Tag.content_tag(:title, title <> suffix, data: [suffix: suffix]) end defp title_tag(title, "" <> prefix, nil = _suffix, _opts) do Phoenix.HTML.Tag.content_tag(:title, prefix <> title, data: [prefix: prefix]) end defp title_tag(title, "" <> pre, "" <> post, _opts) do Phoenix.HTML.Tag.content_tag(:title, pre <> title <> post, data: [prefix: pre, suffix: post]) end defp title_tag(title, _prefix = nil, _postfix = nil, []) do Phoenix.HTML.Tag.content_tag(:title, title) end defp title_tag(_title, _prefix = nil, _suffix = nil, opts) do raise ArgumentError, "live_title_tag/2 expects a :prefix and/or :suffix option, got: #{inspect(opts)}" end end
32.237817
100
0.650925
ffc96dfb294cc921d742f668c6571188a39f22c0
2,457
exs
Elixir
mix.exs
fhunleth/kiosk_system_x86_64
8bde5080f7c1bf769d830aff504ed49425692574
[ "Apache-2.0" ]
10
2017-06-22T16:06:15.000Z
2020-02-15T12:54:15.000Z
mix.exs
fhunleth/kiosk_system_x86_64
8bde5080f7c1bf769d830aff504ed49425692574
[ "Apache-2.0" ]
10
2017-09-15T05:13:21.000Z
2020-02-09T08:24:48.000Z
mix.exs
fhunleth/kiosk_system_x86_64
8bde5080f7c1bf769d830aff504ed49425692574
[ "Apache-2.0" ]
5
2017-09-14T19:59:45.000Z
2018-10-23T01:51:55.000Z
defmodule KioskSystemx8664.Mixfile do use Mix.Project @app :kiosk_system_x86_64 @version Path.join(__DIR__, "VERSION") |> File.read! |> String.trim def project do [ app: @app, version: @version, elixir: "~> 1.8", compilers: Mix.compilers ++ [:nerves_package], nerves_package: nerves_package(), description: description(), package: package(), deps: deps(), aliases: [loadconfig: [&bootstrap/1]], docs: [extras: ["README.md"], main: "readme"] ] end def application do [] end defp bootstrap(args) do set_target() Application.start(:nerves_bootstrap) Mix.Task.run("loadconfig", args) end defp nerves_package do [ type: :system, artifact_sites: [ {:github_releases, "letoteteam/#{@app}"} ], build_runner_opts: build_runner_opts(), platform: Nerves.System.BR, platform_config: [ defconfig: "nerves_defconfig" ], checksum: package_files() ] end defp deps do [ {:nerves, "~> 1.5.0", runtime: false}, {:nerves_system_br, "1.9.2", runtime: false}, {:nerves_toolchain_x86_64_unknown_linux_gnu , "1.2.0", runtime: false}, {:nerves_system_linter, "~> 0.3.0", runtime: false}, {:ex_doc, "~> 0.18", only: :dev} ] end defp description do """ Nerves System - x86_64 Kiosk """ end defp package do [ files: package_files(), licenses: ["Apache 2.0"], links: %{"GitHub" => "https://github.com/letoteteam/#{@app}"} ] end defp package_files do [ "fwup_include", "lib", "patches", "priv", "rootfs_overlay", "CHANGELOG.md", "Config.in", "fwup-revert.conf", "fwup.conf", "grub.cfg", "LICENSE", "linux-4.19.defconfig", "logo_custom_clut224.ppm", "mix.exs", "nerves_defconfig", "post-build.sh", "post-createfs.sh", "README.md", "users_table.txt", "VERSION" ] end defp build_runner_opts() do if primary_site = System.get_env("BR2_PRIMARY_SITE") do [make_args: ["BR2_PRIMARY_SITE=#{primary_site}", "PARALLEL_JOBS=8"]] else [make_args: ["PARALLEL_JOBS=8"]] end end defp set_target() do if function_exported?(Mix, :target, 1) do apply(Mix, :target, [:target]) else System.put_env("MIX_TARGET", "target") end end end
21.552632
77
0.576313
ffc97ef4fa5a30711c7952f2baec7434c01b723e
942
ex
Elixir
web/gettext.ex
superdev999/Phoenix-project
ab13ac9366cdd0aa9581da7faf993b11aaa5344c
[ "MIT" ]
null
null
null
web/gettext.ex
superdev999/Phoenix-project
ab13ac9366cdd0aa9581da7faf993b11aaa5344c
[ "MIT" ]
null
null
null
web/gettext.ex
superdev999/Phoenix-project
ab13ac9366cdd0aa9581da7faf993b11aaa5344c
[ "MIT" ]
null
null
null
defmodule CodeCorps.Gettext do @moduledoc """ A module providing Internationalization with a gettext-based API. By using [Gettext](https://hexdocs.pm/gettext), your module gains a set of macros for translations, for example: import CodeCorps.Gettext # Simple translation gettext "Here is the string to translate" # Plural translation ngettext "Here is the string to translate", "Here are the strings to translate", 3 # Domain-based translation dgettext "errors", "Here is the error message to translate" See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage. """ use Gettext, otp_app: :code_corps @dialyzer [{:nowarn_function, 'MACRO-dgettext': 3}, {:nowarn_function, 'MACRO-dgettext': 4}, {:nowarn_function, 'MACRO-dngettext': 5}, {:nowarn_function, 'MACRO-dngettext': 6}, ] end
30.387097
72
0.646497
ffc9cdea78fd2587fca42fd39ae4cf83c4d50116
2,445
exs
Elixir
test/dsl/signal_obs_test.exs
DriesDeBackker/bquarp-reactivity
28c28fba7c60dcf0a4bee6cbcb2c67545dc5122c
[ "MIT" ]
1
2019-07-18T12:44:08.000Z
2019-07-18T12:44:08.000Z
test/dsl/signal_obs_test.exs
DriesDeBackker/bquarp-reactivity
28c28fba7c60dcf0a4bee6cbcb2c67545dc5122c
[ "MIT" ]
null
null
null
test/dsl/signal_obs_test.exs
DriesDeBackker/bquarp-reactivity
28c28fba7c60dcf0a4bee6cbcb2c67545dc5122c
[ "MIT" ]
null
null
null
defmodule Test.DSL.SignalObs do use ExUnit.Case alias Reactivity.DSL.SignalObs, as: Sobs alias Observables.Obs alias Observables.Subject test "From observable" do testproc = self() obs = Subject.create() obs |> Sobs.from_plain_obs() |> Obs.map(fn v -> send(testproc, v) end) Subject.next(obs, :v) assert_receive({:v, []}, 1000, "did not get this message!") end test "To plain observable" do testproc = self() obs = Subject.create() obs |> Sobs.to_plain_obs() |> Obs.map(fn v -> send(testproc, v) end) Subject.next(obs, {:v, :c}) assert_receive(:v, 1000, "did not get this message!") end test "Add a context to a signal observable (1)" do testproc = self() obs = Subject.create() obs |> Sobs.add_context({:g, 0}) |> Obs.map(fn v -> send(testproc, v) end) Subject.next(obs, {:v, []}) assert_receive({:v, [[{s, 0}]]}, 1000, "did not get this message!") Subject.next(obs, {:v, []}) assert_receive({:v, [[{s, 1}]]}, 1000, "did not get this message!") end test "Remove a context from a signal observable" do testproc = self() obs = Subject.create() obs |> Sobs.remove_context(2) |> Obs.map(fn v -> send(testproc, v) end) Subject.next(obs, {:v, [:c1, :c2, :c3, :c4]}) assert_receive({:v, [:c1, :c2, :c4]}, 1000, "did not get this message!") end test "Keep a context of a signal observable" do testproc = self() obs = Subject.create() obs |> Sobs.keep_context(2) |> Obs.map(fn v -> send(testproc, v) end) Subject.next(obs, {:v, [:c1, :c2, :c3, :c4]}) assert_receive({:v, [:c3]}, 1000, "did not get this message!") end test "Set the context of a signal observable" do testproc = self() obs = Subject.create() obs |> Sobs.set_context({:t, 0}) |> Obs.map(fn v -> send(testproc, v) end) Subject.next(obs, {:v, [:c1, :c2, :c3, :c4]}) assert_receive({:v, [0]}, 1000, "did not get this message!") Subject.next(obs, {:v, [:c1, :c2, :c3, :c4]}) assert_receive({:v, [1]}, 1000, "did not get this message!") end test "Clear the context of a signal observable" do testproc = self() obs = Subject.create() obs |> Sobs.clear_context() |> Obs.map(fn v -> send(testproc, v) end) Subject.next(obs, {:v, [:c1, :c2, :c3, :c4]}) assert_receive({:v, []}, 1000, "did not get this message!") end end
23.970588
76
0.584458
ffc9ce2d78b6f4754d5eb68521b640aba84023ba
4,115
ex
Elixir
lib/absinthe/phase/document/missing_literals.ex
TheRealReal/absinthe
6eae5bc36283e58f42d032b8afd90de3ad64f97b
[ "MIT" ]
4,101
2016-03-02T03:49:20.000Z
2022-03-31T05:46:01.000Z
lib/absinthe/phase/document/missing_literals.ex
TheRealReal/absinthe
6eae5bc36283e58f42d032b8afd90de3ad64f97b
[ "MIT" ]
889
2016-03-02T16:06:59.000Z
2022-03-31T20:24:12.000Z
lib/absinthe/phase/document/missing_literals.ex
TheRealReal/absinthe
6eae5bc36283e58f42d032b8afd90de3ad64f97b
[ "MIT" ]
564
2016-03-02T07:49:59.000Z
2022-03-06T14:40:59.000Z
defmodule Absinthe.Phase.Document.MissingLiterals do @moduledoc false # Fills out missing arguments and input object fields. # # Filling out means inserting a stubbed `Input.Argument` or `Input.Field` struct. # # Only those arguments which are non null and / or have a default value are filled # out. # # If an argument or input object field is non null and missing, it is marked invalid use Absinthe.Phase alias Absinthe.{Blueprint, Type} @spec run(Blueprint.t(), Keyword.t()) :: {:ok, Blueprint.t()} def run(input, _options \\ []) do node = Blueprint.prewalk(input, &populate_node(&1, input.adapter, input.schema)) {:ok, node} end defp populate_node(%{schema_node: nil} = node, _adapter, _schema), do: node defp populate_node( %{arguments: arguments, schema_node: %{args: schema_args}} = node, adapter, schema ) do arguments = fill_missing_nodes( Blueprint.Input.Argument, arguments, schema_args, node.source_location, adapter, schema ) %{node | arguments: arguments} end defp populate_node( %Blueprint.Input.Object{fields: fields, schema_node: %{fields: schema_fields}} = node, adapter, schema ) do fields = fill_missing_nodes( Blueprint.Input.Field, fields, schema_fields, node.source_location, adapter, schema ) %{node | fields: fields} end defp populate_node( %Blueprint.Input.Object{schema_node: %{of_type: type}} = node, adapter, schema ) do %{node | schema_node: type} |> populate_node(adapter, schema) end defp populate_node(node, _adapter, _schema), do: node defp fill_missing_nodes(type, arguments, schema_args, source_location, adapter, schema) do missing_schema_args = find_missing_schema_nodes(arguments, schema_args) missing_schema_args |> Map.values() |> Enum.reduce(arguments, fn # If it's deprecated without a default, ignore it %{deprecation: %{}, default_value: nil}, arguments -> arguments # If it has a default value, we want it. %{default_value: val} = schema_node, arguments when not is_nil(val) -> arg = build_node(type, schema_node, val, source_location, adapter, schema) [arg | arguments] # It isn't deprecated, it is null, and there's no default value. It's missing %{type: %Type.NonNull{}} = missing_mandatory_arg_schema_node, arguments -> arg = type |> build_node( missing_mandatory_arg_schema_node, missing_mandatory_arg_schema_node.default_value, source_location, adapter, schema ) |> flag_invalid(:missing) [arg | arguments] # No default value, and it's allowed to be null. Ignore it. _, arguments -> arguments end) end # Given the set of possible schema args, return only those not supplied in # the document argument / fields defp find_missing_schema_nodes(nodes, schema_nodes) do nodes |> Enum.filter(& &1.schema_node) |> Enum.reduce(schema_nodes, fn %{schema_node: %{identifier: id}}, acc -> Map.delete(acc, id) _, acc -> acc end) end defp build_node(type, schema_node_arg, default, source_location, adapter, schema) do struct!(type, %{ name: schema_node_arg.name |> build_name(adapter, type), input_value: %Blueprint.Input.Value{ data: default, normalized: if(is_nil(default), do: nil, else: %Blueprint.Input.Generated{by: __MODULE__}), raw: nil, schema_node: Type.expand(schema_node_arg.type, schema) }, schema_node: schema_node_arg, source_location: source_location }) end defp build_name(name, adapter, Blueprint.Input.Argument) do adapter.to_external_name(name, :argument) end defp build_name(name, adapter, Blueprint.Input.Field) do adapter.to_external_name(name, :field) end end
28.576389
95
0.63791
ffca35367bc339332b6135a85b28759733fd1fcd
432
exs
Elixir
apps/api/test/api/views/base_view_test.exs
omgnetwork/omg-childchain-v2
31cc9cf9e42718fc3b9bd6668f24a627cac80b4f
[ "Apache-2.0" ]
4
2020-11-30T17:38:57.000Z
2021-01-23T21:29:41.000Z
apps/api/test/api/views/base_view_test.exs
omgnetwork/omg-childchain-v2
31cc9cf9e42718fc3b9bd6668f24a627cac80b4f
[ "Apache-2.0" ]
24
2020-11-30T17:32:48.000Z
2021-02-22T06:25:22.000Z
apps/api/test/api/views/base_view_test.exs
omgnetwork/omg-childchain-v2
31cc9cf9e42718fc3b9bd6668f24a627cac80b4f
[ "Apache-2.0" ]
null
null
null
defmodule API.View.BaseTest do @moduledoc """ """ use ExUnit.Case, async: true alias API.View.Base describe "serialize/3" do test "serializes data, success and version" do assert Base.serialize(%{some: "data"}, true, "1.2.3") == %{ data: %{some: "data"}, service_name: "child_chain", success: true, version: "1.2.3" } end end end
21.6
65
0.530093
ffca435abc8c22880a5dd2b4aacca9a561be1e2c
342
ex
Elixir
apps/authenticator/lib/application.ex
dcdourado/watcher_ex
ce80df81610a6e9b77612911aac2a6d6cf4de8d5
[ "Apache-2.0" ]
9
2020-10-13T14:11:37.000Z
2021-08-12T18:40:08.000Z
apps/authenticator/lib/application.ex
dcdourado/watcher_ex
ce80df81610a6e9b77612911aac2a6d6cf4de8d5
[ "Apache-2.0" ]
28
2020-10-04T14:43:48.000Z
2021-12-07T16:54:22.000Z
apps/authenticator/lib/application.ex
dcdourado/watcher_ex
ce80df81610a6e9b77612911aac2a6d6cf4de8d5
[ "Apache-2.0" ]
3
2020-11-25T20:59:47.000Z
2021-08-30T10:36:58.000Z
defmodule Authenticator.Application do @moduledoc false use Application @doc false def start(_type, _args) do Supervisor.start_link(children(), strategy: :one_for_one, name: Authenticator.Supervisor) end defp children do :authenticator |> Application.get_env(__MODULE__, []) |> Keyword.get(:children) end end
20.117647
93
0.722222
ffca46a57bb9f63a31440a8647c4a2a0c3d593c5
1,695
ex
Elixir
clients/domains/lib/google_api/domains/v1beta1/model/test_iam_permissions_request.ex
yoshi-code-bot/elixir-google-api
cdb6032f01fac5ab704803113c39f2207e9e019d
[ "Apache-2.0" ]
null
null
null
clients/domains/lib/google_api/domains/v1beta1/model/test_iam_permissions_request.ex
yoshi-code-bot/elixir-google-api
cdb6032f01fac5ab704803113c39f2207e9e019d
[ "Apache-2.0" ]
null
null
null
clients/domains/lib/google_api/domains/v1beta1/model/test_iam_permissions_request.ex
yoshi-code-bot/elixir-google-api
cdb6032f01fac5ab704803113c39f2207e9e019d
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Domains.V1beta1.Model.TestIamPermissionsRequest do @moduledoc """ Request message for `TestIamPermissions` method. ## Attributes * `permissions` (*type:* `list(String.t)`, *default:* `nil`) - The set of permissions to check for the `resource`. Permissions with wildcards (such as `*` or `storage.*`) are not allowed. For more information see [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions). """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :permissions => list(String.t()) | nil } field(:permissions, type: :list) end defimpl Poison.Decoder, for: GoogleApi.Domains.V1beta1.Model.TestIamPermissionsRequest do def decode(value, options) do GoogleApi.Domains.V1beta1.Model.TestIamPermissionsRequest.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Domains.V1beta1.Model.TestIamPermissionsRequest do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
36.06383
288
0.746313
ffca4edb60958a804e1caa97379b3ab62c2281f4
331
ex
Elixir
lib/cryppo/initialization_worker.ex
Meeco/cryppo_ex
fd9b6f4f84c6668797b1e31f6e59bb5f42630a2a
[ "Apache-2.0" ]
null
null
null
lib/cryppo/initialization_worker.ex
Meeco/cryppo_ex
fd9b6f4f84c6668797b1e31f6e59bb5f42630a2a
[ "Apache-2.0" ]
null
null
null
lib/cryppo/initialization_worker.ex
Meeco/cryppo_ex
fd9b6f4f84c6668797b1e31f6e59bb5f42630a2a
[ "Apache-2.0" ]
1
2021-06-01T07:46:14.000Z
2021-06-01T07:46:14.000Z
defmodule Cryppo.InitializationWorker do @moduledoc false # for now the only point of this genserver is to seed # Erlang random generator. Then it exits. use GenServer @spec start_link :: :ignore def start_link do :crypto.rand_seed() :ignore end @impl true def init(init_arg), do: {:ok, init_arg} end
18.388889
55
0.703927
ffca652c03ad60eba2008893266ec802c2bd14a5
972
ex
Elixir
lib/bot/command_handler.ex
iGalaxyYT/lanyard
ff9beca4cae2030bea97b9268fa0598465dbb10c
[ "MIT" ]
null
null
null
lib/bot/command_handler.ex
iGalaxyYT/lanyard
ff9beca4cae2030bea97b9268fa0598465dbb10c
[ "MIT" ]
null
null
null
lib/bot/command_handler.ex
iGalaxyYT/lanyard
ff9beca4cae2030bea97b9268fa0598465dbb10c
[ "MIT" ]
null
null
null
defmodule Lanyard.DiscordBot.CommandHandler do @command_map %{ "get" => Lanyard.DiscordBot.Commands.Get, "set" => Lanyard.DiscordBot.Commands.Set, "del" => Lanyard.DiscordBot.Commands.Del, "apikey" => Lanyard.DiscordBot.Commands.ApiKey, "kv" => Lanyard.DiscordBot.Commands.KV } def handle_message(payload) do case payload.data do # Don't handle messages from other bots %{"author" => %{"bot" => true}} -> :ok %{"content" => content} -> if String.starts_with?(content, ".") do [attempted_command | args] = content |> String.to_charlist() |> tl() |> to_string() |> String.split(" ") unless @command_map[attempted_command] == nil do @command_map[attempted_command].handle(args, payload.data) end end _ -> :ok end end def handle_command(_unknown_command, _args), do: :ok end
26.27027
70
0.574074
ffca7cbb0cff46c4cff53f6cf232f2afc94a0e82
203
exs
Elixir
priv/repo/migrations/20171029204629_add_thread_details.exs
making3/summoner-alerts-service
b560d53cb39048049f52dd99d796eab52544da9d
[ "MIT" ]
null
null
null
priv/repo/migrations/20171029204629_add_thread_details.exs
making3/summoner-alerts-service
b560d53cb39048049f52dd99d796eab52544da9d
[ "MIT" ]
null
null
null
priv/repo/migrations/20171029204629_add_thread_details.exs
making3/summoner-alerts-service
b560d53cb39048049f52dd99d796eab52544da9d
[ "MIT" ]
null
null
null
defmodule SAS.Repo.Migrations.AddThreadDetails do use Ecto.Migration def change do alter table(:threads) do add :permalink, :string add :created_utc, :utc_datetime end end end
18.454545
49
0.704433
ffca958459185c8d12967944082283f1422c7426
526
ex
Elixir
Microsoft.Azure.Management.Containers/lib/microsoft/azure/management/containers/model/empty_dir_volume.ex
chgeuer/ex_microsoft_azure_management
99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603
[ "Apache-2.0" ]
4
2018-09-29T03:43:15.000Z
2021-04-01T18:30:46.000Z
Microsoft.Azure.Management.Containers/lib/microsoft/azure/management/containers/model/empty_dir_volume.ex
chgeuer/ex_microsoft_azure_management
99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603
[ "Apache-2.0" ]
null
null
null
Microsoft.Azure.Management.Containers/lib/microsoft/azure/management/containers/model/empty_dir_volume.ex
chgeuer/ex_microsoft_azure_management
99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603
[ "Apache-2.0" ]
null
null
null
# NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule Microsoft.Azure.Management.Containers.Model.EmptyDirVolume do @moduledoc """ The empty directory volume. """ @derive [Poison.Encoder] defstruct [ ] @type t :: %__MODULE__{ } end defimpl Poison.Decoder, for: Microsoft.Azure.Management.Containers.Model.EmptyDirVolume do def decode(value, _options) do value end end
20.230769
90
0.724335
ffca9c16f563229e49e6e07ea55b5f56e512f007
3,479
ex
Elixir
web/ex_admin/errors_helper.ex
chaince/ex_admin
dee0b0fcf6c1c95d71290a8375a75b7da35c7c25
[ "MIT" ]
null
null
null
web/ex_admin/errors_helper.ex
chaince/ex_admin
dee0b0fcf6c1c95d71290a8375a75b7da35c7c25
[ "MIT" ]
null
null
null
web/ex_admin/errors_helper.ex
chaince/ex_admin
dee0b0fcf6c1c95d71290a8375a75b7da35c7c25
[ "MIT" ]
2
2018-07-12T07:44:50.000Z
2018-07-19T11:45:09.000Z
defmodule ExAdmin.ErrorsHelper do @moduledoc """ The primary purpose of this module is to take nested changeset errors created by many_to_many and has many through relationships and change them into a format that the forms can use to get the error message from the field name. Changes sets such as: #Ecto.Changeset<action: nil, changes: %{phone_numbers: [#Ecto.Changeset<action: :update, changes: %{}, errors: [], data: #ContactDemo.PhoneNumber<>, valid?: true>, #Ecto.Changeset<action: :update, changes: %{}, errors: [number: {"can't be blank", []}], data: #ContactDemo.PhoneNumber<>, valid?: false>, #Ecto.Changeset<action: :insert, changes: %{label: "Primary Phone"}, errors: [number: {"can't be blank", []}], data: #ContactDemo.PhoneNumber<>, valid?: false>]}, errors: [], data: #ContactDemo.Contact<>, valid?: false> need to be walked and each of the error messages needs to be flattened into its appropriately namespaced verison. To do this we need both the changeset and the schema used to generate the changeset. This is required because we need to look at the schema to properly create the neccesary form field names. For example, many_to_many association have attributes appended to the field name so that we know it is a many to many field. """ def create_errors(changeset, schema) do assoc_prefixes = create_prefix_map(schema) flatten_errors(changeset, assoc_prefixes) |> List.flatten |> Enum.filter(fn(x) -> x != nil end) end defp flatten_errors(errors_array, assoc_prefixes, prefix \\ nil) defp flatten_errors(%Ecto.Changeset{changes: changes, errors: errors}, assoc_prefixes, prefix) when errors == [] or is_nil(prefix) do errors ++ flatten_errors(changes, assoc_prefixes, prefix) end defp flatten_errors(%Ecto.Changeset{changes: changes, errors: errors}, assoc_prefixes, prefix) do Enum.map(errors, fn({k, v}) -> {concat_atoms(prefix, k), v} end) ++ flatten_errors(changes, assoc_prefixes, prefix) end defp flatten_errors(errors_array, assoc_prefixes, prefix) when is_list(errors_array) do Enum.with_index(errors_array) |> Enum.map(fn({x, i}) -> prefix = concat_atoms(prefix, String.to_atom(Integer.to_string(i))) flatten_errors(x, assoc_prefixes, prefix) end) end defp flatten_errors(%{__struct__: _struct}, _, _), do: nil defp flatten_errors(%{} = errors_map, assoc_prefixes, prefix) do Enum.map(errors_map, fn({k, x}) -> with k <- if(not is_atom(k), do: String.to_atom(k), else: k), k <- if(Keyword.has_key?(assoc_prefixes, k), do: concat_atoms(k, assoc_prefixes[k]), else: k), k <- if(prefix != nil, do: concat_atoms(prefix, k), else: k), do: flatten_errors(x, assoc_prefixes, k) end) end defp flatten_errors(_, _, _), do: nil defp concat_atoms(first, second) do "#{first}_#{second}" |> String.to_atom end defp create_prefix_map(schema) do schema.__schema__(:associations) |> Enum.map(&(schema.__schema__(:association, &1))) |> Enum.map(fn(a) -> case a do %Ecto.Association.HasThrough{field: field} -> { field, :attributes } %Ecto.Association.Has{field: field} -> { field, :attributes } %Ecto.Association.ManyToMany{field: field} -> { field, :attributes } _ -> nil end end) end end
40.453488
135
0.663122
ffcaaae774bca035c283744b3de85a3df93699f3
6,443
ex
Elixir
lib/credo/config_file.ex
jlgeering/credo
b952190ed758c262aa0d9bbee01227f9b1f0c63b
[ "MIT" ]
null
null
null
lib/credo/config_file.ex
jlgeering/credo
b952190ed758c262aa0d9bbee01227f9b1f0c63b
[ "MIT" ]
null
null
null
lib/credo/config_file.ex
jlgeering/credo
b952190ed758c262aa0d9bbee01227f9b1f0c63b
[ "MIT" ]
null
null
null
defmodule Credo.ConfigFile do @doc """ `ConfigFile` structs represent all loaded and merged config files in a run. """ @config_filename ".credo.exs" @default_config_name "default" @default_config_file File.read!(@config_filename) @default_glob "**/*.{ex,exs}" @default_files_included [@default_glob] @default_files_excluded [] defstruct files: nil, color: true, checks: nil, requires: [], strict: false, # checks if there is a new version of Credo check_for_updates: true @doc """ Returns Execution struct representing a consolidated Execution for all `.credo.exs` files in `relevant_directories/1` merged into the default configuration. - `config_name`: name of the configuration to load - `safe`: if +true+, the config files are loaded using static analysis rather than `Code.eval_string/1` """ def read_or_default(dir, config_name \\ nil, safe \\ false) do dir |> relevant_config_files |> combine_configs(dir, config_name, safe) end @doc """ Returns Execution struct representing a consolidated Execution for the provided config_file merged into the default configuration. - `config_file`: full path to the custom configuration file - `config_name`: name of the configuration to load - `safe`: if +true+, the config files are loaded using static analysis rather than `Code.eval_string/1` """ def read_from_file_path(dir, config_file, config_name \\ nil, safe \\ false) do combine_configs([config_file], dir, config_name, safe) end defp combine_configs(files, dir, config_name, safe) do files |> Enum.filter(&File.exists?/1) |> Enum.map(&File.read!/1) |> List.insert_at(0, @default_config_file) |> Enum.map(&from_exs(dir, config_name || @default_config_name, &1, safe)) |> merge |> add_given_directory_to_files(dir) end defp relevant_config_files(dir) do dir |> relevant_directories |> add_config_files end @doc """ Returns all parent directories of the given `dir` as well as each `./config` sub-directory. """ def relevant_directories(dir) do dir |> Path.expand() |> Path.split() |> Enum.reverse() |> get_dir_paths |> add_config_dirs end defp get_dir_paths(dirs), do: do_get_dir_paths(dirs, []) defp do_get_dir_paths(dirs, acc) when length(dirs) < 2, do: acc defp do_get_dir_paths([dir | tail], acc) do expanded_path = tail |> Enum.reverse() |> Path.join() |> Path.join(dir) do_get_dir_paths(tail, [expanded_path | acc]) end defp add_config_dirs(paths) do Enum.flat_map(paths, fn path -> [path, Path.join(path, "config")] end) end defp add_config_files(paths) do for path <- paths, do: Path.join(path, @config_filename) end defp from_exs(dir, config_name, exs_string, safe) do exs_string |> Credo.ExsLoader.parse(safe) |> from_data(dir, config_name) end defp from_data(data, dir, config_name) do data = data[:configs] |> List.wrap() |> Enum.find(&(&1[:name] == config_name)) %__MODULE__{ check_for_updates: data[:check_for_updates] || false, requires: data[:requires] || [], files: files_from_data(data, dir), checks: checks_from_data(data), strict: data[:strict] || false, color: data[:color] || false } end defp files_from_data(data, dir) do files = data[:files] || %{} included_files = files[:included] || dir included_dir = included_files |> List.wrap() |> Enum.map(&join_default_files_if_directory/1) %{ included: included_dir, excluded: files[:excluded] || @default_files_excluded } end defp checks_from_data(data) do case data[:checks] do checks when is_list(checks) -> checks _ -> [] end end @doc """ Merges the given structs from left to right, meaning that later entries overwrites earlier ones. merge(base, other) Any options in `other` will overwrite those in `base`. The `files:` field is merged, meaning that you can define `included` and/or `excluded` and only override the given one. The `checks:` field is merged. """ def merge(list) when is_list(list) do base = List.first(list) tail = List.delete_at(list, 0) merge(tail, base) end def merge([], config), do: config def merge([other | tail], base) do new_base = merge(base, other) merge(tail, new_base) end def merge(base, other) do %__MODULE__{ check_for_updates: other.check_for_updates, requires: base.requires ++ other.requires, files: merge_files(base, other), checks: merge_checks(base, other), strict: other.strict, color: other.color } end def merge_checks(%__MODULE__{checks: checks_base}, %__MODULE__{ checks: checks_other }) do base = normalize_check_tuples(checks_base) other = normalize_check_tuples(checks_other) Keyword.merge(base, other) end def merge_files(%__MODULE__{files: files_base}, %__MODULE__{ files: files_other }) do %{ included: files_other[:included] || files_base[:included], excluded: files_other[:excluded] || files_base[:excluded] } end defp normalize_check_tuples(nil), do: [] defp normalize_check_tuples(list) when is_list(list) do Enum.map(list, &normalize_check_tuple/1) end defp normalize_check_tuple({name}), do: {name, []} defp normalize_check_tuple(tuple), do: tuple defp join_default_files_if_directory(dir) do if File.dir?(dir) do Path.join(dir, @default_files_included) else dir end end defp add_given_directory_to_files(%__MODULE__{files: files} = config, dir) do files = %{ included: files[:included] |> Enum.map(&add_directory_to_file(&1, dir)) |> Enum.uniq(), excluded: files[:excluded] |> Enum.map(&add_directory_to_file(&1, dir)) |> Enum.uniq() } %__MODULE__{config | files: files} end defp add_directory_to_file(file_or_glob, dir) when is_binary(file_or_glob) do if File.dir?(dir) do if dir == "." || file_or_glob =~ ~r/^\// do file_or_glob else Path.join(dir, file_or_glob) end else dir end end defp add_directory_to_file(regex, _), do: regex end
26.08502
85
0.651094
ffcaafa80dc6611daf51fbb9b2eaf57b797b193e
1,174
exs
Elixir
config/config.exs
GabrielMalakias/ex_air
2f892d4c2e1313fc5d794de2e50769e4165ee5fc
[ "MIT" ]
null
null
null
config/config.exs
GabrielMalakias/ex_air
2f892d4c2e1313fc5d794de2e50769e4165ee5fc
[ "MIT" ]
null
null
null
config/config.exs
GabrielMalakias/ex_air
2f892d4c2e1313fc5d794de2e50769e4165ee5fc
[ "MIT" ]
null
null
null
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. # # This configuration file is loaded before any dependency and # is restricted to this project. # General application configuration use Mix.Config config :ex_air, ecto_repos: [ExAir.Repo] config :ex_air, threads: 10 # Configures the endpoint config :ex_air, ExAirWeb.Endpoint, url: [host: "localhost"], secret_key_base: "Nmj+667P/SPmqgmFDysPXUZhXI8qckI9svq9XwApLjXG4BxL06upY4q01g0poxiG", render_errors: [view: ExAirWeb.ErrorView, accepts: ~w(json), layout: false], pubsub_server: ExAir.PubSub, live_view: [signing_salt: "ZX+zAiLZ"] # Configures Elixir's Logger config :logger, :console, format: "$time $metadata[$level] $message\n", metadata: [:request_id] config :ex_air, Friends.Repo, database: "ex_air", username: "postgres", password: "postgres", hostname: "localhost" # Use Jason for JSON parsing in Phoenix config :phoenix, :json_library, Jason # Import environment specific config. This must remain at the bottom # of this file so it overrides the configuration defined above. import_config "#{Mix.env()}.exs"
28.634146
86
0.755537
ffcab8352c92b3525c017bbfee22f8a5a95f49a7
5,125
ex
Elixir
lib/oauther.ex
drozdoff/oauther
7989f04d4b2e91390f4a8cb99380948d7614e7d1
[ "ISC" ]
null
null
null
lib/oauther.ex
drozdoff/oauther
7989f04d4b2e91390f4a8cb99380948d7614e7d1
[ "ISC" ]
null
null
null
lib/oauther.ex
drozdoff/oauther
7989f04d4b2e91390f4a8cb99380948d7614e7d1
[ "ISC" ]
null
null
null
defmodule OAuther do defmodule Credentials do defstruct [ :consumer_key, :consumer_secret, :token, :token_secret, method: :hmac_sha1 ] @type t :: %__MODULE__{ consumer_key: String.t(), consumer_secret: String.t(), token: nil | String.t(), token_secret: nil | String.t(), method: :hmac_sha1 | :rsa_sha1 | :plaintext } end @type params :: [{String.t(), String.Chars.t()}] @type header :: {String.t(), String.t()} @spec credentials(Enumerable.t()) :: Credentials.t() | no_return def credentials(args) do Enum.reduce(args, %Credentials{}, fn {key, val}, acc -> :maps.update(key, val, acc) end) end @spec sign(String.t(), URI.t() | String.t(), params, Credentials.t()) :: params def sign(verb, url, params, %Credentials{} = creds) do params = protocol_params(params, creds) signature = signature(verb, url, params, creds) [{"oauth_signature", signature} | params] end @spec header(params) :: {header, params} def header(params) do {oauth_params, req_params} = split_with(params, &protocol_param?/1) {{"Authorization", "OAuth " <> compose_header(oauth_params)}, req_params} end @spec header(params, String.t()) :: {header, params} def header(params, realm) do {oauth_params, req_params} = split_with(params, &protocol_param?/1) {{"Authorization", "OAuth " <> compose_header([{"realm", realm} | oauth_params])}, req_params} end @spec protocol_params(params, Credentials.t()) :: params def protocol_params(params, %Credentials{} = creds) do [ {"oauth_consumer_key", creds.consumer_key}, {"oauth_nonce", nonce()}, {"oauth_signature_method", signature_method(creds.method)}, {"oauth_timestamp", timestamp()}, {"oauth_version", "1.0"} | maybe_put_token(params, creds.token) ] end @spec signature(String.t(), URI.t() | String.t(), params, Credentials.t()) :: binary def signature(_, _, _, %Credentials{method: :plaintext} = creds) do compose_key(creds) end def signature(verb, url, params, %Credentials{method: :hmac_sha1} = creds) do :sha |> :crypto.hmac(compose_key(creds), base_string(verb, url, params)) |> Base.encode64() end def signature(verb, url, params, %Credentials{method: :rsa_sha1} = creds) do base_string(verb, url, params) |> :public_key.sign(:sha, decode_private_key(creds.consumer_secret)) |> Base.encode64() end defp protocol_param?({key, _value}) do String.starts_with?(key, "oauth_") end defp compose_header([_ | _] = params) do params |> Stream.map(&percent_encode/1) |> Enum.map_join(", ", &compose_header/1) end defp compose_header({key, value}) do key <> "=\"" <> value <> "\"" end defp compose_key(creds) do [creds.consumer_secret, creds.token_secret] |> Enum.map_join("&", &percent_encode/1) end defp read_private_key("-----BEGIN RSA PRIVATE KEY-----" <> _ = private_key) do private_key end defp read_private_key(path) do File.read!(path) end defp decode_private_key(private_key_or_path) do [entry] = private_key_or_path |> read_private_key() |> :public_key.pem_decode() :public_key.pem_entry_decode(entry) end defp base_string(verb, url, params) do {uri, query_params} = parse_url(url) [verb, uri, params ++ query_params] |> Stream.map(&normalize/1) |> Enum.map_join("&", &percent_encode/1) end defp normalize(verb) when is_binary(verb) do String.upcase(verb) end defp normalize(%URI{host: host} = uri) do %{uri | host: String.downcase(host)} end defp normalize([_ | _] = params) do Enum.map(params, &percent_encode/1) |> Enum.sort() |> Enum.map_join("&", &normalize_pair/1) end defp normalize_pair({key, value}) do key <> "=" <> value end defp parse_url(url) do uri = URI.parse(url) {%{uri | query: nil}, parse_query_params(uri.query)} end defp parse_query_params(params) do if is_nil(params) do [] else URI.query_decoder(params) |> Enum.to_list() end end defp nonce() do :crypto.strong_rand_bytes(24) |> Base.encode64() end defp timestamp() do {megasec, sec, _microsec} = :os.timestamp() megasec * 1_000_000 + sec end defp maybe_put_token(params, value) do if is_nil(value) do params else [{"oauth_token", value} | params] end end defp signature_method(:plaintext), do: "PLAINTEXT" defp signature_method(:hmac_sha1), do: "HMAC-SHA1" defp signature_method(:rsa_sha1), do: "RSA-SHA1" defp percent_encode({key, value}) do {percent_encode(key), percent_encode(value)} end defp percent_encode(other) do other |> to_string() |> URI.encode(&URI.char_unreserved?/1) end # TODO: Remove once we depend on Elixir 1.4 and higher. Code.ensure_loaded(Enum) split_with = if function_exported?(Enum, :split_with, 2), do: :split_with, else: :partition defp split_with(enum, fun), do: Enum.unquote(split_with)(enum, fun) end
26.417526
98
0.641171
ffcad1f5b6fb50345693e42c2747e0714e7ef7e1
126
exs
Elixir
main.exs
zachschickler/advent-of-code-2021-elixir
51a8a7bc6bd0ead7f833261ac8671e12ef0cece8
[ "MIT" ]
null
null
null
main.exs
zachschickler/advent-of-code-2021-elixir
51a8a7bc6bd0ead7f833261ac8671e12ef0cece8
[ "MIT" ]
null
null
null
main.exs
zachschickler/advent-of-code-2021-elixir
51a8a7bc6bd0ead7f833261ac8671e12ef0cece8
[ "MIT" ]
null
null
null
IO.puts "Advent of Code 2021\n===================\n" Code.eval_file "days/day1/day1.exs" Code.eval_file "days/day2/day2.exs"
25.2
52
0.634921
ffcaf21957bbb8deeb64a52cbd0a868202035116
2,229
exs
Elixir
apps/content/test/content_web/controllers/comment_controller_test.exs
votiakov/petal
ec03551da6dadc0c3482b25a5f5dcd400c36db43
[ "MIT" ]
null
null
null
apps/content/test/content_web/controllers/comment_controller_test.exs
votiakov/petal
ec03551da6dadc0c3482b25a5f5dcd400c36db43
[ "MIT" ]
null
null
null
apps/content/test/content_web/controllers/comment_controller_test.exs
votiakov/petal
ec03551da6dadc0c3482b25a5f5dcd400c36db43
[ "MIT" ]
null
null
null
defmodule Legendary.Content.CommentControllerTest do use Legendary.Content.ConnCase alias Legendary.Content.Comments alias Legendary.Content.Posts @post_attrs %{id: 456, name: "blergh", status: "publish"} @create_attrs %{id: 123, content: "Hello world", post_id: 456} @update_attrs %{id: 123, content: "Goodbye", post_id: 456} @invalid_attrs %{id: 123, content: "", post_id: 456} def fixture(:post) do {:ok, post} = Posts.create_posts(@post_attrs) post end def fixture(:comment) do {:ok, comment} = Comments.create_comment(@create_attrs) comment end describe "create comment" do test "redirects to show when data is valid", %{conn: conn} do post = fixture(:post) conn = post conn, Routes.comment_path(conn, :create), comment: @create_attrs assert %{id: _} = redirected_params(conn) assert redirected_to(conn) == Routes.posts_path(conn, :show, post) end test "renders errors when data is invalid", %{conn: conn} do post = fixture(:post) conn = post conn, Routes.comment_path(conn, :create), comment: @invalid_attrs assert redirected_to(conn) == Routes.posts_path(conn, :show, post) end end describe "update comment" do setup [:create_comment] test "redirects when data is valid", %{conn: conn, comment: comment, post: post} do conn = put conn, Routes.comment_path(conn, :update, comment), comment: @update_attrs assert redirected_to(conn) == Routes.posts_path(conn, :show, post) end test "renders errors when data is invalid", %{conn: conn, comment: comment, post: post} do conn = put conn, Routes.comment_path(conn, :update, comment), comment: @invalid_attrs assert redirected_to(conn) == Routes.posts_path(conn, :show, post) end end describe "delete comment" do setup [:create_comment] test "deletes chosen comment", %{conn: conn, comment: comment, post: post} do conn = delete conn, Routes.comment_path(conn, :delete, comment) assert redirected_to(conn) == Routes.posts_path(conn, :show, post) end end defp create_comment(_) do comment = fixture(:comment) post = fixture(:post) {:ok, comment: comment, post: post} end end
33.268657
94
0.681023
ffcb1a474fd8ed0aa8da9a545020c95914f906d7
2,076
ex
Elixir
lib/new_relic/transaction/trace.ex
binaryseed/elixir_agent
25f1242c10516618d9ea3a9b18712e5bc41efad6
[ "Apache-2.0" ]
null
null
null
lib/new_relic/transaction/trace.ex
binaryseed/elixir_agent
25f1242c10516618d9ea3a9b18712e5bc41efad6
[ "Apache-2.0" ]
1
2019-02-08T01:08:31.000Z
2019-02-20T00:24:51.000Z
lib/new_relic/transaction/trace.ex
binaryseed/elixir_agent
25f1242c10516618d9ea3a9b18712e5bc41efad6
[ "Apache-2.0" ]
null
null
null
defmodule NewRelic.Transaction.Trace do defstruct start_time: nil, metric_name: nil, request_url: nil, attributes: %{}, segments: [], duration: nil, cat_guid: "", reserved_for_future_use: nil, force_persist_flag: false, xray_session_id: nil, synthetics_resource_id: "" @moduledoc false defmodule Segment do defstruct relative_start_time: nil, relative_end_time: nil, metric_name: nil, attributes: %{}, children: [], class_name: nil, method_name: nil, parent_id: nil, id: nil, pid: nil @moduledoc false end @unused_map %{} def format_traces(traces) do Enum.map(traces, &format_trace/1) end def format_trace(%__MODULE__{} = trace) do trace_segments = format_segments(trace) trace_details = [trace.start_time, @unused_map, @unused_map, trace_segments, trace.attributes] [ trace.start_time, trace.duration, trace.metric_name, trace.request_url, trace_details, trace.cat_guid, trace.reserved_for_future_use, trace.force_persist_flag, trace.xray_session_id, trace.synthetics_resource_id ] end def format_segments(%{ segments: [first_segment | _] = segments, duration: duration, metric_name: metric_name }) do [ 0, duration, "ROOT", first_segment.attributes, [ [ 0, duration, metric_name, first_segment.attributes, Enum.map(segments, &format_child_segments/1) ] ] ] end def format_child_segments(%Segment{} = segment) do [ segment.relative_start_time, segment.relative_end_time, segment.metric_name, segment.attributes, Enum.map(segment.children, &format_child_segments/1), segment.class_name, segment.method_name ] end end
23.325843
98
0.57948
ffcb1abd3fdac44cc7cfd96728bf933f2764a579
729
ex
Elixir
web/gettext.ex
TobiG77/phoenix_api_sample
5d2e69537562ce921b8aa6c2b0d8cd3710490907
[ "MIT" ]
null
null
null
web/gettext.ex
TobiG77/phoenix_api_sample
5d2e69537562ce921b8aa6c2b0d8cd3710490907
[ "MIT" ]
null
null
null
web/gettext.ex
TobiG77/phoenix_api_sample
5d2e69537562ce921b8aa6c2b0d8cd3710490907
[ "MIT" ]
null
null
null
defmodule PhoenixApiSample.Gettext do @moduledoc """ A module providing Internationalization with a gettext-based API. By using [Gettext](http://hexdocs.pm/gettext), your module gains a set of macros for translations, for example: import PhoenixApiSample.Gettext # Simple translation gettext "Here is the string to translate" # Plural translation ngettext "Here is the string to translate", "Here are the strings to translate", 3 # Domain-based translation dgettext "errors", "Here is the error message to translate" See the [Gettext Docs](http://hexdocs.pm/gettext) for detailed usage. """ use Gettext, otp_app: :phoenix_api_sample end
29.16
71
0.689986
ffcb4b7809054f46ef927d57094d1c8c326c00d0
667
exs
Elixir
mix.exs
jpcaruana/enigma
50f7c42dd58cdc95322fb715b7d169717a13575d
[ "MIT" ]
null
null
null
mix.exs
jpcaruana/enigma
50f7c42dd58cdc95322fb715b7d169717a13575d
[ "MIT" ]
1
2016-08-28T20:30:33.000Z
2016-08-28T20:30:33.000Z
mix.exs
jpcaruana/enigma
50f7c42dd58cdc95322fb715b7d169717a13575d
[ "MIT" ]
1
2021-04-28T15:30:09.000Z
2021-04-28T15:30:09.000Z
defmodule Enigma.Mixfile do use Mix.Project def project do [app: :enigma, version: "0.1.0", elixir: "~> 1.3", build_embedded: Mix.env == :prod, start_permanent: Mix.env == :prod, deps: deps()] end # Configuration for the OTP application # # Type "mix help compile.app" for more information def application do [applications: [:logger]] end # Dependencies can be Hex packages: # # {:mydep, "~> 0.3.0"} # # Or git/path repositories: # # {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"} # # Type "mix help deps" for more examples and options defp deps do [] end end
20.212121
77
0.605697
ffcb6e3e050b65d9c43d6fdd828263c91fb8d95b
1,186
exs
Elixir
clients/big_query_data_transfer/mix.exs
nuxlli/elixir-google-api
ecb8679ac7282b7dd314c3e20c250710ec6a7870
[ "Apache-2.0" ]
null
null
null
clients/big_query_data_transfer/mix.exs
nuxlli/elixir-google-api
ecb8679ac7282b7dd314c3e20c250710ec6a7870
[ "Apache-2.0" ]
null
null
null
clients/big_query_data_transfer/mix.exs
nuxlli/elixir-google-api
ecb8679ac7282b7dd314c3e20c250710ec6a7870
[ "Apache-2.0" ]
1
2020-11-10T16:58:27.000Z
2020-11-10T16:58:27.000Z
defmodule GoogleApi.BigQueryDataTransfer.V1.Mixfile do use Mix.Project @version "0.1.0" def project do [app: :google_api_big_query_data_transfer, version: @version, elixir: "~> 1.4", build_embedded: Mix.env == :prod, start_permanent: Mix.env == :prod, description: description(), package: package(), deps: deps(), source_url: "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/big_query_data_transfer" ] end def application() do [extra_applications: [:logger]] end defp deps() do [ {:google_gax, "~> 0.1.0"}, {:ex_doc, "~> 0.16", only: :dev} ] end defp description() do """ Transfers data from partner SaaS applications to Google BigQuery on a scheduled, managed basis. """ end defp package() do [ files: ["lib", "mix.exs", "README*", "LICENSE"], maintainers: ["Jeff Ching"], licenses: ["Apache 2.0"], links: %{ "GitHub" => "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/big_query_data_transfer", "Homepage" => "https://cloud.google.com/bigquery/" } ] end end
24.708333
123
0.62226
ffcb8b156e7d31e85ee5f764de9cf39f95dffc7d
791
exs
Elixir
elixir/bracket-push/bracket_push.exs
macborowy/exercism
c5d45e074e81b946a82a340b2730e0d2732b7e0a
[ "MIT" ]
null
null
null
elixir/bracket-push/bracket_push.exs
macborowy/exercism
c5d45e074e81b946a82a340b2730e0d2732b7e0a
[ "MIT" ]
null
null
null
elixir/bracket-push/bracket_push.exs
macborowy/exercism
c5d45e074e81b946a82a340b2730e0d2732b7e0a
[ "MIT" ]
null
null
null
defmodule BracketPush do @non_bracket_characters_pattern ~r/[^\{\}\[\]\(\)]/ @bracket_pairs_pattern ~r/(\{\}|\[\]|\(\))/ @doc """ Checks that all the brackets and braces in the string are matched correctly, and nested correctly """ @spec check_brackets(String.t) :: boolean def check_brackets(str) do trimmed = str |> remove_non_bracket_characters |> remove_all_bracket_pairs byte_size(trimmed) == 0 end def remove_non_bracket_characters(str) do String.replace(str, @non_bracket_characters_pattern, "") end def remove_all_bracket_pairs(str) do result = String.replace(str, @bracket_pairs_pattern, "") if String.contains?(result, ["{}", "[]", "()"]) do remove_all_bracket_pairs(result) else result end end end
24.71875
99
0.670038
ffcb8e719fe7dcba29194ed7edae41c3fc1bdacd
2,009
ex
Elixir
clients/you_tube/lib/google_api/you_tube/v3/model/video_abuse_report_reason_list_response.ex
GoNZooo/elixir-google-api
cf3ad7392921177f68091f3d9001f1b01b92f1cc
[ "Apache-2.0" ]
null
null
null
clients/you_tube/lib/google_api/you_tube/v3/model/video_abuse_report_reason_list_response.ex
GoNZooo/elixir-google-api
cf3ad7392921177f68091f3d9001f1b01b92f1cc
[ "Apache-2.0" ]
null
null
null
clients/you_tube/lib/google_api/you_tube/v3/model/video_abuse_report_reason_list_response.ex
GoNZooo/elixir-google-api
cf3ad7392921177f68091f3d9001f1b01b92f1cc
[ "Apache-2.0" ]
1
2018-07-28T20:50:50.000Z
2018-07-28T20:50:50.000Z
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.YouTube.V3.Model.VideoAbuseReportReasonListResponse do @moduledoc """ ## Attributes - etag (String.t): Etag of this resource. Defaults to: `null`. - eventId (String.t): Serialized EventId of the request which produced this response. Defaults to: `null`. - items ([VideoAbuseReportReason]): A list of valid abuse reasons that are used with video.ReportAbuse. Defaults to: `null`. - kind (String.t): Identifies what kind of resource this is. Value: the fixed string \&quot;youtube#videoAbuseReportReasonListResponse\&quot;. Defaults to: `null`. - visitorId (String.t): The visitorId identifies the visitor. Defaults to: `null`. """ defstruct [ :etag, :eventId, :items, :kind, :visitorId ] end defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.VideoAbuseReportReasonListResponse do import GoogleApi.YouTube.V3.Deserializer def decode(value, options) do value |> deserialize(:items, :list, GoogleApi.YouTube.V3.Model.VideoAbuseReportReason, options) end end defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.VideoAbuseReportReasonListResponse do def encode(value, options) do GoogleApi.YouTube.V3.Deserializer.serialize_non_nil(value, options) end end
36.527273
165
0.752613
ffcb976cf607f84d6d7a77ed39d8738314b75a6b
1,833
exs
Elixir
clients/cloud_build/mix.exs
kolorahl/elixir-google-api
46bec1e092eb84c6a79d06c72016cb1a13777fa6
[ "Apache-2.0" ]
null
null
null
clients/cloud_build/mix.exs
kolorahl/elixir-google-api
46bec1e092eb84c6a79d06c72016cb1a13777fa6
[ "Apache-2.0" ]
null
null
null
clients/cloud_build/mix.exs
kolorahl/elixir-google-api
46bec1e092eb84c6a79d06c72016cb1a13777fa6
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.CloudBuild.Mixfile do use Mix.Project @version "0.22.0" def project() do [ app: :google_api_cloud_build, version: @version, elixir: "~> 1.6", build_embedded: Mix.env == :prod, start_permanent: Mix.env == :prod, description: description(), package: package(), deps: deps(), source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/cloud_build" ] end def application() do [extra_applications: [:logger]] end defp deps() do [ {:google_gax, "~> 0.2"}, {:ex_doc, "~> 0.16", only: :dev} ] end defp description() do """ Cloud Build API client library. Creates and manages builds on Google Cloud Platform. """ end defp package() do [ files: ["lib", "mix.exs", "README*", "LICENSE"], maintainers: ["Jeff Ching", "Daniel Azuma"], licenses: ["Apache 2.0"], links: %{ "GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/cloud_build", "Homepage" => "https://cloud.google.com/cloud-build/docs/" } ] end end
27.358209
102
0.654664
ffcbbc74efd9784e8f6f2f18b9b6ae95c672dd6a
1,033
exs
Elixir
example/config/config.exs
anschwa/phoenix-tailwind
01ebf4cb6a733b84cb777088468c78f4aa0b735a
[ "MIT" ]
1
2021-02-01T12:01:15.000Z
2021-02-01T12:01:15.000Z
example/config/config.exs
anschwa/phoenix-tailwind
01ebf4cb6a733b84cb777088468c78f4aa0b735a
[ "MIT" ]
null
null
null
example/config/config.exs
anschwa/phoenix-tailwind
01ebf4cb6a733b84cb777088468c78f4aa0b735a
[ "MIT" ]
1
2021-02-01T12:01:46.000Z
2021-02-01T12:01:46.000Z
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. # # This configuration file is loaded before any dependency and # is restricted to this project. # General application configuration use Mix.Config config :example, ecto_repos: [Example.Repo] # Configures the endpoint config :example, ExampleWeb.Endpoint, url: [host: "localhost"], secret_key_base: "cSXKVglIa0SCdpfDjEc2CGVQkKTHWtSwl24062nt48ENIOVM8I4nvrlIxPLVWiGK", render_errors: [view: ExampleWeb.ErrorView, accepts: ~w(html json), layout: false], pubsub_server: Example.PubSub, live_view: [signing_salt: "17YRzNqh"] # Configures Elixir's Logger config :logger, :console, format: "$time $metadata[$level] $message\n", metadata: [:request_id] # Use Jason for JSON parsing in Phoenix config :phoenix, :json_library, Jason # Import environment specific config. This must remain at the bottom # of this file so it overrides the configuration defined above. import_config "#{Mix.env()}.exs"
32.28125
86
0.771539
ffcbd1c3fdbe83c35fb082913f604d6676943be0
925
ex
Elixir
test/support/channel_case.ex
jchristopherinc/mars
a109958cb549ede8d983c3af8183d52528a5eaea
[ "MIT" ]
2
2020-08-28T19:17:33.000Z
2020-09-13T18:49:20.000Z
test/support/channel_case.ex
jchristopherinc/mars
a109958cb549ede8d983c3af8183d52528a5eaea
[ "MIT" ]
5
2018-10-28T14:39:26.000Z
2019-01-31T17:23:36.000Z
test/support/channel_case.ex
jchristopherinc/mars
a109958cb549ede8d983c3af8183d52528a5eaea
[ "MIT" ]
null
null
null
defmodule MarsWeb.ChannelCase do @moduledoc """ This module defines the test case to be used by channel tests. Such tests rely on `Phoenix.ChannelTest` and also import other functionality to make it easier to build common data structures and query the data layer. Finally, if the test case interacts with the database, it cannot be async. For this reason, every test runs inside a transaction which is reset at the beginning of the test unless the test case is marked as async. """ use ExUnit.CaseTemplate alias Ecto.Adapters.SQL using do quote do # Import conveniences for testing with channels use Phoenix.ChannelTest # The default endpoint for testing @endpoint MarsWeb.Endpoint end end setup tags do :ok = SQL.Sandbox.checkout(Mars.Repo) unless tags[:async] do SQL.Sandbox.mode(Mars.Repo, {:shared, self()}) end :ok end end
23.125
59
0.708108
ffcc178a61bd06fc9b619ca700166c7038e9bb81
3,971
ex
Elixir
clients/display_video/lib/google_api/display_video/v1/model/campaign.ex
kyleVsteger/elixir-google-api
3a0dd498af066a4361b5b0fd66ffc04a57539488
[ "Apache-2.0" ]
null
null
null
clients/display_video/lib/google_api/display_video/v1/model/campaign.ex
kyleVsteger/elixir-google-api
3a0dd498af066a4361b5b0fd66ffc04a57539488
[ "Apache-2.0" ]
null
null
null
clients/display_video/lib/google_api/display_video/v1/model/campaign.ex
kyleVsteger/elixir-google-api
3a0dd498af066a4361b5b0fd66ffc04a57539488
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.DisplayVideo.V1.Model.Campaign do @moduledoc """ A single campaign. ## Attributes * `advertiserId` (*type:* `String.t`, *default:* `nil`) - Output only. The unique ID of the advertiser the campaign belongs to. * `campaignBudgets` (*type:* `list(GoogleApi.DisplayVideo.V1.Model.CampaignBudget.t)`, *default:* `nil`) - The list of budgets available to this campaign. Setting no budget gives an unlimited campaign budget. * `campaignFlight` (*type:* `GoogleApi.DisplayVideo.V1.Model.CampaignFlight.t`, *default:* `nil`) - Required. The planned spend and duration of the campaign. * `campaignGoal` (*type:* `GoogleApi.DisplayVideo.V1.Model.CampaignGoal.t`, *default:* `nil`) - Required. The goal of the campaign. * `campaignId` (*type:* `String.t`, *default:* `nil`) - Output only. The unique ID of the campaign. Assigned by the system. * `displayName` (*type:* `String.t`, *default:* `nil`) - Required. The display name of the campaign. Must be UTF-8 encoded with a maximum size of 240 bytes. * `entityStatus` (*type:* `String.t`, *default:* `nil`) - Required. Controls whether or not the insertion orders under this campaign can spend their budgets and bid on inventory. * Accepted values are `ENTITY_STATUS_ACTIVE`, `ENTITY_STATUS_ARCHIVED`, and `ENTITY_STATUS_PAUSED`. * For CreateCampaign method, `ENTITY_STATUS_ARCHIVED` is not allowed. * `frequencyCap` (*type:* `GoogleApi.DisplayVideo.V1.Model.FrequencyCap.t`, *default:* `nil`) - Required. The frequency cap setting of the campaign. * `name` (*type:* `String.t`, *default:* `nil`) - Output only. The resource name of the campaign. * `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The timestamp when the campaign was last updated. Assigned by the system. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :advertiserId => String.t() | nil, :campaignBudgets => list(GoogleApi.DisplayVideo.V1.Model.CampaignBudget.t()) | nil, :campaignFlight => GoogleApi.DisplayVideo.V1.Model.CampaignFlight.t() | nil, :campaignGoal => GoogleApi.DisplayVideo.V1.Model.CampaignGoal.t() | nil, :campaignId => String.t() | nil, :displayName => String.t() | nil, :entityStatus => String.t() | nil, :frequencyCap => GoogleApi.DisplayVideo.V1.Model.FrequencyCap.t() | nil, :name => String.t() | nil, :updateTime => DateTime.t() | nil } field(:advertiserId) field(:campaignBudgets, as: GoogleApi.DisplayVideo.V1.Model.CampaignBudget, type: :list) field(:campaignFlight, as: GoogleApi.DisplayVideo.V1.Model.CampaignFlight) field(:campaignGoal, as: GoogleApi.DisplayVideo.V1.Model.CampaignGoal) field(:campaignId) field(:displayName) field(:entityStatus) field(:frequencyCap, as: GoogleApi.DisplayVideo.V1.Model.FrequencyCap) field(:name) field(:updateTime, as: DateTime) end defimpl Poison.Decoder, for: GoogleApi.DisplayVideo.V1.Model.Campaign do def decode(value, options) do GoogleApi.DisplayVideo.V1.Model.Campaign.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.DisplayVideo.V1.Model.Campaign do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
53.662162
352
0.712919
ffcc3e8f07191b8e237fbecbc0214c928517e264
2,585
exs
Elixir
apps/blunt/test/blunt/message_test.exs
blunt-elixir/blunt
a88b88984022db7ba2110204248fdb541121e3a0
[ "MIT" ]
1
2022-03-07T11:54:47.000Z
2022-03-07T11:54:47.000Z
apps/blunt/test/blunt/message_test.exs
elixir-cqrs/cqrs_tools
afbf82da522a10d2413547a46f316ed3aadebba5
[ "MIT" ]
null
null
null
apps/blunt/test/blunt/message_test.exs
elixir-cqrs/cqrs_tools
afbf82da522a10d2413547a46f316ed3aadebba5
[ "MIT" ]
null
null
null
defmodule Blunt.MessageTest do use ExUnit.Case, async: true alias Blunt.Message.Metadata alias Blunt.MessageTest.Protocol describe "simple message" do alias Protocol.Simple test "is struct" do %Simple{name: nil} = struct!(Simple) end test "is ecto schema" do assert [:name] == Simple.__schema__(:fields) end test "has changeset function" do assert [1, 2] == Simple.__info__(:functions) |> Keyword.get_values(:changeset) end test "has constructor function" do assert [0, 1, 2, 3] == Simple.__info__(:functions) |> Keyword.get_values(:new) end test "has message_type" do assert :message == Metadata.message_type(Simple) end end test "internal fields are never required" do alias Protocol.MessageWithInternalField, as: Msg assert [:id] == Metadata.field_names(Msg) required_fields = Metadata.field_names(Msg, :required) refute Enum.member?(required_fields, :id) end describe "field options" do alias Protocol.FieldOptions test "autogenerate field" do today = Date.utc_today() assert {:ok, %FieldOptions{today: ^today}} = FieldOptions.new(name: "chris", weed: :yes) end test "name is required" do assert {:error, %{name: ["can't be blank"]}} = FieldOptions.new(%{}) assert {:ok, %FieldOptions{gender: nil, name: "chris"}} = FieldOptions.new(%{name: "chris"}) end test "can accept values from different data structures" do assert {:ok, %FieldOptions{gender: nil, name: "chris"}} = FieldOptions.new(%{name: "chris"}) assert {:ok, %FieldOptions{gender: nil, name: "chris"}} = FieldOptions.new(name: "chris") end test "dog defaults to the default option" do assert {:ok, %FieldOptions{gender: :m, name: "chris", dog: "maize"}} = FieldOptions.new(name: "chris", gender: :m) end end describe "required fields with defaults" do defmodule ReqFieldWithDefaultMessage do use Blunt.Message field :validate, :boolean, default: false, required: true end test "will be set to default value if value passed is nil" do assert {:ok, %{validate: false}} = ReqFieldWithDefaultMessage.new(validate: nil) end end describe "static fields" do defmodule MessageWithStaticField do use Blunt.Message static_field(:name, :string, default: "chris") end test "are unsettable" do assert {:ok, %{name: "chris"}} = MessageWithStaticField.new(name: "flkajds") assert {:ok, %{name: "chris"}} = MessageWithStaticField.new() end end end
29.712644
120
0.662669
ffcc542f3de7ad631426ddaa771f7319e4910f71
459
ex
Elixir
lib/appsignal/diagnose/library.ex
sparta-science/appsignal-elixir
4710b4e7ec514a54b82d5c2561f72369ff48e2e2
[ "MIT" ]
null
null
null
lib/appsignal/diagnose/library.ex
sparta-science/appsignal-elixir
4710b4e7ec514a54b82d5c2561f72369ff48e2e2
[ "MIT" ]
null
null
null
lib/appsignal/diagnose/library.ex
sparta-science/appsignal-elixir
4710b4e7ec514a54b82d5c2561f72369ff48e2e2
[ "MIT" ]
null
null
null
defmodule Appsignal.Diagnose.Library do @appsignal_version Mix.Project.config[:version] @agent_version Appsignal.Nif.agent_version @nif Application.get_env(:appsignal, :appsignal_nif, Appsignal.Nif) def info do %{ language: "elixir", agent_version: @agent_version, agent_architecture: Appsignal.System.installed_agent_architecture, package_version: @appsignal_version, extension_loaded: @nif.loaded? } end end
28.6875
72
0.742919
ffcc6965b03654e20955db0f560701a30be9d29b
473
ex
Elixir
lib/image.ex
mauricius/elixir-identicon
ebf967f18a5bb6a39d03969e9dd045d318e95d30
[ "MIT" ]
null
null
null
lib/image.ex
mauricius/elixir-identicon
ebf967f18a5bb6a39d03969e9dd045d318e95d30
[ "MIT" ]
null
null
null
lib/image.ex
mauricius/elixir-identicon
ebf967f18a5bb6a39d03969e9dd045d318e95d30
[ "MIT" ]
null
null
null
defmodule Identicon.Image do @typedoc """ Type that represents Image struct. """ defstruct hex: nil, color: nil, grid: nil, pixel_map: nil @type t(hex, color, grid, pixel_map) :: %Identicon.Image{ hex: hex, color: color, grid: grid, pixel_map: pixel_map } @type t :: %Identicon.Image{ hex: list(integer()), color: tuple(), grid: list(), pixel_map: list() } end
22.52381
59
0.53277
ffcca92f5901473c1708517b92e7daf9a02c7d4e
1,066
ex
Elixir
lib/vnu/message_filter.ex
angelikatyborska/vnu-elixir
c12676e41b7fba3b8acf98812f0e3a054c298458
[ "MIT" ]
51
2020-04-11T22:30:43.000Z
2022-01-14T13:24:56.000Z
lib/vnu/message_filter.ex
angelikatyborska/vnu-elixir
c12676e41b7fba3b8acf98812f0e3a054c298458
[ "MIT" ]
20
2020-04-13T12:20:49.000Z
2022-03-29T18:32:41.000Z
lib/vnu/message_filter.ex
angelikatyborska/vnu-elixir
c12676e41b7fba3b8acf98812f0e3a054c298458
[ "MIT" ]
null
null
null
defmodule Vnu.MessageFilter do @moduledoc """ A behavior for excluding certain messages from the validation result. Modules implementing this behavior can be passed as a `:filter` option to: - `Vnu.validate_html/2`, `Vnu.validate_css/2`, `Vnu.validate_svg/2` - `Vnu.Assertions.assert_valid_html/2`, `Vnu.Assertions.assert_valid_css/2`. `Vnu.Assertions.assert_valid_svg/2` ## Example ``` defmodule MyApp.VnuMessageFilter do @behaviour Vnu.MessageFilter @impl Vnu.MessageFilter def exclude_message?(%Vnu.Message{message: message}) do # those errors are caused by the CSRF meta tag (`csrf_meta_tag()`) # present in the layout of a newly-generated Phoenix app patterns_to_ignore = [ ~r/A document must not include more than one “meta” element with a “charset” attribute./, ~r/Attribute “(.)*” not allowed on element “meta” at this point./ ] Enum.any?(patterns_to_ignore, &Regex.match?(&1, message)) end end ``` """ @callback exclude_message?(Vnu.Message.t()) :: true | false end
34.387097
114
0.694184
ffccc66b138eaa93c888fdec0b9a000d7daa6240
3,699
ex
Elixir
apps/proxy/test/support/ex_chain/fake_ex_chain.ex
b-pmcg/staxx
6cff299620ab4835ceca772357d1925737f2e528
[ "Apache-2.0" ]
null
null
null
apps/proxy/test/support/ex_chain/fake_ex_chain.ex
b-pmcg/staxx
6cff299620ab4835ceca772357d1925737f2e528
[ "Apache-2.0" ]
null
null
null
apps/proxy/test/support/ex_chain/fake_ex_chain.ex
b-pmcg/staxx
6cff299620ab4835ceca772357d1925737f2e528
[ "Apache-2.0" ]
null
null
null
defmodule Staxx.Proxy.ExChain.FakeExChain do @behaviour Staxx.Proxy.ExChain use GenServer def start_link(_), do: GenServer.start_link(__MODULE__, %{chains: %{}, snapshots: %{}}, name: __MODULE__) @impl GenServer def init(state), do: {:ok, state} @impl GenServer def handle_call(:chains, _, %{chains: chains} = state), do: {:reply, chains, state} @impl GenServer def handle_call(:snapshots, _, %{snapshots: snapshots} = state), do: {:reply, snapshots, state} @impl GenServer def handle_call({:start, node, %{id: id} = config}, _, %{chains: chains} = state) do case Map.get(chains, id) do nil -> {:reply, {:ok, id}, %{state | chains: Map.put(chains, id, %{status: :started, node: node, config: config})}} _res -> {:reply, {:error, :already_started}, state} end end @impl GenServer def handle_call({:start_existring, id, pid}, _, %{chains: chains} = state) do case Map.get(chains, id) do nil -> {:reply, {:error, :not_exist}, state} %{status: :stopped, config: config} = chain -> {:reply, {:ok, id}, %{ state | chains: Map.put(chains, id, %{ chain | status: :started, config: %{config | notify_pid: pid} }) }} end end @impl GenServer def handle_call({:stop, id}, _, %{chains: chains} = state) do case Map.get(chains, id) do nil -> {:reply, {:error, :not_exist}, state} %{config: config} = chain -> if pid = Map.get(config, :notify_pid) do send(pid, %Staxx.ExChain.EVM.Notification{id: id, event: :stopped}) end {:reply, :ok, %{state | chains: Map.put(chains, id, %{chain | status: :stopped})}} end end @impl GenServer def handle_call({:clean, id}, _, %{chains: chains} = state), do: {:reply, :ok, %{state | chains: Map.delete(chains, id)}} @impl true def child_spec() do [ Staxx.Proxy.ExChain.FakeExChain ] end @impl true def unique_id(_node) do <<new_unique_id::big-integer-size(8)-unit(8)>> = :crypto.strong_rand_bytes(8) to_string(new_unique_id) end @impl true def chain_list(_node) do __MODULE__ |> GenServer.call(:chains) |> Enum.map(fn {_, v} -> v end) |> Enum.to_list() end @impl true def start_existing(_node, id, pid), do: GenServer.call(__MODULE__, {:start_existring, id, pid}) @impl true def start(node, config), do: GenServer.call(__MODULE__, {:start, node, config}) @impl true def new_notify_pid(_node, _id, _pid), do: :ok @impl true def stop(_node, id), do: GenServer.call(__MODULE__, {:stop, id}) @impl true def clean(_node, id), do: GenServer.call(__MODULE__, {:clean, id}) @impl true def details(_node, id), do: GenServer.call(__MODULE__, {:details, id}) @impl true def take_snapshot(_node, _id, _description \\ ""), do: :ok @impl true def revert_snapshot(_node, _id, _snapshot), do: :ok @impl true def load_snapshot(_node, _snapshot_id), do: :ok @impl true def snapshot_list(_node, _chain) do __MODULE__ |> GenServer.call(:snapshots) |> Enum.map(fn {_, v} -> v end) |> Enum.to_list() end @impl true def get_snapshot(_node, _snapshot_id), do: :ok @impl true def upload_snapshot(_node, _snapshot_id, _chain_type, _description \\ ""), do: :ok @impl true def remove_snapshot(_node, _snapshot_id), do: :ok @impl true def write_external_data(_node, _id, _data), do: :ok @impl true def read_external_data(_node, _id), do: :ok @impl true def version(_node), do: "v 1.0.0" end
24.496689
97
0.601244
ffccd9f9ae25afe1a94ba2549b2d52edab78cdbd
237
exs
Elixir
Chapter08/vocial-demo-chapter-8/priv/repo/migrations/20171125044437_add_vote_records_table.exs
PacktPublishing/Phoenix-Web-Development
a071392abe2a459be1896580446b006126c393bf
[ "MIT" ]
10
2018-05-13T14:53:05.000Z
2021-11-08T13:10:44.000Z
Chapter09/vocial-demo-chapter-9/priv/repo/migrations/20171125044437_add_vote_records_table.exs
PacktPublishing/Phoenix-Web-Development
a071392abe2a459be1896580446b006126c393bf
[ "MIT" ]
null
null
null
Chapter09/vocial-demo-chapter-9/priv/repo/migrations/20171125044437_add_vote_records_table.exs
PacktPublishing/Phoenix-Web-Development
a071392abe2a459be1896580446b006126c393bf
[ "MIT" ]
2
2019-04-23T10:54:33.000Z
2019-04-27T15:47:07.000Z
defmodule Vocial.Repo.Migrations.AddVoteRecordsTable do use Ecto.Migration def change do create table(:vote_records) do add :ip_address, :string add :poll_id, references(:polls) timestamps() end end end
18.230769
55
0.696203
ffccfaed49551c7b8e647c3eb749f7bd5659aae9
1,717
ex
Elixir
apps/tai/lib/tai/venue_adapters/bitmex/cancel_order.ex
ccamateur/tai
41c4b3e09dafc77987fa3f6b300c15461d981e16
[ "MIT" ]
276
2018-01-16T06:36:06.000Z
2021-03-20T21:48:01.000Z
apps/tai/lib/tai/venue_adapters/bitmex/cancel_order.ex
ccamateur/tai
41c4b3e09dafc77987fa3f6b300c15461d981e16
[ "MIT" ]
73
2018-10-05T18:45:06.000Z
2021-02-08T05:46:33.000Z
apps/tai/lib/tai/venue_adapters/bitmex/cancel_order.ex
ccamateur/tai
41c4b3e09dafc77987fa3f6b300c15461d981e16
[ "MIT" ]
43
2018-06-09T09:54:51.000Z
2021-03-07T07:35:17.000Z
defmodule Tai.VenueAdapters.Bitmex.CancelOrder do alias Tai.Orders.Responses @type order :: Tai.Orders.Order.t() @type credentials :: map @type response :: Responses.CancelAccepted.t() @type reason :: :timeout | :overloaded | {:nonce_not_increasing, msg :: String.t()} | {:unhandled, term} @spec cancel_order(order, credentials) :: {:ok, response} | {:error, reason} def cancel_order(order, credentials) do credentials |> to_venue_credentials |> send_to_venue(%{orderID: order.venue_order_id}) |> parse_response() end defdelegate to_venue_credentials(credentials), to: Tai.VenueAdapters.Bitmex.Credentials, as: :from defdelegate send_to_venue(credentials, params), to: ExBitmex.Rest.Orders, as: :cancel defp parse_response({:ok, [venue_order | _], %ExBitmex.RateLimit{}}) do received_at = Tai.Time.monotonic_time() {:ok, venue_timestamp, 0} = DateTime.from_iso8601(venue_order.timestamp) response = %Responses.CancelAccepted{ id: venue_order.order_id, received_at: received_at, venue_timestamp: venue_timestamp } {:ok, response} end defp parse_response({:error, :timeout, nil}) do {:error, :timeout} end defp parse_response({:error, :connect_timeout, nil}) do {:error, :connect_timeout} end defp parse_response({:error, :overloaded, _}) do {:error, :overloaded} end defp parse_response({:error, :rate_limited, _}) do {:error, :rate_limited} end defp parse_response({:error, {:nonce_not_increasing, _} = reason, _}) do {:error, reason} end defp parse_response({:error, reason, _}) do {:error, {:unhandled, reason}} end end
26.015152
78
0.670355
ffcd0783b65799759180f506d5d9c7369dae390d
1,434
ex
Elixir
lib/airtable_config.ex
justicedemocrats/airtable_config
8342a93956a515ddbe169a8163b1acc25962f9d2
[ "MIT" ]
null
null
null
lib/airtable_config.ex
justicedemocrats/airtable_config
8342a93956a515ddbe169a8163b1acc25962f9d2
[ "MIT" ]
null
null
null
lib/airtable_config.ex
justicedemocrats/airtable_config
8342a93956a515ddbe169a8163b1acc25962f9d2
[ "MIT" ]
null
null
null
defmodule AirtableConfig do defmacro __using__(_opts \\ []) do quote do def start_link do Agent.start_link( fn -> fetch_all([], 0) end, name: __MODULE__ ) end def update() do try do config = fetch_all([], 0) Agent.update(__MODULE__, fn _ -> config end) IO.puts("#{table()}: updated at #{inspect(DateTime.utc_now())}") rescue error -> IO.puts( "Could not updated config at #{inspect(DateTime.utc_now())}: #{inspect(error)}. Will try again next cycle." ) end end def get_all do Agent.get(__MODULE__, & &1) end defp fetch_all(prev_records, offset) do %{body: body} = HTTPotion.get( "https://api.airtable.com/v0/#{base}/#{table}", headers: [ Authorization: "Bearer #{key}" ], query: [offset: offset, view: view()] ) decoded = Poison.decode!(body) new_records = decoded["records"] |> Enum.filter(&filter_record/1) |> Enum.map(&process_record/1) |> Enum.concat(prev_records) if Map.has_key?(decoded, "offset") do fetch_all(new_records, decoded["offset"]) else Enum.into(new_records, into_what()) end end end end end
25.157895
121
0.502092
ffcd21e4362bf7261f1e6da3971bac22cf71467d
285
exs
Elixir
bench/vector/uniq.exs
sabiwara/aja
cde91e4263e54a11a1685a777dbffd4912fe3864
[ "MIT" ]
95
2020-10-18T09:27:46.000Z
2022-03-29T20:03:16.000Z
bench/vector/uniq.exs
sabiwara/aja
cde91e4263e54a11a1685a777dbffd4912fe3864
[ "MIT" ]
1
2021-09-22T20:30:08.000Z
2021-10-13T23:55:34.000Z
bench/vector/uniq.exs
sabiwara/aja
cde91e4263e54a11a1685a777dbffd4912fe3864
[ "MIT" ]
1
2020-12-15T12:36:16.000Z
2020-12-15T12:36:16.000Z
list = Enum.to_list(1..50) ++ Enum.to_list(50..1) vector = Aja.Vector.new(list) Benchee.run(%{ "Aja.Vector.uniq/1 (vector)" => fn -> Aja.Vector.uniq(vector) end, "Aja.Enum.uniq/1 (vector)" => fn -> Aja.Enum.uniq(vector) end, "Enum.uniq/2 (list)" => fn -> Enum.uniq(list) end })
31.666667
68
0.624561
ffcd243ef43a0693a9a066563d7c09859b5b4b1e
3,739
ex
Elixir
lib/pdf_party/reader/xref/table.ex
luisgabrielroldan/pdf_party
f26fd69a05a9050a1a8faaa226c0c3ad5ba1c6db
[ "MIT" ]
4
2018-10-26T02:11:14.000Z
2019-04-25T20:59:52.000Z
lib/pdf_party/reader/xref/table.ex
luisgabrielroldan/pdf_party
f26fd69a05a9050a1a8faaa226c0c3ad5ba1c6db
[ "MIT" ]
1
2018-10-26T21:20:40.000Z
2018-10-26T21:20:40.000Z
lib/pdf_party/reader/xref/table.ex
luisgabrielroldan/pdf_party
f26fd69a05a9050a1a8faaa226c0c3ad5ba1c6db
[ "MIT" ]
1
2018-10-26T02:11:17.000Z
2018-10-26T02:11:17.000Z
defmodule PDFParty.Reader.XRef.TableParser do @moduledoc """ XRef Table parser """ alias PDFParty.Reader.{ IOEx, Numbers, Parser, XRef } def parse(io_device, start_offset) do with {:ok, entries, trailer} <- read_xref_table(start_offset, io_device) do document = %XRef{ entries: entries, size: Map.get(trailer, "Size"), root: Map.get(trailer, "Root"), info: Map.get(trailer, "Info") } if length(entries) == document.size do {:ok, document} else {:error, :xref_entries_mismatch} end end end defp read_xref_table(start_offset, io_device) do with {:ok, _} <- :file.position(io_device, start_offset), {:skip_token, "xref" <> _} <- {:skip_token, IOEx.read_line(io_device)}, {:ok, entries} <- read_sections(io_device), {:ok, trailer} <- read_trailer(start_offset, io_device) do case Map.get(trailer, "Prev") do nil -> {:ok, entries, trailer} prev_xref when is_integer(prev_xref) -> with {:ok, prev_entries, _} <- read_xref_table(prev_xref, io_device) do {:ok, merge_entries(prev_entries ++ entries), trailer} end _ -> {:error, :xref_invalid_format} end else {:skip_token, _} -> {:error, :xref_invalid_format} error -> error end end defp merge_entries(list, acc \\ %{}) defp merge_entries([], acc), do: Map.values(acc) defp merge_entries([{id, _offset, _gen, _state} = entry | rest], acc), do: merge_entries(rest, Map.put(acc, id, entry)) defp read_sections(io_device), do: read_sections(io_device, []) defp read_sections(io_device, acc) do with {:ok, id, length} <- read_section_header(io_device), {:ok, entries} <- read_xref_entries(io_device, id, length) do read_sections(io_device, acc ++ entries) else :error -> if length(acc) == 0 do {:error, :xref_invalid_format} else {:ok, acc} end {:error, :xref_invalid_format} -> {:error, :xref_invalid_format} end end defp read_section_header(io_device) do with line when is_binary(line) <- IOEx.read_line(io_device), [[id], [length]] <- Regex.scan(~r/[0-9]+/, line) do {:ok, Numbers.parse_int!(id), Numbers.parse_int!(length)} else _ -> :error end end defp read_xref_entries(io, id, left, acc \\ []) defp read_xref_entries(_io, _id, 0, acc), do: {:ok, acc} defp read_xref_entries(io_device, id, left, acc) do line = IOEx.read_line(io_device) case Regex.scan(~r/[0-9fn]+/, line) do [[offset], [gen], [state]] -> offset = Numbers.parse_int!(offset) gen = Numbers.parse_int!(gen) state = case state do "n" -> :n _ -> :f end entry = {id, gen, offset, state} read_xref_entries(io_device, id + 1, left - 1, acc ++ [entry]) _ -> {:error, :xref_invalid_format} end end defp read_trailer(start_offset, io_device) do :file.position(io_device, start_offset) with :ok <- lookup_trailer(io_device), {:ok, [trailer]} when is_map(trailer) <- Parser.parse(io_device) do {:ok, trailer} else {:ok, _} -> {:error, :trailer_not_found} error -> error end end defp lookup_trailer(io_device) do {:ok, pos} = :file.position(io_device, :cur) case IOEx.read_line(io_device) do "trailer" -> :file.position(io_device, pos) :ok :eof -> {:error, :trailer_not_found} _ -> lookup_trailer(io_device) end end end
24.437908
81
0.577695
ffcd3b4b5545801b5ef74aabaf1ef125b69d6757
3,841
ex
Elixir
lib/zaryn/shared_secrets/mem_tables_loader.ex
ambareesha7/node-zaryn
136e542801bf9b6fa4a015d3464609fdf3dacee8
[ "Apache-2.0" ]
1
2021-07-06T19:47:14.000Z
2021-07-06T19:47:14.000Z
lib/zaryn/shared_secrets/mem_tables_loader.ex
ambareesha7/node-zaryn
136e542801bf9b6fa4a015d3464609fdf3dacee8
[ "Apache-2.0" ]
null
null
null
lib/zaryn/shared_secrets/mem_tables_loader.ex
ambareesha7/node-zaryn
136e542801bf9b6fa4a015d3464609fdf3dacee8
[ "Apache-2.0" ]
null
null
null
defmodule Zaryn.SharedSecrets.MemTablesLoader do @moduledoc false use GenServer alias Zaryn.Crypto alias Zaryn.SharedSecrets.MemTables.NetworkLookup alias Zaryn.SharedSecrets.MemTables.OriginKeyLookup alias Zaryn.SharedSecrets.NodeRenewal alias Zaryn.SharedSecrets.NodeRenewalScheduler alias Zaryn.TransactionChain alias Zaryn.TransactionChain.Transaction alias Zaryn.TransactionChain.Transaction.ValidationStamp alias Zaryn.TransactionChain.TransactionData require Logger def start_link(args \\ []) do GenServer.start_link(__MODULE__, args, name: __MODULE__) end def init(_args) do [ fn -> TransactionChain.list_transactions_by_type(:origin_shared_secrets, [ :type, data: [:content] ]) end, fn -> TransactionChain.list_transactions_by_type(:node, [:type, :previous_public_key]) end, fn -> TransactionChain.list_transactions_by_type(:node_shared_secrets, [ :type, data: [:content], validation_stamp: [:timestamp] ]) end ] |> Task.async_stream(&load_transactions(&1.())) |> Stream.run() {:ok, []} end defp load_transactions(transactions) do transactions |> Stream.each(&load_transaction/1) |> Stream.run() end @doc """ Load the transaction into the memory table """ @spec load_transaction(Transaction.t()) :: :ok def load_transaction(%Transaction{type: :node, previous_public_key: previous_public_key}) do first_public_key = TransactionChain.get_first_public_key(previous_public_key) unless OriginKeyLookup.has_public_key?(first_public_key) do <<_::8, origin_id::8, _::binary>> = previous_public_key family = case Crypto.key_origin(origin_id) do :software -> :software :tpm -> :hardware end :ok = OriginKeyLookup.add_public_key(family, previous_public_key) Logger.info("Load origin public key #{Base.encode16(previous_public_key)} - #{family}") end :ok end def load_transaction(%Transaction{ type: :origin_shared_secrets, data: %TransactionData{content: content} }) do content |> get_origin_public_keys(%{software: [], hardware: []}) |> Enum.each(fn {family, keys} -> Enum.each(keys, fn key -> :ok = OriginKeyLookup.add_public_key(family, key) Logger.info("Load origin public key #{Base.encode16(key)} - #{family}") end) end) end def load_transaction(%Transaction{ type: :node_shared_secrets, data: %TransactionData{content: content}, validation_stamp: %ValidationStamp{ timestamp: timestamp } }) do {:ok, daily_nonce_public_key, network_pool_address} = NodeRenewal.decode_transaction_content(content) NetworkLookup.set_network_pool_address(network_pool_address) NetworkLookup.set_daily_nonce_public_key( daily_nonce_public_key, NodeRenewalScheduler.next_application_date(timestamp) ) Logger.info("Load daily nonce public key: #{Base.encode16(daily_nonce_public_key)}") end def load_transaction(%Transaction{type: :node_rewards, address: address}) do NetworkLookup.set_network_pool_address(address) end def load_transaction(_), do: :ok defp get_origin_public_keys(<<>>, acc), do: acc defp get_origin_public_keys(<<curve_id::8, origin_id::8, rest::binary>>, acc) do key_size = Crypto.key_size(curve_id) <<key::binary-size(key_size), rest::binary>> = rest family = case Crypto.key_origin(origin_id) do :software -> :software :tpm -> :hardware end get_origin_public_keys( rest, Map.update!(acc, family, &[<<curve_id::8, origin_id::8, key::binary>> | &1]) ) end end
27.241135
97
0.672481
ffcd71cf9e0ac44e16244547da1c58b32a26f85e
716
ex
Elixir
web/gettext.ex
zcdunn/elixirstatus-web
9df758dce01c676403effdeb3231db0529285e6c
[ "MIT" ]
299
2015-06-24T09:14:27.000Z
2022-03-03T13:31:59.000Z
web/gettext.ex
zcdunn/elixirstatus-web
9df758dce01c676403effdeb3231db0529285e6c
[ "MIT" ]
63
2015-07-04T19:42:12.000Z
2021-12-10T14:27:28.000Z
web/gettext.ex
zcdunn/elixirstatus-web
9df758dce01c676403effdeb3231db0529285e6c
[ "MIT" ]
49
2015-07-06T13:42:43.000Z
2021-12-22T21:38:04.000Z
defmodule ElixirStatus.Gettext do @moduledoc """ A module providing Internationalization with a gettext-based API. By using [Gettext](http://hexdocs.pm/gettext), your module gains a set of macros for translations, for example: import ElixirStatus.Gettext # Simple translation gettext "Here is the string to translate" # Plural translation ngettext "Here is the string to translate", "Here are the strings to translate", 3 # Domain-based translation dgettext "errors", "Here is the error message to translate" See the [Gettext Docs](http://hexdocs.pm/gettext) for detailed usage. """ use Gettext, otp_app: :elixir_status end
28.64
71
0.684358
ffcd7de2d7d063e6d1f7d339f25f20d67913e121
1,681
ex
Elixir
clients/you_tube/lib/google_api/you_tube/v3/model/activity_content_details_comment.ex
nuxlli/elixir-google-api
ecb8679ac7282b7dd314c3e20c250710ec6a7870
[ "Apache-2.0" ]
null
null
null
clients/you_tube/lib/google_api/you_tube/v3/model/activity_content_details_comment.ex
nuxlli/elixir-google-api
ecb8679ac7282b7dd314c3e20c250710ec6a7870
[ "Apache-2.0" ]
null
null
null
clients/you_tube/lib/google_api/you_tube/v3/model/activity_content_details_comment.ex
nuxlli/elixir-google-api
ecb8679ac7282b7dd314c3e20c250710ec6a7870
[ "Apache-2.0" ]
1
2020-11-10T16:58:27.000Z
2020-11-10T16:58:27.000Z
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the &quot;License&quot;); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an &quot;AS IS&quot; BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This class is auto generated by the swagger code generator program. # https://github.com/swagger-api/swagger-codegen.git # Do not edit the class manually. defmodule GoogleApi.YouTube.V3.Model.ActivityContentDetailsComment do @moduledoc """ Information about a resource that received a comment. ## Attributes - resourceId (ResourceId): The resourceId object contains information that identifies the resource associated with the comment. Defaults to: `null`. """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :resourceId => GoogleApi.YouTube.V3.Model.ResourceId.t() } field(:resourceId, as: GoogleApi.YouTube.V3.Model.ResourceId) end defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ActivityContentDetailsComment do def decode(value, options) do GoogleApi.YouTube.V3.Model.ActivityContentDetailsComment.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.ActivityContentDetailsComment do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
35.020833
150
0.764426
ffcdc0eb29a36750426a60e23579cfe3ac7ca078
791
exs
Elixir
config/jsonb.exs
scudelletti/eventstore
9f795d45f1a8ca4c72d0614df24586dcb2ecfa12
[ "MIT" ]
null
null
null
config/jsonb.exs
scudelletti/eventstore
9f795d45f1a8ca4c72d0614df24586dcb2ecfa12
[ "MIT" ]
null
null
null
config/jsonb.exs
scudelletti/eventstore
9f795d45f1a8ca4c72d0614df24586dcb2ecfa12
[ "MIT" ]
null
null
null
use Mix.Config config :logger, backends: [] config :ex_unit, capture_log: true, assert_receive_timeout: 2_000, refute_receive_timeout: 100 default_config = [ column_data_type: "jsonb", username: "postgres", password: "postgres", database: "eventstore_jsonb_test", hostname: "localhost", pool_size: 1, pool_overflow: 0, registry: :local, serializer: EventStore.JsonbSerializer, subscription_retry_interval: 1_000, types: EventStore.PostgresTypes ] config :eventstore, TestEventStore, default_config config :eventstore, SecondEventStore, Keyword.put(default_config, :database, "eventstore_jsonb_test_2") config :eventstore, SchemaEventStore, default_config config :eventstore, event_stores: [TestEventStore, SecondEventStore, SchemaEventStore]
23.969697
86
0.768647
ffcdedd3cec2e7f075790c1f55a604315f2e78cd
329
ex
Elixir
lib/smlr/compressor/zstd.ex
data-twister/smlr
6c5bbb5d45feb1426c643fef3d714dcee039ad00
[ "MIT" ]
5
2020-03-03T08:33:50.000Z
2021-02-22T01:19:01.000Z
lib/smlr/compressor/zstd.ex
data-twister/smlr
6c5bbb5d45feb1426c643fef3d714dcee039ad00
[ "MIT" ]
null
null
null
lib/smlr/compressor/zstd.ex
data-twister/smlr
6c5bbb5d45feb1426c643fef3d714dcee039ad00
[ "MIT" ]
1
2021-09-27T11:11:09.000Z
2021-09-27T11:11:09.000Z
defmodule Smlr.Compressor.Zstd do @moduledoc false @behaviour Smlr.Compressor alias Smlr.Config def name do "zstd" end def default_level do 4 end def level(opts) do Config.get_compressor_level(__MODULE__, opts) end def compress(data, opts) do :zstd.compress(data, level(opts)) end end
13.708333
49
0.693009
ffce0281f02e74a55fd30c2469dd36c1e7fd9618
139
ex
Elixir
pulsar/lib/pulsar_web/controllers/page_controller.ex
Dermah/pulsar.wtf
fed5734578eb4c8b93bd1cdcb6e2f6d894a4af9e
[ "MIT" ]
null
null
null
pulsar/lib/pulsar_web/controllers/page_controller.ex
Dermah/pulsar.wtf
fed5734578eb4c8b93bd1cdcb6e2f6d894a4af9e
[ "MIT" ]
1
2021-03-09T21:33:42.000Z
2021-03-09T21:33:42.000Z
pulsar/lib/pulsar_web/controllers/page_controller.ex
Dermah/pulsar.wtf
fed5734578eb4c8b93bd1cdcb6e2f6d894a4af9e
[ "MIT" ]
null
null
null
defmodule PulsarWeb.PageController do use PulsarWeb, :controller def index(conn, _params) do render(conn, "index.html") end end
17.375
37
0.733813
ffce0f3a600aa93c69290d6f5530b8e368d61287
1,736
ex
Elixir
web/web.ex
rustedgrail/where_its_due
2140006ecfbe29e9d4ebc1895b147a76b5376fc3
[ "MIT" ]
null
null
null
web/web.ex
rustedgrail/where_its_due
2140006ecfbe29e9d4ebc1895b147a76b5376fc3
[ "MIT" ]
null
null
null
web/web.ex
rustedgrail/where_its_due
2140006ecfbe29e9d4ebc1895b147a76b5376fc3
[ "MIT" ]
null
null
null
defmodule WhereItsDue.Web do @moduledoc """ A module that keeps using definitions for controllers, views and so on. This can be used in your application as: use WhereItsDue.Web, :controller use WhereItsDue.Web, :view The definitions below will be executed for every view, controller, etc, so keep them short and clean, focused on imports, uses and aliases. Do NOT define functions inside the quoted expressions below. """ def model do quote do use Ecto.Schema import Ecto import Ecto.Changeset import Ecto.Query, only: [from: 1, from: 2] end end def controller do quote do use Phoenix.Controller alias WhereItsDue.Repo import Ecto import Ecto.Query, only: [from: 1, from: 2] import WhereItsDue.Router.Helpers import WhereItsDue.Gettext end end def view do quote do use Phoenix.View, root: "web/templates" # Import convenience functions from controllers import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1] # Use all HTML functionality (forms, tags, etc) use Phoenix.HTML import WhereItsDue.Router.Helpers import WhereItsDue.ErrorHelpers import WhereItsDue.Gettext end end def router do quote do use Phoenix.Router end end def channel do quote do use Phoenix.Channel alias WhereItsDue.Repo import Ecto import Ecto.Query, only: [from: 1, from: 2] import WhereItsDue.Gettext end end @doc """ When used, dispatch to the appropriate controller/view/etc. """ defmacro __using__(which) when is_atom(which) do apply(__MODULE__, which, []) end end
21.170732
88
0.668779
ffce4e30eb0d41fccba67d7528880bd1200541f6
1,115
ex
Elixir
server/lib/idai_field_server_web/channels/user_socket.ex
felixwolter/idai-field
146ab8dbdedb23035a4ba19eac95f02a1fa2329f
[ "Apache-2.0" ]
null
null
null
server/lib/idai_field_server_web/channels/user_socket.ex
felixwolter/idai-field
146ab8dbdedb23035a4ba19eac95f02a1fa2329f
[ "Apache-2.0" ]
null
null
null
server/lib/idai_field_server_web/channels/user_socket.ex
felixwolter/idai-field
146ab8dbdedb23035a4ba19eac95f02a1fa2329f
[ "Apache-2.0" ]
null
null
null
defmodule IdaiFieldServerWeb.UserSocket do use Phoenix.Socket ## Channels # channel "room:*", IdaiFieldServerWeb.RoomChannel # Socket params are passed from the client and can # be used to verify and authenticate a user. After # verification, you can put default assigns into # the socket that will be set for all channels, ie # # {:ok, assign(socket, :user_id, verified_user_id)} # # To deny connection, return `:error`. # # See `Phoenix.Token` documentation for examples in # performing token verification on connect. @impl true def connect(_params, socket, _connect_info) do {:ok, socket} end # Socket id's are topics that allow you to identify all sockets for a given user: # # def id(socket), do: "user_socket:#{socket.assigns.user_id}" # # Would allow you to broadcast a "disconnect" event and terminate # all active sockets and channels for a given user: # # IdaiFieldServerWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{}) # # Returning `nil` makes this socket anonymous. @impl true def id(_socket), do: nil end
30.972222
90
0.701345
ffce5b95c3be55db64e1f44cad7cb69811a53c17
9,246
ex
Elixir
lib/coophub/repos_warmer.ex
camplight/coophub
317b65be4f608f251d0e38b5385ac66c468f8850
[ "MIT" ]
1
2020-03-11T02:43:24.000Z
2020-03-11T02:43:24.000Z
lib/coophub/repos_warmer.ex
camplight/coophub
317b65be4f608f251d0e38b5385ac66c468f8850
[ "MIT" ]
null
null
null
lib/coophub/repos_warmer.ex
camplight/coophub
317b65be4f608f251d0e38b5385ac66c468f8850
[ "MIT" ]
null
null
null
defmodule Coophub.Repos.Warmer do use Cachex.Warmer alias Coophub.Repos alias Coophub.Schemas.{Organization, Repository} require Logger @repos_max_fetch Application.get_env(:coophub, :fetch_max_repos) @repos_cache_name Application.get_env(:coophub, :main_cache_name) @repos_cache_interval Application.get_env(:coophub, :cache_interval) @repos_cache_dump_file Application.get_env(:coophub, :main_cache_dump_file) @doc """ Returns the interval for this warmer. """ def interval, do: :timer.minutes(@repos_cache_interval) @doc """ Executes this cache warmer. """ def execute(_state) do ## Delay the execution a bit to ensure Cachex is available Process.sleep(2000) prev_size = Cachex.size(@repos_cache_name) |> elem(1) curr_size = maybe_load_dump(prev_size) maybe_warm_cache(Coophub.Application.env(), prev_size, curr_size) end ## Just load dump on the first warm cycle defp maybe_load_dump(0), do: load_cache_dump() defp maybe_load_dump(prev_size), do: prev_size ## Ignore the first warm cycle if we are at :dev and if dump has entries defp maybe_warm_cache(:dev, 0, curr_size) when curr_size > 0, do: :ignore defp maybe_warm_cache(_, _, _), do: warm_cache() defp warm_cache() do Logger.info("Warming repos into cache from github..", ansi_color: :yellow) repos = read_yml() |> Enum.reduce([], fn {name, yml_data}, acc -> case get_org(name, yml_data) do :error -> acc org -> [get_repos(name, org) | acc] end end) spawn(save_cache_dump(repos)) ## Set a very high TTL to ensure that memory and dump data don't expire ## in the case we aren't able to refresh data from github API, but.. ## we will try to refresh it anyways every ":cache_interval" minutes! {:ok, repos, ttl: :timer.hours(24 * 365)} end defp save_cache_dump(repos) do fn -> ## Delay the execution a bit to ensure cache data is available Process.sleep(2000) case Cachex.dump(@repos_cache_name, @repos_cache_dump_file) do {:ok, true} -> Logger.info( "Saved repos cache dump with #{length(repos)} orgs to local file '#{ @repos_cache_dump_file }'", ansi_color: :green ) err -> Logger.error("Error saving repos cache dump: #{inspect(err)}") end end end defp load_cache_dump() do Logger.info("Warming repos into cache from dump..", ansi_color: :yellow) with {:ok, dump} <- read_cache_dump(@repos_cache_dump_file), {:ok, true} <- Cachex.import(@repos_cache_name, dump), {:ok, size} <- Cachex.size(@repos_cache_name) do Logger.info("The dump was loaded with #{size} orgs!", ansi_color: :yellow) size else _ -> Logger.info("Dump not found '#{@repos_cache_dump_file}'", ansi_color: :yellow) 0 end end defp read_cache_dump(path) do dump = path |> File.read!() # Since our dump has atoms (because of the structs) we can't use Cachex.load() # because it does :erlang.binary_to_term([:safe]) at Cachex.Disk.read() |> :erlang.binary_to_term() {:ok, dump} rescue _ -> {:error, :unreachable_file} end defp read_yml() do path = Path.join(File.cwd!(), "cooperatives.yml") {:ok, coops} = YamlElixir.read_from_file(path, maps_as_keywords: false) coops end ## ## Github API calls and handling functions ## defp get_repos(org_name, org) do org_repos = case call_api_get( "orgs/#{org_name}/repos?per_page=#{@repos_max_fetch}&type=public&sort=pushed&direction=desc" ) do {:ok, body} -> repos = Repos.to_struct(Repository, body) |> put_key(org_name) |> put_popularities() |> put_topics(org_name) |> put_languages(org_name) |> put_repo_data(org_name) Logger.info("Fetched #{length(repos)} repos for #{org_name}", ansi_color: :yellow) repos {:error, reason} -> Logger.error( "Error getting the repos for '#{org_name}' from github: #{inspect(reason)}" ) [] end org = org |> Map.put(:repos, org_repos) |> Map.put(:repo_count, Enum.count(org_repos)) |> put_org_languages_stats() |> put_org_popularity() |> put_org_last_activity() {org_name, org} end defp get_members(%Organization{:key => key} = org) do members = case call_api_get("orgs/#{key}/members") do {:ok, body} -> body {:error, reason} -> Logger.error("Error getting members for '#{key}' from github: #{inspect(reason)}") [] end Map.put(org, :members, members) end defp get_org(name, yml_data) do case call_api_get("orgs/#{name}") do {:ok, org} -> msg = "Fetched '#{name}' organization! Getting members and repos (max=#{@repos_max_fetch}).." Logger.info(msg, ansi_color: :yellow) Repos.to_struct(Organization, org) |> Map.put(:key, name) |> Map.put(:yml_data, yml_data) |> get_members() {:error, reason} -> Logger.error("Error getting the organization '#{name}' from github: #{inspect(reason)}") :error end end defp put_key(repos, key) do Enum.map(repos, &Map.put(&1, :key, key)) end defp put_popularities(repos) do Enum.map(repos, &Map.put(&1, :popularity, Repos.get_repo_popularity(&1))) end defp put_topics(repos, org_name) do Enum.map(repos, fn repo -> repo_name = repo.name topics = case call_api_get("repos/#{org_name}/#{repo_name}/topics") do {:ok, body} -> body {:error, reason} -> Logger.error( "Error getting the topics for '#{org_name}/#{repo_name}' from github: #{ inspect(reason) }" ) %{} end Map.put(repo, :topics, Map.get(topics, "names", [])) end) end defp put_languages(repos, org_name) do Enum.map(repos, fn repo -> repo_name = repo.name languages = case call_api_get("repos/#{org_name}/#{repo_name}/languages") do {:ok, body} -> body {:error, reason} -> Logger.error( "Error getting the languages for '#{org_name}/#{repo_name}' from github: #{ inspect(reason) }" ) %{} end put_repo_languages_stats(repo, languages) end) end defp put_repo_data(repos, org_name) do Enum.map(repos, fn repo -> repo_name = repo.name repo_data = case call_api_get("repos/#{org_name}/#{repo_name}") do {:ok, body} -> body {:error, reason} -> Logger.error( "Error getting repo data for '#{org_name}/#{repo_name}' from github: #{ inspect(reason) }" ) %{} end parent = Map.get(repo_data, "parent") case parent do %{"full_name" => name, "html_url" => url} -> Map.put(repo, :parent, %{"name" => name, "url" => url}) _ -> repo end end) end defp put_repo_languages_stats(repo, languages) do stats = Repos.get_percentages_by_language(languages) Map.put(repo, :languages, stats) end defp put_org_languages_stats(org) do stats = Repos.get_org_languages_stats(org) org |> Map.put(:languages, stats) |> convert_languages_to_list_and_sort() end defp put_org_popularity(org) do popularity = Repos.get_org_popularity(org) Map.put(org, :popularity, popularity) end defp put_org_last_activity(org) do last_activity = Repos.get_org_last_activity(org) Map.put(org, :last_activity, last_activity) end defp convert_languages_to_list_and_sort(org) do repos = org |> Map.get(:repos, []) |> Enum.map(&languages_map_to_list_and_sort/1) org |> Map.put(:repos, repos) |> languages_map_to_list_and_sort() end defp languages_map_to_list_and_sort(datamap) do languages = datamap |> Map.get(:languages, []) |> Enum.map(fn {lang, stats} -> Map.put(stats, "lang", lang) end) |> Enum.sort(&(&1["bytes"] > &2["bytes"])) Map.put(datamap, :languages, languages) end defp headers() do headers = [ {"Accept", "application/vnd.github.mercy-preview+json"} ] token = System.get_env("GITHUB_OAUTH_TOKEN") if is_binary(token) do [{"Authorization", "token #{token}"} | headers] else headers end end @spec call_api_get(String.t()) :: {:ok, map | [map]} | {:error, any} defp call_api_get(path) do url = "https://api.github.com/#{path}" case HTTPoison.get(url, headers()) do {:ok, %HTTPoison.Response{status_code: 200, body: body}} -> {:ok, Jason.decode!(body)} {:ok, %HTTPoison.Response{status_code: 404}} -> {:error, "Not found: #{url}"} {:error, %HTTPoison.Error{reason: reason}} -> {:error, reason} end end end
26.645533
105
0.594852
ffce6748f755b71daf381af4235148942d5b214d
886
exs
Elixir
test/presence_channel_test.exs
pkdcryptos/edgurgel-poxa
d99fbd3ac54317a180349ee91d8e20cdc8270a63
[ "MIT" ]
null
null
null
test/presence_channel_test.exs
pkdcryptos/edgurgel-poxa
d99fbd3ac54317a180349ee91d8e20cdc8270a63
[ "MIT" ]
null
null
null
test/presence_channel_test.exs
pkdcryptos/edgurgel-poxa
d99fbd3ac54317a180349ee91d8e20cdc8270a63
[ "MIT" ]
null
null
null
defmodule Poxa.PresenceChannelTest do use ExUnit.Case import :meck import Poxa.PresenceChannel setup do new Poxa.registry on_exit fn -> unload() end :ok end test "return unique user ids currently subscribed" do expect(Poxa.registry, :unique_subscriptions, ["presence-channel"], [{ :user_id, :user_info }, { :user_id2, :user_info2 }]) assert users("presence-channel") == [:user_id, :user_id2] assert validate Poxa.registry end test "return number of unique subscribed users" do expect(Poxa.registry, :unique_subscriptions, ["presence-channel"], [{ :user_id, :user_info }, { :user_id2, :user_info2 }]) assert user_count("presence-channel") == 2 assert validate Poxa.registry end end
29.533333
100
0.586907
ffce73edb253e4b0de68d23bf7a5a278a4cb26f4
1,216
ex
Elixir
lib/challenge_web/views/error_helpers.ex
gissandrogama/challenge_elixir
bf0c2dd3b82857cfc7f39fefbd2be75a5b5d52dd
[ "MIT" ]
null
null
null
lib/challenge_web/views/error_helpers.ex
gissandrogama/challenge_elixir
bf0c2dd3b82857cfc7f39fefbd2be75a5b5d52dd
[ "MIT" ]
4
2021-01-18T22:25:19.000Z
2021-01-20T17:36:23.000Z
lib/challenge_web/views/error_helpers.ex
gissandrogama/challenge_elixir
bf0c2dd3b82857cfc7f39fefbd2be75a5b5d52dd
[ "MIT" ]
null
null
null
defmodule ChallengeWeb.ErrorHelpers do @moduledoc """ Conveniences for translating and building error messages. """ @doc """ Translates an error message using gettext. """ def translate_error({msg, opts}) do # When using gettext, we typically pass the strings we want # to translate as a static argument: # # # Translate "is invalid" in the "errors" domain # dgettext("errors", "is invalid") # # # Translate the number of files with plural rules # dngettext("errors", "1 file", "%{count} files", count) # # Because the error messages we show in our forms and APIs # are defined inside Ecto, we need to translate them dynamically. # This requires us to call the Gettext module passing our gettext # backend as first argument. # # Note we use the "errors" domain, which means translations # should be written to the errors.po file. The :count option is # set by Ecto and indicates we should also apply plural rules. if count = opts[:count] do Gettext.dngettext(ChallengeWeb.Gettext, "errors", msg, msg, count, opts) else Gettext.dgettext(ChallengeWeb.Gettext, "errors", msg, opts) end end end
35.764706
78
0.672697
ffce74eba5c15c9ed11bc8ae3748365200e609b9
13,460
ex
Elixir
lib/chat_api_web/controllers/slack_controller.ex
hakerspeak/hakerspeak.com
efd9e75f4854fdd19fc1873300deae0b160fb629
[ "MIT" ]
null
null
null
lib/chat_api_web/controllers/slack_controller.ex
hakerspeak/hakerspeak.com
efd9e75f4854fdd19fc1873300deae0b160fb629
[ "MIT" ]
null
null
null
lib/chat_api_web/controllers/slack_controller.ex
hakerspeak/hakerspeak.com
efd9e75f4854fdd19fc1873300deae0b160fb629
[ "MIT" ]
null
null
null
defmodule ChatApiWeb.SlackController do use ChatApiWeb, :controller require Logger alias ChatApiWeb.SlackAuthorizationView alias ChatApi.{Conversations, Slack, SlackAuthorizations} alias ChatApi.SlackAuthorizations.SlackAuthorization action_fallback(ChatApiWeb.FallbackController) @spec notify(Plug.Conn.t(), map()) :: Plug.Conn.t() def notify(conn, %{"text" => text} = params) do with %{account_id: account_id} <- conn.assigns.current_user, %SlackAuthorization{access_token: access_token, channel: channel} <- SlackAuthorizations.get_authorization_by_account(account_id, %{ type: Map.get(params, "type", "reply"), inbox_id: ChatApi.Inboxes.get_account_primary_inbox_id(account_id) }), {:ok, %{body: data}} <- Slack.Client.send_message( %{ "channel" => Map.get(params, "channel", channel), "text" => text }, access_token ) do json(conn, %{data: data}) else _ -> json(conn, %{data: nil}) end end @spec oauth(Plug.Conn.t(), map()) :: Plug.Conn.t() def oauth(conn, %{"code" => code} = params) do Logger.info("Code from Slack OAuth: #{inspect(code)}") # TODO: improve error handling? with %{account_id: account_id, email: email} <- conn.assigns.current_user, redirect_uri <- Map.get(params, "redirect_url"), {:ok, response} <- Slack.Client.get_access_token(code, redirect_uri), :ok <- Logger.info("Slack OAuth response: #{inspect(response)}"), %{body: body} <- response, %{ "access_token" => access_token, "app_id" => app_id, "bot_user_id" => bot_user_id, "scope" => scope, "token_type" => token_type, "authed_user" => authed_user, "team" => team, "incoming_webhook" => incoming_webhook } <- body, %{"id" => authed_user_id} <- authed_user, %{"id" => team_id, "name" => team_name} <- team, # TODO: validate that `channel_id` doesn't match account integration with different `type` %{ "channel" => channel, "channel_id" => channel_id, "configuration_url" => configuration_url, "url" => webhook_url } <- incoming_webhook, integration_type <- Map.get(params, "type", "reply"), inbox_id <- Map.get(params, "inbox_id"), :ok <- Slack.Validation.validate_authorization_channel_id( channel_id, account_id, integration_type ) do filters = case params do %{"type" => "support", "inbox_id" => inbox_id} -> %{type: "support", team_id: team_id, inbox_id: inbox_id} %{"inbox_id" => inbox_id} -> %{type: integration_type, inbox_id: inbox_id} _ -> %{type: integration_type} end # TODO: after creating, check if connected channel is private; # If yes, use webhook_url to send notification that Hakerspeak app needs # to be added manually, along with instructions for how to do so {:ok, _} = SlackAuthorizations.create_or_update(account_id, filters, %{ account_id: account_id, inbox_id: inbox_id, access_token: access_token, app_id: app_id, authed_user_id: authed_user_id, bot_user_id: bot_user_id, scope: scope, token_type: token_type, channel: channel, channel_id: channel_id, configuration_url: configuration_url, team_id: team_id, team_name: team_name, webhook_url: webhook_url, type: integration_type }) cond do integration_type == "reply" -> send_private_channel_instructions(:reply, webhook_url) integration_type == "support" && Slack.Helpers.is_private_slack_channel?(channel_id) -> send_private_channel_instructions(:support, webhook_url) integration_type == "support" -> send_support_channel_instructions(webhook_url) true -> nil end Slack.Helpers.send_internal_notification( "#{email} successfully linked Slack `#{inspect(integration_type)}` integration to channel `#{ channel }`" ) json(conn, %{data: %{ok: true}}) else {:error, :duplicate_channel_id} -> conn |> put_status(400) |> json(%{ error: %{ status: 400, message: """ This Slack channel has already been connected with another integration. Please select another channel, or disconnect the other integration and try again. """ } }) error -> Logger.error(inspect(error)) conn |> put_status(401) |> json(%{error: %{status: 401, message: "OAuth access denied: #{inspect(error)}"}}) end end @spec authorization(Plug.Conn.t(), map()) :: Plug.Conn.t() def authorization(conn, payload) do filters = payload |> Map.new(fn {key, value} -> {String.to_atom(key), value} end) |> Map.merge(%{ type: Map.get(payload, "type", "reply") }) conn |> Pow.Plug.current_user() |> Map.get(:account_id) |> SlackAuthorizations.get_authorization_by_account(filters) |> case do nil -> json(conn, %{data: nil}) auth -> conn |> put_view(SlackAuthorizationView) |> render("show.json", slack_authorization: auth) end end @spec authorizations(Plug.Conn.t(), map()) :: Plug.Conn.t() def authorizations(conn, payload) do filters = payload |> Map.new(fn {key, value} -> {String.to_atom(key), value} end) |> Map.merge(%{ type: Map.get(payload, "type", "support") }) account_id = conn.assigns.current_user.account_id authorizations = SlackAuthorizations.list_slack_authorizations_by_account(account_id, filters) conn |> put_view(SlackAuthorizationView) |> render("index.json", slack_authorizations: authorizations) end @spec update_settings(Plug.Conn.t(), map()) :: Plug.Conn.t() def update_settings(conn, %{"id" => id, "settings" => settings}) do with %{account_id: _account_id} <- conn.assigns.current_user, %SlackAuthorization{} = auth <- SlackAuthorizations.get_slack_authorization!(id), {:ok, %SlackAuthorization{} = authorization} <- SlackAuthorizations.update_slack_authorization(auth, %{settings: settings}) do conn |> put_view(SlackAuthorizationView) |> render("show.json", slack_authorization: authorization) end end @spec delete(Plug.Conn.t(), map()) :: Plug.Conn.t() def delete(conn, %{"id" => id}) do with %{account_id: _account_id} <- conn.assigns.current_user, %SlackAuthorization{} = auth <- SlackAuthorizations.get_slack_authorization!(id), {:ok, %SlackAuthorization{}} <- SlackAuthorizations.delete_slack_authorization(auth) do send_resp(conn, :no_content, "") end end @spec webhook(Plug.Conn.t(), map()) :: Plug.Conn.t() def webhook(conn, payload) do Logger.debug("Payload from Slack webhook: #{inspect(payload)}") case payload do %{"event" => _event} -> handle_webhook_payload(payload) send_resp(conn, 200, "") %{"challenge" => challenge} -> send_resp(conn, 200, challenge) _ -> send_resp(conn, 200, "") end end @spec actions(Plug.Conn.t(), map()) :: Plug.Conn.t() def actions(conn, %{"payload" => json}) do Logger.debug("Payload from Slack action: #{inspect(json)}") with {:ok, %{"actions" => actions}} <- Jason.decode(json) do Enum.each(actions, &handle_action/1) end send_resp(conn, 200, "") end def actions(conn, params) do Logger.debug("Payload from unhandled Slack action: #{inspect(params)}") send_resp(conn, 200, "") end @spec channels(Plug.Conn.t(), map()) :: Plug.Conn.t() def channels(conn, payload) do account_id = conn.assigns.current_user.account_id filters = payload |> Map.new(fn {key, value} -> {String.to_atom(key), value} end) |> Map.merge(%{ type: Map.get(payload, "type", "support") }) auth = case payload do %{"authorization_id" => id} -> SlackAuthorizations.get_slack_authorization!(id) %{"slack_authorization_id" => id} -> SlackAuthorizations.get_slack_authorization!(id) _ -> SlackAuthorizations.get_authorization_by_account(account_id, filters) end # TODO: figure out the best way to handle errors here... should we just return # an empty list of channels if the call fails, or indicate that an error occurred? with %SlackAuthorization{access_token: access_token} <- auth, {:ok, result} <- Slack.Client.list_channels(access_token), %{body: %{"ok" => true, "channels" => channels}} <- result do json(conn, %{data: channels}) end end @spec handle_action(map()) :: any() def handle_action(%{ "action_id" => "close_conversation", "type" => "button", "action_ts" => _action_ts, "value" => conversation_id }) do conversation_id |> Conversations.get_conversation!() |> Conversations.update_conversation(%{"status" => "closed"}) |> case do {:ok, conversation} -> conversation |> Conversations.Notification.notify(:slack) |> Conversations.Notification.notify(:webhooks, event: "conversation:updated") _ -> nil end end def handle_action(%{ "action_id" => "open_conversation", "type" => "button", "action_ts" => _action_ts, "value" => conversation_id }) do conversation_id |> Conversations.get_conversation!() |> Conversations.update_conversation(%{"status" => "open"}) |> case do {:ok, conversation} -> conversation |> Conversations.Notification.notify(:slack) |> Conversations.Notification.notify(:webhooks, event: "conversation:updated") _ -> nil end end @spec handle_webhook_payload(map()) :: any() defp handle_webhook_payload(payload) do # TODO: figure out a better way to handle this in tests case Application.get_env(:chat_api, :environment) do :test -> Slack.Event.handle_payload(payload) _ -> Task.start(fn -> Slack.Event.handle_payload(payload) end) end end # TODO: maybe it would make more sense to put these in the Slack.Notification module @spec send_private_channel_instructions(:reply | :support, binary()) :: any() defp send_private_channel_instructions(:reply, webhook_url) do message = """ Hi there! :wave: looks like you've connected Hakerspeak to this channel. In order to complete your setup, you'll need to manually add the *Hakerspeak* app this channel. You can do this by typing `/app` in the message box below, clicking on "*Add apps to this channel*", and selecting the *Hakerspeak* app. (If that doesn't work, try following these instructions: https://slack.com/help/articles/202035138-Add-apps-to-your-Slack-workspace) Thanks for trying us out! :rocket: """ Logger.info(message) # Putting in an async Task for now, since we don't care if this succeeds # or fails (and we also don't want it to block anything) Task.start(fn -> Slack.Notification.log(message, webhook_url) end) end defp send_private_channel_instructions(:support, webhook_url) do message = """ Hi there! :wave: looks like you've connected Hakerspeak to a private channel. In order to complete your setup, you'll need to manually add the *Hakerspeak* app to this channel, as well as any other channels in which you'd like it to be active. You can do this by typing `/app` in the message box below, click on "*Add apps to this channel*", and selecting the *Hakerspeak* app. (If that doesn't work, try following these instructions: https://slack.com/help/articles/202035138-Add-apps-to-your-Slack-workspace) Thanks for trying us out! :rocket: """ Logger.info(message) # Putting in an async Task for now, since we don't care if this succeeds # or fails (and we also don't want it to block anything) Task.start(fn -> Slack.Notification.log(message, webhook_url) end) end @spec send_support_channel_instructions(binary()) :: any() defp send_support_channel_instructions(webhook_url) do message = """ Hi there! :wave: If you'd like to sync messages with Hakerspeak in other channels, you'll need to manually add the *Hakerspeak* app to them. You can do this by going to the channels you want to sync, typing `/app` in the message box, clicking on "*Add apps to this channel*", and selecting the *Hakerspeak* app. (If that doesn't work, try following these instructions: https://slack.com/help/articles/202035138-Add-apps-to-your-Slack-workspace) Thanks for trying us out! :rocket: """ Logger.info(message) # Putting in an async Task for now, since we don't care if this succeeds # or fails (and we also don't want it to block anything) Task.start(fn -> Slack.Notification.log(message, webhook_url) end) end end
34.690722
174
0.625854
ffce7de0f1c1c54f2001f0a8380dfaf06ceb9521
1,300
exs
Elixir
config/dev.exs
bmbferreira/slack-quiet
3bd08f102e1236189fd81580ded8b6548ddfc2cd
[ "MIT" ]
5
2018-01-15T19:51:37.000Z
2018-01-31T18:30:01.000Z
config/dev.exs
bmbferreira/slack-quiet
3bd08f102e1236189fd81580ded8b6548ddfc2cd
[ "MIT" ]
null
null
null
config/dev.exs
bmbferreira/slack-quiet
3bd08f102e1236189fd81580ded8b6548ddfc2cd
[ "MIT" ]
1
2018-06-25T19:16:39.000Z
2018-06-25T19:16:39.000Z
use Mix.Config # For development, we disable any cache and enable # debugging and code reloading. # # The watchers configuration can be used to run external # watchers to your application. For example, we use it # with brunch.io to recompile .js and .css sources. config :slack_quiet, SlackQuietWeb.Endpoint, http: [port: 4000], debug_errors: true, code_reloader: true, check_origin: false, watchers: [] # ## SSL Support # # In order to use HTTPS in development, a self-signed # certificate can be generated by running the following # command from your terminal: # # openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem # # The `http:` config above can be replaced with: # # https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"], # # If desired, both `http:` and `https:` keys can be # configured to run both http and https servers on # different ports. # Do not include metadata nor timestamps in development logs config :logger, :console, format: "[$level] $message\n" # Set a higher stacktrace during development. Avoid configuring such # in production as building large stacktraces may be expensive. config :phoenix, :stacktrace_depth, 20
34.210526
170
0.737692
ffceb23459e62ed058eec90c5d39fbca258c1ea2
1,419
ex
Elixir
clients/licensing/lib/google_api/licensing/v1/model/license_assignment_insert.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
null
null
null
clients/licensing/lib/google_api/licensing/v1/model/license_assignment_insert.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-12-18T09:25:12.000Z
2020-12-18T09:25:12.000Z
clients/licensing/lib/google_api/licensing/v1/model/license_assignment_insert.ex
medikent/elixir-google-api
98a83d4f7bfaeac15b67b04548711bb7e49f9490
[ "Apache-2.0" ]
1
2020-10-04T10:12:44.000Z
2020-10-04T10:12:44.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.Licensing.V1.Model.LicenseAssignmentInsert do @moduledoc """ Representation of a license assignment. ## Attributes * `userId` (*type:* `String.t`, *default:* `nil`) - Email id of the user """ use GoogleApi.Gax.ModelBase @type t :: %__MODULE__{ :userId => String.t() } field(:userId) end defimpl Poison.Decoder, for: GoogleApi.Licensing.V1.Model.LicenseAssignmentInsert do def decode(value, options) do GoogleApi.Licensing.V1.Model.LicenseAssignmentInsert.decode(value, options) end end defimpl Poison.Encoder, for: GoogleApi.Licensing.V1.Model.LicenseAssignmentInsert do def encode(value, options) do GoogleApi.Gax.ModelBase.encode(value, options) end end
30.191489
84
0.740662
ffceb73db985d8666ab6bb5c72b81653fa82829b
1,117
exs
Elixir
config/config.exs
nicolafiorillo/clooney
588f503966ef5511a5d336c8ed2e04d3aa9728ae
[ "MIT" ]
null
null
null
config/config.exs
nicolafiorillo/clooney
588f503966ef5511a5d336c8ed2e04d3aa9728ae
[ "MIT" ]
null
null
null
config/config.exs
nicolafiorillo/clooney
588f503966ef5511a5d336c8ed2e04d3aa9728ae
[ "MIT" ]
null
null
null
# This file is responsible for configuring your application # and its dependencies with the aid of the Mix.Config module. use Mix.Config # This configuration is loaded before any dependency and is restricted # to this project. If another project depends on this project, this # file won't be loaded nor affect the parent project. For this reason, # if you want to provide default values for your application for # 3rd-party users, it should be done in your "mix.exs" file. # You can configure for your application as: # # config :clooney, key: :value # # And access this configuration in your application as: # # Application.get_env(:clooney, :key) # # Or configure a 3rd-party app: # # config :logger, level: :info # # It is also possible to import configuration files, relative to this # directory. For example, you can emulate configuration per environment # by uncommenting the line below and defining dev.exs, test.exs and such. # Configuration from the imported file will override the ones defined # here (which is why it is important to import them last). # # import_config "#{Mix.env}.exs"
36.032258
73
0.751119