hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
08cacd8df4c7fef4edc7df794764703a5763f177 | 1,105 | ex | Elixir | test/support/channel_case.ex | Nilomiranda/Danpay | 1b6f05a8a5ce6b636241c369fca85f16b229bc82 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | Nilomiranda/Danpay | 1b6f05a8a5ce6b636241c369fca85f16b229bc82 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | Nilomiranda/Danpay | 1b6f05a8a5ce6b636241c369fca85f16b229bc82 | [
"MIT"
] | null | null | null | defmodule DanpayWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use DanpayWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import DanpayWeb.ChannelCase
# The default endpoint for testing
@endpoint DanpayWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Danpay.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Danpay.Repo, {:shared, self()})
end
:ok
end
end
| 26.95122 | 68 | 0.725792 |
08cafa94d5e88d7486018146a83fd71cbe1c06be | 1,081 | ex | Elixir | apps/alerts/lib/stop.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/alerts/lib/stop.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/alerts/lib/stop.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule Alerts.Stop do
@moduledoc """
Given a stop_id, returns the list of alerts which apply to that stop.
Options include:
* route: the route we're interested in (ID string)
* route_type: the route_type of the interested route (GTFS integer)
* direction_id: the direction we're travelling (GTFS integer)
* time: for a particular datetime (DateTime)
"""
alias Alerts.InformedEntity, as: IE
alias Alerts.Match
def match(alerts, stop_id, options \\ []) do
# First, we filter the alerts to those that match any of the options
# including the stop. Then, we filter again to get only those that
# explicitly use the stop.
stop_entity = entity_for(stop_id, [])
stop_with_options_entity = entity_for(stop_id, options)
time = options[:time]
for alert <- alerts,
Match.match([alert], stop_entity, time) != [],
Match.match([alert], stop_with_options_entity) != [] do
alert
end
end
defp entity_for(stop_id, options) do
options
|> Keyword.put(:stop, stop_id)
|> IE.from_keywords()
end
end
| 29.216216 | 72 | 0.682701 |
08cafccff1a8067ed9adc14e35959f180d4b8d77 | 3,596 | exs | Elixir | config/prod.exs | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | 12 | 2019-07-02T14:30:06.000Z | 2022-03-12T08:22:18.000Z | config/prod.exs | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | 9 | 2020-03-16T20:10:50.000Z | 2021-06-17T17:45:44.000Z | config/prod.exs | runhyve/webapp | 434b074f98c1ebac657b56062c1c1a54e683dea1 | [
"BSD-2-Clause"
] | null | null | null | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :webapp, WebappWeb.Endpoint,
http: [:inet6, port: System.get_env("PORT") || 4000],
url: [host: System.get_env("WEBAPP_DOMAIN") || "demo.runhyve.app", scheme: "https", port: 443],
cache_static_manifest: "priv/static/cache_manifest.json",
secret_key_base: Map.fetch!(System.get_env(), "SECRET_KEY_BASE"),
debug_errors: System.get_env("DEBUG") || false,
server: true
# Do not print debug messages in production
config :logger, level: System.get_env("WEBAPP_LOGLEVEL") || :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :webapp, WebappWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# :inet6,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :webapp, WebappWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases (distillery)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :webapp, WebappWeb.Endpoint, server: true
#
# Note you can't rely on `System.get_env/1` when using releases.
# See the releases documentation accordingly.
# Finally import the config/prod.secret.exs which should be versioned
# separately.
config :webapp, Webapp.Repo,
adapter: Ecto.Adapters.Postgres,
ssl: true,
url: System.get_env("DATABASE_URL"),
# Free tier db only allows 4 connections. Rolling deploys need pool_size*(n+1) connections.
pool_size: 10
config :webapp, Webapp.Mailer,
adapter: Bamboo.SMTPAdapter,
server: System.get_env("SMTP_SERVER"),
port: System.get_env("SMTP_PORT"),
username: System.get_env("SMTP_USERNAME"),
password: System.get_env("SMTP_PASSWORD"),
ssl: true,
retries: 1
config :webapp, Webapp.Notifications,
enabled_modules: [Webapp.Notifications.NotifySlack],
slack_webhook_url: System.get_env("SLACK_WEBHOOK_URL"),
slack_channel: System.get_env("SLACK_CHANNEL"),
slack_username: System.get_env("SLACK_USERNAME")
config :sentry,
dsn: System.get_env("SENTRY_DSN"),
environment_name: :prod,
enable_source_code_context: true,
root_source_code_path: File.cwd!,
tags: %{
env: "production"
},
included_environments: [:prod]
| 33.924528 | 97 | 0.720523 |
08cb0d81c7adc6c7c3861f8c814918dc68ba121c | 996 | ex | Elixir | lib/verk/event_producer.ex | Psli/verk | a538ceed1217d7aee89e92cf7740a96543ecc94c | [
"MIT"
] | null | null | null | lib/verk/event_producer.ex | Psli/verk | a538ceed1217d7aee89e92cf7740a96543ecc94c | [
"MIT"
] | null | null | null | lib/verk/event_producer.ex | Psli/verk | a538ceed1217d7aee89e92cf7740a96543ecc94c | [
"MIT"
] | null | null | null | defmodule Verk.EventProducer do
@moduledoc """
A GenStage producer that broadcasts events to subscribed consumers.
"""
use GenStage
def start_link(_args \\ []) do
GenStage.start_link(__MODULE__, :ok, name: __MODULE__)
end
def stop, do: GenStage.stop(__MODULE__)
def async_notify(event) do
GenStage.cast(__MODULE__, {:notify, event})
end
def init(:ok) do
{:producer, {:queue.new(), 0}, dispatcher: GenStage.BroadcastDispatcher}
end
def handle_cast({:notify, event}, {queue, demand}) do
dispatch_events(:queue.in(event, queue), demand)
end
def handle_demand(incoming_demand, {queue, demand}) do
dispatch_events(queue, incoming_demand + demand)
end
defp dispatch_events(queue, demand, events \\ []) do
with d when d > 0 <- demand,
{{:value, event}, queue} <- :queue.out(queue) do
dispatch_events(queue, demand - 1, [event | events])
else
_ -> {:noreply, Enum.reverse(events), {queue, demand}}
end
end
end
| 26.210526 | 76 | 0.671687 |
08cb207e83a10566aca31fdebc3f70358f08f3af | 11,678 | ex | Elixir | lib/membrane/element/base.ex | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | lib/membrane/element/base.ex | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | lib/membrane/element/base.ex | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Element.Base do
@moduledoc """
Module defining behaviour common to all elements.
When used declares behaviour implementation, provides default callback definitions
and imports macros.
# Elements
Elements are units that produce, process or consume data. They can be linked
with `Membrane.Pipeline`, and thus form a pipeline able to perform complex data
processing. Each element defines a set of pads, through which it can be linked
with other elements. During playback, pads can either send (output pads) or
receive (input pads) data. For more information on pads, see
`Membrane.Pad`.
To implement an element, one of base modules (`Membrane.Source`,
`Membrane.Filter`, `Membrane.Sink`)
has to be `use`d, depending on the element type:
- source, producing buffers (contain only output pads),
- filter, processing buffers (contain both input and output pads),
- sink, consuming buffers (contain only input pads).
For more information on each element type, check documentation for appropriate
base module.
## Behaviours
Element-specific behaviours are specified in modules:
- `Membrane.Element.Base` - this module, behaviour common to all
elements,
- `Membrane.Element.WithOutputPads` - behaviour common to sources
and filters,
- `Membrane.Element.WithInputPads` - behaviour common to sinks and
filters,
- Base modules (`Membrane.Source`, `Membrane.Filter`,
`Membrane.Sink`) - behaviours specific to each element type.
## Callbacks
Modules listed above provide specifications of callbacks that define elements
lifecycle. All of these callbacks have names with the `handle_` prefix.
They are used to define reaction to certain events that happen during runtime,
and indicate what actions framework should undertake as a result, besides
executing element-specific code.
For actions that can be returned by each callback, see `Membrane.Element.Action`
module.
"""
use Bunch
alias Membrane.{Element, Event, Pad}
alias Membrane.Core.OptionsSpecs
alias Membrane.Element.{Action, CallbackContext}
@typedoc """
Type that defines all valid return values from most callbacks.
In case of error, a callback is supposed to return `{:error, any}` if it is not
passed state, and `{{:error, any}, state}` otherwise.
"""
@type callback_return_t ::
{:ok | {:ok, [Action.t()]} | {:error, any}, Element.state_t()} | {:error, any}
@doc """
Automatically implemented callback returning specification of pads exported
by the element.
Generated by `Membrane.Element.WithInputPads.def_input_pad/2`
and `Membrane..WithOutputPads.def_output_pad/2` macros.
"""
@callback membrane_pads() :: [{Pad.name_t(), Pad.description_t()}]
@doc """
Automatically implemented callback used to determine if module is a membrane element.
"""
@callback membrane_element? :: true
@doc """
Automatically implemented callback used to determine whether element exports clock.
"""
@callback membrane_clock? :: true
@doc """
Automatically implemented callback determining whether element is a source,
a filter or a sink.
"""
@callback membrane_element_type :: Element.type_t()
@doc """
Callback invoked on initialization of element process. It should parse options
and initialize element internal state. Internally it is invoked inside
`c:GenServer.init/1` callback.
"""
@callback handle_init(options :: Element.options_t()) ::
{:ok, Element.state_t()}
| {:error, any}
@doc """
Callback invoked when element goes to `:prepared` state from state `:stopped` and should get
ready to enter `:playing` state.
Usually most resources used by the element are allocated here.
For example, if element opens a file, this is the place to try to actually open it
and return error if that has failed. Such resources should be released in `c:handle_prepared_to_stopped/2`.
"""
@callback handle_stopped_to_prepared(
context :: CallbackContext.PlaybackChange.t(),
state :: Element.state_t()
) :: callback_return_t
@doc """
Callback invoked when element goes to `:prepared` state from state `:playing` and should get
ready to enter `:stopped` state.
All resources allocated in `c:handle_prepared_to_playing/2` callback should be released here, and no more buffers or
demands should be sent.
"""
@callback handle_playing_to_prepared(
context :: CallbackContext.PlaybackChange.t(),
state :: Element.state_t()
) :: callback_return_t
@doc """
Callback invoked when element is supposed to start playing (goes from state `:prepared` to `:playing`).
This is moment when initial demands are sent and first buffers are generated
if there are any pads in the push mode.
"""
@callback handle_prepared_to_playing(
context :: CallbackContext.PlaybackChange.t(),
state :: Element.state_t()
) :: callback_return_t
@doc """
Callback invoked when element is supposed to stop (goes from state `:prepared` to `:stopped`).
Usually this is the place for releasing all remaining resources
used by the element. For example, if element opens a file in `c:handle_stopped_to_prepared/2`,
this is the place to close it.
"""
@callback handle_prepared_to_stopped(
context :: CallbackContext.PlaybackChange.t(),
state :: Element.state_t()
) :: callback_return_t
@callback handle_stopped_to_terminating(
context :: CallbackContext.PlaybackChange.t(),
state :: Element.state_t()
) :: callback_return_t
@doc """
Callback invoked when element receives a message that is not recognized
as an internal membrane message.
Useful for receiving ticks from timer, data sent from NIFs or other stuff.
"""
@callback handle_other(
message :: any(),
context :: CallbackContext.Other.t(),
state :: Element.state_t()
) :: callback_return_t
@doc """
Callback that is called when new pad has beed added to element. Executed
ONLY for dynamic pads.
"""
@callback handle_pad_added(
pad :: Pad.ref_t(),
context :: CallbackContext.PadAdded.t(),
state :: Element.state_t()
) :: callback_return_t
@doc """
Callback that is called when some pad of the element has beed removed. Executed
ONLY for dynamic pads.
"""
@callback handle_pad_removed(
pad :: Pad.ref_t(),
context :: CallbackContext.PadRemoved.t(),
state :: Element.state_t()
) :: callback_return_t
@doc """
Callback that is called when event arrives.
Events may arrive from both sinks and sources. In filters by default event is
forwarded to all sources or sinks, respectively.
"""
@callback handle_event(
pad :: Pad.ref_t(),
event :: Event.t(),
context :: CallbackContext.Event.t(),
state :: Element.state_t()
) :: callback_return_t
@doc """
Callback invoked upon each timer tick. A timer can be started with `Membrane.Element.Action.start_timer_t`
action.
"""
@callback handle_tick(
timer_id :: any,
context :: CallbackContext.Tick.t(),
state :: Element.state_t()
) :: callback_return_t
@doc """
Callback invoked when element is shutting down just before process is exiting.
Internally called in `c:GenServer.terminate/2` callback.
"""
@callback handle_shutdown(reason, state :: Element.state_t()) :: :ok
when reason: :normal | :shutdown | {:shutdown, any} | term()
@optional_callbacks membrane_clock?: 0,
handle_init: 1,
handle_stopped_to_prepared: 2,
handle_prepared_to_playing: 2,
handle_playing_to_prepared: 2,
handle_prepared_to_stopped: 2,
handle_other: 3,
handle_pad_added: 3,
handle_pad_removed: 3,
handle_event: 4,
handle_tick: 3,
handle_shutdown: 2
@doc """
Macro defining options that parametrize element.
It automatically generates appropriate struct and documentation.
#{OptionsSpecs.options_doc()}
"""
defmacro def_options(options) do
OptionsSpecs.def_options(__CALLER__.module, options, :element)
end
@doc """
Defines that element exports a clock to pipeline.
Exporting clock allows pipeline to choose it as the pipeline clock, enabling other
elements to synchronize with it. Element's clock is accessible via `clock` field,
while pipeline's one - via `parent_clock` field in callback contexts. Both of
them can be used for starting timers.
"""
defmacro def_clock(doc \\ "") do
quote do
@membrane_element_has_clock true
Module.put_attribute(__MODULE__, :membrane_clock_moduledoc, """
## Clock
This element provides a clock to its parent.
#{unquote(doc)}
""")
@impl true
def membrane_clock?, do: true
end
end
@doc false
defmacro __before_compile__(env) do
Membrane.Core.Child.generate_moduledoc(env.module, :element)
end
@doc """
Brings common stuff needed to implement an element. Used by
`Membrane.Source.__using__/1`, `Membrane.Filter.__using__/1`
and `Membrane.Sink.__using__/1`.
Options:
- `:bring_pad?` - if true (default) requires and aliases `Membrane.Pad`
"""
defmacro __using__(options) do
bring_pad =
if options |> Keyword.get(:bring_pad?, true) do
quote do
require Membrane.Pad
alias Membrane.Pad
end
end
quote location: :keep do
@behaviour unquote(__MODULE__)
@before_compile unquote(__MODULE__)
alias Membrane.Element.CallbackContext, as: Ctx
import unquote(__MODULE__), only: [def_clock: 0, def_clock: 1, def_options: 1]
unquote(bring_pad)
@impl true
def membrane_element?, do: true
@impl true
def handle_init(%opt_struct{} = options), do: {:ok, options |> Map.from_struct()}
def handle_init(options), do: {:ok, options}
@impl true
def handle_stopped_to_prepared(_context, state), do: {:ok, state}
@impl true
def handle_prepared_to_playing(_context, state), do: {:ok, state}
@impl true
def handle_playing_to_prepared(_context, state), do: {:ok, state}
@impl true
def handle_prepared_to_stopped(_context, state), do: {:ok, state}
@impl true
def handle_stopped_to_terminating(_context, state), do: {:ok, state}
@impl true
def handle_other(_message, _context, state), do: {:ok, state}
@impl true
def handle_pad_added(_pad, _context, state), do: {:ok, state}
@impl true
def handle_pad_removed(_pad, _context, state), do: {:ok, state}
@impl true
def handle_event(_pad, _event, _context, state), do: {:ok, state}
@impl true
def handle_shutdown(_reason, _state), do: :ok
defoverridable handle_init: 1,
handle_stopped_to_prepared: 2,
handle_playing_to_prepared: 2,
handle_prepared_to_playing: 2,
handle_prepared_to_stopped: 2,
handle_other: 3,
handle_pad_added: 3,
handle_pad_removed: 3,
handle_event: 4,
handle_shutdown: 2
end
end
end
| 33.947674 | 118 | 0.661329 |
08cb4e3dd6d70f78c6980266ec5e02455abfc157 | 419 | exs | Elixir | priv/repo/migrations/20210729101154_craete_users_table.exs | nimblehq/liveman-demo-api | e184349983f949c8434b8651f9223db597ef1025 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210729101154_craete_users_table.exs | nimblehq/liveman-demo-api | e184349983f949c8434b8651f9223db597ef1025 | [
"MIT"
] | 19 | 2021-07-02T08:14:52.000Z | 2021-07-30T09:33:12.000Z | priv/repo/migrations/20210729101154_craete_users_table.exs | nimblehq/liveman | e184349983f949c8434b8651f9223db597ef1025 | [
"MIT"
] | null | null | null | defmodule Liveman.Repo.Migrations.CraeteUsersTable do
use Ecto.Migration
def change do
execute "CREATE EXTENSION IF NOT EXISTS citext", ""
create table(:users, primary_key: false) do
add :id, :uuid, primary_key: true, null: false
add :email, :citext, null: false
add :hashed_password, :string, null: false
timestamps()
end
create unique_index(:users, [:email])
end
end
| 23.277778 | 55 | 0.677804 |
08cb5831ba6c63c9ea75cfadbb1f379f08fdab92 | 3,098 | ex | Elixir | clients/admin/lib/google_api/admin/directory_v1/model/customer_postal_address.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/admin/lib/google_api/admin/directory_v1/model/customer_postal_address.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/admin/lib/google_api/admin/directory_v1/model/customer_postal_address.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Admin.Directory_v1.Model.CustomerPostalAddress do
@moduledoc """
## Attributes
* `addressLine1` (*type:* `String.t`, *default:* `nil`) - A customer's physical address. The address can be composed of one to three lines.
* `addressLine2` (*type:* `String.t`, *default:* `nil`) - Address line 2 of the address.
* `addressLine3` (*type:* `String.t`, *default:* `nil`) - Address line 3 of the address.
* `contactName` (*type:* `String.t`, *default:* `nil`) - The customer contact's name.
* `countryCode` (*type:* `String.t`, *default:* `nil`) - This is a required property. For `countryCode` information see the [ISO 3166 country code elements](https://www.iso.org/iso/country_codes.htm).
* `locality` (*type:* `String.t`, *default:* `nil`) - Name of the locality. An example of a locality value is the city of `San Francisco`.
* `organizationName` (*type:* `String.t`, *default:* `nil`) - The company or company division name.
* `postalCode` (*type:* `String.t`, *default:* `nil`) - The postal code. A postalCode example is a postal zip code such as `10009`. This is in accordance with - http: //portablecontacts.net/draft-spec.html#address_element.
* `region` (*type:* `String.t`, *default:* `nil`) - Name of the region. An example of a region value is `NY` for the state of New York.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:addressLine1 => String.t() | nil,
:addressLine2 => String.t() | nil,
:addressLine3 => String.t() | nil,
:contactName => String.t() | nil,
:countryCode => String.t() | nil,
:locality => String.t() | nil,
:organizationName => String.t() | nil,
:postalCode => String.t() | nil,
:region => String.t() | nil
}
field(:addressLine1)
field(:addressLine2)
field(:addressLine3)
field(:contactName)
field(:countryCode)
field(:locality)
field(:organizationName)
field(:postalCode)
field(:region)
end
defimpl Poison.Decoder, for: GoogleApi.Admin.Directory_v1.Model.CustomerPostalAddress do
def decode(value, options) do
GoogleApi.Admin.Directory_v1.Model.CustomerPostalAddress.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Admin.Directory_v1.Model.CustomerPostalAddress do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.633803 | 226 | 0.684958 |
08cbb15ac2f90437538fff290c9ef7a134d1a7b3 | 674 | ex | Elixir | lib/ex_pwned/api.ex | Grantimus9/ex_pwned | e26035372edc35c0b5167446ee852f133800aa03 | [
"Apache-2.0"
] | null | null | null | lib/ex_pwned/api.ex | Grantimus9/ex_pwned | e26035372edc35c0b5167446ee852f133800aa03 | [
"Apache-2.0"
] | null | null | null | lib/ex_pwned/api.ex | Grantimus9/ex_pwned | e26035372edc35c0b5167446ee852f133800aa03 | [
"Apache-2.0"
] | null | null | null | defmodule ExPwned.Api do
@moduledoc """
Base definition for Api client
"""
defmacro __using__(_opts) do
quote do
alias ExPwned.Parser
import ExPwned.Utils
def build_url(path_arg, query_params \\ %{}) do
"#{base_url()}/#{path_arg}?#{URI.encode_query(query_params)}"
end
def do_get(path_arg), do: do_get(path_arg, %{})
def do_get(path_arg, query_params) when is_list(query_params), do: do_get(path_arg, Enum.into(query_params, %{}))
def do_get(path_arg, query_params) do
path_arg
|> build_url(query_params)
|> HTTPoison.get(headers())
|> Parser.parse
end
end
end
end
| 25.923077 | 119 | 0.632047 |
08cbc90b4a4a7e082aaae15d166b3b85e0836eaf | 24,196 | ex | Elixir | lib/aws/generated/personalize.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/personalize.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/personalize.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.Personalize do
@moduledoc """
Amazon Personalize is a machine learning service that makes it easy to add
individualized recommendations to customers.
"""
@doc """
Creates a batch inference job.
The operation can handle up to 50 million records and the input file must be in
JSON format. For more information, see `recommendations-batch`.
"""
def create_batch_inference_job(client, input, options \\ []) do
request(client, "CreateBatchInferenceJob", input, options)
end
@doc """
Creates a campaign by deploying a solution version.
When a client calls the
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html) and
[GetPersonalizedRanking](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetPersonalizedRanking.html)
APIs, a campaign is specified in the request.
## Minimum Provisioned TPS and Auto-Scaling
A transaction is a single `GetRecommendations` or `GetPersonalizedRanking` call.
Transactions per second (TPS) is the throughput and unit of billing for Amazon
Personalize. The minimum provisioned TPS (`minProvisionedTPS`) specifies the
baseline throughput provisioned by Amazon Personalize, and thus, the minimum
billing charge. If your TPS increases beyond `minProvisionedTPS`, Amazon
Personalize auto-scales the provisioned capacity up and down, but never below
`minProvisionedTPS`, to maintain a 70% utilization. There's a short time delay
while the capacity is increased that might cause loss of transactions. It's
recommended to start with a low `minProvisionedTPS`, track your usage using
Amazon CloudWatch metrics, and then increase the `minProvisionedTPS` as
necessary.
## Status
A campaign can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the campaign status, call `DescribeCampaign`.
Wait until the `status` of the campaign is `ACTIVE` before asking the campaign
for recommendations.
## Related APIs
* `ListCampaigns`
* `DescribeCampaign`
* `UpdateCampaign`
* `DeleteCampaign`
"""
def create_campaign(client, input, options \\ []) do
request(client, "CreateCampaign", input, options)
end
@doc """
Creates an empty dataset and adds it to the specified dataset group.
Use `CreateDatasetImportJob` to import your training data to a dataset.
There are three types of datasets:
* Interactions
* Items
* Users
Each dataset type has an associated schema with required field types. Only the
`Interactions` dataset is required in order to train a model (also referred to
as creating a solution).
A dataset can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the status of the dataset, call `DescribeDataset`.
## Related APIs
* `CreateDatasetGroup`
* `ListDatasets`
* `DescribeDataset`
* `DeleteDataset`
"""
def create_dataset(client, input, options \\ []) do
request(client, "CreateDataset", input, options)
end
@doc """
Creates an empty dataset group.
A dataset group contains related datasets that supply data for training a model.
A dataset group can contain at most three datasets, one for each type of
dataset:
* Interactions
* Items
* Users
To train a model (create a solution), a dataset group that contains an
`Interactions` dataset is required. Call `CreateDataset` to add a dataset to the
group.
A dataset group can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING
To get the status of the dataset group, call `DescribeDatasetGroup`. If the
status shows as CREATE FAILED, the response includes a `failureReason` key,
which describes why the creation failed.
You must wait until the `status` of the dataset group is `ACTIVE` before adding
a dataset to the group.
You can specify an AWS Key Management Service (KMS) key to encrypt the datasets
in the group. If you specify a KMS key, you must also include an AWS Identity
and Access Management (IAM) role that has permission to access the key.
## APIs that require a dataset group ARN in the request
* `CreateDataset`
* `CreateEventTracker`
* `CreateSolution`
## Related APIs
* `ListDatasetGroups`
* `DescribeDatasetGroup`
* `DeleteDatasetGroup`
"""
def create_dataset_group(client, input, options \\ []) do
request(client, "CreateDatasetGroup", input, options)
end
@doc """
Creates a job that imports training data from your data source (an Amazon S3
bucket) to an Amazon Personalize dataset.
To allow Amazon Personalize to import the training data, you must specify an AWS
Identity and Access Management (IAM) role that has permission to read from the
data source, as Amazon Personalize makes a copy of your data and processes it in
an internal AWS system.
The dataset import job replaces any previous data in the dataset.
## Status
A dataset import job can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
To get the status of the import job, call `DescribeDatasetImportJob`, providing
the Amazon Resource Name (ARN) of the dataset import job. The dataset import is
complete when the status shows as ACTIVE. If the status shows as CREATE FAILED,
the response includes a `failureReason` key, which describes why the job failed.
Importing takes time. You must wait until the status shows as ACTIVE before
training a model using the dataset.
## Related APIs
* `ListDatasetImportJobs`
* `DescribeDatasetImportJob`
"""
def create_dataset_import_job(client, input, options \\ []) do
request(client, "CreateDatasetImportJob", input, options)
end
@doc """
Creates an event tracker that you use when sending event data to the specified
dataset group using the
[PutEvents](https://docs.aws.amazon.com/personalize/latest/dg/API_UBS_PutEvents.html)
API.
When Amazon Personalize creates an event tracker, it also creates an
*event-interactions* dataset in the dataset group associated with the event
tracker. The event-interactions dataset stores the event data from the
`PutEvents` call. The contents of this dataset are not available to the user.
Only one event tracker can be associated with a dataset group. You will get an
error if you call `CreateEventTracker` using the same dataset group as an
existing event tracker.
When you send event data you include your tracking ID. The tracking ID
identifies the customer and authorizes the customer to send the data.
The event tracker can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the status of the event tracker, call `DescribeEventTracker`.
The event tracker must be in the ACTIVE state before using the tracking ID.
## Related APIs
* `ListEventTrackers`
* `DescribeEventTracker`
* `DeleteEventTracker`
"""
def create_event_tracker(client, input, options \\ []) do
request(client, "CreateEventTracker", input, options)
end
@doc """
Creates a recommendation filter.
For more information, see [Using Filters with Amazon Personalize](https://docs.aws.amazon.com/personalize/latest/dg/filters.html).
"""
def create_filter(client, input, options \\ []) do
request(client, "CreateFilter", input, options)
end
@doc """
Creates an Amazon Personalize schema from the specified schema string.
The schema you create must be in Avro JSON format.
Amazon Personalize recognizes three schema variants. Each schema is associated
with a dataset type and has a set of required field and keywords. You specify a
schema when you call `CreateDataset`.
## Related APIs
* `ListSchemas`
* `DescribeSchema`
* `DeleteSchema`
"""
def create_schema(client, input, options \\ []) do
request(client, "CreateSchema", input, options)
end
@doc """
Creates the configuration for training a model.
A trained model is known as a solution. After the configuration is created, you
train the model (create a solution) by calling the `CreateSolutionVersion`
operation. Every time you call `CreateSolutionVersion`, a new version of the
solution is created.
After creating a solution version, you check its accuracy by calling
`GetSolutionMetrics`. When you are satisfied with the version, you deploy it
using `CreateCampaign`. The campaign provides recommendations to a client
through the
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html)
API.
To train a model, Amazon Personalize requires training data and a recipe. The
training data comes from the dataset group that you provide in the request. A
recipe specifies the training algorithm and a feature transformation. You can
specify one of the predefined recipes provided by Amazon Personalize.
Alternatively, you can specify `performAutoML` and Amazon Personalize will
analyze your data and select the optimum USER_PERSONALIZATION recipe for you.
## Status
A solution can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
To get the status of the solution, call `DescribeSolution`. Wait until the
status shows as ACTIVE before calling `CreateSolutionVersion`.
## Related APIs
* `ListSolutions`
* `CreateSolutionVersion`
* `DescribeSolution`
* `DeleteSolution`
* `ListSolutionVersions`
* `DescribeSolutionVersion`
"""
def create_solution(client, input, options \\ []) do
request(client, "CreateSolution", input, options)
end
@doc """
Trains or retrains an active solution.
A solution is created using the `CreateSolution` operation and must be in the
ACTIVE state before calling `CreateSolutionVersion`. A new version of the
solution is created every time you call this operation.
## Status
A solution version can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
To get the status of the version, call `DescribeSolutionVersion`. Wait until the
status shows as ACTIVE before calling `CreateCampaign`.
If the status shows as CREATE FAILED, the response includes a `failureReason`
key, which describes why the job failed.
## Related APIs
* `ListSolutionVersions`
* `DescribeSolutionVersion`
* `ListSolutions`
* `CreateSolution`
* `DescribeSolution`
* `DeleteSolution`
"""
def create_solution_version(client, input, options \\ []) do
request(client, "CreateSolutionVersion", input, options)
end
@doc """
Removes a campaign by deleting the solution deployment.
The solution that the campaign is based on is not deleted and can be redeployed
when needed. A deleted campaign can no longer be specified in a
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html)
request. For more information on campaigns, see `CreateCampaign`.
"""
def delete_campaign(client, input, options \\ []) do
request(client, "DeleteCampaign", input, options)
end
@doc """
Deletes a dataset.
You can't delete a dataset if an associated `DatasetImportJob` or
`SolutionVersion` is in the CREATE PENDING or IN PROGRESS state. For more
information on datasets, see `CreateDataset`.
"""
def delete_dataset(client, input, options \\ []) do
request(client, "DeleteDataset", input, options)
end
@doc """
Deletes a dataset group.
Before you delete a dataset group, you must delete the following:
* All associated event trackers.
* All associated solutions.
* All datasets in the dataset group.
"""
def delete_dataset_group(client, input, options \\ []) do
request(client, "DeleteDatasetGroup", input, options)
end
@doc """
Deletes the event tracker.
Does not delete the event-interactions dataset from the associated dataset
group. For more information on event trackers, see `CreateEventTracker`.
"""
def delete_event_tracker(client, input, options \\ []) do
request(client, "DeleteEventTracker", input, options)
end
@doc """
Deletes a filter.
"""
def delete_filter(client, input, options \\ []) do
request(client, "DeleteFilter", input, options)
end
@doc """
Deletes a schema.
Before deleting a schema, you must delete all datasets referencing the schema.
For more information on schemas, see `CreateSchema`.
"""
def delete_schema(client, input, options \\ []) do
request(client, "DeleteSchema", input, options)
end
@doc """
Deletes all versions of a solution and the `Solution` object itself.
Before deleting a solution, you must delete all campaigns based on the solution.
To determine what campaigns are using the solution, call `ListCampaigns` and
supply the Amazon Resource Name (ARN) of the solution. You can't delete a
solution if an associated `SolutionVersion` is in the CREATE PENDING or IN
PROGRESS state. For more information on solutions, see `CreateSolution`.
"""
def delete_solution(client, input, options \\ []) do
request(client, "DeleteSolution", input, options)
end
@doc """
Describes the given algorithm.
"""
def describe_algorithm(client, input, options \\ []) do
request(client, "DescribeAlgorithm", input, options)
end
@doc """
Gets the properties of a batch inference job including name, Amazon Resource
Name (ARN), status, input and output configurations, and the ARN of the solution
version used to generate the recommendations.
"""
def describe_batch_inference_job(client, input, options \\ []) do
request(client, "DescribeBatchInferenceJob", input, options)
end
@doc """
Describes the given campaign, including its status.
A campaign can be in one of the following states:
* CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or- CREATE FAILED
* DELETE PENDING > DELETE IN_PROGRESS
When the `status` is `CREATE FAILED`, the response includes the `failureReason`
key, which describes why.
For more information on campaigns, see `CreateCampaign`.
"""
def describe_campaign(client, input, options \\ []) do
request(client, "DescribeCampaign", input, options)
end
@doc """
Describes the given dataset.
For more information on datasets, see `CreateDataset`.
"""
def describe_dataset(client, input, options \\ []) do
request(client, "DescribeDataset", input, options)
end
@doc """
Describes the given dataset group.
For more information on dataset groups, see `CreateDatasetGroup`.
"""
def describe_dataset_group(client, input, options \\ []) do
request(client, "DescribeDatasetGroup", input, options)
end
@doc """
Describes the dataset import job created by `CreateDatasetImportJob`, including
the import job status.
"""
def describe_dataset_import_job(client, input, options \\ []) do
request(client, "DescribeDatasetImportJob", input, options)
end
@doc """
Describes an event tracker.
The response includes the `trackingId` and `status` of the event tracker. For
more information on event trackers, see `CreateEventTracker`.
"""
def describe_event_tracker(client, input, options \\ []) do
request(client, "DescribeEventTracker", input, options)
end
@doc """
Describes the given feature transformation.
"""
def describe_feature_transformation(client, input, options \\ []) do
request(client, "DescribeFeatureTransformation", input, options)
end
@doc """
Describes a filter's properties.
"""
def describe_filter(client, input, options \\ []) do
request(client, "DescribeFilter", input, options)
end
@doc """
Describes a recipe.
A recipe contains three items:
* An algorithm that trains a model.
* Hyperparameters that govern the training.
* Feature transformation information for modifying the input data
before training.
Amazon Personalize provides a set of predefined recipes. You specify a recipe
when you create a solution with the `CreateSolution` API. `CreateSolution`
trains a model by using the algorithm in the specified recipe and a training
dataset. The solution, when deployed as a campaign, can provide recommendations
using the
[GetRecommendations](https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html)
API.
"""
def describe_recipe(client, input, options \\ []) do
request(client, "DescribeRecipe", input, options)
end
@doc """
Describes a schema.
For more information on schemas, see `CreateSchema`.
"""
def describe_schema(client, input, options \\ []) do
request(client, "DescribeSchema", input, options)
end
@doc """
Describes a solution.
For more information on solutions, see `CreateSolution`.
"""
def describe_solution(client, input, options \\ []) do
request(client, "DescribeSolution", input, options)
end
@doc """
Describes a specific version of a solution.
For more information on solutions, see `CreateSolution`.
"""
def describe_solution_version(client, input, options \\ []) do
request(client, "DescribeSolutionVersion", input, options)
end
@doc """
Gets the metrics for the specified solution version.
"""
def get_solution_metrics(client, input, options \\ []) do
request(client, "GetSolutionMetrics", input, options)
end
@doc """
Gets a list of the batch inference jobs that have been performed off of a
solution version.
"""
def list_batch_inference_jobs(client, input, options \\ []) do
request(client, "ListBatchInferenceJobs", input, options)
end
@doc """
Returns a list of campaigns that use the given solution.
When a solution is not specified, all the campaigns associated with the account
are listed. The response provides the properties for each campaign, including
the Amazon Resource Name (ARN). For more information on campaigns, see
`CreateCampaign`.
"""
def list_campaigns(client, input, options \\ []) do
request(client, "ListCampaigns", input, options)
end
@doc """
Returns a list of dataset groups.
The response provides the properties for each dataset group, including the
Amazon Resource Name (ARN). For more information on dataset groups, see
`CreateDatasetGroup`.
"""
def list_dataset_groups(client, input, options \\ []) do
request(client, "ListDatasetGroups", input, options)
end
@doc """
Returns a list of dataset import jobs that use the given dataset.
When a dataset is not specified, all the dataset import jobs associated with the
account are listed. The response provides the properties for each dataset import
job, including the Amazon Resource Name (ARN). For more information on dataset
import jobs, see `CreateDatasetImportJob`. For more information on datasets, see
`CreateDataset`.
"""
def list_dataset_import_jobs(client, input, options \\ []) do
request(client, "ListDatasetImportJobs", input, options)
end
@doc """
Returns the list of datasets contained in the given dataset group.
The response provides the properties for each dataset, including the Amazon
Resource Name (ARN). For more information on datasets, see `CreateDataset`.
"""
def list_datasets(client, input, options \\ []) do
request(client, "ListDatasets", input, options)
end
@doc """
Returns the list of event trackers associated with the account.
The response provides the properties for each event tracker, including the
Amazon Resource Name (ARN) and tracking ID. For more information on event
trackers, see `CreateEventTracker`.
"""
def list_event_trackers(client, input, options \\ []) do
request(client, "ListEventTrackers", input, options)
end
@doc """
Lists all filters that belong to a given dataset group.
"""
def list_filters(client, input, options \\ []) do
request(client, "ListFilters", input, options)
end
@doc """
Returns a list of available recipes.
The response provides the properties for each recipe, including the recipe's
Amazon Resource Name (ARN).
"""
def list_recipes(client, input, options \\ []) do
request(client, "ListRecipes", input, options)
end
@doc """
Returns the list of schemas associated with the account.
The response provides the properties for each schema, including the Amazon
Resource Name (ARN). For more information on schemas, see `CreateSchema`.
"""
def list_schemas(client, input, options \\ []) do
request(client, "ListSchemas", input, options)
end
@doc """
Returns a list of solution versions for the given solution.
When a solution is not specified, all the solution versions associated with the
account are listed. The response provides the properties for each solution
version, including the Amazon Resource Name (ARN). For more information on
solutions, see `CreateSolution`.
"""
def list_solution_versions(client, input, options \\ []) do
request(client, "ListSolutionVersions", input, options)
end
@doc """
Returns a list of solutions that use the given dataset group.
When a dataset group is not specified, all the solutions associated with the
account are listed. The response provides the properties for each solution,
including the Amazon Resource Name (ARN). For more information on solutions, see
`CreateSolution`.
"""
def list_solutions(client, input, options \\ []) do
request(client, "ListSolutions", input, options)
end
@doc """
Updates a campaign by either deploying a new solution or changing the value of
the campaign's `minProvisionedTPS` parameter.
To update a campaign, the campaign status must be ACTIVE or CREATE FAILED. Check
the campaign status using the `DescribeCampaign` API.
You must wait until the `status` of the updated campaign is `ACTIVE` before
asking the campaign for recommendations.
For more information on campaigns, see `CreateCampaign`.
"""
def update_campaign(client, input, options \\ []) do
request(client, "UpdateCampaign", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "personalize"}
host = build_host("personalize", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AmazonPersonalize.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 31.795007 | 132 | 0.722185 |
08cbcba69ff518e59855294e6f920e9926a16878 | 576 | ex | Elixir | lib/kerosene/html/foundation.ex | AlloyCI/kerosene | 9cba2a414d34ca0f5c5796f7e17d175374ca2b42 | [
"MIT"
] | null | null | null | lib/kerosene/html/foundation.ex | AlloyCI/kerosene | 9cba2a414d34ca0f5c5796f7e17d175374ca2b42 | [
"MIT"
] | null | null | null | lib/kerosene/html/foundation.ex | AlloyCI/kerosene | 9cba2a414d34ca0f5c5796f7e17d175374ca2b42 | [
"MIT"
] | null | null | null | defmodule Kerosene.HTML.Foundation do
use Phoenix.HTML
def generate_links(page_list, additional_class) do
content_tag :ul, class: build_html_class(additional_class), role: "pagination" do
for {label, _page, url, current} <- page_list do
content_tag :li, class: build_html_class(current) do
link("#{label}", to: url)
end
end
end
end
defp build_html_class(true), do: "active"
defp build_html_class(false), do: nil
defp build_html_class(additional_class) do
String.trim("pagination #{additional_class}")
end
end
| 27.428571 | 85 | 0.697917 |
08cbde9c0e1ce7817ad903a44a7a64e027c996e3 | 758 | exs | Elixir | apps/performance_1/rel/config.exs | WhiteRookPL/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 14 | 2017-08-09T14:21:47.000Z | 2022-03-11T04:10:49.000Z | apps/performance_1/rel/config.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | null | null | null | apps/performance_1/rel/config.exs | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 15 | 2017-09-05T15:43:53.000Z | 2020-04-13T16:20:18.000Z | use Mix.Releases.Config,
default_release: :default,
default_environment: Mix.env()
cookie_dev = :"DEV_COOKIE"
environment :dev do
set dev_mode: true
set include_erts: false
set cookie: cookie_dev
set overlay_vars: [ cookie: cookie_dev ]
set vm_args: "rel/vm.args"
end
cookie_prod = :"PROD_COOKIE"
environment :prod do
set include_erts: true
set include_src: false
set cookie: cookie_prod
set overlay_vars: [ cookie: cookie_prod ]
set vm_args: "rel/vm.args"
end
release :library_app do
set version: "1.0.0"
set applications: [
sasl: :permanent,
logger: :permanent,
library_app: :permanent,
runtime_tools: :permanent,
xprof: :permanent,
recon: :permanent,
eper: :permanent,
dbg: :permanent
]
end | 21.657143 | 43 | 0.705805 |
08cbe35869f269dfde41b7c72ef7baa1d29fe7f3 | 611 | ex | Elixir | lib/conduit/broker/topology/queue.ex | youalreadydid/conduit | d86b8da06dafe8cc665739b5c5397389f85d821c | [
"MIT"
] | 119 | 2016-11-21T13:19:22.000Z | 2021-11-07T17:29:05.000Z | lib/conduit/broker/topology/queue.ex | youalreadydid/conduit | d86b8da06dafe8cc665739b5c5397389f85d821c | [
"MIT"
] | 104 | 2018-02-02T20:42:46.000Z | 2021-08-03T05:36:09.000Z | lib/conduit/broker/topology/queue.ex | youalreadydid/conduit | d86b8da06dafe8cc665739b5c5397389f85d821c | [
"MIT"
] | 21 | 2018-08-03T02:38:21.000Z | 2022-03-16T18:26:58.000Z | defmodule Conduit.Broker.Topology.Queue do
@moduledoc false
@type t :: %__MODULE__{
name: String.t(),
opts: Keyword.t()
}
@type name :: String.t()
@type opts :: Keyword.t()
defstruct name: nil, opts: []
@spec new(name, opts) :: t()
def new(name, opts) do
%__MODULE__{
name: name,
opts: opts
}
end
@doc false
# Conduit.Topology.Queue.new(name, opts, config)
def escape(%__MODULE__{} = queue) do
quote(do: Conduit.Topology.Queue.new())
|> put_elem(2, [queue.name, queue.opts, Macro.var(:config, Conduit.Broker.Topology)])
end
end
| 22.62963 | 89 | 0.605565 |
08cbea2348846b6044bac5fbca16e25b77b10e6c | 1,299 | ex | Elixir | lib/jmdict/entry_xml.ex | bchase/jmdict-elixir | 29fa95ef85d20e344ee62343df94754bd209a3f2 | [
"MIT"
] | null | null | null | lib/jmdict/entry_xml.ex | bchase/jmdict-elixir | 29fa95ef85d20e344ee62343df94754bd209a3f2 | [
"MIT"
] | null | null | null | lib/jmdict/entry_xml.ex | bchase/jmdict-elixir | 29fa95ef85d20e344ee62343df94754bd209a3f2 | [
"MIT"
] | null | null | null | defmodule JMDict.EntryXML do
import SweetXml
def parse({_, doc}) do
xpath doc, ~x"//entry"e,
# EID
eid: ~x"./ent_seq/text()"s,
k_ele: [ # KANJI
~x"./k_ele"le,
keb: ~x"./keb/text()"ls,
ke_inf: ~x"./ke_inf/text()"ls,
ke_pri: ~x"./ke_pri/text()"ls
],
r_ele: [ # KANA
~x"./r_ele"le,
reb: ~x"./reb/text()"ls,
re_inf: ~x"./re_inf/text()"ls,
re_pri: ~x"./re_pri/text()"ls,
re_nokanji: ~x"./re_nokanji"e,
re_restr: ~x"./re_restr/text()"ls,
],
sense: [ # SENSES (GLOSSES)
~x{./sense}le,
stagk: ~x{./stagk/text()}ls,
stagr: ~x{./stagr/text()}ls,
xref: ~x{./xref/text()}ls, # full ex: <xref>彼・あれ・1</xref>
pos: ~x{./pos/text()}ls, # prior ./sense/pos apply, unless new added
field: ~x{./field/text()}ls,
misc: ~x{./misc/text()}ls, # "usually apply to several senses"
dial: ~x{./dial/text()}ls,
gloss: ~x{./gloss/text()}ls,
s_inf: ~x{./s_inf/text()}ls,
lsource: ~x{./lsource}le, # attr xml:lang="eng" (default) ISO 639-2
# attr ls_wasei="y" means "yes" e.g. waseieigo
]
end
end
| 34.184211 | 87 | 0.460354 |
08cbed563341472247d17d038a55e06a8242b4d5 | 1,186 | ex | Elixir | lib/plausible_web/controllers/tracker_controller.ex | wvffle/analytics | 2c0fd55bc67f74af1fe1e2641678d44e9fee61d5 | [
"MIT"
] | 1 | 2020-10-08T13:33:04.000Z | 2020-10-08T13:33:04.000Z | lib/plausible_web/controllers/tracker_controller.ex | wvffle/analytics | 2c0fd55bc67f74af1fe1e2641678d44e9fee61d5 | [
"MIT"
] | null | null | null | lib/plausible_web/controllers/tracker_controller.ex | wvffle/analytics | 2c0fd55bc67f74af1fe1e2641678d44e9fee61d5 | [
"MIT"
] | null | null | null | defmodule PlausibleWeb.TrackerController do
use PlausibleWeb, :controller
require EEx
EEx.function_from_file(
:defp,
:render_plausible,
Application.app_dir(:plausible, "priv/tracker/js/plausible.js"),
[:base_url]
)
EEx.function_from_file(
:defp,
:render_plausible_hash,
Application.app_dir(:plausible, "priv/tracker/js/plausible.hash.js"),
[:base_url]
)
EEx.function_from_file(
:defp,
:render_p,
Application.app_dir(:plausible, "priv/tracker/js/p.js"),
[:base_url]
)
# 1 hour
@max_age 3600
def plausible(conn, _params) do
send_js(conn, render_plausible(base_url()))
end
def plausible_hash(conn, _params) do
send_js(conn, render_plausible_hash(base_url()))
end
def analytics(conn, _params) do
send_js(conn, render_plausible(base_url()))
end
def p(conn, _params) do
send_js(conn, render_p(base_url()))
end
defp send_js(conn, file) do
conn
|> put_resp_header("cache-control", "max-age=#{@max_age},public")
|> put_resp_header("content-type", "application/javascript")
|> send_resp(200, file)
end
defp base_url() do
PlausibleWeb.Endpoint.url()
end
end
| 21.178571 | 73 | 0.682968 |
08cc51b088fd2812ab1863aa9ea74b84a9eda74e | 1,943 | ex | Elixir | lib/ex_unit/lib/ex_unit/case_template.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/ex_unit/lib/ex_unit/case_template.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/ex_unit/lib/ex_unit/case_template.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | defmodule ExUnit.CaseTemplate do
@moduledoc """
Defines a module template to be used throughout your test suite.
This is useful when there are a set of setup callbacks or a set
of functions that should be shared between test modules.
Once a case template is used, the regular functionality in
`ExUnit.Case` plus the functionality defined in the template
will become available.
## Example
defmodule MyCase do
use ExUnit.CaseTemplate
setup do
IO.puts("This will run before each test that uses this case")
end
end
defmodule MyTest do
use MyCase, async: true
test "truth" do
assert true
end
end
"""
@doc false
defmacro __using__(_) do
quote do
use ExUnit.Callbacks
import ExUnit.Assertions
import unquote(__MODULE__)
defmacro __using__(opts) do
unquote(__MODULE__).__proxy__(__MODULE__, opts)
end
defoverridable __using__: 1
end
end
@doc false
def __proxy__(module, opts) do
quote do
use ExUnit.Case, unquote(opts)
setup_all context do
unquote(module).__ex_unit__(:setup_all, context)
end
setup context do
unquote(module).__ex_unit__(:setup, context)
end
end
end
@doc """
Allows a developer to customize the using block
when the case template is used.
## Example
defmodule MyCase do
use ExUnit.CaseTemplate
using do
quote do
# This code is injected into every case that calls "use MyCase"
alias MyApp.FunModule
end
end
end
"""
defmacro using(var \\ quote(do: _), do: block) do
quote do
defmacro __using__(unquote(var) = opts) do
parent = unquote(__MODULE__).__proxy__(__MODULE__, opts)
result = unquote(block)
{:__block__, [], [parent, result]}
end
end
end
end
| 21.351648 | 75 | 0.629439 |
08cc622e79767faf1d00d9290795ebbfbc60dc54 | 1,384 | exs | Elixir | .formatter.exs | maartenvanvliet/ash | c7fd1927169b45d9e1e5ad4ba2ee81703fcf27db | [
"MIT"
] | null | null | null | .formatter.exs | maartenvanvliet/ash | c7fd1927169b45d9e1e5ad4ba2ee81703fcf27db | [
"MIT"
] | null | null | null | .formatter.exs | maartenvanvliet/ash | c7fd1927169b45d9e1e5ad4ba2ee81703fcf27db | [
"MIT"
] | null | null | null | # THIS FILE IS AUTOGENERATED USING `mix ash.formatter`
# DONT MODIFY IT BY HAND
locals_without_parens = [
accept: 1,
allow_nil?: 1,
argument: 2,
argument: 3,
attribute: 2,
attribute: 3,
base_filter: 1,
belongs_to: 2,
belongs_to: 3,
calculate: 2,
calculate: 3,
change: 1,
change: 2,
constraints: 1,
count: 2,
count: 3,
create: 1,
create: 2,
create_timestamp: 1,
create_timestamp: 2,
default: 1,
define_field?: 1,
description: 1,
destination_field: 1,
destination_field_on_join_table: 1,
destroy: 1,
destroy: 2,
expensive?: 1,
field_type: 1,
filter: 1,
generated?: 1,
has_many: 2,
has_many: 3,
has_one: 2,
has_one: 3,
identity: 2,
identity: 3,
join_attributes: 1,
join_relationship: 1,
kind: 1,
many_to_many: 2,
many_to_many: 3,
on: 1,
pagination: 1,
primary?: 1,
primary_key?: 1,
private?: 1,
read: 1,
read: 2,
required?: 1,
resource: 1,
resource: 2,
soft?: 1,
source_field: 1,
source_field_on_join_table: 1,
table: 1,
through: 1,
type: 1,
update: 1,
update: 2,
update_default: 1,
update_timestamp: 1,
update_timestamp: 2,
validate: 1,
validate: 2,
writable?: 1
]
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
locals_without_parens: locals_without_parens,
export: [
locals_without_parens: locals_without_parens
]
]
| 17.518987 | 70 | 0.645231 |
08ccf2918c419bad45355119ab2bb88d7a2948b8 | 309 | ex | Elixir | lib/exscript/stdlib/js.ex | craigspaeth/exscript | c3a24790dff0eac592060ba3ae349be7c567e0a7 | [
"MIT"
] | 1 | 2017-12-15T23:55:05.000Z | 2017-12-15T23:55:05.000Z | lib/exscript/stdlib/js.ex | craigspaeth/exscript | c3a24790dff0eac592060ba3ae349be7c567e0a7 | [
"MIT"
] | null | null | null | lib/exscript/stdlib/js.ex | craigspaeth/exscript | c3a24790dff0eac592060ba3ae349be7c567e0a7 | [
"MIT"
] | null | null | null | defmodule JS do
@doc "No-op function that gets rewritten at compile-time to embed Javascript"
def embed(_), do: nil
def global, do: nil
end
defmodule ExScript.Stdlib.JS do
def global do
JS.embed "typeof global !== 'undefined' && global || typeof window !== 'undefined' && window || {}"
end
end
| 25.75 | 103 | 0.682848 |
08ccf86353ff214332156ea3ee858b20511700df | 7,338 | exs | Elixir | test/lib/dwolla/customer_test.exs | ark7-inc/dwolla-elixir | a3ee9c149fbf5cd5d38dc2e854278ab321a94bf5 | [
"MIT"
] | 6 | 2018-07-23T07:43:56.000Z | 2021-07-09T01:24:21.000Z | test/lib/dwolla/customer_test.exs | ark7-inc/dwolla-elixir | a3ee9c149fbf5cd5d38dc2e854278ab321a94bf5 | [
"MIT"
] | 1 | 2019-11-05T05:31:35.000Z | 2019-11-06T21:26:38.000Z | test/lib/dwolla/customer_test.exs | ark7-inc/dwolla-elixir | a3ee9c149fbf5cd5d38dc2e854278ab321a94bf5 | [
"MIT"
] | 7 | 2019-02-12T07:48:09.000Z | 2021-11-08T15:58:06.000Z | defmodule Dwolla.CustomerTest do
use ExUnit.Case
import Dwolla.Factory
alias Dwolla.Customer
alias Plug.Conn
setup do
bypass = Bypass.open()
Application.put_env(:dwolla, :root_uri, "http://localhost:#{bypass.port}/")
{:ok, bypass: bypass}
end
describe "customer" do
test "create_unverified/2 requests POST and returns new id", %{bypass: bypass} do
Bypass.expect bypass, fn conn ->
assert "POST" == conn.method
{k, v} = http_response_header(:customer)
conn
|> Conn.put_resp_header(k, v)
|> Conn.resp(201, "")
end
params = %{
first_name: "Will",
last_name: "Gilman",
email: "will@example.com",
ip_address: "10.0.0.1"
}
assert {:ok, resp} = Customer.create_unverified("token", params)
assert resp.id == "b2cf497a-b315-497e-95b7-d1238288f8cb"
end
test "create_unverified/2 returns error on incorrect parameters" do
assert {:error, resp} = Customer.create_unverified("token", %{})
assert resp == :invalid_parameters
end
test "create_verified/2 requests POST and returns new id", %{bypass: bypass} do
Bypass.expect bypass, fn conn ->
assert "POST" == conn.method
{k, v} = http_response_header(:customer)
conn
|> Conn.put_resp_header(k, v)
|> Conn.resp(201, "")
end
params = %{
first_name: "Cary",
last_name: "Grant",
email: "tocatchathief@example.com",
ip_address: "10.0.0.1",
type: "personal",
address1: "19218 Hollywood Blvd",
city: "Los Angeles",
state: "CA",
postal_code: "90028",
date_of_birth: "1904-01-18",
ssn: "1234",
phone: "1234567890"
}
assert {:ok, resp} = Customer.create_verified("token", params)
assert resp.id == "b2cf497a-b315-497e-95b7-d1238288f8cb"
end
test "create_verified/2 returns error on incorrect parameters" do
assert {:error, resp} = Customer.create_verified("token", %{})
assert resp == :invalid_parameters
end
test "verify/3 requests POST and returns Dwolla.Customer", %{bypass: bypass} do
body = http_response_body(:customer, :update)
Bypass.expect bypass, fn conn ->
assert "POST" == conn.method
Conn.resp(conn, 200, body)
end
params = %{
first_name: "Will",
last_name: "Gilman",
email: "will@example.com",
type: "personal",
address1: "2340 Chicago St",
address2: "Apt 3",
city: "Bodega Bay",
state: "CA",
postal_code: "94923",
date_of_birth: "1990-03-14",
ssn: "4321",
phone: "0987654321"
}
assert {:ok, resp} = Customer.verify("token", "id", params)
assert resp.__struct__ == Dwolla.Customer
assert resp.status == "verified"
end
test "verify/3 returns error on incorrect parameters" do
assert {:error, resp} = Customer.verify("token", "id", %{})
assert resp == :invalid_parameters
end
test "suspend/2 requests POST and returns Dwolla.Customer", %{bypass: bypass} do
body = http_response_body(:customer, :suspend)
Bypass.expect bypass, fn conn ->
assert "POST" == conn.method
Conn.resp(conn, 200, body)
end
assert {:ok, resp} = Customer.suspend("token", "id")
assert resp.__struct__ == Dwolla.Customer
assert resp.status == "suspended"
end
test "get/2 requests GET and returns Dwolla.Customer", %{bypass: bypass} do
body = http_response_body(:customer, :get)
Bypass.expect bypass, fn conn ->
assert "GET" == conn.method
Conn.resp(conn, 200, body)
end
assert {:ok, resp} = Customer.get("token", "id")
assert resp.__struct__ == Dwolla.Customer
refute resp.id == nil
refute resp.first_name == nil
refute resp.last_name == nil
refute resp.email == nil
refute resp.type == nil
refute resp.status == nil
refute resp.created == nil
refute resp.address1 == nil
refute resp.city == nil
refute resp.phone == nil
refute resp.postal_code == nil
refute resp.state == nil
end
test "update/3 requests POST and returns Dwolla.Customer", %{bypass: bypass} do
body = http_response_body(:customer, :update)
Bypass.expect bypass, fn conn ->
assert "POST" == conn.method
Conn.resp(conn, 200, body)
end
assert {:ok, resp} = Customer.update("token", "id", %{})
assert resp.__struct__ == Dwolla.Customer
end
test "search/2 requests GET and returns list of Dwolla.Customer", %{bypass: bypass} do
body = http_response_body(:customer, :search)
Bypass.expect bypass, fn conn ->
assert "GET" == conn.method
Conn.resp(conn, 200, body)
end
assert {:ok, resp} = Customer.search("token", %{search: "some@email.com"})
assert Enum.count(resp) == 1
customer = Enum.at(resp, 0)
assert customer.__struct__ == Dwolla.Customer
end
test "create_funding_source/3 requests POST and returns id", %{bypass: bypass} do
Bypass.expect bypass, fn conn ->
assert "POST" == conn.method
{k, v} = http_response_header(:funding_source)
conn
|> Conn.put_resp_header(k, v)
|> Conn.resp(201, "")
end
params = %{
routing_number: "114923756",
account_number: "123456788",
type: "checking",
name: "Big Ben's Checking Account"
}
assert {:ok, resp} = Customer.create_funding_source("token", "id", params)
assert resp.id == "e8b4d511-805d-4e91-bfb4-670cd9583a18"
end
test "list_funding_sources/3 requests GET and returns list of Dwolla.FundingSource", %{bypass: bypass} do
body = http_response_body(:funding_source, :list)
Bypass.expect bypass, fn conn ->
assert "GET" == conn.method
Conn.resp(conn, 200, body)
end
assert {:ok, resp} = Customer.list_funding_sources("token", "id")
assert Enum.count(resp) == 4
funding_source = Enum.at(resp, 0)
assert funding_source.__struct__ == Dwolla.FundingSource
end
test "list_funding_sources/3 sets removed query string parameter", %{bypass: bypass} do
body = http_response_body(:funding_source, :list)
Bypass.expect bypass, fn conn ->
assert "removed=false" == conn.query_string
Conn.resp(conn, 200, body)
end
Customer.list_funding_sources("token", "id", false)
end
test "search_transfers/2 requests GET and return list of Dwolla.Transfer", %{bypass: bypass} do
body = http_response_body(:transfer, :search)
Bypass.expect bypass, fn conn ->
assert "GET" == conn.method
Conn.resp(conn, 200, body)
end
assert {:ok, resp} = Customer.search_transfers("token", "id", %{status: "pending"})
assert Enum.empty?(resp)
end
test "search_transfers/2 returns list when params are omitted", %{bypass: bypass} do
body = http_response_body(:transfer, :search)
Bypass.expect bypass, fn conn ->
Conn.resp(conn, 200, body)
end
assert {:ok, resp} = Customer.search_transfers("token", "id")
assert Enum.empty?(resp)
end
end
end
| 31.493562 | 109 | 0.610793 |
08cd26a7fcc56bfe777e51391b1408f3f99da8b9 | 304 | ex | Elixir | apps/ello_notifications/lib/ello_notifications.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 16 | 2017-06-21T21:31:20.000Z | 2021-05-09T03:23:26.000Z | apps/ello_notifications/lib/ello_notifications.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 25 | 2017-06-07T12:18:28.000Z | 2018-06-08T13:27:43.000Z | apps/ello_notifications/lib/ello_notifications.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 3 | 2018-06-14T15:34:07.000Z | 2022-02-28T21:06:13.000Z | defmodule Ello.Notifications do
@moduledoc """
Wrapper module for Elixir APIs responsible for interacting with Notification related services.
* Ello.Notifications.Stream - Read and write in app notifications.
* Ello.Notifications.Push - TODO
* Ello.Notifications.Email - TODO
"""
end
| 27.636364 | 96 | 0.743421 |
08cd491365eb310b8d440d78d08e130c2269509f | 4,141 | ex | Elixir | test/support/mix/tasks/gen/test_suite.ex | hrzndhrn/json_xema | 955eab7b0919d144b38364164d90275201c89474 | [
"MIT"
] | 54 | 2019-03-10T19:51:07.000Z | 2021-12-23T07:31:09.000Z | test/support/mix/tasks/gen/test_suite.ex | hrzndhrn/json_xema | 955eab7b0919d144b38364164d90275201c89474 | [
"MIT"
] | 36 | 2018-05-20T09:13:20.000Z | 2021-03-14T15:22:03.000Z | test/support/mix/tasks/gen/test_suite.ex | hrzndhrn/json_xema | 955eab7b0919d144b38364164d90275201c89474 | [
"MIT"
] | 3 | 2019-04-12T09:08:51.000Z | 2019-12-04T01:23:56.000Z | defmodule Mix.Tasks.Gen.TestSuite do
@moduledoc """
This mix task generates tests form the JSON Schema test suite.
"""
use Mix.Task
@url "https://github.com/json-schema-org/JSON-Schema-Test-Suite"
@test_suite_path "JSON-Schema-Test-Suite"
@test_path "test/json_schema_test_suite"
@template File.read!("test/support/test.eex")
@exclude [
# Link to latest
"latest",
# Unsupported JSON Schema versions
"draft3",
"draft2019-09",
# Unsupported optional features
"content.json",
"ecmascript-regex.json",
"zeroTerminatedFloats.json",
"non-bmp-regex.json",
# Unsupported semantic formats
"idn-email.json",
"idn-hostname.json",
"iri.json",
"iri-reference.json"
]
@exclude_test_case [
# Unsupported semantic formats
"validation of IDN e-mail addresses",
"validation of IDN hostnames",
# will be fixed soon
"Location-independent identifier with absolute URI",
"Location-independent identifier with base URI change in subschema"
]
@exclude_test_case [
# will be fixed soon
"Location-independent identifier with absolute URI",
"Location-independent identifier with base URI change in subschema"
]
def run(_) do
IO.puts("Generate JSON Schema test suite.")
case File.dir?(@test_suite_path) do
true ->
gen_test_suite(@test_suite_path)
false ->
path = File.cwd!() |> Path.join(@test_suite_path)
IO.puts(
"Error: Can't find JSON Schema Test Suite at #{path}, " <>
"please download test suite from: #{@url} ."
)
end
end
defp gen_test_suite(path) do
File.cwd!() |> Path.join(@test_path) |> File.rm_rf!()
path
|> Path.join("tests")
|> get_file_names()
|> Enum.map(&read_json/1)
|> Enum.map(&create_tests/1)
|> Enum.map(&write_test_file/1)
end
defp get_file_names(path) do
case File.dir?(path) do
true ->
path
|> File.ls!()
|> Enum.map(&Path.join(path, &1))
|> Enum.filter(&include?/1)
|> Enum.flat_map(&get_file_names/1)
false ->
case include?(path) do
true -> [path]
false -> []
end
end
end
defp write_test_file({file_name, code}) do
path = File.cwd!() |> Path.join(file_name)
path |> Path.dirname() |> File.mkdir_p!()
File.write!(path, code)
end
defp include?(file), do: not Enum.any?(@exclude, &String.ends_with?(file, &1))
defp read_json(file_name), do: {file_name, file_name |> File.read!() |> Jason.decode!()}
defp create_tests({file_name, json}),
do: {test_file_name(file_name), test_code(file_name, json)}
defp test_file_name(file_name) do
file_name
|> String.replace(Path.join(@test_suite_path, "tests"), @test_path)
|> ConvCase.to_snake_case()
|> String.replace(~r/.json$/, "_test.exs")
end
defp test_code(file_name, test_cases) do
test_cases =
test_cases
|> Enum.filter(fn test_case -> test_case["description"] not in @exclude_test_case end)
|> update_descriptions()
@template
|> EEx.eval_string(assigns: [module: module_name(file_name), test_cases: test_cases])
|> Code.format_string!()
end
defp module_name(file_name) do
regex = ~r/#{@test_suite_path}\/tests\/(.*).json$/
[_, name] = Regex.run(regex, file_name)
module = name |> ConvCase.to_camel_case() |> Macro.camelize()
"JsonSchemaTestSuite.#{module}"
end
defp update_descriptions(test_cases) do
test_cases
|> Enum.reduce([], fn test_case, acc ->
[update_description(test_case, acc, 0) | acc]
end)
|> Enum.reverse()
end
defp update_description(test_case, test_cases, count) do
description =
case count do
0 -> test_case["description"]
n -> ~s|#{test_case["description"]} (#{n})|
end
test_cases
|> Enum.any?(fn
%{"description" => ^description} -> true
_ -> false
end)
|> case do
false ->
Map.put(test_case, "description", description)
true ->
update_description(test_case, test_cases, count + 1)
end
end
end
| 26.375796 | 92 | 0.627626 |
08cd8417801fb11aef83cb7c69216a38e10264f5 | 841 | ex | Elixir | test/support/conn_case.ex | zan-kusterle/Liquio | 341556529633f9a99ad95b502f182d6644b9b1ed | [
"MIT"
] | 5 | 2016-09-12T08:31:14.000Z | 2018-03-12T23:40:08.000Z | test/support/conn_case.ex | zan-kusterle/Liquio | 341556529633f9a99ad95b502f182d6644b9b1ed | [
"MIT"
] | null | null | null | test/support/conn_case.ex | zan-kusterle/Liquio | 341556529633f9a99ad95b502f182d6644b9b1ed | [
"MIT"
] | null | null | null | defmodule LiquioWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import LiquioWeb.Router.Helpers
# The default endpoint for testing
@endpoint LiquioWeb.Endpoint
end
end
setup tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 24.735294 | 56 | 0.724138 |
08cdb848772a881a94295dd3366b0a78d247a8fa | 379 | exs | Elixir | priv/repo/seeds.exs | egjimenezg/absinthe_subscriptions | 49d05d9ce141c9de5d1cfabe21452d2bde8c49d9 | [
"Apache-2.0"
] | null | null | null | priv/repo/seeds.exs | egjimenezg/absinthe_subscriptions | 49d05d9ce141c9de5d1cfabe21452d2bde8c49d9 | [
"Apache-2.0"
] | null | null | null | priv/repo/seeds.exs | egjimenezg/absinthe_subscriptions | 49d05d9ce141c9de5d1cfabe21452d2bde8c49d9 | [
"Apache-2.0"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# AbsintheSubscriptions.Repo.insert!(%AbsintheSubscriptions.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 31.583333 | 77 | 0.728232 |
08cdf93a4d67441ed0c8deeb10754757319b90c7 | 80 | ex | Elixir | priv/templates/coherence.install/views/coherence/session_view.ex | remigijusj/coherence | 36fe35b0bfe7ac63b44b4046f3ba62f2fe69603a | [
"MIT"
] | 1 | 2022-03-06T16:30:21.000Z | 2022-03-06T16:30:21.000Z | priv/templates/coherence.install/views/coherence/session_view.ex | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | null | null | null | priv/templates/coherence.install/views/coherence/session_view.ex | ysbaddaden/coherence | 5c4f26d3c87f6a16638adf623d041e2723ccf2b8 | [
"MIT"
] | 2 | 2017-09-22T16:54:36.000Z | 2021-11-09T20:55:58.000Z | defmodule Coherence.SessionView do
use <%= base %>.Coherence.Web, :view
end
| 13.333333 | 38 | 0.7125 |
08cdffaeddf6327f76f0f872d1c5d39f6213e96c | 449 | ex | Elixir | test/fixtures/elixir/multiple_d_post.ex | csperando/curlconverter | 733f110e5621375701f4424299ccd72e669876f6 | [
"MIT"
] | 536 | 2021-10-06T17:21:25.000Z | 2022-03-31T13:05:48.000Z | test/fixtures/elixir/multiple_d_post.ex | csperando/curlconverter | 733f110e5621375701f4424299ccd72e669876f6 | [
"MIT"
] | 74 | 2021-10-08T13:57:14.000Z | 2022-03-31T06:55:39.000Z | test/fixtures/elixir/multiple_d_post.ex | csperando/curlconverter | 733f110e5621375701f4424299ccd72e669876f6 | [
"MIT"
] | 104 | 2021-10-06T19:36:15.000Z | 2022-03-31T07:34:04.000Z | request = %HTTPoison.Request{
method: :post,
url: "https://localhost:28139/webservices/rest.php",
options: [],
headers: [
{~s|Content-Type|, ~s|application/x-www-form-urlencoded|},
],
params: [],
body: [
{~s|version|, ~s|1.2|},
{~s|auth_user|, ~s|fdgxf|},
{~s|auth_pwd|, ~s|oxfdscds|},
{~s|json_data|, ~s|{ "operation": "core/get", "class": "Software", "key": "key" }|}
]
}
response = HTTPoison.request(request)
| 24.944444 | 87 | 0.581292 |
08ce093942f5fc503e866786de80593243144e97 | 19,401 | ex | Elixir | lib/ecto/adapters/sql/sandbox.ex | stevedomin/ecto | 31d1fc4a31bce4c159b0ffe33398be7b1f095f2e | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/sql/sandbox.ex | stevedomin/ecto | 31d1fc4a31bce4c159b0ffe33398be7b1f095f2e | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/sql/sandbox.ex | stevedomin/ecto | 31d1fc4a31bce4c159b0ffe33398be7b1f095f2e | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Adapters.SQL.Sandbox do
@moduledoc ~S"""
A pool for concurrent transactional tests.
The sandbox pool is implemented on top of an ownership mechanism.
When started, the pool is in automatic mode, which means the
repository will automatically check connections out as with any
other pool.
The `mode/2` function can be used to change the pool mode to
manual or shared. In both modes, the connection must be explicitly
checked out before use. When explicit checkouts are made, the
sandbox will wrap the connection in a transaction by default and
control who has access to it. This means developers have a safe
mechanism for running concurrent tests against the database.
## Database support
While both PostgreSQL and MySQL support SQL Sandbox, only PostgreSQL
supports concurrent tests while running the SQL Sandbox. Therefore, do
not run concurrent tests with MySQL as you may run into deadlocks due to
its transaction implementation.
## Example
The first step is to configure your database to use the
`Ecto.Adapters.SQL.Sandbox` pool. You set those options in your
`config/config.exs` (or preferably `config/test.exs`) if you
haven't yet:
config :my_app, Repo,
pool: Ecto.Adapters.SQL.Sandbox
Now with the test database properly configured, you can write
transactional tests:
# At the end of your test_helper.exs
# Set the pool mode to manual for explicit checkouts
Ecto.Adapters.SQL.Sandbox.mode(Repo, :manual)
defmodule PostTest do
# Once the mode is manual, tests can also be async
use ExUnit.Case, async: true
setup do
# Explicitly get a connection before each test
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Repo)
end
test "create post" do
# Use the repository as usual
assert %Post{} = Repo.insert!(%Post{})
end
end
## Collaborating processes
The example above is straight-forward because we have only
a single process using the database connection. However,
sometimes a test may need to interact with multiple processes,
all using the same connection so they all belong to the same
transaction.
Before we discuss solutions, let's see what happens if we try
to use a connection from a new process without explicitly
checking it out first:
setup do
# Explicitly get a connection before each test
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Repo)
end
test "create two posts, one sync, another async" do
task = Task.async(fn ->
Repo.insert!(%Post{title: "async"})
end)
assert %Post{} = Repo.insert!(%Post{title: "sync"})
assert %Post{} = Task.await(task)
end
The test above will fail with an error similar to:
** (RuntimeError) cannot find ownership process for #PID<0.35.0>
That's because the `setup` block is checking out the connection only
for the test process. Once we spawn a Task, there is no connection
assigned to it and it will fail.
The sandbox module provides two ways of doing so, via allowances or
by running in shared mode.
### Allowances
The idea behind allowances is that you can explicitly tell a process
which checked out connection it should use, allowing multiple processes
to collaborate over the same connection. Let's give it a try:
test "create two posts, one sync, another async" do
parent = self()
task = Task.async(fn ->
Ecto.Adapters.SQL.Sandbox.allow(Repo, parent, self())
Repo.insert!(%Post{title: "async"})
end)
assert %Post{} = Repo.insert!(%Post{title: "sync"})
assert %Post{} = Task.await(task)
end
And that's it, by calling `allow/3`, we are explicitly assigning
the parent's connection (i.e. the test process' connection) to
the task.
Because allowances use an explicit mechanism, their advantage
is that you can still run your tests in async mode. The downside
is that you need to explicitly control and allow every single
process. This is not always possible. In such cases, you will
want to use shared mode.
### Shared mode
Shared mode allows a process to share its connection with any other
process automatically, without relying on explicit allowances.
Let's change the example above to use shared mode:
setup do
# Explicitly get a connection before each test
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Repo)
# Setting the shared mode must be done only after checkout
Ecto.Adapters.SQL.Sandbox.mode(Repo, {:shared, self()})
end
test "create two posts, one sync, another async" do
task = Task.async(fn ->
Repo.insert!(%Post{title: "async"})
end)
assert %Post{} = Repo.insert!(%Post{title: "sync"})
assert %Post{} = Task.await(task)
end
By calling `mode({:shared, self()})`, any process that needs
to talk to the database will now use the same connection as the
one checked out by the test process during the `setup` block.
Make sure to always check a connection out before setting the mode
to `{:shared, self()}`.
The advantage of shared mode is that by calling a single function,
you will ensure all upcoming processes and operations will use that
shared connection, without a need to explicitly allow them. The
downside is that tests can no longer run concurrently in shared mode.
### Summing up
There are two mechanisms for explicit ownerships:
* Using allowances - requires explicit allowances via `allow/3`.
Tests may run concurrently.
* Using shared mode - does not require explicit allowances.
Tests cannot run concurrently.
## FAQ
When running the sandbox mode concurrently, developers may run into
issues we explore in the upcoming sections.
### "owner exited while client is still running"
In some situations, you may see error reports similar to the one below:
21:57:43.910 [error] Postgrex.Protocol (#PID<0.284.0>) disconnected:
** (DBConnection.Error) owner #PID<> exited while client #PID<> is still running
Such errors are usually followed by another error report from another
process that failed while executing a database query.
To understand the failure, we need to answer the question: who are the
owner and client processes? The owner process is the one that checks
out the connection, which, in the majority of cases, is the test process,
the one running your tests. In other words, the error happens because
the test process has finished, either because the test succeeded or
because it failed, while the client process was trying to get information
from the database. Since the owner process, the one that owns the
connection, no longer exists, Ecto will check the connection back in
and notify the client process using the connection that the connection
owner is no longer available.
This can happen in different situations. For example, imagine you query
a GenServer in your test that is using a database connection:
test "gets results from GenServer" do
{:ok, pid} = MyAppServer.start_link()
Ecto.Adapters.SQL.Sandbox.allow(Repo, self(), pid)
assert MyAppServer.get_my_data_fast(timeout: 1000) == [...]
end
In the test above, we spawn the server and allow it to perform database
queries using the connection owned by the test process. Since we gave
a timeout of 1 second, in case the database takes longer than one second
to reply, the test process will fail, due to the timeout, making the
"owner down" message to be printed because the server process is still
waiting on a connection reply.
In some situations, such failures may be intermittent. Imagine that you
allow a process that queries the database every half second:
test "queries periodically" do
{:ok, pid} = PeriodicServer.start_link()
Ecto.Adapters.SQL.Sandbox.allow(Repo, self(), pid)
# more tests
end
Because the server is querying the database from time to time, there is
a chance that, when the test exists, the periodic process may be querying
the database, regardless of test success or failure.
### "owner timed out because it owned the connection for longer than Nms"
In some situations, you may see error reports similar to the one below:
09:56:43.081 [error] Postgrex.Protocol (#PID<>) disconnected:
** (DBConnection.ConnectionError) owner #PID<> timed out
because it owned the connection for longer than 15000ms
If you have a long running test (or you're debugging with IEx.pry), the timeout for the connection ownership may
be too short. You can increase the timeout by setting the
`:ownership_timeout` options for your repo config in `config/config.exs` (or preferably in `config/test.exs`):
config :my_app, MyApp.Repo,
ownership_timeout: NEW_TIMEOUT_IN_MILLISECONDS
The `:ownership_timeout` option is part of
[`DBConnection.Ownership`](https://hexdocs.pm/db_connection/DBConnection.Ownership.html)
and defaults to 15000ms. Timeouts are given as integers in milliseconds.
Alternately, if this is an issue for only a handful of long-running tests,
you can pass an `:ownership_timeout` option when calling
`Ecto.Adapters.SQL.Sandbox.checkout/2` instead of setting a longer timeout
globally in your config.
### Database locks and deadlocks
Since the sandbox relies on concurrent transactional tests, there is
a chance your tests may trigger deadlocks in your database. This is
specially true with MySQL, where the solutions presented here are not
enough to avoid deadlocks and therefore making the use of concurrent tests
with MySQL prohibited.
However, even on databases like PostgreSQL, performance degradations or
deadlocks may still occur. For example, imagine multiple tests are
trying to insert the same user to the database. They will attempt to
retrieve the same database lock, causing only one test to succeed and
run while all other tests wait for the lock.
In other situations, two different tests may proceed in a way that
each test retrieves locks desired by the other, leading to a situation
that cannot be resolved, a deadlock. For instance:
Transaction 1: Transaction 2:
begin
begin
update posts where id = 1
update posts where id = 2
update posts where id = 1
update posts where id = 2
**deadlock**
There are different ways to avoid such problems. One of them is
to make sure your tests work on distinct data. Regardless of
your choice between using fixtures or factories for test data,
make sure you get a new set of data per test. This is specially
important for data that is meant to be unique like user emails.
For example, instead of:
def insert_user do
Repo.insert! %User{email: "sample@example.com"}
end
prefer:
def insert_user do
Repo.insert! %User{email: "sample-#{counter()}@example.com"}
end
defp counter do
System.unique_integer [:positive]
end
Deadlocks may happen in other circumstances. If you believe you
are hitting a scenario that has not been described here, please
report an issue so we can improve our examples. As a last resort,
you can always disable the test triggering the deadlock from
running asynchronously by setting "async: false".
"""
defmodule Connection do
@moduledoc false
if Code.ensure_loaded?(DBConnection) do
@behaviour DBConnection
end
def connect(_opts) do
raise "should never be invoked"
end
def disconnect(err, {conn_mod, state, _in_transaction?}) do
conn_mod.disconnect(err, state)
end
def checkout(state), do: proxy(:checkout, state, [])
def checkin(state), do: proxy(:checkin, state, [])
def ping(state), do: proxy(:ping, state, [])
def handle_begin(opts, {conn_mod, state, false}) do
opts = [mode: :savepoint] ++ opts
case conn_mod.handle_begin(opts, state) do
{:ok, value, state} ->
{:ok, value, {conn_mod, state, true}}
{kind, err, state} ->
{kind, err, {conn_mod, state, false}}
end
end
def handle_commit(opts, {conn_mod, state, true}) do
opts = [mode: :savepoint] ++ opts
proxy(:handle_commit, {conn_mod, state, false}, [opts])
end
def handle_rollback(opts, {conn_mod, state, true}) do
opts = [mode: :savepoint] ++ opts
proxy(:handle_rollback, {conn_mod, state, false}, [opts])
end
def handle_prepare(query, opts, state),
do: proxy(:handle_prepare, state, [query, maybe_savepoint(opts, state)])
def handle_execute(query, params, opts, state),
do: proxy(:handle_execute, state, [query, params, maybe_savepoint(opts, state)])
def handle_close(query, opts, state),
do: proxy(:handle_close, state, [query, maybe_savepoint(opts, state)])
def handle_declare(query, params, opts, state),
do: proxy(:handle_declare, state, [query, params, maybe_savepoint(opts, state)])
def handle_first(query, cursor, opts, state),
do: proxy(:handle_first, state, [query, cursor, maybe_savepoint(opts, state)])
def handle_next(query, cursor, opts, state),
do: proxy(:handle_next, state, [query, cursor, maybe_savepoint(opts, state)])
def handle_deallocate(query, cursor, opts, state),
do: proxy(:handle_deallocate, state, [query, cursor, maybe_savepoint(opts, state)])
def handle_info(msg, state),
do: proxy(:handle_info, state, [msg])
defp maybe_savepoint(opts, {_, _, in_transaction?}) do
if not in_transaction? and Keyword.get(opts, :sandbox_subtransaction, true) do
[mode: :savepoint] ++ opts
else
opts
end
end
defp proxy(fun, {conn_mod, state, in_transaction?}, args) do
result = apply(conn_mod, fun, args ++ [state])
pos = :erlang.tuple_size(result)
:erlang.setelement(pos, result, {conn_mod, :erlang.element(pos, result), in_transaction?})
end
end
defmodule Pool do
@moduledoc false
if Code.ensure_loaded?(DBConnection) do
@behaviour DBConnection.Pool
end
def ensure_all_started(_opts, _type) do
raise "should never be invoked"
end
def start_link(_module, _opts) do
raise "should never be invoked"
end
def child_spec(_module, _opts, _child_opts) do
raise "should never be invoked"
end
def checkout(pool, opts) do
pool_mod = opts[:sandbox_pool]
case pool_mod.checkout(pool, opts) do
{:ok, pool_ref, conn_mod, conn_state} ->
case conn_mod.handle_begin([mode: :transaction] ++ opts, conn_state) do
{:ok, _, conn_state} ->
{:ok, pool_ref, Connection, {conn_mod, conn_state, false}}
{_error_or_disconnect, err, conn_state} ->
pool_mod.disconnect(pool_ref, err, conn_state, opts)
end
error ->
error
end
end
def checkin(pool_ref, {conn_mod, conn_state, _in_transaction?}, opts) do
pool_mod = opts[:sandbox_pool]
case conn_mod.handle_rollback([mode: :transaction] ++ opts, conn_state) do
{:ok, _, conn_state} ->
pool_mod.checkin(pool_ref, conn_state, opts)
{_error_or_disconnect, err, conn_state} ->
pool_mod.disconnect(pool_ref, err, conn_state, opts)
end
end
def disconnect(owner, exception, {_conn_mod, conn_state, _in_transaction?}, opts) do
opts[:sandbox_pool].disconnect(owner, exception, conn_state, opts)
end
def stop(owner, reason, {_conn_mod, conn_state, _in_transaction?}, opts) do
opts[:sandbox_pool].stop(owner, reason, conn_state, opts)
end
end
@doc """
Sets the mode for the `repo` pool.
The mode can be `:auto`, `:manual` or `{:shared, <pid>}`.
"""
def mode(repo, mode)
when mode in [:auto, :manual]
when elem(mode, 0) == :shared and is_pid(elem(mode, 1)) do
{_repo_mod, name, opts} = proxy_pool(repo)
# If the mode is set to anything but shared, let's
# automatically checkin the current connection to
# force it to act according to the chosen mode.
# TODO: This is may no longer be necessary on latest DBConnection
if mode in [:auto, :manual] do
checkin(repo, [])
end
DBConnection.Ownership.ownership_mode(name, mode, opts)
end
@doc """
Checks a connection out for the given `repo`.
The process calling `checkout/2` will own the connection
until it calls `checkin/2` or until it crashes when then
the connection will be automatically reclaimed by the pool.
## Options
* `:sandbox` - when true the connection is wrapped in
a transaction. Defaults to true.
* `:isolation` - set the query to the given isolation level
* `:ownership_timeout` - limits how long the connection can be
owned. Defaults to the compiled value from your repo config in
`config/config.exs` (or preferably in `config/test.exs`), or
15000 ms if not set.
"""
def checkout(repo, opts \\ []) do
{_repo_mod, name, pool_opts} =
if Keyword.get(opts, :sandbox, true) do
proxy_pool(repo)
else
Ecto.Registry.lookup(repo)
end
pool_opts_overrides = Keyword.take(opts, [:ownership_timeout])
pool_opts = Keyword.merge(pool_opts, pool_opts_overrides)
case DBConnection.Ownership.ownership_checkout(name, pool_opts) do
:ok ->
if isolation = opts[:isolation] do
set_transaction_isolation_level(repo, isolation)
end
:ok
other ->
other
end
end
defp set_transaction_isolation_level(repo, isolation) do
query = "SET TRANSACTION ISOLATION LEVEL #{isolation}"
case Ecto.Adapters.SQL.query(repo, query, [], sandbox_subtransaction: false) do
{:ok, _} ->
:ok
{:error, error} ->
checkin(repo, [])
raise error
end
end
@doc """
Checks in the connection back into the sandbox pool.
"""
def checkin(repo, _opts \\ []) do
{_repo_mod, name, opts} = Ecto.Registry.lookup(repo)
DBConnection.Ownership.ownership_checkin(name, opts)
end
@doc """
Allows the `allow` process to use the same connection as `parent`.
"""
def allow(repo, parent, allow, _opts \\ []) do
{_repo_mod, name, opts} = Ecto.Registry.lookup(repo)
DBConnection.Ownership.ownership_allow(name, parent, allow, opts)
end
@doc """
Runs a function outside of the sandbox.
"""
def unboxed_run(repo, fun) do
checkin(repo)
checkout(repo, sandbox: false)
try do
fun.()
after
checkin(repo)
end
end
defp proxy_pool(repo) do
{repo_mod, name, opts} = Ecto.Registry.lookup(repo)
if opts[:pool] != DBConnection.Ownership do
raise """
cannot configure sandbox with pool #{inspect opts[:pool]}.
To use the SQL Sandbox, configure your repository pool as:
pool: #{inspect __MODULE__}
"""
end
{pool, opts} = Keyword.pop(opts, :ownership_pool, DBConnection.Poolboy)
{repo_mod, name, [repo: repo, sandbox_pool: pool, ownership_pool: Pool] ++ opts}
end
end
| 36.195896 | 114 | 0.685016 |
08ce1f489203f84a730c528948e07f01eae469dc | 1,185 | exs | Elixir | mix.exs | cschmatzler/datamatrix | ac8e0733f8535c1b276276acad9e90e90a53e7aa | [
"MIT"
] | 5 | 2020-02-24T22:31:19.000Z | 2022-02-11T08:57:43.000Z | mix.exs | cschmatzler/datamatrix | ac8e0733f8535c1b276276acad9e90e90a53e7aa | [
"MIT"
] | null | null | null | mix.exs | cschmatzler/datamatrix | ac8e0733f8535c1b276276acad9e90e90a53e7aa | [
"MIT"
] | 1 | 2021-08-05T12:01:43.000Z | 2021-08-05T12:01:43.000Z | defmodule DataMatrix.MixProject do
use Mix.Project
@version "0.1.3"
def project do
[
app: :datamatrix,
version: @version,
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
escript: escript()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:git_hooks, "~> 0.4.0", only: [:test, :dev], runtime: false},
{:credo, "~> 1.1.0", only: [:dev, :test], runtime: false},
{:doctor, "~> 0.11.0", only: [:dev, :test], runtime: false}
]
end
defp description do
"Library that enables programs to write Data Matrix barcodes of the modern ECC200 variety."
end
defp package do
[
files: ~w(lib mix.exs README.md LICENSE),
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/0x8b/datamatrix"}
]
end
defp escript do
[
main_module: DataMatrix.CLI
]
end
end
| 21.944444 | 95 | 0.578903 |
08ce252688ff1314139a798ead76d4e63f10f7e3 | 2,179 | ex | Elixir | lib/erlef_web/html.ex | ferd/website | 400409d05ba91ff2a84179ed9a769196aee8f564 | [
"Apache-2.0"
] | null | null | null | lib/erlef_web/html.ex | ferd/website | 400409d05ba91ff2a84179ed9a769196aee8f564 | [
"Apache-2.0"
] | null | null | null | lib/erlef_web/html.ex | ferd/website | 400409d05ba91ff2a84179ed9a769196aee8f564 | [
"Apache-2.0"
] | null | null | null | defmodule ErlefWeb.HTML do
@moduledoc """
ErlefWeb.HTML - HTML helpers for ErlefWeb
"""
import Phoenix.HTML.Tag
def right_svg_hero(f_opts, opts) when is_list(f_opts) do
right_svg_hero(f_opts ++ opts)
end
def right_svg_hero(text, opts) do
right_svg_hero(text, [{:do, text}] ++ opts)
end
def right_svg_hero(opts) do
class = "clip-svg clip-svg-hero right-bleed bg-dark"
div_tag([{:class, class}] ++ opts)
end
def left_svg_hero(f_opts, opts) when is_list(f_opts) do
left_svg_hero(f_opts ++ opts)
end
def left_svg_hero(text, opts) do
left_svg_hero([{:do, text}] ++ opts)
end
def left_svg_hero(opts) do
class = "clip-svg clip-svg-hero left-bleed bg-dark"
div_tag([{:class, class}] ++ opts)
end
def div_tag(text, opts) do
content_tag(:div, text, opts)
end
def div_tag(opts) when is_list(opts) do
error = "div_tag/2 requires a text as first argument or contents in the :do block"
{contents, opts} = pop_required_option!(opts, :do, error)
div_tag(contents, opts)
end
# TODO: This doesn't belong here
def subscribe_form do
"""
<form action="https://erlef.us20.list-manage.com/subscribe/post?u=8d8ff4d9284d463c374e574bb&id=8cad7357f8"
method="post" id="mc-embedded-subscribe-form"
name="mc-embedded-subscribe-form"
class="validate mc4wp-form mc4wp-form-116"
target="_blank" novalidate>
<div>
<div style="position: absolute; left: -5000px;" aria-hidden="true">
<input type="text" name="b_8d8ff4d9284d463c374e574bb_8cad7357f8" tabindex="-1" value="">
</div>
<div class="form-row subscribe">
<input class="col-lg-6 form-control" type="email" name="EMAIL" placeholder="Your e-mail address" required="">
<input class="col-lg-auto btn btn-primary ml-2" type="submit" value="Subscribe">
</div>
</div>
</form>
"""
end
defp pop_required_option!(opts, key, error_message) do
{value, opts} = Keyword.pop(opts, key)
unless value do
raise ArgumentError, error_message
end
{value, opts}
end
end
| 27.2375 | 124 | 0.64112 |
08ce2542998e8c360fec126fb30c24da718d2c3f | 533 | exs | Elixir | mix.exs | mazurka/mazurka_dsl | de4b24b5d6a2ffa94aab65af413a9e4771e8815d | [
"MIT"
] | null | null | null | mix.exs | mazurka/mazurka_dsl | de4b24b5d6a2ffa94aab65af413a9e4771e8815d | [
"MIT"
] | 1 | 2015-01-26T06:19:55.000Z | 2015-01-26T06:19:55.000Z | mix.exs | mazurka/mazurka_dsl | de4b24b5d6a2ffa94aab65af413a9e4771e8815d | [
"MIT"
] | null | null | null | defmodule MazurkaDsl.Mixfile do
use Mix.Project
def project do
[app: :mazurka_dsl,
version: "0.1.1",
elixir: "~> 1.0",
description: "DSL for defining mazurka resources",
package: package,
deps: deps]
end
def application do
[applications: [:logger]]
end
defp deps do
[]
end
defp package do
[files: ["src", "mix.exs", "README*"],
contributors: ["Cameron Bytheway"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/mazurka/mazurka_dsl"}]
end
end
| 19.035714 | 68 | 0.602251 |
08ce5d2b8480ca73b4ccc46127fd32946b5d617d | 599 | ex | Elixir | apps/auto_publish/lib/auto_publish/application.ex | isavita/diet-umbrella | 0eee4d0dc4d9567888e4b69ccc7993e95d95ed29 | [
"MIT"
] | 1 | 2020-06-01T21:25:54.000Z | 2020-06-01T21:25:54.000Z | apps/auto_publish/lib/auto_publish/application.ex | isavita/diet-umbrella | 0eee4d0dc4d9567888e4b69ccc7993e95d95ed29 | [
"MIT"
] | 221 | 2019-07-20T17:20:49.000Z | 2021-08-02T06:21:10.000Z | apps/auto_publish/lib/auto_publish/application.ex | isavita/diet-umbrella | 0eee4d0dc4d9567888e4b69ccc7993e95d95ed29 | [
"MIT"
] | null | null | null | defmodule AutoPublish.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
# Starts a worker by calling: AutoPublish.Worker.start_link(arg)
# {AutoPublish.Worker, arg}
{AutoPublish.Videos.YoutubeScheduler, [[]]}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: AutoPublish.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 28.52381 | 70 | 0.712855 |
08ce7c883dab0ef488d84f42a48ccea41c9eb1e0 | 428 | exs | Elixir | test/day_12_test.exs | robindaumann/aoc-2020 | 63e2dd4cd3062b15fc5be3ec8b3fe75716701f7a | [
"MIT"
] | 1 | 2020-12-19T18:40:00.000Z | 2020-12-19T18:40:00.000Z | test/day_12_test.exs | robindaumann/aoc-2020 | 63e2dd4cd3062b15fc5be3ec8b3fe75716701f7a | [
"MIT"
] | null | null | null | test/day_12_test.exs | robindaumann/aoc-2020 | 63e2dd4cd3062b15fc5be3ec8b3fe75716701f7a | [
"MIT"
] | null | null | null | defmodule Day12Test do
use ExUnit.Case, async: true
require Input
test "part1 input" do
f = Input.path()
assert Day12.part1(f) == 1106
end
test "part1 example" do
f = Input.example()
assert Day12.part1(f) == 25
end
test "part2 input" do
f = Input.path()
assert Day12.part2(f) == 107_281
end
test "part2 example" do
f = Input.example()
assert Day12.part2(f) == 286
end
end
| 17.12 | 36 | 0.623832 |
08ce83cf7c77f304877540a657e98494d80ed968 | 1,618 | exs | Elixir | test/easter_eggs_test.exs | KRISHITECH/farmbot_os | 4220baf8f3ef930ce3b861b178ca6a76e2fd7591 | [
"MIT"
] | null | null | null | test/easter_eggs_test.exs | KRISHITECH/farmbot_os | 4220baf8f3ef930ce3b861b178ca6a76e2fd7591 | [
"MIT"
] | null | null | null | test/easter_eggs_test.exs | KRISHITECH/farmbot_os | 4220baf8f3ef930ce3b861b178ca6a76e2fd7591 | [
"MIT"
] | null | null | null | defmodule Farmbot.EasterEggsTest do
@moduledoc false
use ExUnit.Case, async: true
setup_all do
write_me = test_json() |> Poison.encode!
File.write!("/tmp/test.json", write_me)
:ok
end
test "starts the server with a path to a file" do
path = "/tmp/test.json"
{:ok, pid} = Farmbot.EasterEggs.start_link({:name, :test_1}, {:path, path})
assert is_pid(pid) == true
end
test "starts the server with a json object" do
json = test_json_with_strings()
{:ok, pid} = Farmbot.EasterEggs.start_link({:name, :test_2}, {:json, json})
assert is_pid(pid) == true
end
test "adds a new json to the state" do
path = "/tmp/test.json"
{:ok, pid} = Farmbot.EasterEggs.start_link({:name, :test_3}, {:path, path})
assert is_pid(pid) == true
state1 = GenServer.call(pid, :state)
assert state1 == %{nouns: %{}, verbs: []}
new_json = %{"nouns" => [%{"somehting" => "heyo"}], "verbs" => []}
Farmbot.EasterEggs.load_json(new_json, pid)
state2 = GenServer.call(pid, :state)
assert state2 == %{nouns: %{somehting: "heyo"}, verbs: []}
end
test "logs a thing" do
path = "/tmp/test.json"
{:ok, pid} = Farmbot.EasterEggs.start_link({:name, :test_4}, {:path, path})
assert is_pid(pid) == true
state1 = GenServer.call(pid, :state)
assert state1 == %{nouns: %{}, verbs: []}
GenServer.cast pid, "hey this will get logged but logger is disabled hur dur dur"
#??? i cant actually test that?
end
def test_json do
%{nouns: [], verbs: []}
end
def test_json_with_strings do
%{"nouns" => [], "verbs" => []}
end
end
| 29.962963 | 85 | 0.618047 |
08ce8ea031bb7b1ddedb54b17d79a46752cb4041 | 245 | exs | Elixir | config/test.exs | mbklein/ueberauth_openam | 159574506b4a8add8d0d19db79e9b2bd1a6aed00 | [
"MIT"
] | null | null | null | config/test.exs | mbklein/ueberauth_openam | 159574506b4a8add8d0d19db79e9b2bd1a6aed00 | [
"MIT"
] | 1 | 2022-02-03T06:45:39.000Z | 2022-02-23T05:30:04.000Z | config/test.exs | nulib/ueberauth_openam | 159574506b4a8add8d0d19db79e9b2bd1a6aed00 | [
"MIT"
] | null | null | null | import Config
config :ueberauth, Ueberauth,
providers: [
openam:
{Ueberauth.Strategy.OpenAM,
[base_url: "https://openam.example.edu/", sso_cookie: "openAMssoToken"]}
]
config :ueberauth_openam, :http_client, MockHTTPoison
| 22.272727 | 79 | 0.706122 |
08ce91f0ad11f49c894f0dbc3bb755306d024bb4 | 203 | ex | Elixir | lib/phone/za.ex | davidkovsky/phone | 83108ab1042efe62778c7363f5d02ef888883408 | [
"Apache-2.0"
] | 97 | 2016-04-05T13:08:41.000Z | 2021-12-25T13:08:34.000Z | lib/phone/za.ex | davidkovsky/phone | 83108ab1042efe62778c7363f5d02ef888883408 | [
"Apache-2.0"
] | 70 | 2016-06-14T00:56:00.000Z | 2022-02-10T19:43:14.000Z | lib/phone/za.ex | davidkovsky/phone | 83108ab1042efe62778c7363f5d02ef888883408 | [
"Apache-2.0"
] | 31 | 2016-04-21T22:26:12.000Z | 2022-01-24T21:40:00.000Z | defmodule Phone.ZA do
@moduledoc false
use Helper.Country
def regex, do: ~r/^(27)()(.{9})/
def country, do: "South Africa"
def a2, do: "ZA"
def a3, do: "ZAF"
matcher(:regex, ["27"])
end
| 15.615385 | 34 | 0.596059 |
08ceb3d619bece42580b6eedf27bff85987054aa | 80 | exs | Elixir | apps/uniflow_web/test/uniflow_web/views/page_view_test.exs | andyl/uniflow | 049050d2e41b8242bb1e6c543d715caa0d0e7f02 | [
"MIT"
] | null | null | null | apps/uniflow_web/test/uniflow_web/views/page_view_test.exs | andyl/uniflow | 049050d2e41b8242bb1e6c543d715caa0d0e7f02 | [
"MIT"
] | 9 | 2021-05-10T18:31:22.000Z | 2021-05-27T12:57:20.000Z | apps/uniflow_web/test/uniflow_web/views/page_view_test.exs | andyl/uniflow | 049050d2e41b8242bb1e6c543d715caa0d0e7f02 | [
"MIT"
] | null | null | null | defmodule UniflowWeb.PageViewTest do
use UniflowWeb.ConnCase, async: true
end
| 20 | 38 | 0.825 |
08cf18a77ae8fd767a6c20cf548808afbb404316 | 511 | exs | Elixir | priv/repo/migrations/20210502100549_create_tips.exs | mcintyre94/CentralTipsBot | 626145478d3516493e72d271b045d1f19275fef5 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210502100549_create_tips.exs | mcintyre94/CentralTipsBot | 626145478d3516493e72d271b045d1f19275fef5 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210502100549_create_tips.exs | mcintyre94/CentralTipsBot | 626145478d3516493e72d271b045d1f19275fef5 | [
"MIT"
] | null | null | null | defmodule Centraltipsbot.Repo.Migrations.CreateTips do
use Ecto.Migration
def change do
create table(:tips, primary_key: false) do
add :id, :binary_id, primary_key: true
add :from_source, :string
add :from_source_id, :string
add :to_source, :string
add :to_source_id, :string
add :memo, :string
add :quantity, :decimal
add :paid, :boolean, default: false, null: false
timestamps()
end
create index(:tips, [:inserted_at, :paid])
end
end
| 24.333333 | 54 | 0.655577 |
08cf2db8afa0e9f61996d7b5806671b07fd51918 | 4,994 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/order.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/order.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/order.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Model.Order do
@moduledoc """
Describes properties of a Planning order.
## Attributes
* `accountId` (*type:* `String.t`, *default:* `nil`) - Account ID of this order.
* `advertiserId` (*type:* `String.t`, *default:* `nil`) - Advertiser ID of this order.
* `approverUserProfileIds` (*type:* `list(String.t)`, *default:* `nil`) - IDs for users that have to approve documents created for this order.
* `buyerInvoiceId` (*type:* `String.t`, *default:* `nil`) - Buyer invoice ID associated with this order.
* `buyerOrganizationName` (*type:* `String.t`, *default:* `nil`) - Name of the buyer organization.
* `comments` (*type:* `String.t`, *default:* `nil`) - Comments in this order.
* `contacts` (*type:* `list(GoogleApi.DFAReporting.V34.Model.OrderContact.t)`, *default:* `nil`) - Contacts for this order.
* `id` (*type:* `String.t`, *default:* `nil`) - ID of this order. This is a read-only, auto-generated field.
* `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#order".
* `lastModifiedInfo` (*type:* `GoogleApi.DFAReporting.V34.Model.LastModifiedInfo.t`, *default:* `nil`) - Information about the most recent modification of this order.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of this order.
* `notes` (*type:* `String.t`, *default:* `nil`) - Notes of this order.
* `planningTermId` (*type:* `String.t`, *default:* `nil`) - ID of the terms and conditions template used in this order.
* `projectId` (*type:* `String.t`, *default:* `nil`) - Project ID of this order.
* `sellerOrderId` (*type:* `String.t`, *default:* `nil`) - Seller order ID associated with this order.
* `sellerOrganizationName` (*type:* `String.t`, *default:* `nil`) - Name of the seller organization.
* `siteId` (*type:* `list(String.t)`, *default:* `nil`) - Site IDs this order is associated with.
* `siteNames` (*type:* `list(String.t)`, *default:* `nil`) - Free-form site names this order is associated with.
* `subaccountId` (*type:* `String.t`, *default:* `nil`) - Subaccount ID of this order.
* `termsAndConditions` (*type:* `String.t`, *default:* `nil`) - Terms and conditions of this order.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => String.t() | nil,
:advertiserId => String.t() | nil,
:approverUserProfileIds => list(String.t()) | nil,
:buyerInvoiceId => String.t() | nil,
:buyerOrganizationName => String.t() | nil,
:comments => String.t() | nil,
:contacts => list(GoogleApi.DFAReporting.V34.Model.OrderContact.t()) | nil,
:id => String.t() | nil,
:kind => String.t() | nil,
:lastModifiedInfo => GoogleApi.DFAReporting.V34.Model.LastModifiedInfo.t() | nil,
:name => String.t() | nil,
:notes => String.t() | nil,
:planningTermId => String.t() | nil,
:projectId => String.t() | nil,
:sellerOrderId => String.t() | nil,
:sellerOrganizationName => String.t() | nil,
:siteId => list(String.t()) | nil,
:siteNames => list(String.t()) | nil,
:subaccountId => String.t() | nil,
:termsAndConditions => String.t() | nil
}
field(:accountId)
field(:advertiserId)
field(:approverUserProfileIds, type: :list)
field(:buyerInvoiceId)
field(:buyerOrganizationName)
field(:comments)
field(:contacts, as: GoogleApi.DFAReporting.V34.Model.OrderContact, type: :list)
field(:id)
field(:kind)
field(:lastModifiedInfo, as: GoogleApi.DFAReporting.V34.Model.LastModifiedInfo)
field(:name)
field(:notes)
field(:planningTermId)
field(:projectId)
field(:sellerOrderId)
field(:sellerOrganizationName)
field(:siteId, type: :list)
field(:siteNames, type: :list)
field(:subaccountId)
field(:termsAndConditions)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V34.Model.Order do
def decode(value, options) do
GoogleApi.DFAReporting.V34.Model.Order.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V34.Model.Order do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.019231 | 170 | 0.659191 |
08cf3e8e7fe1020af883fe1eb7c10af085b8e960 | 2,294 | exs | Elixir | test/lua_scripts/cache2_quick_search_test.exs | fangkunping/micro-server | 3307b605ed33a8c089283274d2713d8ee3798b1a | [
"Apache-2.0"
] | null | null | null | test/lua_scripts/cache2_quick_search_test.exs | fangkunping/micro-server | 3307b605ed33a8c089283274d2713d8ee3798b1a | [
"Apache-2.0"
] | null | null | null | test/lua_scripts/cache2_quick_search_test.exs | fangkunping/micro-server | 3307b605ed33a8c089283274d2713d8ee3798b1a | [
"Apache-2.0"
] | null | null | null | defmodule MicroServerTest.LuaScript.Cache2QuickSearchTest do
use MicroServerTest.MyCase, async: false
test "test gen ms" do
lua_script = ~s|
cache1_table("man", id, name, age)
cache1_table("class", id, math, english)
cache1_table("animal", name, type, age)
cache1_def_query("animal", "q_animal", [[name ~= '1' and age > '2']] )
cache2.q_write("animal", {name=1, name=20, age=30})
cache2.q_select("q_animal", "ludde", 30)
|
"=============================" |> IO.inspect()
:ets.new(:test, [:set, :named_table])
:ets.insert(:test, {:rufsen, :dog, 7})
:ets.insert(:test, {:brunte, :horse, 5})
:ets.insert(:test, {:ludde, :dog, 5})
{:ok, parser} = MicroServer.LuaParserUtility.parse(lua_script) |> IO.inspect()
cache1_table = MicroServer.LuaParserUtility.get_cache1_table(parser) |> IO.inspect()
cache1_def_query = MicroServer.LuaParserUtility.get_cache1_def_query(parser) |> IO.inspect()
# cache1_def_query |> Map.get("select1")
ms =
MicroServer.LuaLib.Cache1.gen_match_spec(cache1_table, cache1_def_query, "q_animal", [
:ludde,
6
])
|> IO.inspect()
:ets.select(:test, ms) |> IO.inspect()
end
test "test run server" do
lua_script = ~s|
require(cache1, cache2, web)
cache1_table("animal", id, name, type, age)
cache1_def_query("animal", "q_animal_1", [[name ~= "1" and age > "2"]] )
cache1_def_query("animal", "q_animal_2", [[name ~= "1" and age > "2", name == "3"]] )
function on_http(ticket, message)
trace(cache2.q_write("animal", {id=10, name="tiger", age=3}))
trace(cache2.q_write("animal", {id=11, name="cat", age=2}))
trace(cache2.q_read("animal", 10))
trace(cache2.q_select("q_animal_1", "dog", 1))
local rs = cache2.q_select("q_animal_2", "dog", 4, "cat")
for _,v in pairs(rs) do
trace("result is: " .. v.name)
end
cache2.q_delete("q_animal_2", "dog", 4, "cat")
trace(cache2.q_select("q_animal_1", "dog", 1))
end
|
MicroServer.Repo.query!(~s/update `scripts` set `content` = '#{lua_script}' where id = 1/)
start_server()
MicroServer.ServerUtility.call(@server_id, {:lua_event, :on_http, %{}}) |> IO.inspect()
Process.sleep(@waiting_print_finish)
end
end
| 38.233333 | 96 | 0.61857 |
08cf5a99f8345001b543a358c15d5b3150701183 | 598 | exs | Elixir | programming/elixir/conduit/priv/repo/migrations/20170721110435_create_blog_comment.exs | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | programming/elixir/conduit/priv/repo/migrations/20170721110435_create_blog_comment.exs | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | programming/elixir/conduit/priv/repo/migrations/20170721110435_create_blog_comment.exs | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | defmodule Conduit.Repo.Migrations.CreateBlogComment do
use Ecto.Migration
def change do
create table(:blog_comments, primary_key: false) do
add :uuid, :uuid, primary_key: true
add :body, :text
add :article_uuid, :uuid
add :author_uuid, :uuid
add :author_username, :text
add :author_bio, :text
add :author_image, :text
add :commented_at, :naive_datetime
timestamps()
end
create index(:blog_comments, [:article_uuid])
create index(:blog_comments, [:author_uuid])
create index(:blog_comments, [:commented_at])
end
end
| 26 | 55 | 0.677258 |
08cf8010052c658ec9c8650141394a6ffd2f9ec3 | 1,616 | exs | Elixir | mix.exs | f0rest8/pwned | d2bbd279296c1be4eaf5b5be916321965e230e12 | [
"Apache-2.0"
] | 2 | 2021-10-10T09:58:23.000Z | 2022-02-09T02:09:04.000Z | mix.exs | f0rest8/pwned | d2bbd279296c1be4eaf5b5be916321965e230e12 | [
"Apache-2.0"
] | 1 | 2021-01-27T07:54:07.000Z | 2021-01-27T07:54:07.000Z | mix.exs | f0rest8/pwned | d2bbd279296c1be4eaf5b5be916321965e230e12 | [
"Apache-2.0"
] | null | null | null | defmodule Pwned.MixProject do
use Mix.Project
@version "1.5.2"
def project do
[
app: :pwned_coretheory,
version: @version,
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
docs: docs(),
source_url: "https://github.com/coretheory/ct_pwned"
]
end
def application do
[
applications: [:httpoison],
extra_applications: [:logger],
]
end
defp deps do
[
{:httpoison, "~> 1.8"},
{:ex_doc, "~> 0.23.0", only: :dev, runtime: false},
{:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false}
]
end
defp description() do
"""
A simple application to check if an email or password has been pwned
using the HaveIBeenPwned? API. It requires a purchased hibp-api-key
in order to use the email checking functions.
"""
end
defp package() do
[
maintainers: ["Core Theory"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/coretheory/ct_pwned"},
]
end
defp docs() do
[
main: "readme",
name: "Pwned by Core Theory",
source_ref: "v#{@version}",
canonical: "https://hexdocs.pm/pwned_coretheory/",
source_url: "https://github.com/coretheory/ct_pwned",
logo: "assets/static/images/CT_Logo_Color.png",
extras: [
"README.md",
"CHANGELOG.md",
"CONTRIBUTING.md",
"FURTHER_READING.md",
"LICENSE.md"
]
]
end
end
| 23.085714 | 72 | 0.568069 |
08cf92c9a4e987d0ce62f85e34a25e05a7ace3f6 | 574 | ex | Elixir | priv/catalogue/button/example02.ex | RobertDober/surface_catalogue | 05495b00573b4138a167812e33e8d441590e4c89 | [
"MIT"
] | 132 | 2021-02-02T04:03:17.000Z | 2022-03-24T07:02:00.000Z | priv/catalogue/button/example02.ex | RobertDober/surface_catalogue | 05495b00573b4138a167812e33e8d441590e4c89 | [
"MIT"
] | 30 | 2021-02-16T13:18:43.000Z | 2022-03-20T20:25:47.000Z | priv/catalogue/button/example02.ex | RobertDober/surface_catalogue | 05495b00573b4138a167812e33e8d441590e4c89 | [
"MIT"
] | 17 | 2021-03-20T16:23:13.000Z | 2022-03-15T16:21:08.000Z | defmodule SurfaceCatalogue.SampleComponents.Button.Example02 do
@moduledoc """
An example with direction `horizontal` with the content larger than the code
area.
"""
use Surface.Catalogue.Example,
subject: SurfaceCatalogue.SampleComponents.Button,
catalogue: SurfaceCatalogue.SampleCatalogue,
title: "Horizontal with scroll",
height: "90px",
container: {:div, class: "buttons"}
def render(assigns) do
~F"""
<Button size="normal" color="primary">Normal</Button><Button size="medium" color="warning">Medium</Button>
"""
end
end
| 28.7 | 110 | 0.712544 |
08cf93d63e5968291341a063a8f884d35b82d646 | 1,644 | ex | Elixir | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/arg.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/arg.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/firebase_rules/lib/google_api/firebase_rules/v1/model/arg.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.FirebaseRules.V1.Model.Arg do
@moduledoc """
Arg matchers for the mock function.
## Attributes
* `anyValue` (*type:* `GoogleApi.FirebaseRules.V1.Model.Empty.t`, *default:* `nil`) - Argument matches any value provided.
* `exactValue` (*type:* `any()`, *default:* `nil`) - Argument exactly matches value provided.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:anyValue => GoogleApi.FirebaseRules.V1.Model.Empty.t() | nil,
:exactValue => any() | nil
}
field(:anyValue, as: GoogleApi.FirebaseRules.V1.Model.Empty)
field(:exactValue)
end
defimpl Poison.Decoder, for: GoogleApi.FirebaseRules.V1.Model.Arg do
def decode(value, options) do
GoogleApi.FirebaseRules.V1.Model.Arg.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.FirebaseRules.V1.Model.Arg do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.88 | 126 | 0.724453 |
08cfa5cc610eda9c2953962d91803e440ea28050 | 880 | ex | Elixir | clients/content/lib/google_api/content/v2/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/metadata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2 do
@moduledoc """
API client metadata for GoogleApi.Content.V2.
"""
@discovery_revision "20201008"
def discovery_revision(), do: @discovery_revision
end
| 32.592593 | 74 | 0.757955 |
08cfabb158107a9b7f99d429d1966db3a0b60839 | 357 | ex | Elixir | apps/panacea_beacon/lib/panacea_beacon_web/metrics/setup.ex | timjp87/panacea | 5edddfa12a8f18b040248b9b186479b9ec8aed51 | [
"MIT"
] | null | null | null | apps/panacea_beacon/lib/panacea_beacon_web/metrics/setup.ex | timjp87/panacea | 5edddfa12a8f18b040248b9b186479b9ec8aed51 | [
"MIT"
] | null | null | null | apps/panacea_beacon/lib/panacea_beacon_web/metrics/setup.ex | timjp87/panacea | 5edddfa12a8f18b040248b9b186479b9ec8aed51 | [
"MIT"
] | null | null | null | defmodule BeaconWeb.Metrics.Setup do
@moduledoc """
Common area to set up metrics
"""
require Logger
def setup do
Logger.info("Starting Prometheus Metrics on http://localhost:4000/metrics")
BeaconWeb.Metrics.BeaconInstrumenter.setup()
BeaconWeb.Metrics.NetworkInstrumenter.setup()
BeaconWeb.PrometheusExporter.setup()
end
end
| 22.3125 | 79 | 0.745098 |
08cfb8e6b2b9a39168302cf14eab5b1a95d5f289 | 886 | ex | Elixir | clients/cloud_shell/lib/google_api/cloud_shell/v1/metadata.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/cloud_shell/lib/google_api/cloud_shell/v1/metadata.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/cloud_shell/lib/google_api/cloud_shell/v1/metadata.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudShell.V1 do
@moduledoc """
API client metadata for GoogleApi.CloudShell.V1.
"""
@discovery_revision "20210119"
def discovery_revision(), do: @discovery_revision
end
| 32.814815 | 74 | 0.759594 |
08cfcabeca511a84af992dfda6e355c62976f609 | 1,066 | exs | Elixir | test/still/node/process_test.exs | still-ex/still_node | 7ac96b28b4b3c952f10660712c107d7a77f03124 | [
"MIT"
] | 2 | 2021-04-25T07:46:51.000Z | 2021-05-01T07:00:16.000Z | test/still/node/process_test.exs | still-ex/still_node | 7ac96b28b4b3c952f10660712c107d7a77f03124 | [
"MIT"
] | null | null | null | test/still/node/process_test.exs | still-ex/still_node | 7ac96b28b4b3c952f10660712c107d7a77f03124 | [
"MIT"
] | null | null | null | defmodule Still.Node.ProcessTest do
use ExUnit.Case, async: true
alias Still.Node.Process
@js_file Path.dirname(__DIR__)
|> Path.join("../../assets/index.js")
|> Path.expand()
describe "invoke/3" do
test "calls a node function" do
{:ok, _} = Process.start_link(file: @js_file)
assert {:ok, "hello world"} = Process.invoke("echo", ["hello world"])
end
test "uses a node package" do
{:ok, _} = Process.start_link(file: @js_file)
assert {:ok, true} = Process.invoke("eq", ["hello", "hello"])
end
test "handles huge amounts of data" do
{:ok, _} = Process.start_link(file: @js_file)
{:ok, response} = Process.invoke("huge", [])
assert String.length(response) == 100_000
end
end
describe "invoke/4" do
test "supports a custom process name" do
{:ok, _} = Process.start_link(file: @js_file, name: "index")
assert {:ok, "hello gabriel"} =
Process.invoke("index", "hello", ["gabriel"], timeout: :infinity)
end
end
end
| 26 | 80 | 0.593809 |
08cfdff827a39c732ba3d6d91f04f5d882a29ab8 | 1,365 | ex | Elixir | lib/console/organizations/organization_resolver.ex | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 83 | 2018-05-31T14:49:10.000Z | 2022-03-27T16:49:49.000Z | lib/console/organizations/organization_resolver.ex | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 267 | 2018-05-22T23:19:02.000Z | 2022-03-31T04:31:06.000Z | lib/console/organizations/organization_resolver.ex | maco2035/console | 2a9a65678b8c671c7d92cdb62dfcfc71b84957c5 | [
"Apache-2.0"
] | 18 | 2018-11-20T05:15:54.000Z | 2022-03-28T08:20:13.000Z | defmodule Console.Organizations.OrganizationResolver do
alias Console.Repo
alias Console.Organizations.Organization
alias Console.Organizations
def paginate(%{page: page, page_size: page_size}, %{context: %{current_user: current_user, current_membership: current_membership}}) do
organizations =
case current_user.super do
true ->
Organization |> Repo.paginate(page: page, page_size: page_size)
_ ->
orgs = Organizations.get_organizations_with_devices(current_user)
%{
entries: orgs,
}
end
entries =
case current_membership.role do
"read" ->
Enum.map(organizations.entries, fn o ->
Map.drop(o, [:webhook_key])
end)
_ -> organizations.entries
end
{:ok, Map.put(organizations, :entries, entries)}
end
def find(%{id: id}, %{context: %{current_user: current_user}}) do
organization = Organizations.get_organization!(current_user, id)
{:ok, Map.put(organization, :flow, Poison.encode!(organization.flow)) |> Map.drop([:webhook_key])}
end
def all(_, %{context: %{current_user: current_user}}) do
organizations =
Organizations.get_organizations_with_devices(current_user)
|> Enum.map(fn o ->
Map.drop(o, [:webhook_key])
end)
{:ok, organizations}
end
end
| 30.333333 | 137 | 0.648352 |
08d03fa21e212d914d6fa3b92c98d0bf156f1a02 | 6,337 | ex | Elixir | test/elixir/lib/couch.ex | frapa/couchdb | 6c28960f0fe2eec06aca7d58fd73f3c7cdbe1112 | [
"Apache-2.0"
] | 1 | 2022-01-14T20:52:55.000Z | 2022-01-14T20:52:55.000Z | test/elixir/lib/couch.ex | frapa/couchdb | 6c28960f0fe2eec06aca7d58fd73f3c7cdbe1112 | [
"Apache-2.0"
] | 1 | 2021-07-28T02:13:39.000Z | 2021-07-28T02:13:39.000Z | test/elixir/lib/couch.ex | frapa/couchdb | 6c28960f0fe2eec06aca7d58fd73f3c7cdbe1112 | [
"Apache-2.0"
] | null | null | null | defmodule Couch.Session do
@moduledoc """
CouchDB session helpers.
"""
defstruct [:cookie, :error, :base_url]
def new(cookie, error \\ "") do
%Couch.Session{cookie: cookie, error: error}
end
def logout(sess) do
headers = [
"Content-Type": "application/x-www-form-urlencoded",
"X-CouchDB-WWW-Authenticate": "Cookie",
Cookie: sess.cookie
]
Couch.delete!("/_session", headers: headers)
end
def info(sess) do
headers = [
"Content-Type": "application/x-www-form-urlencoded",
"X-CouchDB-WWW-Authenticate": "Cookie",
Cookie: sess.cookie
]
Couch.get("/_session", headers: headers).body
end
def get(sess, url, opts \\ []), do: go(sess, :get, url, opts)
def get!(sess, url, opts \\ []), do: go!(sess, :get, url, opts)
def put(sess, url, opts \\ []), do: go(sess, :put, url, opts)
def put!(sess, url, opts \\ []), do: go!(sess, :put, url, opts)
def post(sess, url, opts \\ []), do: go(sess, :post, url, opts)
def post!(sess, url, opts \\ []), do: go!(sess, :post, url, opts)
def delete(sess, url, opts \\ []), do: go(sess, :delete, url, opts)
def delete!(sess, url, opts \\ []), do: go!(sess, :delete, url, opts)
# Skipping head/patch/options for YAGNI. Feel free to add
# if the need arises.
def go(%Couch.Session{} = sess, method, url, opts) do
parse_response = Keyword.get(opts, :parse_response, true)
opts = opts
|> Keyword.merge(cookie: sess.cookie, base_url: sess.base_url)
|> Keyword.delete(:parse_response)
if parse_response do
Couch.request(method, url, opts)
else
Rawresp.request(method, url, opts)
end
end
def go!(%Couch.Session{} = sess, method, url, opts) do
parse_response = Keyword.get(opts, :parse_response, true)
opts = opts
|> Keyword.merge(cookie: sess.cookie, base_url: sess.base_url)
|> Keyword.delete(:parse_response)
if parse_response do
Couch.request!(method, url, opts)
else
Rawresp.request!(method, url, opts)
end
end
end
defmodule Couch do
use HTTPotion.Base
@moduledoc """
CouchDB library to power test suite.
"""
# These constants are supplied to the underlying HTTP client and control
# how long we will wait before timing out a test. The inactivity timeout
# specifically fires during an active HTTP response and defaults to 10_000
# if not specified. We're defining it to a different value than the
# request_timeout largely just so we know which timeout fired.
@request_timeout 60_000
@inactivity_timeout 55_000
@max_sessions 1_000
def base_url() do
System.get_env("EX_COUCH_URL") || "http://127.0.0.1:15984"
end
def process_url("http://" <> _ = url) do
url
end
def process_url(url, options) do
(Keyword.get(options, :base_url) <> url)
|> prepend_protocol
|> append_query_string(options)
end
def process_request_headers(headers, _body, options) do
headers = Keyword.put(headers, :"User-Agent", "couch-potion")
headers =
if headers[:"Content-Type"] do
headers
else
Keyword.put(headers, :"Content-Type", "application/json")
end
case Keyword.get(options, :cookie) do
nil ->
headers
cookie ->
Keyword.put(headers, :Cookie, cookie)
end
end
def process_options(options) do
base_url = base_url()
options = Keyword.put_new(options, :base_url, base_url)
options
|> set_auth_options()
|> set_inactivity_timeout()
|> set_request_timeout()
|> set_max_sessions()
end
def process_request_body(body) do
if is_map(body) do
:jiffy.encode(body)
else
body
end
end
def process_response_body(_headers, body) when body == [] do
""
end
def process_response_body(headers, body) do
content_type = headers[:"Content-Type"]
if !!content_type and String.match?(content_type, ~r/application\/json/) do
body |> IO.iodata_to_binary() |> :jiffy.decode([:return_maps])
else
process_response_body(body)
end
end
def set_auth_options(options) do
no_auth? = Keyword.get(options, :no_auth) == true
cookie? = Keyword.has_key?(options, :cookie)
basic_auth? = Keyword.has_key?(options, :basic_auth)
if cookie? or no_auth? or basic_auth? do
Keyword.delete(options, :no_auth)
else
headers = Keyword.get(options, :headers, [])
if headers[:basic_auth] != nil or headers[:authorization] != nil
or List.keymember?(headers, :"X-Auth-CouchDB-UserName", 0) do
options
else
username = System.get_env("EX_USERNAME") || "adm"
password = System.get_env("EX_PASSWORD") || "pass"
Keyword.put(options, :basic_auth, {username, password})
end
end
end
def set_inactivity_timeout(options) do
Keyword.update(
options,
:ibrowse,
[{:inactivity_timeout, @inactivity_timeout}],
fn ibrowse ->
Keyword.put_new(ibrowse, :inactivity_timeout, @inactivity_timeout)
end
)
end
def set_request_timeout(options) do
timeout = Application.get_env(:httpotion, :default_timeout, @request_timeout)
Keyword.put_new(options, :timeout, timeout)
end
def set_max_sessions(options) do
Keyword.update(
options,
:ibrowse,
[{:max_sessions, @max_sessions}],
fn ibrowse ->
Keyword.put_new(ibrowse, :max_sessions, @max_sessions)
end
)
end
def login(userinfo) do
[user, pass] = String.split(userinfo, ":", parts: 2)
login(user, pass)
end
def login(user, pass, options \\ []) do
options = options |> Enum.into(%{})
base_url =
Map.get_lazy(options, :base_url, fn ->
System.get_env("EX_COUCH_URL") || "http://127.0.0.1:15984"
end)
resp =
Couch.post(
"/_session",
body: %{:username => user, :password => pass},
base_url: base_url,
no_auth: true
)
if Map.get(options, :expect, :success) == :success do
true = resp.body["ok"]
cookie = resp.headers[:"set-cookie"]
[token | _] = String.split(cookie, ";")
%Couch.Session{cookie: token, base_url: base_url}
else
true = Map.has_key?(resp.body, "error")
%Couch.Session{error: resp.body["error"], base_url: base_url}
end
end
end
| 27.79386 | 81 | 0.633423 |
08d04fa934df705b9a12f21bb9414954bbd9d184 | 362 | ex | Elixir | lib/ex_hl7/composite/default/eip.ex | workpathco/ex_hl7 | 20f2fadb158e903cf1752f69cd0ecdeae377c2c3 | [
"Apache-2.0"
] | null | null | null | lib/ex_hl7/composite/default/eip.ex | workpathco/ex_hl7 | 20f2fadb158e903cf1752f69cd0ecdeae377c2c3 | [
"Apache-2.0"
] | null | null | null | lib/ex_hl7/composite/default/eip.ex | workpathco/ex_hl7 | 20f2fadb158e903cf1752f69cd0ecdeae377c2c3 | [
"Apache-2.0"
] | null | null | null | defmodule HL7.Composite.Default.EIP do
@moduledoc """
2.A.26 EIP - entity identifier pair
Components:
- `placer_assigned_id` (EI)
- `filler_assigned_id` (EI)
"""
use HL7.Composite.Spec
require HL7.Composite.Default.EI, as: EI
composite do
component :placer_assigned_id, type: EI
component :filler_assigned_id, type: EI
end
end
| 20.111111 | 43 | 0.698895 |
08d05e87650bf68da088fc5fcf6244e89af092de | 1,440 | ex | Elixir | clients/script/lib/google_api/script/v1/model/empty.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/script/lib/google_api/script/v1/model/empty.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/script/lib/google_api/script/v1/model/empty.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Script.V1.Model.Empty do
@moduledoc """
A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.Script.V1.Model.Empty do
def decode(value, options) do
GoogleApi.Script.V1.Model.Empty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Script.V1.Model.Empty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.285714 | 282 | 0.754861 |
08d06dc641505dc58056050968d6a7d40ea167ac | 16,933 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/subaccounts.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/subaccounts.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/subaccounts.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Api.Subaccounts do
@moduledoc """
API calls for all endpoints tagged `Subaccounts`.
"""
alias GoogleApi.DFAReporting.V33.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets one subaccount by ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Subaccount ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.Subaccount{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_subaccounts_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V33.Model.Subaccount.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_subaccounts_get(connection, profile_id, id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts/{id}", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Subaccount{}])
end
@doc """
Inserts a new subaccount.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.Subaccount.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.Subaccount{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_subaccounts_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.Subaccount.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_subaccounts_insert(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Subaccount{}])
end
@doc """
Gets a list of subaccounts, possibly filtered. This method supports paging.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:ids` (*type:* `list(String.t)`) - Select only subaccounts with these IDs.
* `:maxResults` (*type:* `integer()`) - Maximum number of results to return.
* `:pageToken` (*type:* `String.t`) - Value of the nextPageToken from the previous result page.
* `:searchString` (*type:* `String.t`) - Allows searching for objects by name or ID. Wildcards (*) are allowed. For example, "subaccount*2015" will return objects with names like "subaccount June 2015", "subaccount April 2015", or simply "subaccount 2015". Most of the searches also add wildcards implicitly at the start and the end of the search string. For example, a search string of "subaccount" will match objects with name "my subaccount", "subaccount 2015", or simply "subaccount" .
* `:sortField` (*type:* `String.t`) - Field by which to sort the list.
* `:sortOrder` (*type:* `String.t`) - Order of sorted results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.SubaccountsListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_subaccounts_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.SubaccountsListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_subaccounts_list(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:ids => :query,
:maxResults => :query,
:pageToken => :query,
:searchString => :query,
:sortField => :query,
:sortOrder => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.SubaccountsListResponse{}]
)
end
@doc """
Updates an existing subaccount. This method supports patch semantics.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Subaccount ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.Subaccount.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.Subaccount{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_subaccounts_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V33.Model.Subaccount.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_subaccounts_patch(
connection,
profile_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_param(:query, :id, id)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Subaccount{}])
end
@doc """
Updates an existing subaccount.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.Subaccount.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.Subaccount{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_subaccounts_update(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V33.Model.Subaccount.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_subaccounts_update(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/subaccounts", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.Subaccount{}])
end
end
| 46.391781 | 497 | 0.622512 |
08d09414f59fffe8806553232c581cb1d478c703 | 4,037 | ex | Elixir | apps/tai/lib/tai/markets/order_book.ex | ihorkatkov/tai | 09f9f15d2c385efe762ae138a8570f1e3fd41f26 | [
"MIT"
] | 1 | 2019-12-19T05:16:26.000Z | 2019-12-19T05:16:26.000Z | apps/tai/lib/tai/markets/order_book.ex | ihorkatkov/tai | 09f9f15d2c385efe762ae138a8570f1e3fd41f26 | [
"MIT"
] | null | null | null | apps/tai/lib/tai/markets/order_book.ex | ihorkatkov/tai | 09f9f15d2c385efe762ae138a8570f1e3fd41f26 | [
"MIT"
] | null | null | null | defmodule Tai.Markets.OrderBook do
@moduledoc """
Manage price points for a venue's order book
"""
use GenServer
alias __MODULE__
defmodule ChangeSet do
@type venue_id :: Tai.Venues.Adapter.venue_id()
@type product_symbol :: Tai.Venues.Product.symbol()
@type side :: :bid | :ask
@type price :: number
@type size :: number
@type upsert :: {:upsert, side, price, size}
@type delete :: {:delete, side, price}
@type change :: upsert | delete
@type t :: %ChangeSet{
venue: venue_id,
symbol: product_symbol,
changes: [change],
last_received_at: DateTime.t(),
last_venue_timestamp: DateTime.t() | nil
}
@enforce_keys ~w(venue symbol changes last_received_at)a
defstruct ~w(venue symbol changes last_received_at last_venue_timestamp)a
end
@type venue_id :: Tai.Venues.Adapter.venue_id()
@type product :: Tai.Venues.Product.t()
@type product_symbol :: Tai.Venues.Product.symbol()
@type t :: %OrderBook{
venue_id: venue_id,
product_symbol: product_symbol,
bids: %{(price :: number) => size :: pos_integer},
asks: %{(price :: number) => size :: pos_integer},
last_received_at: DateTime.t(),
last_venue_timestamp: DateTime.t() | nil
}
@enforce_keys ~w(
venue_id
product_symbol
bids
asks
)a
defstruct ~w(
venue_id
product_symbol
bids
asks
last_received_at
last_venue_timestamp
)a
@spec start_link(product) :: GenServer.on_start()
def start_link(product) do
name = to_name(product.venue_id, product.symbol)
state = %OrderBook{
venue_id: product.venue_id,
product_symbol: product.symbol,
bids: %{},
asks: %{}
}
GenServer.start_link(__MODULE__, state, name: name)
end
@spec to_name(venue_id, product_symbol) :: atom
def to_name(venue, symbol), do: :"#{__MODULE__}_#{venue}_#{symbol}"
@spec replace(ChangeSet.t()) :: :ok
def replace(%OrderBook.ChangeSet{} = change_set) do
change_set.venue
|> OrderBook.to_name(change_set.symbol)
|> GenServer.cast({:replace, change_set})
end
@spec apply(ChangeSet.t()) :: term
def apply(%ChangeSet{} = change_set) do
change_set.venue
|> OrderBook.to_name(change_set.symbol)
|> GenServer.cast({:apply, change_set})
end
def init(state), do: {:ok, state}
def handle_cast({:replace, %OrderBook.ChangeSet{} = change_set}, state) do
new_state =
%{
state
| bids: %{},
asks: %{},
last_received_at: change_set.last_received_at,
last_venue_timestamp: change_set.last_venue_timestamp
}
|> apply_changes(change_set.changes)
change_set.venue
|> Tai.Markets.ProcessQuote.to_name(change_set.symbol)
|> GenServer.cast({:order_book_snapshot, new_state, change_set})
{:noreply, new_state}
end
def handle_cast({:apply, change_set}, state) do
new_state =
%{
state
| last_received_at: change_set.last_received_at,
last_venue_timestamp: change_set.last_venue_timestamp
}
|> apply_changes(change_set.changes)
change_set.venue
|> Tai.Markets.ProcessQuote.to_name(change_set.symbol)
|> GenServer.cast({:order_book_apply, new_state, change_set})
{:noreply, new_state}
end
defp apply_changes(book, changes) do
changes
|> Enum.reduce(
book,
fn
{:upsert, :bid, price, size}, acc ->
new_bids = acc.bids |> Map.put(price, size)
Map.put(acc, :bids, new_bids)
{:upsert, :ask, price, size}, acc ->
new_bids = acc.asks |> Map.put(price, size)
Map.put(acc, :asks, new_bids)
{:delete, :bid, price}, acc ->
new_bids = acc.bids |> Map.delete(price)
Map.put(acc, :bids, new_bids)
{:delete, :ask, price}, acc ->
new_asks = acc.asks |> Map.delete(price)
Map.put(acc, :asks, new_asks)
end
)
end
end
| 27.277027 | 77 | 0.622244 |
08d11d2955b1120acf81910416ce13147b0765dc | 1,519 | ex | Elixir | clients/tpu/lib/google_api/tpu/v1/model/empty.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/tpu/lib/google_api/tpu/v1/model/empty.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/tpu/lib/google_api/tpu/v1/model/empty.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.TPU.V1.Model.Empty do
@moduledoc """
A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
}
The JSON representation for `Empty` is empty JSON object `{}`.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.TPU.V1.Model.Empty do
def decode(value, options) do
GoogleApi.TPU.V1.Model.Empty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.TPU.V1.Model.Empty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.38 | 76 | 0.737986 |
08d1398f5d11cc14f8c2d0e51ee420d4a7b82746 | 198 | ex | Elixir | lib/hon_graffiti_phoenix/parsers/decorated_string.ex | rjdellecese/hon-graffiti-phoenix | a1ee866ef89f9b8ebb45a644db0b19729a37b07d | [
"MIT"
] | null | null | null | lib/hon_graffiti_phoenix/parsers/decorated_string.ex | rjdellecese/hon-graffiti-phoenix | a1ee866ef89f9b8ebb45a644db0b19729a37b07d | [
"MIT"
] | null | null | null | lib/hon_graffiti_phoenix/parsers/decorated_string.ex | rjdellecese/hon-graffiti-phoenix | a1ee866ef89f9b8ebb45a644db0b19729a37b07d | [
"MIT"
] | null | null | null | defmodule HonGraffitiPhoenix.Parsers.DecoratedString do
@moduledoc """
Provides a struct for strings with hon style markup
"""
@enforce_keys [:body]
defstruct [:body, color: "white"]
end
| 22 | 55 | 0.732323 |
08d14ac38a3a7c54683011b23815c7cef1bc0e6b | 1,591 | ex | Elixir | lib/termDirectory_web/controllers/fact_type_controller.ex | nliechti/termDirectory | 267b3025c14e26575c7a9483692e94a7bf29e5fe | [
"MIT"
] | 1 | 2019-03-15T15:40:24.000Z | 2019-03-15T15:40:24.000Z | lib/termDirectory_web/controllers/fact_type_controller.ex | nliechti/termDirectory | 267b3025c14e26575c7a9483692e94a7bf29e5fe | [
"MIT"
] | null | null | null | lib/termDirectory_web/controllers/fact_type_controller.ex | nliechti/termDirectory | 267b3025c14e26575c7a9483692e94a7bf29e5fe | [
"MIT"
] | null | null | null | defmodule TermDirectoryWeb.FactTypeController do
@moduledoc false
use TermDirectoryWeb, :controller
alias TermDirectory.Modules
alias TermDirectory.Modules.FactType
action_fallback TermDirectoryWeb.FallbackController
@doc """
This catches the search url param and performs a search for
the given search string in the database
"""
def index(conn, %{"search" => searchString}) do
fact_types = Modules.search_fact_type(searchString)
conn
|> render("index.json", fact_types: fact_types)
end
def index(conn, _params) do
fact_types = Modules.list_fact_types()
render(conn, "index.json", fact_types: fact_types)
end
def create(conn, fact_type_params) do
with {:ok, %FactType{} = fact_type} <- Modules.create_fact_type(fact_type_params) do
conn
|> put_status(:created)
|> put_resp_header("location", fact_type_path(conn, :show, fact_type))
|> render("show.json", fact_type: fact_type)
end
end
def show(conn, %{"id" => id}) do
fact_type = Modules.get_fact_type!(id)
render(conn, "show.json", fact_type: fact_type)
end
def update(conn, fact_type_params = %{"id" => id}) do
fact_type = Modules.get_fact_type!(id)
with {:ok, %FactType{} = fact_type} <- Modules.update_fact_type(fact_type, fact_type_params) do
render(conn, "show.json", fact_type: fact_type)
end
end
def delete(conn, %{"id" => id}) do
fact_type = Modules.get_fact_type!(id)
with {:ok, %FactType{}} <- Modules.delete_fact_type(fact_type) do
send_resp(conn, :no_content, "")
end
end
end
| 28.927273 | 99 | 0.691389 |
08d169af44794946370fd00fb84da25169809ff6 | 1,801 | exs | Elixir | apps/app_udpcan/test/test_helper.exs | niclaslind/signalbroker-server | afb80514dcbabe561ac2da42adc08843a15c37c5 | [
"Apache-2.0"
] | 17 | 2020-06-20T11:29:43.000Z | 2022-03-21T05:53:06.000Z | apps/app_udpcan/test/test_helper.exs | niclaslind/signalbroker-server | afb80514dcbabe561ac2da42adc08843a15c37c5 | [
"Apache-2.0"
] | 2 | 2020-07-09T10:22:50.000Z | 2020-09-01T14:46:40.000Z | apps/app_udpcan/test/test_helper.exs | niclaslind/signalbroker-server | afb80514dcbabe561ac2da42adc08843a15c37c5 | [
"Apache-2.0"
] | 3 | 2020-07-17T20:04:36.000Z | 2022-01-24T14:19:46.000Z | # Copyright 2019 Volvo Cars
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# ”License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# “AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
ExUnit.start()
defmodule Helper.UdpClient do
@moduledoc """
Simple UDP client for unit testing.
Parent pid is used to signal a received UDP packet.
"""
use GenServer
defmodule State, do: defstruct [:parent_pid, :socket, :dest_port, received: []]
def start_link(listen_port, dest_port),
do: GenServer.start_link(__MODULE__, {listen_port, dest_port, self()})
def init({listen_port, dest_port, parent_pid}) do
{:ok, socket} = :gen_udp.open(listen_port, [:binary])
{:ok, %State{parent_pid: parent_pid, socket: socket, dest_port: dest_port}}
end
def send_data(pid, data), do: GenServer.call(pid, {:send, data})
def handle_call({:send, data}, _, state) do
:gen_udp.send(state.socket, {127, 0, 0, 1}, state.dest_port, data)
{:reply, :ok, state}
end
def handle_info({:udp, _, _, _, data}, state) do
state = %State{state | received: [data | state.received]}
send state.parent_pid, {:helper_udp, data}
{:noreply, state}
end
end
| 33.981132 | 81 | 0.7196 |
08d172690bea9895b31a60c6bc0c6f7ef728e9f6 | 1,109 | exs | Elixir | config/config.exs | brainlid/tictac | 2e5c5cd44a147e5b7b754860d187fcfda74cf74a | [
"Apache-2.0"
] | 251 | 2021-04-30T04:46:53.000Z | 2022-03-30T06:39:52.000Z | config/config.exs | brainlid/tictac | 2e5c5cd44a147e5b7b754860d187fcfda74cf74a | [
"Apache-2.0"
] | 3 | 2021-05-06T03:01:39.000Z | 2022-03-29T13:04:16.000Z | config/config.exs | brainlid/tictac | 2e5c5cd44a147e5b7b754860d187fcfda74cf74a | [
"Apache-2.0"
] | 21 | 2021-04-30T05:25:57.000Z | 2022-03-01T17:22:21.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
# Configures the endpoint
config :tictac, TictacWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "/03gdZfIExh2op5Xzm1A0YuYp5VlP4nGDUbX6yc2TpR3MY/K/LCk8h2mLjF9PXEC",
render_errors: [view: TictacWeb.ErrorView, accepts: ~w(html json), layout: false],
pubsub_server: Tictac.PubSub,
live_view: [signing_salt: "wxN4jnnC"]
# Track which mix environment this is for since Mix isn't available in
# production releases.
config :tictac, :env, Mix.env()
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 33.606061 | 86 | 0.767358 |
08d175df4a1d871a3928cb5640e8e270d269ccb5 | 583 | exs | Elixir | test/types/map_test.exs | bamorim/exchema | 4e11136fae6092e0e64870b455fd6a68cf36603c | [
"Apache-2.0"
] | 21 | 2018-01-06T22:33:20.000Z | 2020-02-07T18:49:22.000Z | test/types/map_test.exs | bamorim/exchema | 4e11136fae6092e0e64870b455fd6a68cf36603c | [
"Apache-2.0"
] | 5 | 2018-02-15T16:39:14.000Z | 2018-11-30T21:06:08.000Z | test/types/map_test.exs | bamorim/exchema | 4e11136fae6092e0e64870b455fd6a68cf36603c | [
"Apache-2.0"
] | 4 | 2018-05-21T21:26:27.000Z | 2019-04-17T16:45:14.000Z | defmodule Exchema.Types.MapTest do
use ExUnit.Case
alias Exchema.Types, as: T
test "it allows only maps" do
a %{}
a %{1 => 2}
r nil
r ""
r 1
end
test "it can check the element type" do
a %{1 => 2}
a %{1 => 2, 3 => 4}
r %{1 => "1"}
r %{"1" => 1}
r %{"1" => "1"}
end
test "allow map without inner type" do
assert Exchema.is?(%{"1" => :a}, T.Map)
end
def a(val) do
assert Exchema.is?(val, {T.Map, {T.Integer, T.Integer}})
end
def r(val) do
refute Exchema.is?(val, {T.Map, {T.Integer, T.Integer}})
end
end
| 17.666667 | 60 | 0.524871 |
08d182da4e6dc0b96952acd05d8f1b4950e68bcc | 765 | ex | Elixir | lib/ueberauth_example_web/router.ex | is2ei/ueberauth_typetalk_example | 545df8f1312f42dfc256c06cf86779c11028e2e6 | [
"MIT"
] | null | null | null | lib/ueberauth_example_web/router.ex | is2ei/ueberauth_typetalk_example | 545df8f1312f42dfc256c06cf86779c11028e2e6 | [
"MIT"
] | 3 | 2020-07-17T15:34:04.000Z | 2021-05-09T22:10:14.000Z | lib/ueberauth_example_web/router.ex | is2ei/ueberauth_typetalk_example | 545df8f1312f42dfc256c06cf86779c11028e2e6 | [
"MIT"
] | null | null | null | defmodule UeberauthExampleWeb.Router do
@moduledoc false
use UeberauthExampleWeb, :router
require Ueberauth
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/auth", UeberauthExampleWeb do
pipe_through [:browser]
get "/:provider", AuthController, :request
get "/:provider/callback", AuthController, :callback
post "/:provider/callback", AuthController, :callback
post "/logout", AuthController, :delete
end
scope "/", UeberauthExampleWeb do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
end
end
| 22.5 | 57 | 0.70719 |
08d184abb94731570f6a56f62d2bd63d257da533 | 6,935 | exs | Elixir | .credo.exs | oestrich/hcl-elixir | aba949284785f81c4e690755cfec5e557c7bda4d | [
"MIT"
] | 125 | 2020-02-09T17:05:27.000Z | 2022-03-31T03:31:00.000Z | .credo.exs | oestrich/hcl-elixir | aba949284785f81c4e690755cfec5e557c7bda4d | [
"MIT"
] | 19 | 2020-02-20T02:36:38.000Z | 2021-08-03T16:24:19.000Z | .credo.exs | oestrich/hcl-elixir | aba949284785f81c4e690755cfec5e557c7bda4d | [
"MIT"
] | 8 | 2020-04-07T05:06:20.000Z | 2021-11-20T06:43:14.000Z | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: [
"lib/",
"src/",
"test/",
"web/",
"apps/*/lib/",
"apps/*/src/",
"apps/*/test/",
"apps/*/web/"
],
excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"]
},
#
# Load and configure plugins here:
#
plugins: [],
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: false,
#
# To modify the timeout for parsing files, change this value:
#
parse_timeout: 5000,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames, []},
{Credo.Check.Consistency.LineEndings, []},
{Credo.Check.Consistency.ParameterPatternMatching, []},
{Credo.Check.Consistency.SpaceAroundOperators, []},
{Credo.Check.Consistency.SpaceInParentheses, []},
{Credo.Check.Consistency.TabsOrSpaces, []},
#
## Design Checks
#
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage,
[priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, [exit_status: 2]},
{Credo.Check.Design.TagFIXME, []},
#
## Readability Checks
#
{Credo.Check.Readability.AliasOrder, []},
{Credo.Check.Readability.FunctionNames, []},
{Credo.Check.Readability.LargeNumbers, []},
{Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
{Credo.Check.Readability.ModuleAttributeNames, []},
{Credo.Check.Readability.ModuleDoc, []},
{Credo.Check.Readability.ModuleNames, []},
{Credo.Check.Readability.ParenthesesInCondition, []},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
{Credo.Check.Readability.PredicateFunctionNames, []},
{Credo.Check.Readability.PreferImplicitTry, []},
{Credo.Check.Readability.RedundantBlankLines, []},
{Credo.Check.Readability.Semicolons, []},
{Credo.Check.Readability.SpaceAfterCommas, []},
{Credo.Check.Readability.StringSigils, []},
{Credo.Check.Readability.TrailingBlankLine, []},
{Credo.Check.Readability.TrailingWhiteSpace, []},
# TODO: enable by default in Credo 1.1
{Credo.Check.Readability.UnnecessaryAliasExpansion, false},
{Credo.Check.Readability.VariableNames, []},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, []},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.LongQuoteBlocks, []},
{Credo.Check.Refactor.MapInto, false},
{Credo.Check.Refactor.MatchInCondition, []},
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
{Credo.Check.Refactor.Nesting, []},
{Credo.Check.Refactor.UnlessWithElse, []},
{Credo.Check.Refactor.WithClauses, []},
#
## Warnings
#
{Credo.Check.Warning.BoolOperationOnSameValues, []},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
{Credo.Check.Warning.IExPry, []},
{Credo.Check.Warning.IoInspect, []},
{Credo.Check.Warning.LazyLogging, false},
{Credo.Check.Warning.MixEnv, false},
{Credo.Check.Warning.OperationOnSameValues, []},
{Credo.Check.Warning.OperationWithConstantResult, []},
{Credo.Check.Warning.RaiseInsideRescue, []},
{Credo.Check.Warning.UnusedEnumOperation, []},
{Credo.Check.Warning.UnusedFileOperation, []},
{Credo.Check.Warning.UnusedKeywordOperation, []},
{Credo.Check.Warning.UnusedListOperation, []},
{Credo.Check.Warning.UnusedPathOperation, []},
{Credo.Check.Warning.UnusedRegexOperation, []},
{Credo.Check.Warning.UnusedStringOperation, []},
{Credo.Check.Warning.UnusedTupleOperation, []},
#
# Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`)
#
# Controversial and experimental checks (opt-in, just replace `false` with `[]`)
#
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
{Credo.Check.Consistency.UnusedVariableNames, false},
{Credo.Check.Design.DuplicatedCode, false},
{Credo.Check.Readability.AliasAs, false},
{Credo.Check.Readability.MultiAlias, false},
{Credo.Check.Readability.Specs, false},
{Credo.Check.Readability.SinglePipe, false},
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.DoubleBooleanNegation, false},
{Credo.Check.Refactor.ModuleDependencies, false},
{Credo.Check.Refactor.NegatedIsNil, false},
{Credo.Check.Refactor.PipeChainStart, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Warning.UnsafeToAtom, false}
#
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 37.896175 | 97 | 0.60894 |
08d19e7eb57413074b947c74b41e02dd584956bc | 3,164 | ex | Elixir | clients/gke_hub/lib/google_api/gke_hub/v1/model/operation.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/gke_hub/lib/google_api/gke_hub/v1/model/operation.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/gke_hub/lib/google_api/gke_hub/v1/model/operation.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GKEHub.V1.Model.Operation do
@moduledoc """
This resource represents a long-running operation that is the result of a network API call.
## Attributes
* `done` (*type:* `boolean()`, *default:* `nil`) - If the value is `false`, it means the operation is still in progress. If `true`, the operation is completed, and either `error` or `response` is available.
* `error` (*type:* `GoogleApi.GKEHub.V1.Model.GoogleRpcStatus.t`, *default:* `nil`) - The error result of the operation in case of failure or cancellation.
* `metadata` (*type:* `map()`, *default:* `nil`) - Service-specific metadata associated with the operation. It typically contains progress information and common metadata such as create time. Some services might not provide such metadata. Any method that returns a long-running operation should document the metadata type, if any.
* `name` (*type:* `String.t`, *default:* `nil`) - The server-assigned name, which is only unique within the same service that originally returns it. If you use the default HTTP mapping, the `name` should be a resource name ending with `operations/{unique_id}`.
* `response` (*type:* `map()`, *default:* `nil`) - The normal response of the operation in case of success. If the original method returns no data on success, such as `Delete`, the response is `google.protobuf.Empty`. If the original method is standard `Get`/`Create`/`Update`, the response should be the resource. For other methods, the response should have the type `XxxResponse`, where `Xxx` is the original method name. For example, if the original method name is `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:done => boolean() | nil,
:error => GoogleApi.GKEHub.V1.Model.GoogleRpcStatus.t() | nil,
:metadata => map() | nil,
:name => String.t() | nil,
:response => map() | nil
}
field(:done)
field(:error, as: GoogleApi.GKEHub.V1.Model.GoogleRpcStatus)
field(:metadata, type: :map)
field(:name)
field(:response, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.GKEHub.V1.Model.Operation do
def decode(value, options) do
GoogleApi.GKEHub.V1.Model.Operation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.GKEHub.V1.Model.Operation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 53.627119 | 543 | 0.720607 |
08d1b722ad3b34f0c305d03fdfd276265932f7c2 | 75 | ex | Elixir | phone/lib/phone.ex | ejc123/music_phone | 868dae9fc40224e52a63386672bb0ae8cff2f97d | [
"Apache-2.0"
] | 2 | 2021-04-01T18:18:02.000Z | 2021-04-01T18:18:02.000Z | phone/lib/phone.ex | ejc123/music_phone | 868dae9fc40224e52a63386672bb0ae8cff2f97d | [
"Apache-2.0"
] | null | null | null | phone/lib/phone.ex | ejc123/music_phone | 868dae9fc40224e52a63386672bb0ae8cff2f97d | [
"Apache-2.0"
] | null | null | null | defmodule Phone do
@moduledoc """
Documentation for `Phone`.
"""
end
| 12.5 | 28 | 0.653333 |
08d1ce479e9cc72d5cfc64269c75a3874e4acf43 | 883 | exs | Elixir | bench/bench.decoder.exs | aaronrenner/toml-elixir | c2d17a29a4fd31739f04247cc6d3d9054ec27f7e | [
"Apache-2.0"
] | 176 | 2018-07-29T08:29:40.000Z | 2022-03-30T06:20:39.000Z | bench/bench.decoder.exs | aaronrenner/toml-elixir | c2d17a29a4fd31739f04247cc6d3d9054ec27f7e | [
"Apache-2.0"
] | 26 | 2018-08-02T19:00:56.000Z | 2022-03-18T21:29:05.000Z | bench/bench.decoder.exs | aaronrenner/toml-elixir | c2d17a29a4fd31739f04247cc6d3d9054ec27f7e | [
"Apache-2.0"
] | 22 | 2018-08-02T18:39:52.000Z | 2021-12-25T09:53:58.000Z | decode_jobs = %{
"toml" => fn path -> {:ok, _} = Toml.decode_file(path) end,
# Incorrect implementation of 0.5.0 (expected, but fails during parsing)
# "toml_elixir" => fn path -> {:ok, _} = TomlElixir.parse_file(path) end,
# Doesn't support 0.5.0 spec, or incomplete
# "tomlex" => fn path -> %{} = Tomlex.load(File.read!(path)) end,
# "jerry" => fn path -> %{} = Jerry.decode(File.read!(path)) end,
# "etoml" => fn path -> {:ok, _} = :etoml.parse(File.read!(path)) end,
}
inputs = %{
"example.toml" => Path.join([__DIR__, "..", "test", "fixtures", "example.toml"])
}
Benchee.run(decode_jobs,
warmup: 5,
time: 30,
memory_time: 1,
inputs: inputs,
formatters: [
&Benchee.Formatters.HTML.output/1,
&Benchee.Formatters.Console.output/1,
],
formatter_options: [
html: [
file: Path.expand("output/decode.html", __DIR__)
]
]
)
| 29.433333 | 82 | 0.605889 |
08d1d33487e6c80ac9d62f19805b29545b0b89f9 | 1,195 | ex | Elixir | lib/rfxi_web/channels/user_socket.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 1 | 2021-08-10T14:46:10.000Z | 2021-08-10T14:46:10.000Z | lib/rfxi_web/channels/user_socket.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 2 | 2021-06-22T14:12:37.000Z | 2021-06-28T05:06:23.000Z | lib/rfxi_web/channels/user_socket.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | null | null | null | defmodule RfxiWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", RfxiWeb.RoomChannel
channel "ping", RfxiWeb.PingChannel
channel "echo", RfxiWeb.EchoChannel
channel "rfx", RfxiWeb.RfxChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
@impl true
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# RfxiWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(_socket), do: nil
end
| 30.641026 | 83 | 0.697908 |
08d20b95365abe87247670be7e47f9bf66d348d9 | 408 | ex | Elixir | test/support/bill_item_test_helpers.ex | aramisf/caravan | add305c70ee1ceb4a3dff9a7ed606a182e6f12d7 | [
"MIT"
] | null | null | null | test/support/bill_item_test_helpers.ex | aramisf/caravan | add305c70ee1ceb4a3dff9a7ed606a182e6f12d7 | [
"MIT"
] | null | null | null | test/support/bill_item_test_helpers.ex | aramisf/caravan | add305c70ee1ceb4a3dff9a7ed606a182e6f12d7 | [
"MIT"
] | null | null | null | defmodule Caravan.BillItemTestHelpers do
import Caravan.BillTestHelpers
alias Caravan.Repo
alias Caravan.BillItem
@endpoint Caravan.Endpoint
def valid_bill_item_attrs do
bill = create_bill
%{bill_id: bill.id, amount: 62, description: "Real valid item"}
end
def create_bill_item(attrs \\ valid_bill_item_attrs) do
Repo.insert!(BillItem.changeset(%BillItem{}, attrs))
end
end
| 20.4 | 67 | 0.75 |
08d22090a53810db1bb719a9493963cb3f58b898 | 14,482 | ex | Elixir | lib/mastani_server/cms/delegates/seeds.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | lib/mastani_server/cms/delegates/seeds.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | lib/mastani_server/cms/delegates/seeds.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | defmodule MastaniServer.CMS.Delegate.Seeds do
@moduledoc """
seeds data for init, should be called ONLY in new database, like migration
"""
import Helper.Utils, only: [done: 1]
import Ecto.Query, warn: false
@oss_endpoint "https://cps-oss.oss-cn-shanghai.aliyuncs.com"
# import MastaniServer.CMS.Utils.Matcher
# import Helper.Utils, only: [done: 1, map_atom_value: 2]
# import MastaniServer.CMS.Delegate.ArticleCURD, only: [ensure_author_exists: 1]
import ShortMaps
alias Helper.ORM
# alias Helper.QueryBuilder
alias MastaniServer.{Accounts, CMS}
alias MastaniServer.CMS.{Community, Thread, Category}
alias CMS.Delegate.SeedsConfig
# threads
@default_threads SeedsConfig.threads(:default)
@city_threads SeedsConfig.threads(:city, :list)
@home_threads SeedsConfig.threads(:home, :list)
# communities
# done
@pl_communities SeedsConfig.communities(:pl)
@framework_communities SeedsConfig.communities(:framework)
@ui_communities SeedsConfig.communities(:ui)
@editor_communities SeedsConfig.communities(:editor)
@database_communities SeedsConfig.communities(:database)
@devops_communities SeedsConfig.communities(:devops)
@dblockchain_communities SeedsConfig.communities(:blockchain)
# done
@city_communities SeedsConfig.communities(:city)
# categories
@default_categories SeedsConfig.categories(:default)
@doc """
seed communities pragraming languages
"""
def seed_communities(:pl) do
with {:ok, threads} <- seed_threads(:default),
{:ok, bot} <- seed_bot(),
{:ok, categories} <- seed_categories(bot, :default),
{:ok, communities} <- seed_for_communities(bot, :pl) do
threadify_communities(communities, threads.entries)
tagfy_threads(communities, threads.entries, bot)
categorify_communities(communities, categories, :pl)
end
end
@doc """
seed communities for frameworks
"""
def seed_communities(:framework) do
with {:ok, threads} <- seed_threads(:default),
{:ok, bot} <- seed_bot(),
{:ok, _categories} <- seed_categories(bot, :default),
{:ok, communities} <- seed_for_communities(bot, :framework) do
threadify_communities(communities, threads.entries)
tagfy_threads(communities, threads.entries, bot)
# categorify_communities(communities, categories, :other)
end
end
@doc """
seed communities for editors
"""
def seed_communities(:editor) do
with {:ok, threads} <- seed_threads(:default),
{:ok, bot} <- seed_bot(),
{:ok, categories} <- seed_categories(bot, :default),
{:ok, communities} <- seed_for_communities(bot, :editor) do
threadify_communities(communities, threads.entries)
tagfy_threads(communities, threads.entries, bot)
categorify_communities(communities, categories, :other)
end
end
@doc """
seed communities for database
"""
def seed_communities(:database) do
with {:ok, threads} <- seed_threads(:default),
{:ok, bot} <- seed_bot(),
{:ok, _categories} <- seed_categories(bot, :default),
{:ok, communities} <- seed_for_communities(bot, :database) do
threadify_communities(communities, threads.entries)
tagfy_threads(communities, threads.entries, bot)
# categorify_communities(communities, categories, :other)
end
end
@doc """
seed communities for database
"""
def seed_communities(:devops) do
with {:ok, threads} <- seed_threads(:default),
{:ok, bot} <- seed_bot(),
{:ok, categories} <- seed_categories(bot, :default),
{:ok, communities} <- seed_for_communities(bot, :devops) do
threadify_communities(communities, threads.entries)
tagfy_threads(communities, threads.entries, bot)
categorify_communities(communities, categories, :other)
end
end
@doc """
seed communities for database
"""
def seed_communities(:blockchain) do
with {:ok, threads} <- seed_threads(:default),
{:ok, bot} <- seed_bot(),
{:ok, _categories} <- seed_categories(bot, :default),
{:ok, communities} <- seed_for_communities(bot, :blockchain) do
threadify_communities(communities, threads.entries)
tagfy_threads(communities, threads.entries, bot)
# categorify_communities(communities, categories, :other)
end
end
@doc """
seed communities for designs
"""
def seed_communities(:ui) do
with {:ok, threads} <- seed_threads(:default),
{:ok, bot} <- seed_bot(),
{:ok, _categories} <- seed_categories(bot, :default),
{:ok, communities} <- seed_for_communities(bot, :ui) do
threadify_communities(communities, threads.entries)
tagfy_threads(communities, threads.entries, bot)
# categorify_communities(communities, categories, :other)
end
end
@doc """
seed communities for cities
"""
def seed_communities(:city) do
with {:ok, threads} <- seed_threads(:city),
{:ok, bot} <- seed_bot(),
{:ok, categories} <- seed_categories(bot, :default),
{:ok, communities} <- seed_for_communities(bot, :city) do
threadify_communities(communities, threads.entries)
tagfy_threads(communities, threads.entries, bot, :city)
categorify_communities(communities, categories, :city)
end
end
@doc """
seed community for home
"""
def seed_communities(:home) do
with {:error, _} <- ORM.find_by(CMS.Community, %{raw: "home"}),
{:ok, bot} <- seed_bot(),
{:ok, threads} <- seed_threads(:home),
{:ok, categories} <- seed_categories(bot, :default) do
args = %{
title: "coderplanets",
desc: "the most sexy community for developers, ever.",
logo: "#{@oss_endpoint}/icons/cmd/keyboard_logo.png",
raw: "home",
user_id: bot.id
}
{:ok, community} = Community |> ORM.create(args)
threadify_communities(community, threads.entries)
tagfy_threads(community, threads.entries, bot, :home)
categorify_communities(community, categories, :other)
end
end
@doc """
seed default threads like: post, user, wiki, cheetsheet, job ..
"""
def seed_threads(:default) do
case ORM.find_by(CMS.Thread, %{raw: "post"}) do
{:ok, _} ->
{:ok, :pass}
{:error, _} ->
@default_threads
|> Enum.each(fn thread ->
{:ok, _thread} = CMS.create_thread(thread)
end)
end
thread_titles =
@default_threads
|> Enum.reduce([], fn x, acc -> acc ++ [x.title] end)
CMS.Thread
|> where([t], t.raw in ^thread_titles)
|> ORM.paginater(page: 1, size: 30)
|> done()
end
def seed_threads(:city) do
case ORM.find_by(CMS.Thread, %{raw: "post"}) do
{:ok, _} -> {:ok, :pass}
{:error, _} -> seed_threads(:default)
end
{:ok, _thread} = CMS.create_thread(%{title: "group", raw: "group", index: 1})
{:ok, _thread} = CMS.create_thread(%{title: "company", raw: "company", index: 2})
CMS.Thread
|> where([t], t.raw in @city_threads)
|> ORM.paginater(page: 1, size: 10)
|> done()
end
# NOTE: the home threads should be insert after default threads
def seed_threads(:home) do
case ORM.find_by(CMS.Thread, %{raw: "post"}) do
{:ok, _} -> {:ok, :pass}
{:error, _} -> seed_threads(:default)
end
{:ok, _thread} = CMS.create_thread(%{title: "tech", raw: "tech", index: 1})
{:ok, _thread} = CMS.create_thread(%{title: "radar", raw: "radar", index: 2})
{:ok, _thread} = CMS.create_thread(%{title: "share", raw: "share", index: 3})
{:ok, _thread} = CMS.create_thread(%{title: "city", raw: "city", index: 16})
CMS.Thread
|> where([t], t.raw in @home_threads)
|> ORM.paginater(page: 1, size: 10)
|> done()
end
def seed_categories(bot, :default) do
case is_empty_db?(Category) do
true ->
Enum.each(@default_categories, fn cat ->
CMS.create_category(cat, bot)
end)
false ->
"pass"
end
ORM.find_all(Category, %{page: 1, size: 20})
end
@doc """
set list of communities to a spec category
"""
def seed_set_category(communities_names, cat_name) when is_list(communities_names) do
{:ok, category} = ORM.find_by(Category, %{raw: cat_name})
Enum.each(communities_names, fn name ->
{:ok, community} = ORM.find_by(Community, %{raw: name})
{:ok, _} = CMS.set_category(%Community{id: community.id}, %Category{id: category.id})
end)
end
defp seed_bot do
case ORM.find(Accounts.User, 1) do
{:ok, user} ->
{:ok, user}
{:error, _} ->
nickname = "cps_bot_2398614_2018"
avatar = "https://avatars1.githubusercontent.com/u/6184465?s=460&v=4"
Accounts.User |> ORM.findby_or_insert(~m(nickname avatar)a, ~m(nickname avatar)a)
# Accounts.User |> ORM.create(~m(nickname avatar)a)
end
end
# seed raw communities, without thread or categories staff
defp seed_for_communities(bot, :pl) do
with {:error, _} <- ORM.find_by(Community, %{raw: "javascript"}) do
{:ok, _communities} = insert_multi_communities(bot, @pl_communities, :pl)
end
end
defp seed_for_communities(bot, :framework) do
with {:error, _} <- ORM.find_by(Community, %{raw: "react"}) do
{:ok, _communities} = insert_multi_communities(bot, @framework_communities, :framework)
end
end
defp seed_for_communities(bot, :editor) do
with {:error, _} <- ORM.find_by(Community, %{raw: "emacs"}) do
{:ok, _communities} = insert_multi_communities(bot, @editor_communities, :editor)
end
end
defp seed_for_communities(bot, :database) do
with {:error, _} <- ORM.find_by(Community, %{raw: "mysql"}) do
{:ok, _communities} = insert_multi_communities(bot, @database_communities, :database)
end
end
defp seed_for_communities(bot, :devops) do
with {:error, _} <- ORM.find_by(Community, %{raw: "cps-support"}) do
{:ok, _communities} = insert_multi_communities(bot, @devops_communities, :devops)
end
end
defp seed_for_communities(bot, :blockchain) do
with {:error, _} <- ORM.find_by(Community, %{raw: "bitcoin"}) do
{:ok, _communities} = insert_multi_communities(bot, @dblockchain_communities, :blockchain)
end
end
defp seed_for_communities(bot, :ui) do
with {:error, _} <- ORM.find_by(Community, %{raw: "css"}) do
{:ok, _communities} = insert_multi_communities(bot, @ui_communities, :ui)
end
end
defp seed_for_communities(bot, :city) do
with {:error, _} <- ORM.find_by(Community, %{raw: "chengdu"}) do
{:ok, _communities} = insert_multi_communities(bot, @city_communities, :city)
end
end
defp svg_icons do
[
"cps-support",
"beijing",
"shanghai",
"shenzhen",
"hangzhou",
"guangzhou",
"chengdu",
"wuhan",
"xiamen",
"nanjing"
]
end
defp insert_multi_communities(bot, communities, type) do
type = Atom.to_string(type)
communities =
Enum.reduce(communities, [], fn c, acc ->
ext = if Enum.member?(svg_icons(), c), do: "svg", else: "png"
args = %{
title: trans(c),
aka: c,
desc: "#{c} is awesome!",
logo: "#{@oss_endpoint}/icons/#{type}/#{c}.#{ext}",
raw: c,
user_id: bot.id
}
{:ok, community} = ORM.create(Community, args)
acc ++ [community]
end)
{:ok, communities}
end
defp trans("beijing"), do: "北京"
defp trans("shanghai"), do: "上海"
defp trans("shenzhen"), do: "深圳"
defp trans("hangzhou"), do: "杭州"
defp trans("guangzhou"), do: "广州"
defp trans("chengdu"), do: "成都"
defp trans("wuhan"), do: "武汉"
defp trans("xiamen"), do: "厦门"
defp trans("nanjing"), do: "南京"
defp trans(c), do: c
# set threads to given communities
defp threadify_communities(communities, threads) when is_list(communities) do
Enum.each(communities, fn community ->
Enum.each(threads, fn thread ->
{:ok, _} = CMS.set_thread(%Community{id: community.id}, %Thread{id: thread.id})
end)
end)
end
defp threadify_communities(community, threads) do
Enum.each(threads, fn thread ->
# System.halt(0)
{:ok, _} = CMS.set_thread(%Community{id: community.id}, %Thread{id: thread.id})
end)
end
# tagfy only post job repo and video
defp tagfy_threads(communities, _threads, bot, :city) when is_list(communities) do
Enum.each(communities, fn community ->
set_tags(community, :post, bot, :city)
end)
end
defp tagfy_threads(communities, threads, bot) when is_list(communities) do
Enum.each(communities, fn community ->
Enum.each(threads, fn thread ->
set_tags(community, thread, bot)
end)
end)
end
defp tagfy_threads(community, threads, bot, :home) do
Enum.each(threads, fn thread ->
set_tags(community, thread, bot, :home)
end)
end
defp set_tags(%Community{} = community, %Thread{raw: raw}, bot) do
thread = raw |> String.to_atom()
Enum.each(SeedsConfig.tags(thread), fn attr ->
CMS.create_tag(community, thread, attr, bot)
end)
end
defp set_tags(%Community{} = community, :post, bot, :city) do
Enum.each(SeedsConfig.tags(:city, :post), fn attr ->
CMS.create_tag(community, :post, attr, bot)
end)
end
defp set_tags(%Community{} = community, %Thread{raw: raw}, bot, :home) do
thread = raw |> String.to_atom()
Enum.each(SeedsConfig.tags(:home, thread), fn attr ->
CMS.create_tag(community, thread, attr, bot)
end)
end
# set categories to given communities
defp categorify_communities(communities, categories, part)
when is_list(communities) and is_atom(part) do
the_category = categories.entries |> Enum.find(fn cat -> cat.raw == Atom.to_string(part) end)
Enum.each(communities, fn community ->
{:ok, _} = CMS.set_category(%Community{id: community.id}, %Category{id: the_category.id})
end)
end
defp categorify_communities(community, categories, part) when is_atom(part) do
the_category = categories.entries |> Enum.find(fn cat -> cat.raw == Atom.to_string(part) end)
{:ok, _} = CMS.set_category(%Community{id: community.id}, %Category{id: the_category.id})
end
# check is the seeds alreay runed
defp is_empty_db?(queryable) do
{:ok, results} = ORM.find_all(queryable, %{page: 1, size: 20})
results.total_count == 0
end
end
| 31.211207 | 97 | 0.644731 |
08d248216d8a45bca1ae801687c444199dcc835f | 3,398 | ex | Elixir | lib/utility/cache.ex | zestcreative/elixir-utilities-web | 6d7545b61939a038bb277790bb9a9ccd683dd16a | [
"MIT"
] | 25 | 2020-09-16T22:01:53.000Z | 2022-03-12T02:01:01.000Z | lib/utility/cache.ex | zestcreative/elixir-utilities-web | 6d7545b61939a038bb277790bb9a9ccd683dd16a | [
"MIT"
] | 2 | 2021-07-26T20:30:26.000Z | 2022-03-29T19:51:48.000Z | lib/utility/cache.ex | zestcreative/elixir-utilities-web | 6d7545b61939a038bb277790bb9a9ccd683dd16a | [
"MIT"
] | null | null | null | defmodule Utility.Cache do
@moduledoc """
Interface for caching data. This serves as the layer between the real implementations and mock
implementations, and to hide adapters from application code.
"""
defmodule Error do
defexception [:message, :original_error]
end
@callback multi(list(), Keyword.t()) :: {:ok, list(any())} | {:error, any()}
@callback hash_get(any(), any(), Keyword.t()) :: {:ok, any()} | {:error, any()}
@callback hash_set(any(), any(), any(), Keyword.t()) :: {:ok, any()} | {:error, any()}
@callback keys(any(), Keyword.t()) :: {:ok, list(any())} | {:error, any()}
@callback bust(any(), Keyword.t()) :: {:ok, any()} | {:error, any()}
@callback flush(Keyword.t()) :: {:ok, any()} | {:error, any()}
@callback expire(any(), integer(), Keyword.t()) :: {:ok, any()} | {:error, any()}
@module Application.get_env(:utility, :cache)
@spec multi(any(), Keyword.t()) :: {:ok, list(any())} | {:error, any()}
def multi(term, opts \\ []), do: @module.multi(term, opts)
@spec expire(any(), integer(), Keyword.t()) :: {:ok, list(any())} | {:error, any()}
def expire(term, ttl, opts \\ []), do: @module.expire(term, ttl, opts)
@spec keys(any(), Keyword.t()) :: {:ok, list(any())} | {:error, any()}
def keys(term, opts \\ []), do: @module.keys(term, opts)
@spec keys!(Keyword.t()) :: list(any()) | no_return()
def keys!(term, opts \\ []) do
case keys(term, opts) do
{:ok, value} ->
value
{:error, error} ->
raise Error, message: "Error when getting keys", original_error: error
end
end
@spec flush(Keyword.t()) :: {:ok, any()} | {:error, any()}
def flush(opts \\ []), do: @module.flush(opts)
@spec flush!(Keyword.t()) :: any() | no_return()
def flush!(opts \\ []) do
case flush(opts) do
{:ok, value} ->
value
{:error, error} ->
raise Error, message: "Error when flushing", original_error: error
end
end
@spec bust(binary(), Keyword.t()) :: {:ok, any()} | {:error, any()}
def bust(key, opts \\ []), do: @module.bust(key, opts)
@spec bust!(binary(), Keyword.t()) :: any() | no_return()
def bust!(key, opts \\ []) do
case bust(key, opts) do
{:ok, value} ->
value
{:error, error} ->
raise Error, message: "Error when busting #{key}", original_error: error
end
end
@spec hash_get(binary(), binary()) :: {:ok, any()} | {:error, any()}
def hash_get(key, field, opts \\ []), do: @module.hash_get(key, field, opts)
@spec hash_get!(binary(), binary()) :: any() | no_return()
def hash_get!(key, field, opts \\ []) do
case hash_get(key, field, opts) do
{:ok, value} ->
value
{:error, error} ->
raise Error, message: "Error when retrieving #{key}:#{field}", original_error: error
end
end
@spec hash_set(binary(), binary(), any(), Keyword.t()) :: {:ok, any()} | {:error, any()}
def hash_set(key, field, value, options \\ []), do: @module.hash_set(key, field, value, options)
@spec hash_set!(binary(), binary(), any(), Keyword.t()) :: any() | no_return()
def hash_set!(key, field, value, options \\ []) do
case hash_set(key, field, value, options) do
{:ok, value} ->
value
{:error, error} ->
raise Error,
message: "Error when setting #{key}:#{field} to #{value}",
original_error: error
end
end
end
| 33.643564 | 98 | 0.568275 |
08d27f1b9985b18bc68ff82acbfebc0fbffeebb2 | 5,772 | ex | Elixir | lib/zen_monitor.ex | LaudateCorpus1/zen_monitor | a01589e2df68ad643974b79a05fc3212587eca53 | [
"MIT"
] | 84 | 2020-04-29T12:15:29.000Z | 2022-02-15T09:52:37.000Z | lib/zen_monitor.ex | LaudateCorpus1/zen_monitor | a01589e2df68ad643974b79a05fc3212587eca53 | [
"MIT"
] | null | null | null | lib/zen_monitor.ex | LaudateCorpus1/zen_monitor | a01589e2df68ad643974b79a05fc3212587eca53 | [
"MIT"
] | 7 | 2020-10-10T05:30:14.000Z | 2022-03-17T09:48:54.000Z | defmodule ZenMonitor do
@moduledoc """
ZenMonitor provides efficient monitoring of remote processes and controlled dissemination of
any resulting `:DOWN` messages.
This module provides a convenient client interface which aims to be a drop in replacement for
`Process.monitor/1` and `Process.demonitor/2`
# Known differences between ZenMonitor and Process
- `ZenMonitor.demonitor/2` has the same signature as Process.demonitor/2 but does not respect
the `:info` option.
- ZenMonitor aims to be efficient over distribution, one of the main strategies for achieving
this is relying mainly on local monitors and then batching up all changes over a time period
to be sent as a single message. This design means that additional latency is added to the
delivery of down messages in pursuit of the goal. Where `Process.monitor/1` on a remote
process will provide a :DOWN message as soon as possible, `ZenMonitor.monitor/1` on a remote
process will actually have a number of batching periods to go through before the message
arrives at the monitoring process, here are all the points that add latency.
1. When the monitor is enqueued it has to wait until the next sweep happens in the
`ZenMonitor.Local.Connector` until it will be delivered to the `ZenMonitor.Proxy`.
1. The monitor arrives at the `ZenMonitor.Proxy`, the process crashes and the ERTS `:DOWN`
message is delivered. This will be translated into a death_certificate and sent to a
`ZenMonitor.Proxy.Batcher` for delivery. It will have to wait until the next sweep
happens for it to be sent back to the `ZenMonitor.Local.Connector` for fan-out.
1. The dead summary including the death_certificate arrives at the
`ZenMonitor.Local.Connector` and a down_dispatch is created for it and enqueued with the
`ZenMonitor.Local`.
1. The down_dispatch waits in a queue until the `ZenMonitor.Local.Dispatcher` generates
more demand.
1. Once demand is generated, `ZenMonitor.Local` will hand off the down_dispatch for actual
delivery by `ZenMonitor.Local.Dispatcher`.
* Steps 1 and 3 employ a strategy of batch sizing to prevent the message from growing too
large. The batch size is controlled by application configuration and is alterable at boot
and runtime. This means though that Steps 1 and 3 can be delayed by N intervals
where `N = ceil(items_ahead_of_event / chunk_size)`
* Step 4 employs a similar batching strategy, a down_dispatch will wait in queue for up to N
intervals where `N = ceil(items_ahead_of_dispatch / chunk_size)`
- `ZenMonitor` decorates the reason of the `:DOWN` message. If a remote process goes down
because of `original_reason`, this will get decorated as `{:zen_monitor, original_reason}`
when delivered by ZenMonitor. This allows the receiver to differentiate `:DOWN` messages
originating from `ZenMonitor.monitor/1` and those originating from `Process.monitor/1`.
This is necessary when operating in mixed mode. It is the responsibility of the receiver to
unwrap this reason if it requires the `original_reason` for some additional handling of the
`:DOWN` message.
"""
@gen_module GenServer
@typedoc """
`ZenMonitor.destination` are all the types that can be monitored.
- `pid()` either local or remote
- `{name, node}` represents a named process on the given node
- `name :: atom()` is a named process on the local node
"""
@type destination :: pid() | {name :: atom, node :: node()} | name :: atom()
## Delegates
@doc """
Delegate to `ZenMonitor.Local.compatibility/1`
"""
defdelegate compatibility(target), to: ZenMonitor.Local
@doc """
Delegate to `ZenMonitor.Local.compatibility_for_node/1`
"""
defdelegate compatibility_for_node(remote), to: ZenMonitor.Local
@doc """
Delegate to `ZenMonitor.Local.Connector.connect/1`
"""
defdelegate connect(remote), to: ZenMonitor.Local.Connector
@doc """
Delegate to `ZenMonitor.Local.demonitor/2`
"""
defdelegate demonitor(ref, options \\ []), to: ZenMonitor.Local
@doc """
Delegate to `ZenMonitor.Local.monitor/1`
"""
defdelegate monitor(target), to: ZenMonitor.Local
## Client
@doc """
Get the module to use for gen calls from the Application Environment
This module only needs to support `GenServer.call/3` and `GenServer.cast/3` functionality, see
ZenMonitor's `@gen_module` for the default value
This can be controlled at boot and runtime with the `{:zen_monitor, :gen_module}` setting, see
`ZenMonitor.gen_module/1` for runtime convenience functionality.
"""
@spec gen_module() :: atom
def gen_module do
Application.get_env(:zen_monitor, :gen_module, @gen_module)
end
@doc """
Put the module to use for gen calls into the Application Environment
This is a simple convenience function for overwriting the `{:zen_monitor, :gen_module}` setting
at runtime.
"""
@spec gen_module(value :: atom) :: :ok
def gen_module(value) do
Application.put_env(:zen_monitor, :gen_module, value)
end
@doc """
Get the current monotonic time in milliseconds
This is a helper because `System.monotonic_time(:milliseconds)` is long and error-prone to
type in multiple call sites.
See `System.monotonic_time/1` for more information.
"""
@spec now() :: integer
def now do
System.monotonic_time(:milliseconds)
end
@doc """
Find the node for a destination.
"""
@spec find_node(target :: destination) :: node()
def find_node(pid) when is_pid(pid), do: node(pid)
def find_node({_, node}), do: node
def find_node(_), do: Node.self()
end
| 41.826087 | 98 | 0.717256 |
08d29281518e4c33428d798b33352c6d2a527f4d | 291 | exs | Elixir | year_2021/test/day_14_test.exs | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2021/test/day_14_test.exs | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | year_2021/test/day_14_test.exs | bschmeck/advent_of_code | cbec98019c6c00444e0f4c7e15e01b1ed9ae6145 | [
"MIT"
] | null | null | null | defmodule Day14Test do
use ExUnit.Case, async: true
test "it computes the difference after 10 steps" do
assert Day14.part_one(InputTestFile) == 1588
end
test "it computes the difference after 40 steps" do
assert Day14.part_two(InputTestFile) == 2_188_189_693_529
end
end
| 24.25 | 61 | 0.749141 |
08d2a136ccde309d2f18f7437bc58ec13dc99591 | 1,260 | ex | Elixir | test/support/conn_case.ex | kadmohardy/calculator | 671e0d6c6c8d7ef7fba75b7f47a76aaeb29eaf7e | [
"MIT"
] | null | null | null | test/support/conn_case.ex | kadmohardy/calculator | 671e0d6c6c8d7ef7fba75b7f47a76aaeb29eaf7e | [
"MIT"
] | null | null | null | test/support/conn_case.ex | kadmohardy/calculator | 671e0d6c6c8d7ef7fba75b7f47a76aaeb29eaf7e | [
"MIT"
] | null | null | null | defmodule CalculatorWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use CalculatorWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import CalculatorWeb.ConnCase
alias CalculatorWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint CalculatorWeb.Endpoint
end
end
setup tags do
:ok = Sandbox.checkout(Calculator.Repo)
unless tags[:async] do
Sandbox.mode(Calculator.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 28 | 64 | 0.727778 |
08d2b19160ef9ed7cc93b71604a8289430c634d4 | 1,087 | ex | Elixir | lib/queries/table_cache_hit.ex | blockfi/ecto_psql_extras | 27602dfb9d508b8ad3a904b1962d8a837128b0c7 | [
"MIT"
] | 278 | 2020-08-04T12:38:29.000Z | 2022-03-25T15:14:28.000Z | lib/queries/table_cache_hit.ex | blockfi/ecto_psql_extras | 27602dfb9d508b8ad3a904b1962d8a837128b0c7 | [
"MIT"
] | 21 | 2020-09-27T14:38:21.000Z | 2021-11-08T22:44:46.000Z | lib/queries/table_cache_hit.ex | blockfi/ecto_psql_extras | 27602dfb9d508b8ad3a904b1962d8a837128b0c7 | [
"MIT"
] | 15 | 2020-09-28T18:08:25.000Z | 2022-03-22T11:26:21.000Z | defmodule EctoPSQLExtras.TableCacheHit do
@behaviour EctoPSQLExtras
def info do
%{
title: "Calculates your cache hit rate for reading tables",
order_by: [ratio: :desc],
columns: [
%{name: :schema, type: :string},
%{name: :name, type: :string},
%{name: :buffer_hits, type: :integer},
%{name: :block_reads, type: :integer},
%{name: :total_read, type: :integer},
%{name: :ratio, type: :numeric}
]
}
end
def query(_args \\ []) do
"""
/* ECTO_PSQL_EXTRAS: Calculates your cache hit rate for reading tables */
SELECT
schemaname AS schema, relname AS name,
heap_blks_hit AS buffer_hits,
heap_blks_read AS block_reads,
heap_blks_hit + heap_blks_read AS total_read,
CASE (heap_blks_hit + heap_blks_read)::float
WHEN 0 THEN NULL
ELSE (heap_blks_hit / (heap_blks_hit + heap_blks_read)::float)
END ratio
FROM
pg_statio_user_tables
ORDER BY
heap_blks_hit / (heap_blks_hit + heap_blks_read + 1)::float DESC;
"""
end
end
| 27.871795 | 77 | 0.620975 |
08d2b550ac8de17ae5579ef1e099588c1addb367 | 1,125 | exs | Elixir | lib/elixir/test/elixir/kernel/case_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | 1 | 2015-11-12T19:23:45.000Z | 2015-11-12T19:23:45.000Z | lib/elixir/test/elixir/kernel/case_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/case_test.exs | guilleiguaran/elixir | 952052869ff7af0e293d2a7160b1aebc68fc46be | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule Kernel.CaseTest do
use ExUnit.Case, async: true
test :inline_case do
assert (case 1, do: (1 -> :ok; 2 -> :wrong)) == :ok
end
test :nested_variables do
assert vars_case(400, 1) == { 400, 1 }
assert vars_case(401, 1) == { 400, -1 }
assert vars_case(0, -1) == { 0, -1 }
assert vars_case(-1, -1) == { 0, 1 }
end
test :nested_vars_match do
x = { :error, { :ok, :done } }
assert (case x do
{ :ok, right } ->
right
{ _left, right } ->
case right do
{ :ok, right } -> right
end
end) == :done
end
test :in_operator_outside_case do
x = 1
y = 4
assert x in [1, 2, 3], "in assertion"
assert not y in [1, 2, 3], "not in assertion"
end
test :in_with_match do
refute 1.0 in [1, 2, 3], "not in assertion"
end
defp vars_case(x, vx) do
case x > 400 do
true ->
x = 400
vx = -vx
_ ->
case x < 0 do
true ->
x = 0
vx = -vx
_ -> nil
end
end
{x, vx}
end
end
| 20.089286 | 55 | 0.496889 |
08d2d93bc0da3badde601d5b15bbe8b792e3d7cb | 4,710 | ex | Elixir | lib/makeup/styles/html/style.ex | tmbb/makeup | 8bdc3b3de49e06a23fa6977e4c84c3be0f0d1d32 | [
"BSD-2-Clause"
] | 115 | 2017-08-01T15:13:13.000Z | 2020-08-20T13:29:38.000Z | lib/makeup/styles/html/style.ex | tmbb/makeup | 8bdc3b3de49e06a23fa6977e4c84c3be0f0d1d32 | [
"BSD-2-Clause"
] | 24 | 2017-08-01T13:33:47.000Z | 2020-09-25T10:12:58.000Z | lib/makeup/styles/html/style.ex | tmbb/makeup | 8bdc3b3de49e06a23fa6977e4c84c3be0f0d1d32 | [
"BSD-2-Clause"
] | 18 | 2017-10-05T17:14:13.000Z | 2020-09-01T10:46:54.000Z | defmodule Makeup.Styles.HTML.Style do
@moduledoc """
The style struct.
"""
defstruct long_name: "",
short_name: "",
background_color: "#ffffff",
highlight_color: "#ffffcc",
styles: []
alias Makeup.Styles.HTML.TokenStyle
require Makeup.Token.Utils
alias Makeup.Token.Utils
defp handle_inheritance(style_map) do
# Handles insheritance between styles.
# This is automatic in Pygments' design, because they use class inheritance for tokens.
# We don't have class inheritance in elixir, so we must have something else.
# Here, we use a manually build hierarchy to fake inheritance.
#
# In any case, the goal is to have flat tokens at runtime.
# This function is only called at compile time.
Enum.reduce(Utils.precedence(), style_map, fn {parent_key, child_keys}, style_map ->
parent_style = style_map[parent_key]
Enum.reduce(child_keys, style_map, fn child_key, style_map ->
child_style = style_map[child_key]
Map.put(
style_map,
child_key,
Map.merge(
parent_style,
child_style,
fn _k, v1, v2 -> v2 || v1 end
)
)
end)
end)
end
require EEx
EEx.function_from_string(
:defp,
:render_css,
"""
.<%= highlight_class %> .hll {background-color: <%= highlight_color %>}
.<%= highlight_class %> {\
<%= if token_text.color do %>color: <%= token_text.color %>; <% end %>\
<%= if token_text.font_style do %>font-style: <%= token_text.font_style %>; <% end %>\
<%= if token_text.font_weight do %>font-weight: <%= token_text.font_weight %>; <% end %>\
<%= if token_text.border do %>border: <%= token_text.border %>; <% end %>\
<%= if token_text.text_decoration do %>text-decoration: <%= token_text.text_decoration %>; <% end %>\
<%= if background_color do %>background-color: <%= background_color %><% end %>}\
.<%= highlight_class %> .unselectable {
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
<%= for {css_class, token_style, token_type} <- styles do %>
.<%= highlight_class %> .<%= css_class %> {\
<%= if token_style.color do %>color: <%= token_style.color %>; <% end %>\
<%= if token_style.font_style do %>font-style: <%= token_style.font_style %>; <% end %>\
<%= if token_style.font_weight do %>font-weight: <%= token_style.font_weight %>; <% end %>\
<%= if token_style.border do %>border: <%= token_style.border %>; <% end %>\
<%= if token_style.text_decoration do %>text-decoration: <%= token_style.text_decoration %>; <% end %>\
<%= if token_style.background_color do %>background-color: <%= token_style.background_color %>; <% end %>\
} /* :<%= Atom.to_string(token_type) %> */\
<% end %>
""",
[:highlight_class, :highlight_color, :background_color, :token_text, :styles]
)
@doc """
Generate a stylesheet for a style.
"""
def stylesheet(style, css_class \\ "highlight") do
token_styles =
style.styles
|> Map.delete(:text)
|> Enum.into([])
|> Enum.map(fn {token_type, token_style} ->
css_class = Makeup.Token.Utils.css_class_for_token_type(token_type)
{css_class, token_style, token_type}
end)
|> Enum.filter(fn {_, token_style, _} ->
Makeup.Styles.HTML.TokenStyle.not_empty?(token_style)
end)
|> Enum.sort()
token_text = style.styles[:text]
render_css(
css_class,
style.highlight_color,
style.background_color,
token_text,
token_styles
)
end
@doc """
Creates a new style.
Takes care of unspecified token types and inheritance.
Writes and caches a CSS stylesheet for the style.
"""
def make_style(options \\ []) do
short_name = Keyword.fetch!(options, :short_name)
long_name = Keyword.fetch!(options, :long_name)
background_color = Keyword.fetch!(options, :background_color)
highlight_color = Keyword.fetch!(options, :highlight_color)
incomplete_style_map = Keyword.fetch!(options, :styles)
complete_style_map =
Utils.standard_token_types()
|> Enum.map(fn k -> {k, ""} end)
|> Enum.into(%{})
|> Map.merge(incomplete_style_map)
|> Enum.map(fn {k, v} -> {k, TokenStyle.from_string(v)} end)
|> Enum.into(%{})
|> handle_inheritance
%__MODULE__{
long_name: long_name,
short_name: short_name,
background_color: background_color,
highlight_color: highlight_color,
styles: complete_style_map
}
end
end
| 34.130435 | 110 | 0.623779 |
08d32c75a0e06a9e9c9070614745b83fa31b5234 | 2,204 | ex | Elixir | lib/phoenix_live_view/socket.ex | gjacobrobertson/phoenix_live_view | 459fd4939126edeaad673277791d460028bdb102 | [
"MIT"
] | 1 | 2021-03-16T17:01:33.000Z | 2021-03-16T17:01:33.000Z | lib/phoenix_live_view/socket.ex | gjacobrobertson/phoenix_live_view | 459fd4939126edeaad673277791d460028bdb102 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/socket.ex | gjacobrobertson/phoenix_live_view | 459fd4939126edeaad673277791d460028bdb102 | [
"MIT"
] | 1 | 2020-09-23T17:11:30.000Z | 2020-09-23T17:11:30.000Z | defmodule Phoenix.LiveView.Socket.AssignsNotInSocket do
@moduledoc """
Struct for socket.assigns while rendering.
The socket assigns are available directly inside the template
as LiveEEx `assigns`, such as `@foo` and `@bar`. Any assign access
should be done using the assigns in the template where proper change
tracking takes place.
"""
defimpl Inspect do
def inspect(_, _) do
"#Phoenix.LiveView.Socket.AssignsNotInSocket<>"
end
end
defstruct [:__assigns__]
@type t :: %__MODULE__{}
end
defmodule Phoenix.LiveView.Socket do
@moduledoc """
The LiveView socket for Phoenix Endpoints.
"""
use Phoenix.Socket
require Logger
if Version.match?(System.version(), ">= 1.8.0") do
@derive {Inspect,
only: [:id, :endpoint, :router, :view, :parent_pid, :root_pid, :assigns, :changed]}
end
defstruct id: nil,
endpoint: nil,
view: nil,
root_view: nil,
parent_pid: nil,
root_pid: nil,
router: nil,
assigns: %{},
changed: %{},
private: %{},
fingerprints: Phoenix.LiveView.Diff.new_fingerprints(),
redirected: nil,
host_uri: nil,
connected?: false
@type assigns :: map | Phoenix.LiveView.Socket.AssignsNotInSocket.t()
@type fingerprints :: {nil, map} | {binary, map}
@type t :: %__MODULE__{
id: binary(),
endpoint: module(),
view: module(),
root_view: module(),
parent_pid: nil | pid(),
root_pid: pid(),
router: module(),
assigns: assigns,
changed: map(),
private: map(),
fingerprints: fingerprints,
redirected: nil | tuple(),
host_uri: URI.t(),
connected?: boolean()
}
channel "lv:*", Phoenix.LiveView.Channel
@doc """
Connects the Phoenix.Socket for a LiveView client.
"""
@impl Phoenix.Socket
def connect(_params, %Phoenix.Socket{} = socket, connect_info) do
{:ok, put_in(socket.private[:connect_info], connect_info)}
end
@doc """
Identifies the Phoenix.Socket for a LiveView client.
"""
@impl Phoenix.Socket
def id(socket), do: socket.private.connect_info[:session]["live_socket_id"]
end
| 26.238095 | 96 | 0.627042 |
08d334fb9bc118306d609d66fe256890d15feee0 | 1,868 | exs | Elixir | exercises/simple-linked-list/example.exs | darktef/elixir-exercism | bcaae351486b1405f0a01cd33b4d39555546298e | [
"MIT"
] | null | null | null | exercises/simple-linked-list/example.exs | darktef/elixir-exercism | bcaae351486b1405f0a01cd33b4d39555546298e | [
"MIT"
] | null | null | null | exercises/simple-linked-list/example.exs | darktef/elixir-exercism | bcaae351486b1405f0a01cd33b4d39555546298e | [
"MIT"
] | null | null | null | defmodule LinkedList do
@opaque t :: tuple()
@doc """
Construct a new LinkedList
"""
@spec new() :: t
def new() do
{}
end
@doc """
Push an item onto a LinkedList
"""
@spec push(t, any()) :: t
def push(list, elem) do
{elem, list}
end
@doc """
Calculate the length of a LinkedList
"""
@spec length(t) :: non_neg_integer()
def length(list) do
count_length(list, 0)
end
defp count_length({}, n), do: n
defp count_length({_, t}, n), do: count_length(t, n + 1)
@doc """
Determine if a LinkedList is empty
"""
@spec empty?(t) :: boolean()
def empty?({}), do: true
def empty?(_), do: false
@doc """
Get the value of a head of the LinkedList
"""
@spec peek(t) :: {:ok, any()} | {:error, :empty_list}
def peek({}), do: {:error, :empty_list}
def peek({x, _}), do: {:ok, x}
@doc """
Get tail of a LinkedList
"""
@spec tail(t) :: {:ok, t} | {:error, :empty_list}
def tail({}), do: {:error, :empty_list}
def tail({_, t}), do: {:ok, t}
@doc """
Remove the head from a LinkedList
"""
@spec pop(t) :: {:ok, any(), t} | {:error, :empty_list}
def pop({}), do: {:error, :empty_list}
def pop({h, t}), do: {:ok, h, t}
@doc """
Construct a LinkedList from a stdlib List
"""
@spec from_list(list()) :: t
def from_list(list) do
List.foldr(list, new(), &push(&2, &1))
end
@doc """
Construct a stdlib List LinkedList from a LinkedList
"""
@spec to_list(t) :: list()
def to_list(list) do
list |> do_to_list([]) |> Enum.reverse()
end
defp do_to_list({}, acc), do: acc
defp do_to_list({h, t}, acc), do: do_to_list(t, [h|acc])
@doc """
Reverse a LinkedList
"""
@spec reverse(t) :: t
def reverse(list) do
do_reverse(list, new())
end
def do_reverse({}, acc), do: acc
def do_reverse({h, t}, acc), do: do_reverse(t, push(acc, h))
end
| 21.471264 | 62 | 0.569058 |
08d346fb1b5300917633630107c8d77045787cce | 4,808 | exs | Elixir | test/core_contract_test.exs | aeternity/aepp-sdk-elixir | a001b0eb264665623c9b05de25a71d1f13990679 | [
"0BSD"
] | 19 | 2019-04-16T07:27:53.000Z | 2022-01-22T21:35:02.000Z | test/core_contract_test.exs | aeternity/aepp-sdk-elixir | a001b0eb264665623c9b05de25a71d1f13990679 | [
"0BSD"
] | 131 | 2019-04-05T13:01:37.000Z | 2020-07-09T14:53:34.000Z | test/core_contract_test.exs | aeternity/aepp-sdk-elixir | a001b0eb264665623c9b05de25a71d1f13990679 | [
"0BSD"
] | 5 | 2019-04-11T19:21:42.000Z | 2022-03-06T09:08:34.000Z | defmodule CoreContractTest do
use ExUnit.Case
alias AeppSDK.{Account, Client, Contract, Utils.Keys}
setup_all do
Code.require_file("test_utils.ex", "test/")
TestUtils.get_test_data()
end
@tag :travis_test
test "create, call, call static and decode contract", setup_data do
deploy_result =
Contract.deploy(
setup_data.client,
setup_data.source_code,
["42"]
)
assert match?({:ok, _}, deploy_result)
{:ok, %{contract_id: ct_address}} = deploy_result
on_chain_call_result =
Contract.call(
setup_data.client,
ct_address,
setup_data.source_code,
"add_to_number",
["33"]
)
assert match?({:ok, %{return_value: _, return_type: "ok"}}, on_chain_call_result)
refute on_chain_call_result |> elem(1) |> Map.get(:log) |> Enum.empty?()
static_call_result =
Contract.call(
setup_data.client,
ct_address,
setup_data.source_code,
"get_number",
[],
fee: 10_000_000_000_000_000
)
assert match?({:ok, %{return_value: _, return_type: "ok"}}, static_call_result)
{:ok, %{return_value: data, return_type: "ok"}} = on_chain_call_result
assert {:ok, data} ==
Contract.decode_return_value(
"int",
"cb_AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEvrXnzA",
"ok"
)
%{public: low_balance_public_key} = low_balance_keypair = Keys.generate_keypair()
Account.spend(setup_data.client, low_balance_public_key, 1)
static_call_result_1 =
Contract.call(
%Client{setup_data.client | keypair: low_balance_keypair},
ct_address,
setup_data.source_code,
"get_number",
[],
fee: 10_000_000_000_000_000
)
assert match?({:ok, %{return_value: _, return_type: "ok"}}, static_call_result_1)
non_existing_keypair = Keys.generate_keypair()
static_call_result_2 =
Contract.call(
%Client{setup_data.client | keypair: non_existing_keypair},
ct_address,
setup_data.source_code,
"get_number",
[],
fee: 10_000_000_000_000_000
)
assert match?({:ok, %{return_value: _, return_type: "ok"}}, static_call_result_2)
end
@tag :travis_test
test "create invalid contract", setup_data do
invalid_source_code = String.replace(setup_data.source_code, "x : int", "x : list(int)")
deploy_result = Contract.deploy(setup_data.client, invalid_source_code, ["42"])
assert match?({:error, _}, deploy_result)
end
@tag :travis_test
test "call non-existent function", setup_data do
deploy_result =
Contract.deploy(
setup_data.client,
setup_data.source_code,
["42"]
)
assert match?({:ok, _}, deploy_result)
{:ok, %{contract_id: ct_address}} = deploy_result
on_chain_call_result =
Contract.call(
setup_data.client,
ct_address,
setup_data.source_code,
"non_existing_function",
["33"]
)
assert match?({:error, "Undefined function non_existing_function"}, on_chain_call_result)
end
@tag :travis_test
test "call static non-existent function", setup_data do
deploy_result =
Contract.deploy(
setup_data.client,
setup_data.source_code,
["42"]
)
assert match?({:ok, _}, deploy_result)
{:ok, %{contract_id: ct_address}} = deploy_result
static_call_result =
Contract.call(
setup_data.client,
ct_address,
setup_data.source_code,
"non_existing_function",
["33"],
fee: 10_000_000_000_000_000
)
assert match?({:error, "Undefined function non_existing_function"}, static_call_result)
end
@tag :travis_test
test "decode data wrong type", setup_data do
deploy_result =
Contract.deploy(
setup_data.client,
setup_data.source_code,
["42"]
)
assert match?({:ok, _}, deploy_result)
{:ok, %{contract_id: ct_address}} = deploy_result
on_chain_call_result =
Contract.call(
setup_data.client,
ct_address,
setup_data.source_code,
"add_to_number",
["33"]
)
assert match?({:ok, %{return_value: _, return_type: "ok"}}, on_chain_call_result)
{:ok, %{return_value: _, return_type: "ok"}} = on_chain_call_result
assert {:error,
{:badmatch,
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0,
75>>}} ==
Contract.decode_return_value(
"list(int)",
"cb_AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEvrXnzA",
"ok"
)
end
end
| 25.849462 | 98 | 0.607529 |
08d35b2d6a5c974be2c57fb19fa53b3290068a6c | 54 | ex | Elixir | Elixir/elixirexamples/lib/json.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 8 | 2016-08-14T12:35:16.000Z | 2021-01-26T04:05:31.000Z | Elixir/elixirexamples/lib/json.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | null | null | null | Elixir/elixirexamples/lib/json.ex | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 5 | 2016-08-18T22:12:19.000Z | 2020-02-17T18:52:41.000Z | defmodule Json do
def to_json(d) do
d
end
end
| 9 | 19 | 0.648148 |
08d368fc6043fb24b0e78c33d32d2abd72b17e1f | 738 | ex | Elixir | lib/littlechat/room.ex | petermm/littlechat | b8672165ab5e6efd0d501f291de682a40b37a7b7 | [
"MIT"
] | 166 | 2020-07-15T14:47:19.000Z | 2022-03-25T03:57:35.000Z | lib/littlechat/room.ex | Jurshsmith/littlechat | 50fac2f907abbfcd574d31b4d4bdad7e51302da7 | [
"MIT"
] | 12 | 2020-07-01T23:32:47.000Z | 2021-03-18T21:21:28.000Z | lib/littlechat/room.ex | Jurshsmith/littlechat | 50fac2f907abbfcd574d31b4d4bdad7e51302da7 | [
"MIT"
] | 21 | 2020-07-15T14:59:39.000Z | 2022-03-20T21:05:16.000Z | defmodule Littlechat.Room do
@moduledoc """
Schema for creating video chat rooms.
"""
use Ecto.Schema
import Ecto.Changeset
schema "rooms" do
field :title, :string
field :slug, :string
field :active_call, :boolean
timestamps()
end
@fields [:title, :slug, :active_call]
def changeset(room, attrs) do
room
|> cast(attrs, @fields)
|> validate_required([:title, :slug])
|> format_slug()
|> unique_constraint(:slug)
end
defp format_slug(%Ecto.Changeset{changes: %{slug: _}} = changeset) do
changeset
|> update_change(:slug, fn slug ->
slug
|> String.downcase()
|> String.replace(" ", "-")
end)
end
defp format_slug(changeset), do: changeset
end
| 19.945946 | 71 | 0.628726 |
08d3ad3c5b1e9cf8b7f77b989465091203f27c94 | 1,645 | ex | Elixir | apps/firestorm_data/lib/firestorm_data/commands/follow_thread.ex | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | 10 | 2017-06-28T08:06:52.000Z | 2022-03-19T17:49:21.000Z | apps/firestorm_data/lib/firestorm_data/commands/follow_thread.ex | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | null | null | null | apps/firestorm_data/lib/firestorm_data/commands/follow_thread.ex | CircleCI-Public/firestorm | 9ca2c46a2b2377370347ad94d6003eeb77be38d6 | [
"MIT"
] | 2 | 2017-10-21T12:01:02.000Z | 2021-01-29T10:26:22.000Z | defmodule FirestormData.Commands.FollowThread do
@moduledoc """
A command to follow a `Thread`. This marks the `Thread` as followed by the `User`.
"""
use FirestormData.Command
alias FirestormData.Follow
embedded_schema do
field :user_id, :integer
field :thread_id, :integer
end
@required_fields ~w(user_id thread_id)a
@optional_fields ~w()a
def changeset(record, params \\ %{}) do
record
|> cast(params, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
end
# Imagine we've extracted this to something like `Firestorm.run`
# and it can handle all of our commands.
#
# For now we'll just put them in each command til I figure it out :)
def run(changeset) do
case changeset.valid? do
true ->
%{user_id: user_id, thread_id: thread_id} =
changeset
|> apply_changes
thread = Repo.get(Thread, thread_id)
user = Repo.get(User, user_id)
if Followable.followed_by?(thread, user) do
{:error, Changeset.add_error(changeset, :user_id, "User already follows this thread")}
else
thread
|> Ecto.build_assoc(:follows, %{user_id: user_id})
|> Follow.changeset(%{})
|> Repo.insert
|> handle_result(changeset)
end
false ->
{:error, changeset}
end
end
def handle_result({:ok, follow}, _changeset) do
{:ok, follow.id}
end
def handle_result({:error, changes}, changeset) do
# need to do better than this
{:error, Changeset.add_error(changeset, :user_id, "There was an error", changes.errors)}
end
end
| 27.881356 | 96 | 0.643161 |
08d3adb27485473196c8cf20c86c71c6cdb5241b | 378 | ex | Elixir | lib/photo_gallery/users/user.ex | rayrrr/fameliphotos | cc928abdc6d761d76113067432e9d6d0fcb2507b | [
"MIT"
] | 6 | 2019-10-08T01:30:08.000Z | 2020-10-01T04:49:33.000Z | lib/photo_gallery/users/user.ex | rayrrr/fameliphotos | cc928abdc6d761d76113067432e9d6d0fcb2507b | [
"MIT"
] | null | null | null | lib/photo_gallery/users/user.ex | rayrrr/fameliphotos | cc928abdc6d761d76113067432e9d6d0fcb2507b | [
"MIT"
] | 1 | 2019-11-08T22:58:31.000Z | 2019-11-08T22:58:31.000Z | defmodule PhotoGallery.Users.User do
use Ecto.Schema
use Pow.Ecto.Schema
use Pow.Extension.Ecto.Schema,
extensions: [PowResetPassword, PowEmailConfirmation]
schema "users" do
pow_user_fields()
timestamps()
end
def changeset(user_or_changeset, attrs) do
user_or_changeset
|> pow_changeset(attrs)
|> pow_extension_changeset(attrs)
end
end
| 19.894737 | 56 | 0.73545 |
08d406f5d8fe183271037f10d8c7996587bdb9b0 | 5,670 | ex | Elixir | lib/brotorift/binary.ex | CDR2003/BrotoriftElixir | 03a5aa83c0745e0ad53c38c24508078af5a8008c | [
"MIT"
] | 2 | 2018-05-30T13:05:55.000Z | 2018-12-12T03:54:05.000Z | lib/brotorift/binary.ex | CDR2003/BrotoriftElixir | 03a5aa83c0745e0ad53c38c24508078af5a8008c | [
"MIT"
] | 1 | 2018-06-11T10:50:13.000Z | 2018-06-11T10:50:13.000Z | lib/brotorift/binary.ex | CDR2003/BrotoriftElixir | 03a5aa83c0745e0ad53c38c24508078af5a8008c | [
"MIT"
] | 1 | 2018-06-11T10:22:42.000Z | 2018-06-11T10:22:42.000Z | defmodule Brotorift.Binary do
# Read
def read_bool(data) do
<<value::8-little-unsigned, data::binary>> = data
{data, value != 0}
end
def read_byte(data) do
<<value::8-little-unsigned, data::binary>> = data
{data, value}
end
def read_short(data) do
<<value::16-little-signed, data::binary>> = data
{data, value}
end
def read_int(data) do
<<value::32-little-signed, data::binary>> = data
{data, value}
end
def read_long(data) do
<<value::64-little-signed, data::binary>> = data
{data, value}
end
def read_ushort(data) do
<<value::16-little-unsigned, data::binary>> = data
{data, value}
end
def read_uint(data) do
<<value::32-little-unsigned, data::binary>> = data
{data, value}
end
def read_ulong(data) do
<<value::64-little-unsigned, data::binary>> = data
{data, value}
end
def read_float(data) do
<<value::32-little-float, data::binary>> = data
{data, value}
end
def read_double(data) do
<<value::64-little-float, data::binary>> = data
{data, value}
end
def read_string(data) do
<<len::32-little, data::binary>> = data
<<value::binary-size(len), data::binary>> = data
{data, value}
end
def read_datetime(data) do
{data, timestamp} = read_int(data)
{:ok, datetime} = DateTime.from_unix(timestamp)
{data, datetime}
end
def read_byte_buffer(data) do
<<len::32-little, value::binary-size(len), data::binary>> = data
{data, value}
end
def read_list(data, reader) do
<<len::32-little, data::binary>> = data
{data, values} = read_list_iter(data, reader, [], len)
{data, Enum.reverse(values)}
end
defp read_list_iter(data, _reader, values, 0) do
{data, values}
end
defp read_list_iter(data, reader, values, current) do
{data, value} = reader.(data)
values = [value | values]
read_list_iter(data, reader, values, current - 1)
end
def read_set(data, reader) do
{data, list} = read_list(data, reader)
{data, MapSet.new(list)}
end
def read_map(data, key_reader, value_reader) do
<<len::32-little, data::binary>> = data
read_map_iter(data, key_reader, value_reader, %{}, len)
end
defp read_map_iter(data, _key_reader, _value_reader, values, 0) do
{data, values}
end
defp read_map_iter(data, key_reader, value_reader, values, current) do
{data, key} = key_reader.(data)
{data, value} = value_reader.(data)
values = Map.put(values, key, value)
read_map_iter(data, key_reader, value_reader, values, current - 1)
end
def read_vector2(data) do
{data, x} = read_float(data)
{data, y} = read_float(data)
{data, {x, y}}
end
def read_vector3(data) do
{data, x} = read_float(data)
{data, y} = read_float(data)
{data, z} = read_float(data)
{data, {x, y, z}}
end
def read_color(data) do
{data, r} = read_float(data)
{data, g} = read_float(data)
{data, b} = read_float(data)
{data, a} = read_float(data)
{data, {r, g, b, a}}
end
# Write
def write_bool(data, value) do
bool_data = if value do 1 else 0 end
<<data::binary, bool_data::8-little-unsigned>>
end
def write_byte(data, value) do
<<data::binary, value::8-little-unsigned>>
end
def write_short(data, value) do
<<data::binary, value::16-little-signed>>
end
def write_int(data, value) do
<<data::binary, value::32-little-signed>>
end
def write_long(data, value) do
<<data::binary, value::64-little-signed>>
end
def write_ushort(data, value) do
<<data::binary, value::16-little-unsigned>>
end
def write_uint(data, value) do
<<data::binary, value::32-little-unsigned>>
end
def write_ulong(data, value) do
<<data::binary, value::64-little-unsigned>>
end
def write_float(data, value) do
<<data::binary, value::32-little-float>>
end
def write_double(data, value) do
<<data::binary, value::64-little-float>>
end
def write_string(data, value) do
<<data::binary, byte_size(value)::32-little, value::binary>>
end
def write_datetime(data, value) do
timestamp = DateTime.to_unix(value)
write_int(data, timestamp)
end
def write_byte_buffer(data, value) do
<<data::binary, byte_size(value)::32-little, value::binary>>
end
def write_list(data, value, writer) do
data = <<data::binary, length(value)::32-little>>
write_list_iter(data, writer, value)
end
defp write_list_iter(data, _writer, []) do
data
end
defp write_list_iter(data, writer, [x | xs]) do
data = writer.(data, x)
write_list_iter(data, writer, xs)
end
def write_set(data, value, writer) do
write_list(data, MapSet.to_list(value), writer)
end
def write_map(data, value, key_writer, value_writer) do
data = <<data::binary, map_size(value)::32-little>>
write_map_iter(data, key_writer, value_writer, Map.to_list(value))
end
defp write_map_iter(data, _key_writer, _value_writer, []) do
data
end
defp write_map_iter(data, key_writer, value_writer, [{key, value} | xs]) do
data = key_writer.(data, key)
data = value_writer.(data, value)
write_map_iter(data, key_writer, value_writer, xs)
end
def write_vector2(data, {x, y}) do
data = write_float(data, x)
data = write_float(data, y)
data
end
def write_vector3(data, {x, y, z}) do
data = write_float(data, x)
data = write_float(data, y)
data = write_float(data, z)
data
end
def write_color(data, {r, g, b, a}) do
data = write_float(data, r)
data = write_float(data, g)
data = write_float(data, b)
data = write_float(data, a)
data
end
end
| 24.025424 | 77 | 0.644621 |
08d42c42877bbc117ff774ce8af7a533115dead2 | 776 | exs | Elixir | test/option_parser_test.exs | NickNeck/prove | a3f48f8289f9a56dd57d22c58469227981c8ebb7 | [
"MIT"
] | null | null | null | test/option_parser_test.exs | NickNeck/prove | a3f48f8289f9a56dd57d22c58469227981c8ebb7 | [
"MIT"
] | 2 | 2021-08-01T07:15:41.000Z | 2021-08-09T05:40:30.000Z | test/option_parser_test.exs | hrzndhrn/prove | a3f48f8289f9a56dd57d22c58469227981c8ebb7 | [
"MIT"
] | null | null | null | defmodule OptionParserTest do
use ExUnit.Case
import Prove
describe "next" do
def config, do: [strict: [str: :string, int: :integer, bool: :boolean]]
batch "with strict good options" do
prove OptionParser.next(["--str", "hello", "..."], config()) ==
{:ok, :str, "hello", ["..."]}
prove OptionParser.next(["--int=13", "..."], config()) == {:ok, :int, 13, ["..."]}
prove OptionParser.next(["--bool=false", "..."], config()) == {:ok, :bool, false, ["..."]}
prove OptionParser.next(["--no-bool", "..."], config()) == {:ok, :bool, false, ["..."]}
prove OptionParser.next(["--bool", "..."], config()) == {:ok, :bool, true, ["..."]}
prove OptionParser.next(["..."], config()) == {:error, ["..."]}
end
end
end
| 36.952381 | 96 | 0.511598 |
08d46055adcb444f03d26e648f4f7b8a1c089474 | 1,402 | ex | Elixir | combo/phxexibee/lib/phxexibee_web/telemetry.ex | exineris/shp_challenge2021 | 2a5447af681259d0ea699b670cf079bd31315cd8 | [
"Apache-2.0"
] | null | null | null | combo/phxexibee/lib/phxexibee_web/telemetry.ex | exineris/shp_challenge2021 | 2a5447af681259d0ea699b670cf079bd31315cd8 | [
"Apache-2.0"
] | null | null | null | combo/phxexibee/lib/phxexibee_web/telemetry.ex | exineris/shp_challenge2021 | 2a5447af681259d0ea699b670cf079bd31315cd8 | [
"Apache-2.0"
] | null | null | null | defmodule PhxexibeeWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {PhxexibeeWeb, :count_users, []}
]
end
end
| 28.612245 | 86 | 0.671184 |
08d47e47c110e27d4176ebed2a0859116571a613 | 2,742 | ex | Elixir | lib/straw_hat_review/reviews/reviews.ex | straw-hat-team/straw_hat_review | 342dbbfac0ac96287111babd59b5321efdd8728d | [
"MIT"
] | 11 | 2018-04-09T06:32:02.000Z | 2019-09-11T14:18:21.000Z | lib/straw_hat_review/reviews/reviews.ex | straw-hat-labs/straw_hat_review | 342dbbfac0ac96287111babd59b5321efdd8728d | [
"MIT"
] | 64 | 2018-03-30T06:21:49.000Z | 2019-11-01T13:57:34.000Z | lib/straw_hat_review/reviews/reviews.ex | straw-hat-labs/straw_hat_review | 342dbbfac0ac96287111babd59b5321efdd8728d | [
"MIT"
] | 1 | 2018-06-21T23:00:00.000Z | 2018-06-21T23:00:00.000Z | defmodule StrawHat.Review.Reviews do
@moduledoc """
Interactor module that defines all the functionality for Reviews management.
"""
use StrawHat.Review.Interactor
alias StrawHat.Review.Review
@doc """
Gets the list of reviews.
"""
@spec get_reviews(Scrivener.Config.t() | keyword()) :: Scrivener.Page.t()
def get_reviews(pagination \\ []), do: Repo.paginate(Review, pagination)
@doc """
Creates a review.
"""
@spec create_review(Review.review_attrs()) :: {:ok, Review.t()} | {:error, Ecto.Changeset.t()}
def create_review(review_attrs) do
%Review{}
|> Review.changeset(review_attrs)
|> Repo.insert()
end
@doc """
Updates a review.
"""
@spec update_review(Review.t(), Review.review_attrs()) ::
{:ok, Review.t()} | {:error, Ecto.Changeset.t()}
def update_review(%Review{} = review, review_attrs) do
review
|> Review.changeset(review_attrs)
|> Repo.update()
end
@doc """
Destroys a review.
"""
@spec destroy_review(Review.t()) :: {:ok, Review.t()} | {:error, Ecto.Changeset.t()}
def destroy_review(%Review{} = review), do: Repo.delete(review)
@doc """
Finds a review by `id`.
"""
@spec find_review(Integer.t()) :: {:ok, Review.t()} | {:error, Error.t()}
def find_review(review_id) do
review_id
|> get_review()
|> Response.from_value(
Error.new(
"straw_hat_review.review.not_found",
metadata: [review_id: review_id]
)
)
end
@doc """
Gets a review by `id`.
"""
@spec get_review(Integer.t()) :: Review.t() | nil | no_return
def get_review(review_id) do
query =
from(
review in Review,
where: review.id == ^review_id,
preload: [aspects: :aspect],
preload: [:medias]
)
Repo.one(query)
end
@doc """
Gets a list of review by ids.
"""
@spec get_review_by_ids([Integer.t()]) :: [Review.t()] | no_return
def get_review_by_ids(review_ids) do
query =
from(
review in Review,
where: review.id in ^review_ids,
preload: [aspects: :aspect],
preload: [:medias]
)
Repo.all(query)
end
@doc """
Gets a list of comments by review ids.
"""
@spec get_comments([Integer.t()]) :: [Review.t()] | no_return
def get_comments(review_ids) do
query =
from(
review in Review,
where: review.id in ^review_ids,
join: comments in assoc(review, :comments),
preload: [comments: comments]
)
Repo.all(query)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking review changes.
"""
@spec change_review(Review.t()) :: Ecto.Changeset.t()
def change_review(%Review{} = review) do
Review.changeset(review, %{})
end
end
| 24.265487 | 96 | 0.607586 |
08d47f7cebad549a782a48307163001d4217fb02 | 886 | ex | Elixir | clients/connectors/lib/google_api/connectors/v1/metadata.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/connectors/lib/google_api/connectors/v1/metadata.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/connectors/lib/google_api/connectors/v1/metadata.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Connectors.V1 do
@moduledoc """
API client metadata for GoogleApi.Connectors.V1.
"""
@discovery_revision "20211106"
def discovery_revision(), do: @discovery_revision
end
| 32.814815 | 74 | 0.759594 |
08d49820b91947197828d52cd505cde59c6b2deb | 4,235 | ex | Elixir | apps/toniefy/lib/toniex_web/live/recorder_live.ex | benvp/toniefy.me | 9a432380e43f1c9a15ccd1910357d491d64a1dc0 | [
"MIT"
] | 17 | 2021-04-12T10:01:14.000Z | 2022-02-25T17:30:27.000Z | apps/toniefy/lib/toniex_web/live/recorder_live.ex | benvp/toniefy.me | 9a432380e43f1c9a15ccd1910357d491d64a1dc0 | [
"MIT"
] | 1 | 2022-03-09T09:35:00.000Z | 2022-03-09T11:46:14.000Z | apps/toniefy/lib/toniex_web/live/recorder_live.ex | benvp/toniefy | 9a432380e43f1c9a15ccd1910357d491d64a1dc0 | [
"MIT"
] | null | null | null | defmodule ToniexWeb.RecorderLive do
use ToniexWeb, :live_view
alias Toniex.{Accounts, Recorder, Library}
@max_playlists 5
def mount(_params, session, socket) do
socket = assign_defaults(session, socket)
has_active_jobs = Recorder.active_jobs(socket.assigns.current_user) |> Enum.count() > 0
has_recording_sessions =
Recorder.list_sessions(socket.assigns.current_user)
|> Enum.count() > 0
spotify_connected =
!!Accounts.get_credential_by_provider(socket.assigns.current_user, :spotify)
max_playlists_reached =
Library.list_playlists(socket.assigns.current_user) |> Enum.count() >= @max_playlists
cond do
spotify_connected && has_active_jobs ->
socket =
socket
|> put_flash(
:info,
"Bitte füge deine aktuelle Aufnahme zu deiner Bibliothek hinzu, bevor du eine neue Aufnahme startest."
)
|> push_redirect(to: Routes.library_index_path(socket, :index))
{:ok, socket}
spotify_connected && has_recording_sessions ->
socket =
socket
|> put_flash(
:info,
"Bitte warte bis deine aktuelle Aufnahme beendet wurde bevor du eine neue Aufnahme startest."
)
|> push_redirect(to: Routes.library_index_path(socket, :index))
{:ok, socket}
!spotify_connected ->
socket =
socket
|> put_flash(
:info,
"Bitte verbinde zuerst deinen Spotify account um eine Aufnahme zu starten."
)
|> push_redirect(to: Routes.library_index_path(socket, :index))
{:ok, socket}
max_playlists_reached ->
socket =
socket
|> put_flash(
:info,
"Du hat das Maximum von 5 Playlisten erreicht. Bitte lösche zunächst eine Playlist."
)
|> push_redirect(to: Routes.library_index_path(socket, :index))
{:ok, socket}
true ->
{:ok, socket}
end
end
def handle_event("record", %{"recorder" => %{"uri" => uri}}, socket) do
user = socket.assigns.current_user
case Recorder.enqueue(user, uri) do
{:ok, _job} ->
socket = push_redirect(socket, to: Routes.library_index_path(socket, :index))
{:noreply, socket}
{:error, reason} ->
IO.inspect(reason)
socket =
put_flash(
socket,
:error,
get_error_message(reason)
)
{:noreply, socket}
end
end
def handle_params(_params, uri, socket) do
{:noreply, assign(socket, uri: URI.parse(uri))}
end
def render(assigns) do
~L"""
<%= live_patch to: Routes.library_index_path(@socket, :index), class: "mb-4 link link__back" do %>
<i class="fas fa-arrow-left"></i>
<% end %>
<div class="card">
<h2 class="card__title">Neue Aufnahme starten</h2>
<div class="card__body">
<p>Bitte gib eine Spotify URL oder URI in das Textfeld ein. Dies kann der Link zu einem einzelnen Lied, einer Playlist oder einem Album sein.</p>
<div class="mt-6">
<%= f = form_for :recorder, "#", [phx_submit: :record] %>
<%= text_input f, :uri, required: true, class: "input w-full text-xl py-3", placeholder: "spotify:awesome-track" %>
<p class="mt-2">
<%= link "Wo finde ich die Spotify URL?", to: Routes.static_path(@socket, "/images/how-to-get-spotify-uri.gif"), target: "_blank", class: "link" %>
</p>
<div class="text-right mt-6">
<%= submit "Aufnahme starten", phx_disable_with: "Aufnahme starten...", class: "btn btn-primary" %>
</div>
</form>
</div>
</div>
</div>
"""
end
defp get_error_message(:invalid_uri), do: "Bitte gib eine gültige Spotify URI ein."
defp get_error_message(:max_duration_exceeded),
do: "Das Album oder die Playlist darf maximal 89 Minuten lang sein."
defp get_error_message(:not_found), do: "Die URI konnte nicht gefunden werden."
defp get_error_message(_reason),
do: "Oh nein. Es ist ein Fehler aufgetreten. Bitte versuche es nochmal."
end
| 31.139706 | 163 | 0.598347 |
08d498a3a82e4445f755f41900cad1d90eb48152 | 2,113 | exs | Elixir | config/dev.exs | brunorafa/hello_world | c20af9a5f797a13867a1a5bf4f53cfb420d6377a | [
"MIT"
] | null | null | null | config/dev.exs | brunorafa/hello_world | c20af9a5f797a13867a1a5bf4f53cfb420d6377a | [
"MIT"
] | null | null | null | config/dev.exs | brunorafa/hello_world | c20af9a5f797a13867a1a5bf4f53cfb420d6377a | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :hello_world, HelloWorldWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :hello_world, HelloWorldWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/hello_world_web/views/.*(ex)$},
~r{lib/hello_world_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
# Configure your database
config :hello_world, HelloWorld.Repo,
username: "postgres",
password: "postgres",
database: "hello_world_dev",
hostname: "localhost",
pool_size: 10
| 27.802632 | 68 | 0.691434 |
08d4e35aeddd006c63d5d935a2d060818cb488a0 | 1,858 | ex | Elixir | clients/gmail/lib/google_api/gmail/v1/model/delegate.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/gmail/lib/google_api/gmail/v1/model/delegate.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/gmail/lib/google_api/gmail/v1/model/delegate.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Gmail.V1.Model.Delegate do
@moduledoc """
Settings for a delegate. Delegates can read, send, and delete messages, as
well as view and add contacts, for the delegator's account. See
<a href="https://support.google.com/mail/answer/138350">"Set up
mail delegation"</a> for more information about delegates.
## Attributes
* `delegateEmail` (*type:* `String.t`, *default:* `nil`) - The email address of the delegate.
* `verificationStatus` (*type:* `String.t`, *default:* `nil`) - Indicates whether this address has been verified and can act as a delegate
for the account. Read-only.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:delegateEmail => String.t(),
:verificationStatus => String.t()
}
field(:delegateEmail)
field(:verificationStatus)
end
defimpl Poison.Decoder, for: GoogleApi.Gmail.V1.Model.Delegate do
def decode(value, options) do
GoogleApi.Gmail.V1.Model.Delegate.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Gmail.V1.Model.Delegate do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.407407 | 142 | 0.724973 |
08d4f821540e7adb54479bc34a63684da620a732 | 96 | exs | Elixir | config/config.exs | sudokid-software/dart_sass | c6adf3e6fc655826ac96ac211cf900ccf5b904c0 | [
"MIT"
] | null | null | null | config/config.exs | sudokid-software/dart_sass | c6adf3e6fc655826ac96ac211cf900ccf5b904c0 | [
"MIT"
] | null | null | null | config/config.exs | sudokid-software/dart_sass | c6adf3e6fc655826ac96ac211cf900ccf5b904c0 | [
"MIT"
] | null | null | null | import Config
config :dart_sass,
version: "1.39.0",
another: [
args: ["--version"]
]
| 12 | 23 | 0.583333 |
08d5014e564def4940926aa030fbf420d5d0d4df | 209 | exs | Elixir | config/releases.exs | corybuecker/exlytics | b6bffc682954755d0f2e56c2562bf5654d129e67 | [
"Apache-2.0"
] | null | null | null | config/releases.exs | corybuecker/exlytics | b6bffc682954755d0f2e56c2562bf5654d129e67 | [
"Apache-2.0"
] | 27 | 2020-06-11T08:58:48.000Z | 2022-02-28T23:31:13.000Z | config/releases.exs | corybuecker/exlytics | b6bffc682954755d0f2e56c2562bf5654d129e67 | [
"Apache-2.0"
] | null | null | null | import Config
config :exlytics, host: System.get_env("HOST"), port: System.get_env("PORT")
config :exlytics, Exlytics.Data.Repo,
url: System.get_env("DATABASE_URL"),
migration_default_prefix: "exlytics"
| 26.125 | 76 | 0.755981 |
08d52786765fb42e7b300db79128fcd48c5a168e | 1,614 | ex | Elixir | clients/android_publisher/lib/google_api/android_publisher/v3/model/uses_permission.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/android_publisher/lib/google_api/android_publisher/v3/model/uses_permission.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/android_publisher/lib/google_api/android_publisher/v3/model/uses_permission.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidPublisher.V3.Model.UsesPermission do
@moduledoc """
A permission used by this APK.
## Attributes
* `maxSdkVersion` (*type:* `integer()`, *default:* `nil`) - Optionally, the maximum SDK version for which the permission is required.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the permission requested.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:maxSdkVersion => integer(),
:name => String.t()
}
field(:maxSdkVersion)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidPublisher.V3.Model.UsesPermission do
def decode(value, options) do
GoogleApi.AndroidPublisher.V3.Model.UsesPermission.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidPublisher.V3.Model.UsesPermission do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.28 | 137 | 0.728005 |
08d555c4dac7351d8e4c113fe3ccb22488794e3f | 1,414 | ex | Elixir | lib/kayrock/serialize.ex | joshuawscott/kayrock | 614b930d568f7d0655a00e8d0d760e3b33314366 | [
"MIT"
] | 24 | 2019-06-26T22:08:13.000Z | 2022-01-27T00:10:41.000Z | lib/kayrock/serialize.ex | joshuawscott/kayrock | 614b930d568f7d0655a00e8d0d760e3b33314366 | [
"MIT"
] | 10 | 2020-04-10T07:48:53.000Z | 2021-03-26T10:50:25.000Z | lib/kayrock/serialize.ex | joshuawscott/kayrock | 614b930d568f7d0655a00e8d0d760e3b33314366 | [
"MIT"
] | 11 | 2019-10-30T12:53:09.000Z | 2022-03-09T23:16:42.000Z | defmodule Kayrock.Serialize do
@moduledoc """
Serializations for primitive types for the kafka protocol
"""
defmacro primitive_types do
quote do
[
:boolean,
:int8,
:int16,
:int32,
:int64,
:string,
:nullable_string,
:bytes,
:nullable_bytes
]
end
end
def serialize(:boolean, true), do: <<1>>
def serialize(:boolean, _), do: <<0>>
def serialize(:int8, val), do: <<val::8-signed>>
def serialize(:int16, val), do: <<val::16-signed>>
def serialize(:int32, val), do: <<val::32-signed>>
def serialize(:int64, val), do: <<val::64-signed>>
def serialize(:string, val), do: [<<byte_size(val)::16-signed>>, val]
def serialize(:nullable_string, nil), do: <<-1::16-signed>>
def serialize(:nullable_string, val), do: [<<byte_size(val)::16-signed>>, val]
def serialize(:bytes, val), do: [<<byte_size(val)::32-signed>>, val]
def serialize(:iodata_bytes, val), do: [<<IO.iodata_length(val)::32-signed>>, val]
def serialize(:nullable_bytes, nil), do: <<-1::32-signed>>
def serialize(:nullable_bytes, val), do: serialize(:bytes, val)
def serialize_array(_type, nil), do: <<-1::32-signed>>
def serialize_array(_type, []), do: <<0::32-signed>>
def serialize_array(type, vals) when is_list(vals) do
[
<<length(vals)::32-signed>>,
Enum.map(vals, &serialize(type, &1))
]
end
end
| 30.73913 | 84 | 0.61174 |
08d55fec09947c922f5193d2803fff9352c57f22 | 522 | ex | Elixir | lib/jumubase_web/views/generators/pdf_generator.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 2 | 2019-01-20T07:03:30.000Z | 2019-04-11T10:20:14.000Z | lib/jumubase_web/views/generators/pdf_generator.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 6 | 2018-09-20T05:52:14.000Z | 2019-04-23T19:27:39.000Z | lib/jumubase_web/views/generators/pdf_generator.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | null | null | null | defmodule JumubaseWeb.PDFGenerator do
alias JumubaseWeb.PDFGenerator.DefaultEngine
def jury_sheets(performances, round) do
get_engine().jury_sheets(performances, round)
end
def jury_table(performances) do
get_engine().jury_table(performances)
end
def certificates(performances, contest) do
get_engine().certificates(performances, contest)
end
# Private helpers
defp get_engine do
config = Application.get_env(:jumubase, __MODULE__, [])
config[:engine] || DefaultEngine
end
end
| 22.695652 | 59 | 0.754789 |
08d56c77ddd217e519aae77f8ab43be288436ebf | 2,953 | exs | Elixir | test/floki/html/generated/tokenizer/namedEntities_part43_test.exs | nathanl/floki | 042b3f60f4d9a6218ec85d558d13cc6dac30c587 | [
"MIT"
] | 1,778 | 2015-01-07T14:12:31.000Z | 2022-03-29T22:42:48.000Z | test/floki/html/generated/tokenizer/namedEntities_part43_test.exs | nathanl/floki | 042b3f60f4d9a6218ec85d558d13cc6dac30c587 | [
"MIT"
] | 279 | 2015-01-01T15:54:50.000Z | 2022-03-28T18:06:03.000Z | test/floki/html/generated/tokenizer/namedEntities_part43_test.exs | nathanl/floki | 042b3f60f4d9a6218ec85d558d13cc6dac30c587 | [
"MIT"
] | 166 | 2015-04-24T20:48:02.000Z | 2022-03-28T17:29:05.000Z | defmodule Floki.HTML.Generated.Tokenizer.NamedentitiesPart43Test do
use ExUnit.Case, async: true
# NOTE: This file was generated by "mix generate_tokenizer_tests namedEntities.test".
# html5lib-tests rev: e52ff68cc7113a6ef3687747fa82691079bf9cc5
alias Floki.HTML.Tokenizer
test "tokenize/1 Named entity: zdot; with a semi-colon" do
input = "ż"
output = [["Character", "ż"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Named entity: zeetrf; with a semi-colon" do
input = "ℨ"
output = [["Character", "ℨ"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Named entity: zeta; with a semi-colon" do
input = "ζ"
output = [["Character", "ζ"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Named entity: zfr; with a semi-colon" do
input = "𝔷"
output = [["Character", "𝔷"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Named entity: zhcy; with a semi-colon" do
input = "ж"
output = [["Character", "ж"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Named entity: zigrarr; with a semi-colon" do
input = "⇝"
output = [["Character", "⇝"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Named entity: zopf; with a semi-colon" do
input = "𝕫"
output = [["Character", "𝕫"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Named entity: zscr; with a semi-colon" do
input = "𝓏"
output = [["Character", "𝓏"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Named entity: zwj; with a semi-colon" do
input = "‍"
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Named entity: zwnj; with a semi-colon" do
input = "‌"
output = [["Character", ""]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
end
| 22.891473 | 87 | 0.629191 |
08d56f605545ec49140472b5710af960a6ab68ba | 1,022 | ex | Elixir | lib/cotoami/exceptions.ex | cruatta/cotoami | 29b7ef66c053cf4e381c6ff65d5fd599066ebabe | [
"Apache-2.0"
] | 337 | 2016-11-28T15:46:58.000Z | 2022-03-01T06:21:25.000Z | lib/cotoami/exceptions.ex | cruatta/cotoami | 29b7ef66c053cf4e381c6ff65d5fd599066ebabe | [
"Apache-2.0"
] | 79 | 2017-02-27T05:44:36.000Z | 2021-12-09T00:28:11.000Z | lib/cotoami/exceptions.ex | cruatta/cotoami | 29b7ef66c053cf4e381c6ff65d5fd599066ebabe | [
"Apache-2.0"
] | 47 | 2018-02-03T01:32:13.000Z | 2021-11-08T07:54:43.000Z | defmodule Cotoami.Exceptions do
@moduledoc """
Exceptions
"""
defmodule NoPermission do
@moduledoc """
raised when the amishi has no permission for the operation
"""
defexception message: "You have no permission to do this operation.",
plug_status: 403
end
defmodule NotFound do
@moduledoc """
raised when the requested entity is not found.
"""
defexception [:message, plug_status: 404]
def exception(target) do
%NotFound{message: "Not found: #{target}"}
end
end
defmodule InvalidOperation do
@moduledoc """
raised when an invalid operation has been requested.
"""
defexception message: "This operation is not supported.",
plug_status: 405
end
defmodule DuplicateRepost do
@moduledoc """
raised when reposting a coto that has already been posted in the cotonoma
"""
defexception message: "This coto has already been posted in the cotonoma",
plug_status: 405
end
end
| 24.926829 | 78 | 0.658513 |
08d59338555bdf0ca233effdd362d30e05e93ea9 | 1,649 | exs | Elixir | mix.exs | Yoosuke/shopmap | 5a1892850d2c79b4732e3ee0c40a8b7c7c55d20d | [
"MIT"
] | null | null | null | mix.exs | Yoosuke/shopmap | 5a1892850d2c79b4732e3ee0c40a8b7c7c55d20d | [
"MIT"
] | null | null | null | mix.exs | Yoosuke/shopmap | 5a1892850d2c79b4732e3ee0c40a8b7c7c55d20d | [
"MIT"
] | null | null | null | defmodule Shopmap.MixProject do
use Mix.Project
def project do
[
app: :shopmap,
version: "0.1.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Shopmap.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.0"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.174603 | 79 | 0.576713 |
08d594d56615d327ce47fda8e67f04cea4b89624 | 1,632 | exs | Elixir | test/erlef/integrations_test.exs | joaquinalcerro/website | 52dc89c70cd0b42127ab233a4c0d10f626d2b698 | [
"Apache-2.0"
] | 71 | 2019-07-02T18:06:15.000Z | 2022-03-09T15:30:08.000Z | test/erlef/integrations_test.exs | joaquinalcerro/website | 52dc89c70cd0b42127ab233a4c0d10f626d2b698 | [
"Apache-2.0"
] | 157 | 2019-07-02T01:21:16.000Z | 2022-03-30T16:08:12.000Z | test/erlef/integrations_test.exs | joaquinalcerro/website | 52dc89c70cd0b42127ab233a4c0d10f626d2b698 | [
"Apache-2.0"
] | 45 | 2019-07-04T05:51:11.000Z | 2022-02-27T11:56:02.000Z | defmodule Erlef.IntegrationsTest do
use Erlef.DataCase
alias Erlef.Integrations
describe "apps" do
alias Erlef.Integrations.App
@audit_opts [audit: %{member_id: Ecto.UUID.generate()}]
@valid_attrs %{name: "some name"}
@update_attrs %{name: "some updated name"}
@invalid_attrs %{name: nil}
def app_fixture(attrs \\ %{}) do
{:ok, app} =
attrs
|> Enum.into(@valid_attrs)
|> Integrations.create_app(@audit_opts)
app
end
test "list_apps/0 returns all apps" do
app = app_fixture()
assert Integrations.list_apps() == [app]
end
test "get_app!/1 returns the app with given id" do
app = app_fixture()
assert Integrations.get_app!(app.id) == app
end
test "create_app/1 with valid data creates a app" do
assert {:ok, %App{} = app} = Integrations.create_app(@valid_attrs, @audit_opts)
assert app.name == "some name"
end
test "create_app/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Integrations.create_app(@invalid_attrs, @audit_opts)
end
test "update_app/2 with valid data updates the app" do
app = app_fixture()
assert {:ok, %App{} = app} = Integrations.update_app(app, @update_attrs, @audit_opts)
assert app.name == "some updated name"
end
test "update_app/2 with invalid data returns error changeset" do
app = app_fixture()
assert {:error, %Ecto.Changeset{}} =
Integrations.update_app(app, @invalid_attrs, @audit_opts)
assert app == Integrations.get_app!(app.id)
end
end
end
| 28.137931 | 95 | 0.645221 |
08d5c45bc1f473cb9b889aa3afdcb881462735d8 | 973 | ex | Elixir | lib/headers/x_permitted_cross_domain_policies.ex | anotherhale/secure_headers | 04c354351562a2211d4fafa4ef10947cfbfacb14 | [
"Apache-2.0"
] | 44 | 2016-01-14T06:30:37.000Z | 2021-10-21T03:22:55.000Z | lib/headers/x_permitted_cross_domain_policies.ex | anotherhale/plug_secure_headers | 04c354351562a2211d4fafa4ef10947cfbfacb14 | [
"Apache-2.0"
] | 3 | 2017-08-08T17:18:33.000Z | 2018-02-18T06:47:24.000Z | lib/headers/x_permitted_cross_domain_policies.ex | anotherhale/plug_secure_headers | 04c354351562a2211d4fafa4ef10947cfbfacb14 | [
"Apache-2.0"
] | 12 | 2017-01-09T15:18:40.000Z | 2020-12-22T08:53:08.000Z | defmodule SecureHeaders.XPermittedCrossDomainPolicies do
@valid_header ~r/\A(ALL\z|NONE\z|MASTER-ONLY\z|BY-FTP-FILENAME\z|BY-CONTENT-TYPE\z|])/i
@error_msg "Invalid configuration for x-permitted-cross-domain-policies"
def validate(options) when is_list(options) do
case Keyword.has_key?(options, :config) do
false -> {:ok, options}
true ->
case Keyword.has_key?(options[:config], :x_permitted_cross_domain_policies) do
# No x-permitted-cross-domain configuration found - return config
false -> {:ok, options}
true ->
case validate_config(options[:config][:x_permitted_cross_domain_policies]) do
false -> {:error, @error_msg}
true -> {:ok, options}
end
end
end
end
def validate(_), do: {:error, @error_msg}
defp validate_config(config) when is_bitstring(config) do
Regex.match?( @valid_header, config)
end
defp validate_config(_), do: false
end
| 31.387097 | 89 | 0.670092 |
08d60486fe843c2bbb06f9ad3499fe25de407bfa | 1,764 | ex | Elixir | clients/service_directory/lib/google_api/service_directory/v1/model/set_iam_policy_request.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/service_directory/lib/google_api/service_directory/v1/model/set_iam_policy_request.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/service_directory/lib/google_api/service_directory/v1/model/set_iam_policy_request.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceDirectory.V1.Model.SetIamPolicyRequest do
@moduledoc """
Request message for `SetIamPolicy` method.
## Attributes
* `policy` (*type:* `GoogleApi.ServiceDirectory.V1.Model.Policy.t`, *default:* `nil`) - REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform services (such as Projects) might reject them.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:policy => GoogleApi.ServiceDirectory.V1.Model.Policy.t() | nil
}
field(:policy, as: GoogleApi.ServiceDirectory.V1.Model.Policy)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceDirectory.V1.Model.SetIamPolicyRequest do
def decode(value, options) do
GoogleApi.ServiceDirectory.V1.Model.SetIamPolicyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceDirectory.V1.Model.SetIamPolicyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.531915 | 316 | 0.754535 |
08d60f5f61677ec94fb5f98a8cf9f7ffec92a3c3 | 1,150 | ex | Elixir | clients/games_configuration/lib/google_api/games_configuration/v1configuration/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/games_configuration/lib/google_api/games_configuration/v1configuration/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/games_configuration/lib/google_api/games_configuration/v1configuration/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GamesConfiguration.V1configuration.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.GamesConfiguration.V1configuration.
"""
@type t :: Tesla.Env.client()
use GoogleApi.Gax.Connection,
scopes: [
# View and manage your Google Play Developer account
"https://www.googleapis.com/auth/androidpublisher"
],
otp_app: :google_api_games_configuration,
base_url: "https://www.googleapis.com/"
end
| 34.848485 | 76 | 0.750435 |
08d610fd71a2c252f30476c76f674f6ec8a09623 | 359 | exs | Elixir | priv/repo/seeds.exs | hugomd/five_hundred | 74d5b251d4da05ff650d786d5627965114f26447 | [
"MIT"
] | 2 | 2021-06-28T07:06:13.000Z | 2021-07-18T01:13:27.000Z | priv/repo/seeds.exs | hugomd/five_hundred | 74d5b251d4da05ff650d786d5627965114f26447 | [
"MIT"
] | 1 | 2021-06-20T07:49:56.000Z | 2021-06-20T07:49:56.000Z | priv/repo/seeds.exs | hugomd/five_hundred | 74d5b251d4da05ff650d786d5627965114f26447 | [
"MIT"
] | 1 | 2021-07-17T10:23:36.000Z | 2021-07-17T10:23:36.000Z | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# FiveHundred.Repo.insert!(%FiveHundred.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.916667 | 61 | 0.713092 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.